Rewrite stack map statistics printing in outdump.
The code was repetitive and had a tendency to get out of sync.
Test: Check the output of oatdump
Change-Id: Ied68a6c9add7ac234ede68d4adbc097bb8b82d6f
diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h
index 0ae60b9..6a714e6 100644
--- a/libartbase/base/bit_table.h
+++ b/libartbase/base/bit_table.h
@@ -120,6 +120,7 @@
ALWAYS_INLINE void Decode(BitMemoryRegion region, size_t* bit_offset) {
// Decode row count and column sizes from the table header.
+ size_t initial_bit_offset = *bit_offset;
num_rows_ = DecodeVarintBits(region, bit_offset);
if (num_rows_ != 0) {
column_offset_[0] = 0;
@@ -128,6 +129,7 @@
column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end);
}
}
+ header_bit_size_ = *bit_offset - initial_bit_offset;
// Record the region which contains the table data and skip past it.
table_data_ = region.Subregion(*bit_offset, num_rows_ * NumRowBits());
@@ -158,24 +160,26 @@
return column_offset_[column + 1] - column_offset_[column];
}
- size_t DataBitSize() const { return num_rows_ * column_offset_[kNumColumns]; }
+ size_t HeaderBitSize() const { return header_bit_size_; }
+ size_t BitSize() const { return header_bit_size_ + table_data_.size_in_bits(); }
protected:
BitMemoryRegion table_data_;
size_t num_rows_ = 0;
uint16_t column_offset_[kNumColumns + 1] = {};
+ uint16_t header_bit_size_ = 0;
};
// Template meta-programming helper.
template<typename Accessor, size_t... Columns>
-static const char** GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) {
+static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) {
static const char* names[] = { Accessor::template ColumnName<Columns, 0>::Value... };
return names;
}
template<typename Accessor>
-static const char** GetBitTableColumnNames() {
+static const char* const* GetBitTableColumnNames() {
return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kCount>());
}
diff --git a/libartbase/base/indenter.h b/libartbase/base/indenter.h
index 06e7340..a479b7d 100644
--- a/libartbase/base/indenter.h
+++ b/libartbase/base/indenter.h
@@ -122,6 +122,10 @@
return indented_os_;
}
+ size_t GetIndentation() const {
+ return indenter_.count_;
+ }
+
void IncreaseIndentation(size_t adjustment) {
indenter_.count_ += adjustment;
}
diff --git a/libartbase/base/stats.h b/libartbase/base/stats.h
new file mode 100644
index 0000000..4dcbfe8
--- /dev/null
+++ b/libartbase/base/stats.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_LIBARTBASE_BASE_STATS_H_
+#define ART_LIBARTBASE_BASE_STATS_H_
+
+#include <unordered_map>
+
+#include "globals.h"
+
+namespace art {
+
+// Simple structure to record tree of statistical values.
+class Stats {
+ public:
+ double Value() const { return value_; }
+ size_t Count() const { return count_; }
+ Stats* Child(const char* name) { return &children_[name]; }
+ const std::unordered_map<const char*, Stats>& Children() const { return children_; }
+
+ void AddBytes(double bytes, size_t count = 1) { Add(bytes, count); }
+ void AddBits(double bits, size_t count = 1) { Add(bits / kBitsPerByte, count); }
+ void AddSeconds(double s, size_t count = 1) { Add(s, count); }
+ void AddNanoSeconds(double ns, size_t count = 1) { Add(ns / 1000000000.0, count); }
+
+ double SumChildrenValues() const {
+ double sum = 0.0;
+ for (auto it : children_) {
+ sum += it.second.Value();
+ }
+ return sum;
+ }
+
+ private:
+ void Add(double value, size_t count = 1) {
+ value_ += value;
+ count_ += count;
+ }
+
+ double value_ = 0.0; // Commutative sum of the collected statistic in basic units.
+ size_t count_ = 0; // The number of samples for this node.
+ std::unordered_map<const char*, Stats> children_;
+};
+
+} // namespace art
+
+#endif // ART_LIBARTBASE_BASE_STATS_H_
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 453e9da..2db0283 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -18,6 +18,7 @@
#include <stdlib.h>
#include <fstream>
+#include <iomanip>
#include <iostream>
#include <map>
#include <set>
@@ -37,6 +38,7 @@
#include "base/indenter.h"
#include "base/os.h"
#include "base/safe_map.h"
+#include "base/stats.h"
#include "base/stl_util.h"
#include "base/unix_file/fd_file.h"
#include "class_linker-inl.h"
@@ -639,7 +641,8 @@
{
os << "OAT FILE STATS:\n";
VariableIndentationOutputStream vios(&os);
- stats_.Dump(vios);
+ stats_.AddBytes(oat_file_.Size());
+ DumpStats(vios, "OatFile", stats_, stats_.Value());
}
os << std::flush;
@@ -737,156 +740,42 @@
return vdex_file;
}
- struct Stats {
- enum ByteKind {
- kByteKindCode,
- kByteKindQuickMethodHeader,
- kByteKindCodeInfoLocationCatalog,
- kByteKindCodeInfoDexRegisterMask,
- kByteKindCodeInfoDexRegisterMap,
- kByteKindCodeInfo,
- kByteKindCodeInfoInvokeInfo,
- kByteKindCodeInfoStackMasks,
- kByteKindCodeInfoRegisterMasks,
- kByteKindStackMapNativePc,
- kByteKindStackMapDexPc,
- kByteKindStackMapDexRegisterMap,
- kByteKindStackMapInlineInfoIndex,
- kByteKindStackMapRegisterMaskIndex,
- kByteKindStackMapStackMaskIndex,
- kByteKindInlineInfoMethodIndexIdx,
- kByteKindInlineInfoDexPc,
- kByteKindInlineInfoArtMethod,
- kByteKindInlineInfoNumDexRegisters,
- kByteKindInlineInfoIsLast,
- kByteKindCount,
- // Special ranges for std::accumulate convenience.
- kByteKindStackMapFirst = kByteKindStackMapNativePc,
- kByteKindStackMapLast = kByteKindStackMapStackMaskIndex,
- kByteKindInlineInfoFirst = kByteKindInlineInfoMethodIndexIdx,
- kByteKindInlineInfoLast = kByteKindInlineInfoIsLast,
- };
- int64_t bits[kByteKindCount] = {};
- // Since code has deduplication, seen tracks already seen pointers to avoid double counting
- // deduplicated code and tables.
- std::unordered_set<const void*> seen;
+ bool AddStatsObject(const void* address) {
+ return seen_stats_objects_.insert(address).second; // Inserted new entry.
+ }
- // Returns true if it was newly added.
- bool AddBitsIfUnique(ByteKind kind, int64_t count, const void* address) {
- if (seen.insert(address).second == true) {
- // True means the address was not already in the set.
- AddBits(kind, count);
- return true;
+ void DumpStats(VariableIndentationOutputStream& os,
+ const std::string& name,
+ const Stats& stats,
+ double total) {
+ if (std::fabs(stats.Value()) > 0 || !stats.Children().empty()) {
+ double percent = 100.0 * stats.Value() / total;
+ os.Stream()
+ << std::setw(40 - os.GetIndentation()) << std::left << name << std::right << " "
+ << std::setw(8) << stats.Count() << " "
+ << std::setw(12) << std::fixed << std::setprecision(3) << stats.Value() / KB << "KB "
+ << std::setw(8) << std::fixed << std::setprecision(1) << percent << "%\n";
+
+ // Sort all children by largest value first, than by name.
+ std::map<std::pair<double, std::string>, const Stats&> sorted_children;
+ for (const auto& it : stats.Children()) {
+ sorted_children.emplace(std::make_pair(-it.second.Value(), it.first), it.second);
}
- return false;
- }
- void AddBits(ByteKind kind, int64_t count) {
- bits[kind] += count;
- }
-
- void Dump(VariableIndentationOutputStream& os) {
- const int64_t sum = std::accumulate(bits, bits + kByteKindCount, 0u);
- os.Stream() << "Dumping cumulative use of " << sum / kBitsPerByte << " accounted bytes\n";
- if (sum > 0) {
- Dump(os, "Code ", bits[kByteKindCode], sum);
- Dump(os, "QuickMethodHeader ", bits[kByteKindQuickMethodHeader], sum);
- Dump(os, "CodeInfo ", bits[kByteKindCodeInfo], sum);
- Dump(os, "CodeInfoLocationCatalog ", bits[kByteKindCodeInfoLocationCatalog], sum);
- Dump(os, "CodeInfoDexRegisterMask ", bits[kByteKindCodeInfoDexRegisterMask], sum);
- Dump(os, "CodeInfoDexRegisterMap ", bits[kByteKindCodeInfoDexRegisterMap], sum);
- Dump(os, "CodeInfoStackMasks ", bits[kByteKindCodeInfoStackMasks], sum);
- Dump(os, "CodeInfoRegisterMasks ", bits[kByteKindCodeInfoRegisterMasks], sum);
- Dump(os, "CodeInfoInvokeInfo ", bits[kByteKindCodeInfoInvokeInfo], sum);
- // Stack map section.
- const int64_t stack_map_bits = std::accumulate(bits + kByteKindStackMapFirst,
- bits + kByteKindStackMapLast + 1,
- 0u);
- Dump(os, "CodeInfoStackMap ", stack_map_bits, sum);
- {
- ScopedIndentation indent1(&os);
- Dump(os,
- "StackMapNativePc ",
- bits[kByteKindStackMapNativePc],
- stack_map_bits,
- "stack map");
- Dump(os,
- "StackMapDexPc ",
- bits[kByteKindStackMapDexPc],
- stack_map_bits,
- "stack map");
- Dump(os,
- "StackMapDexRegisterMap ",
- bits[kByteKindStackMapDexRegisterMap],
- stack_map_bits,
- "stack map");
- Dump(os,
- "StackMapInlineInfoIndex ",
- bits[kByteKindStackMapInlineInfoIndex],
- stack_map_bits,
- "stack map");
- Dump(os,
- "StackMapRegisterMaskIndex ",
- bits[kByteKindStackMapRegisterMaskIndex],
- stack_map_bits,
- "stack map");
- Dump(os,
- "StackMapStackMaskIndex ",
- bits[kByteKindStackMapStackMaskIndex],
- stack_map_bits,
- "stack map");
- }
- // Inline info section.
- const int64_t inline_info_bits = std::accumulate(bits + kByteKindInlineInfoFirst,
- bits + kByteKindInlineInfoLast + 1,
- 0u);
- Dump(os, "CodeInfoInlineInfo ", inline_info_bits, sum);
- {
- ScopedIndentation indent1(&os);
- Dump(os,
- "InlineInfoMethodIndexIdx ",
- bits[kByteKindInlineInfoMethodIndexIdx],
- inline_info_bits,
- "inline info");
- Dump(os,
- "InlineInfoDexPc ",
- bits[kByteKindStackMapDexPc],
- inline_info_bits,
- "inline info");
- Dump(os,
- "InlineInfoArtMethod ",
- bits[kByteKindInlineInfoArtMethod],
- inline_info_bits,
- "inline info");
- Dump(os,
- "InlineInfoNumDexRegisters ",
- bits[kByteKindInlineInfoNumDexRegisters],
- inline_info_bits,
- "inline info");
- Dump(os,
- "InlineInfoIsLast ",
- bits[kByteKindInlineInfoIsLast],
- inline_info_bits,
- "inline info");
- }
+ // Add "other" row to represent any amount not account for by the children.
+ Stats other;
+ other.AddBytes(stats.Value() - stats.SumChildrenValues(), stats.Count());
+ if (std::fabs(other.Value()) > 0 && !stats.Children().empty()) {
+ sorted_children.emplace(std::make_pair(-other.Value(), "(other)"), other);
}
- os.Stream() << "\n" << std::flush;
- }
- private:
- void Dump(VariableIndentationOutputStream& os,
- const char* name,
- int64_t size,
- int64_t total,
- const char* sum_of = "total") {
- const double percent = (static_cast<double>(size) / static_cast<double>(total)) * 100;
- os.Stream() << StringPrintf("%s = %8" PRId64 " (%2.0f%% of %s)\n",
- name,
- size / kBitsPerByte,
- percent,
- sum_of);
+ // Print the data.
+ ScopedIndentation indent1(&os);
+ for (const auto& it : sorted_children) {
+ DumpStats(os, it.first.second, it.second, total);
+ }
}
- };
+ }
private:
void AddAllOffsets() {
@@ -1266,9 +1155,9 @@
vios->Stream() << "OatQuickMethodHeader ";
uint32_t method_header_offset = oat_method.GetOatQuickMethodHeaderOffset();
const OatQuickMethodHeader* method_header = oat_method.GetOatQuickMethodHeader();
- stats_.AddBitsIfUnique(Stats::kByteKindQuickMethodHeader,
- sizeof(*method_header) * kBitsPerByte,
- method_header);
+ if (AddStatsObject(method_header)) {
+ stats_.Child("QuickMethodHeader")->AddBytes(sizeof(*method_header));
+ }
if (options_.absolute_addresses_) {
vios->Stream() << StringPrintf("%p ", method_header);
}
@@ -1340,7 +1229,9 @@
const void* code = oat_method.GetQuickCode();
uint32_t aligned_code_begin = AlignCodeOffset(code_offset);
uint64_t aligned_code_end = aligned_code_begin + code_size;
- stats_.AddBitsIfUnique(Stats::kByteKindCode, code_size * kBitsPerByte, code);
+ if (AddStatsObject(code)) {
+ stats_.Child("Code")->AddBytes(code_size);
+ }
if (options_.absolute_addresses_) {
vios->Stream() << StringPrintf("%p ", code);
@@ -1690,78 +1581,15 @@
} else if (!bad_input && IsMethodGeneratedByOptimizingCompiler(oat_method,
code_item_accessor)) {
// The optimizing compiler outputs its CodeInfo data in the vmap table.
+ const OatQuickMethodHeader* method_header = oat_method.GetOatQuickMethodHeader();
StackMapsHelper helper(oat_method.GetVmapTable(), instruction_set_);
- MethodInfo method_info(oat_method.GetOatQuickMethodHeader()->GetOptimizedMethodInfo());
- {
- const CodeInfo code_info = helper.GetCodeInfo();
- const BitTable<StackMap::kCount>& stack_maps = code_info.stack_maps_;
- const size_t num_stack_maps = stack_maps.NumRows();
- if (stats_.AddBitsIfUnique(Stats::kByteKindCodeInfo,
- code_info.size_ * kBitsPerByte,
- oat_method.GetVmapTable())) {
- // Stack maps
- stats_.AddBits(
- Stats::kByteKindStackMapNativePc,
- stack_maps.NumColumnBits(StackMap::kPackedNativePc) * num_stack_maps);
- stats_.AddBits(
- Stats::kByteKindStackMapDexPc,
- stack_maps.NumColumnBits(StackMap::kDexPc) * num_stack_maps);
- stats_.AddBits(
- Stats::kByteKindStackMapDexRegisterMap,
- stack_maps.NumColumnBits(StackMap::kDexRegisterMapIndex) * num_stack_maps);
- stats_.AddBits(
- Stats::kByteKindStackMapInlineInfoIndex,
- stack_maps.NumColumnBits(StackMap::kInlineInfoIndex) * num_stack_maps);
- stats_.AddBits(
- Stats::kByteKindStackMapRegisterMaskIndex,
- stack_maps.NumColumnBits(StackMap::kRegisterMaskIndex) * num_stack_maps);
- stats_.AddBits(
- Stats::kByteKindStackMapStackMaskIndex,
- stack_maps.NumColumnBits(StackMap::kStackMaskIndex) * num_stack_maps);
-
- // Stack masks
- stats_.AddBits(
- Stats::kByteKindCodeInfoStackMasks,
- code_info.stack_masks_.DataBitSize());
-
- // Register masks
- stats_.AddBits(
- Stats::kByteKindCodeInfoRegisterMasks,
- code_info.register_masks_.DataBitSize());
-
- // Invoke infos
- stats_.AddBits(
- Stats::kByteKindCodeInfoInvokeInfo,
- code_info.invoke_infos_.DataBitSize());
-
- // Location catalog
- stats_.AddBits(Stats::kByteKindCodeInfoLocationCatalog,
- code_info.dex_register_catalog_.DataBitSize());
- stats_.AddBits(Stats::kByteKindCodeInfoDexRegisterMask,
- code_info.dex_register_masks_.DataBitSize());
- stats_.AddBits(Stats::kByteKindCodeInfoDexRegisterMap,
- code_info.dex_register_maps_.DataBitSize());
-
- // Inline infos.
- const BitTable<InlineInfo::kCount>& inline_infos = code_info.inline_infos_;
- const size_t num_inline_infos = inline_infos.NumRows();
- if (num_inline_infos > 0u) {
- stats_.AddBits(
- Stats::kByteKindInlineInfoMethodIndexIdx,
- inline_infos.NumColumnBits(InlineInfo::kMethodInfoIndex) * num_inline_infos);
- stats_.AddBits(
- Stats::kByteKindInlineInfoDexPc,
- inline_infos.NumColumnBits(InlineInfo::kDexPc) * num_inline_infos);
- stats_.AddBits(
- Stats::kByteKindInlineInfoArtMethod,
- inline_infos.NumColumnBits(InlineInfo::kArtMethodHi) * num_inline_infos +
- inline_infos.NumColumnBits(InlineInfo::kArtMethodLo) * num_inline_infos);
- stats_.AddBits(
- Stats::kByteKindInlineInfoNumDexRegisters,
- inline_infos.NumColumnBits(InlineInfo::kNumberOfDexRegisters) * num_inline_infos);
- stats_.AddBits(Stats::kByteKindInlineInfoIsLast, num_inline_infos);
- }
- }
+ if (AddStatsObject(oat_method.GetVmapTable())) {
+ helper.GetCodeInfo().AddSizeStats(&stats_);
+ }
+ MethodInfo method_info(method_header->GetOptimizedMethodInfo());
+ if (AddStatsObject(method_header->GetOptimizedMethodInfoPtr())) {
+ size_t method_info_size = MethodInfo::ComputeSize(method_info.NumMethodIndices());
+ stats_.Child("MethodInfo")->AddBytes(method_info_size);
}
const uint8_t* quick_native_pc = reinterpret_cast<const uint8_t*>(quick_code);
size_t offset = 0;
@@ -1894,6 +1722,7 @@
std::set<uintptr_t> offsets_;
Disassembler* disassembler_;
Stats stats_;
+ std::unordered_set<const void*> seen_stats_objects_;
};
class ImageDumper {
diff --git a/oatdump/oatdump_test.h b/oatdump/oatdump_test.h
index 293acdc..231163b 100644
--- a/oatdump/oatdump_test.h
+++ b/oatdump/oatdump_test.h
@@ -158,7 +158,7 @@
// Code and dex code do not show up if list only.
expected_prefixes.push_back("DEX CODE:");
expected_prefixes.push_back("CODE:");
- expected_prefixes.push_back("CodeInfoInlineInfo");
+ expected_prefixes.push_back("InlineInfos");
}
if (mode == kModeArt) {
exec_argv.push_back("--image=" + core_art_location_);
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index a25c9fd..23cc1d6 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -21,6 +21,7 @@
#include "art_method.h"
#include "base/indenter.h"
+#include "base/stats.h"
#include "scoped_thread_state_change-inl.h"
namespace art {
@@ -113,9 +114,39 @@
}
}
+template<typename Accessor>
+static void AddTableSizeStats(const char* table_name,
+ const BitTable<Accessor::kCount>& table,
+ /*out*/ Stats* parent) {
+ Stats* table_stats = parent->Child(table_name);
+ table_stats->AddBits(table.BitSize());
+ table_stats->Child("Header")->AddBits(table.HeaderBitSize());
+ const char* const* column_names = GetBitTableColumnNames<Accessor>();
+ for (size_t c = 0; c < table.NumColumns(); c++) {
+ if (table.NumColumnBits(c) > 0) {
+ Stats* column_stats = table_stats->Child(column_names[c]);
+ column_stats->AddBits(table.NumRows() * table.NumColumnBits(c), table.NumRows());
+ }
+ }
+}
+
+void CodeInfo::AddSizeStats(/*out*/ Stats* parent) const {
+ Stats* stats = parent->Child("CodeInfo");
+ stats->AddBytes(size_);
+ stats->Child("Header")->AddBytes(UnsignedLeb128Size(size_));
+ AddTableSizeStats<StackMap>("StackMaps", stack_maps_, stats);
+ AddTableSizeStats<RegisterMask>("RegisterMasks", register_masks_, stats);
+ AddTableSizeStats<MaskInfo>("StackMasks", stack_masks_, stats);
+ AddTableSizeStats<InvokeInfo>("InvokeInfos", invoke_infos_, stats);
+ AddTableSizeStats<InlineInfo>("InlineInfos", inline_infos_, stats);
+ AddTableSizeStats<MaskInfo>("DexRegisterMasks", dex_register_masks_, stats);
+ AddTableSizeStats<DexRegisterMapInfo>("DexRegisterMaps", dex_register_maps_, stats);
+ AddTableSizeStats<DexRegisterInfo>("DexRegisterCatalog", dex_register_catalog_, stats);
+}
+
static void DumpDexRegisterMap(VariableIndentationOutputStream* vios,
const DexRegisterMap& map) {
- if (!map.empty()) {
+ if (map.HasAnyLiveDexRegisters()) {
ScopedIndentation indent1(vios);
for (size_t i = 0; i < map.size(); ++i) {
if (map.IsDexRegisterLive(i)) {
@@ -126,18 +157,19 @@
}
}
-template<uint32_t kNumColumns>
+template<typename Accessor>
static void DumpTable(VariableIndentationOutputStream* vios,
const char* table_name,
- const BitTable<kNumColumns>& table,
+ const BitTable<Accessor::kCount>& table,
bool verbose,
bool is_mask = false) {
if (table.NumRows() != 0) {
- vios->Stream() << table_name << " BitSize=" << table.NumRows() * table.NumRowBits();
+ vios->Stream() << table_name << " BitSize=" << table.BitSize();
vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
+ const char* const* column_names = GetBitTableColumnNames<Accessor>();
for (size_t c = 0; c < table.NumColumns(); c++) {
vios->Stream() << (c != 0 ? " " : "");
- vios->Stream() << table.NumColumnBits(c);
+ vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
}
vios->Stream() << "}\n";
if (verbose) {
@@ -171,14 +203,14 @@
<< " BitSize=" << size_ * kBitsPerByte
<< "\n";
ScopedIndentation indent1(vios);
- DumpTable(vios, "StackMaps", stack_maps_, verbose);
- DumpTable(vios, "RegisterMasks", register_masks_, verbose);
- DumpTable(vios, "StackMasks", stack_masks_, verbose, true /* is_mask */);
- DumpTable(vios, "InvokeInfos", invoke_infos_, verbose);
- DumpTable(vios, "InlineInfos", inline_infos_, verbose);
- DumpTable(vios, "DexRegisterMasks", dex_register_masks_, verbose, true /* is_mask */);
- DumpTable(vios, "DexRegisterMaps", dex_register_maps_, verbose);
- DumpTable(vios, "DexRegisterCatalog", dex_register_catalog_, verbose);
+ DumpTable<StackMap>(vios, "StackMaps", stack_maps_, verbose);
+ DumpTable<RegisterMask>(vios, "RegisterMasks", register_masks_, verbose);
+ DumpTable<MaskInfo>(vios, "StackMasks", stack_masks_, verbose, true /* is_mask */);
+ DumpTable<InvokeInfo>(vios, "InvokeInfos", invoke_infos_, verbose);
+ DumpTable<InlineInfo>(vios, "InlineInfos", inline_infos_, verbose);
+ DumpTable<MaskInfo>(vios, "DexRegisterMasks", dex_register_masks_, verbose, true /* is_mask */);
+ DumpTable<DexRegisterMapInfo>(vios, "DexRegisterMaps", dex_register_maps_, verbose);
+ DumpTable<DexRegisterInfo>(vios, "DexRegisterCatalog", dex_register_catalog_, verbose);
// Display stack maps along with (live) Dex register maps.
if (verbose) {
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 53f80e5..ea358c6 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -47,6 +47,7 @@
class ArtMethod;
class CodeInfo;
+class Stats;
std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);
@@ -252,6 +253,18 @@
}
};
+class MaskInfo : public BitTable<1>::Accessor {
+ public:
+ BIT_TABLE_HEADER()
+ BIT_TABLE_COLUMN(0, Mask)
+};
+
+class DexRegisterMapInfo : public BitTable<1>::Accessor {
+ public:
+ BIT_TABLE_HEADER()
+ BIT_TABLE_COLUMN(0, CatalogueIndex)
+};
+
class DexRegisterInfo : public BitTable<2>::Accessor {
public:
BIT_TABLE_HEADER()
@@ -480,6 +493,9 @@
InstructionSet instruction_set,
const MethodInfo& method_info) const;
+ // Accumulate code info size statistics into the given Stats tree.
+ void AddSizeStats(/*out*/ Stats* parent) const;
+
private:
// Scan backward to determine dex register locations at given stack map.
void DecodeDexRegisterMap(uint32_t stack_map_index,
@@ -506,15 +522,13 @@
size_t size_;
BitTable<StackMap::kCount> stack_maps_;
BitTable<RegisterMask::kCount> register_masks_;
- BitTable<1> stack_masks_;
+ BitTable<MaskInfo::kCount> stack_masks_;
BitTable<InvokeInfo::kCount> invoke_infos_;
BitTable<InlineInfo::kCount> inline_infos_;
- BitTable<1> dex_register_masks_;
- BitTable<1> dex_register_maps_;
+ BitTable<MaskInfo::kCount> dex_register_masks_;
+ BitTable<DexRegisterMapInfo::kCount> dex_register_maps_;
BitTable<DexRegisterInfo::kCount> dex_register_catalog_;
uint32_t number_of_dex_registers_; // Excludes any inlined methods.
-
- friend class OatDumper;
};
#undef ELEMENT_BYTE_OFFSET_AFTER