Add helper method to iterate over BitTable fields in CodeInfo.
Avoid the repetitive code patterns and simplify code.
Test: test-art-host-gtest-stack_map_test
Test: checked output of oatdump
Change-Id: I2354bc652837eb34efeecf4de56a027384544034
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index df11709..01c6bf9 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -109,7 +109,7 @@
BitTableBuilder<RegisterMask> register_masks_;
BitmapTableBuilder stack_masks_;
BitmapTableBuilder dex_register_masks_;
- BitTableBuilder<MaskInfo> dex_register_maps_;
+ BitTableBuilder<DexRegisterMapInfo> dex_register_maps_;
BitTableBuilder<DexRegisterInfo> dex_register_catalog_;
ScopedArenaVector<BitVector*> lazy_stack_masks_;
diff --git a/libartbase/base/bit_memory_region.h b/libartbase/base/bit_memory_region.h
index bcdc573..5668b6c 100644
--- a/libartbase/base/bit_memory_region.h
+++ b/libartbase/base/bit_memory_region.h
@@ -53,6 +53,11 @@
ALWAYS_INLINE bool IsValid() const { return data_ != nullptr; }
+ const uint8_t* data() const {
+ DCHECK_ALIGNED(bit_start_, kBitsPerByte);
+ return reinterpret_cast<const uint8_t*>(data_) + bit_start_ / kBitsPerByte;
+ }
+
size_t size_in_bits() const {
return bit_size_;
}
@@ -215,6 +220,8 @@
: finished_region_(const_cast<uint8_t*>(data), bit_offset, /* bit_length */ 0) {
}
+ const uint8_t* data() const { return finished_region_.data(); }
+
BitMemoryRegion GetReadRegion() const { return finished_region_; }
size_t NumberOfReadBits() const { return finished_region_.size_in_bits(); }
diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h
index 7fec51e..54e8861 100644
--- a/libartbase/base/bit_table.h
+++ b/libartbase/base/bit_table.h
@@ -49,7 +49,6 @@
ALWAYS_INLINE void Decode(BitMemoryReader& reader) {
// Decode row count and column sizes from the table header.
- size_t initial_bit_offset = reader.NumberOfReadBits();
num_rows_ = reader.ReadVarint();
if (num_rows_ != 0) {
column_offset_[0] = 0;
@@ -58,14 +57,13 @@
column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end);
}
}
- header_bit_size_ = reader.NumberOfReadBits() - initial_bit_offset;
// Record the region which contains the table data and skip past it.
table_data_ = reader.ReadRegion(num_rows_ * NumRowBits());
}
ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column = 0) const {
- DCHECK_NE(header_bit_size_, 0u) << "Table has not been loaded";
+ DCHECK(table_data_.IsValid()) << "Table has not been loaded";
DCHECK_LT(row, num_rows_);
DCHECK_LT(column, kNumColumns);
size_t offset = row * NumRowBits() + column_offset_[column];
@@ -73,7 +71,7 @@
}
ALWAYS_INLINE BitMemoryRegion GetBitMemoryRegion(uint32_t row, uint32_t column = 0) const {
- DCHECK_NE(header_bit_size_, 0u) << "Table has not been loaded";
+ DCHECK(table_data_.IsValid()) << "Table has not been loaded";
DCHECK_LT(row, num_rows_);
DCHECK_LT(column, kNumColumns);
size_t offset = row * NumRowBits() + column_offset_[column];
@@ -90,9 +88,7 @@
return column_offset_[column + 1] - column_offset_[column];
}
- size_t HeaderBitSize() const { return header_bit_size_; }
-
- size_t BitSize() const { return header_bit_size_ + table_data_.size_in_bits(); }
+ size_t DataBitSize() const { return table_data_.size_in_bits(); }
bool Equals(const BitTableBase& other) const {
return num_rows_ == other.num_rows_ &&
@@ -103,9 +99,9 @@
protected:
BitMemoryRegion table_data_;
size_t num_rows_ = 0;
-
uint16_t column_offset_[kNumColumns + 1] = {};
- uint16_t header_bit_size_ = 0;
+
+ DISALLOW_COPY_AND_ASSIGN(BitTableBase);
};
// Helper class which can be used to create BitTable accessors with named getters.
@@ -129,9 +125,10 @@
}
// Helper macro to create constructors and per-table utilities in derived class.
-#define BIT_TABLE_HEADER() \
+#define BIT_TABLE_HEADER(NAME) \
using BitTableAccessor<kNumColumns>::BitTableAccessor; /* inherit constructors */ \
template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName; \
+ static constexpr const char* kTableName = #NAME; \
// Helper macro to create named column accessors in derived class.
#define BIT_TABLE_COLUMN(COLUMN, NAME) \
@@ -154,12 +151,6 @@
return names;
}
-// Returns the names of all columns in the given accessor.
-template<typename Accessor>
-static const char* const* GetBitTableColumnNames() {
- return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>());
-}
-
// Wrapper which makes it easier to use named accessors for the individual rows.
template<typename Accessor>
class BitTable : public BitTableBase<Accessor::kNumColumns> {
@@ -217,6 +208,14 @@
ALWAYS_INLINE Accessor GetInvalidRow() const {
return Accessor(this, static_cast<uint32_t>(-1));
}
+
+ const char* GetName() const {
+ return Accessor::kTableName;
+ }
+
+ const char* const* GetColumnNames() const {
+ return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>());
+ }
};
template<typename Accessor>
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 9d73879..c04c50e 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -1578,7 +1578,7 @@
// The optimizing compiler outputs its CodeInfo data in the vmap table.
StackMapsHelper helper(oat_method.GetVmapTable(), instruction_set_);
if (AddStatsObject(oat_method.GetVmapTable())) {
- helper.GetCodeInfo().AddSizeStats(&stats_);
+ helper.GetCodeInfo().CollectSizeStats(oat_method.GetVmapTable(), &stats_);
}
const uint8_t* quick_native_pc = reinterpret_cast<const uint8_t*>(quick_code);
size_t offset = 0;
diff --git a/oatdump/oatdump_test.h b/oatdump/oatdump_test.h
index 2c28f06..4ee5101 100644
--- a/oatdump/oatdump_test.h
+++ b/oatdump/oatdump_test.h
@@ -171,7 +171,7 @@
// Code and dex code do not show up if list only.
expected_prefixes.push_back("DEX CODE:");
expected_prefixes.push_back("CODE:");
- expected_prefixes.push_back("InlineInfos");
+ expected_prefixes.push_back("InlineInfo");
}
if (mode == kModeArt) {
exec_argv.push_back("--image=" + core_art_location_);
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 689904a..62dec15 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -31,93 +31,67 @@
: CodeInfo(header->GetOptimizedCodeInfoPtr(), flags) {
}
+// Returns true if the decoded table was deduped.
template<typename Accessor>
-ALWAYS_INLINE static void DecodeTable(BitTable<Accessor>& table,
- BitMemoryReader& reader,
- const uint8_t* reader_data) {
- if (reader.ReadBit() /* is_deduped */) {
+ALWAYS_INLINE static bool DecodeTable(BitTable<Accessor>& table, BitMemoryReader& reader) {
+ bool is_deduped = reader.ReadBit();
+ if (is_deduped) {
ssize_t bit_offset = reader.NumberOfReadBits() - reader.ReadVarint();
- BitMemoryReader reader2(reader_data, bit_offset); // The offset is negative.
+ BitMemoryReader reader2(reader.data(), bit_offset); // The offset is negative.
table.Decode(reader2);
} else {
table.Decode(reader);
}
+ return is_deduped;
}
void CodeInfo::Decode(const uint8_t* data, DecodeFlags flags) {
BitMemoryReader reader(data);
- packed_frame_size_ = reader.ReadVarint();
- core_spill_mask_ = reader.ReadVarint();
- fp_spill_mask_ = reader.ReadVarint();
- number_of_dex_registers_ = reader.ReadVarint();
- DecodeTable(stack_maps_, reader, data);
- DecodeTable(register_masks_, reader, data);
- DecodeTable(stack_masks_, reader, data);
- if (flags & DecodeFlags::GcMasksOnly) {
- return;
- }
- DecodeTable(inline_infos_, reader, data);
- DecodeTable(method_infos_, reader, data);
- if (flags & DecodeFlags::InlineInfoOnly) {
- return;
- }
- DecodeTable(dex_register_masks_, reader, data);
- DecodeTable(dex_register_maps_, reader, data);
- DecodeTable(dex_register_catalog_, reader, data);
+ ForEachHeaderField([this, &reader](auto member_pointer) {
+ this->*member_pointer = reader.ReadVarint();
+ });
+ ForEachBitTableField([this, &reader](auto member_pointer) {
+ DecodeTable(this->*member_pointer, reader);
+ }, flags);
size_in_bits_ = reader.NumberOfReadBits();
}
-template<typename Accessor>
-ALWAYS_INLINE void CodeInfo::Deduper::DedupeTable(BitMemoryReader& reader) {
- bool is_deduped = reader.ReadBit();
- DCHECK(!is_deduped);
- size_t bit_table_start = reader.NumberOfReadBits();
- BitTable<Accessor> bit_table(reader);
- BitMemoryRegion region = reader.GetReadRegion().Subregion(bit_table_start);
- auto it = dedupe_map_.insert(std::make_pair(region, /* placeholder */ 0));
- if (it.second /* new bit table */ || region.size_in_bits() < 32) {
- writer_.WriteBit(false); // Is not deduped.
- it.first->second = writer_.NumberOfWrittenBits();
- writer_.WriteRegion(region);
- } else {
- writer_.WriteBit(true); // Is deduped.
- size_t bit_offset = writer_.NumberOfWrittenBits();
- writer_.WriteVarint(bit_offset - it.first->second);
- }
-}
-
-size_t CodeInfo::Deduper::Dedupe(const uint8_t* code_info) {
+size_t CodeInfo::Deduper::Dedupe(const uint8_t* code_info_data) {
writer_.ByteAlign();
size_t deduped_offset = writer_.NumberOfWrittenBits() / kBitsPerByte;
- BitMemoryReader reader(code_info);
- writer_.WriteVarint(reader.ReadVarint()); // packed_frame_size_.
- writer_.WriteVarint(reader.ReadVarint()); // core_spill_mask_.
- writer_.WriteVarint(reader.ReadVarint()); // fp_spill_mask_.
- writer_.WriteVarint(reader.ReadVarint()); // number_of_dex_registers_.
- DedupeTable<StackMap>(reader);
- DedupeTable<RegisterMask>(reader);
- DedupeTable<MaskInfo>(reader);
- DedupeTable<InlineInfo>(reader);
- DedupeTable<MethodInfo>(reader);
- DedupeTable<MaskInfo>(reader);
- DedupeTable<DexRegisterMapInfo>(reader);
- DedupeTable<DexRegisterInfo>(reader);
+ BitMemoryReader reader(code_info_data);
+ CodeInfo code_info; // Temporary storage for decoded data.
+ ForEachHeaderField([this, &reader, &code_info](auto member_pointer) {
+ code_info.*member_pointer = reader.ReadVarint();
+ writer_.WriteVarint(code_info.*member_pointer);
+ });
+ ForEachBitTableField([this, &reader, &code_info](auto member_pointer) {
+ bool is_deduped = reader.ReadBit();
+ DCHECK(!is_deduped);
+ size_t bit_table_start = reader.NumberOfReadBits();
+ (code_info.*member_pointer).Decode(reader);
+ BitMemoryRegion region = reader.GetReadRegion().Subregion(bit_table_start);
+ auto it = dedupe_map_.insert(std::make_pair(region, /* placeholder */ 0));
+ if (it.second /* new bit table */ || region.size_in_bits() < 32) {
+ writer_.WriteBit(false); // Is not deduped.
+ it.first->second = writer_.NumberOfWrittenBits();
+ writer_.WriteRegion(region);
+ } else {
+ writer_.WriteBit(true); // Is deduped.
+ size_t bit_offset = writer_.NumberOfWrittenBits();
+ writer_.WriteVarint(bit_offset - it.first->second);
+ }
+ });
if (kIsDebugBuild) {
- CodeInfo old_code_info(code_info);
+ CodeInfo old_code_info(code_info_data);
CodeInfo new_code_info(writer_.data() + deduped_offset);
- DCHECK_EQ(old_code_info.packed_frame_size_, new_code_info.packed_frame_size_);
- DCHECK_EQ(old_code_info.core_spill_mask_, new_code_info.core_spill_mask_);
- DCHECK_EQ(old_code_info.fp_spill_mask_, new_code_info.fp_spill_mask_);
- DCHECK_EQ(old_code_info.number_of_dex_registers_, new_code_info.number_of_dex_registers_);
- DCHECK(old_code_info.stack_maps_.Equals(new_code_info.stack_maps_));
- DCHECK(old_code_info.register_masks_.Equals(new_code_info.register_masks_));
- DCHECK(old_code_info.stack_masks_.Equals(new_code_info.stack_masks_));
- DCHECK(old_code_info.inline_infos_.Equals(new_code_info.inline_infos_));
- DCHECK(old_code_info.method_infos_.Equals(new_code_info.method_infos_));
- DCHECK(old_code_info.dex_register_masks_.Equals(new_code_info.dex_register_masks_));
- DCHECK(old_code_info.dex_register_maps_.Equals(new_code_info.dex_register_maps_));
- DCHECK(old_code_info.dex_register_catalog_.Equals(new_code_info.dex_register_catalog_));
+ ForEachHeaderField([&old_code_info, &new_code_info](auto member_pointer) {
+ DCHECK_EQ(old_code_info.*member_pointer, new_code_info.*member_pointer);
+ });
+ ForEachBitTableField([&old_code_info, &new_code_info](auto member_pointer) {
+ DCHECK((old_code_info.*member_pointer).Equals(new_code_info.*member_pointer));
+ });
}
return deduped_offset;
@@ -207,33 +181,32 @@
}
}
-template<typename Accessor>
-static void AddTableSizeStats(const char* table_name,
- const BitTable<Accessor>& table,
- /*out*/ Stats* parent) {
- Stats* table_stats = parent->Child(table_name);
- table_stats->AddBits(table.BitSize());
- table_stats->Child("Header")->AddBits(table.HeaderBitSize());
- const char* const* column_names = GetBitTableColumnNames<Accessor>();
- for (size_t c = 0; c < table.NumColumns(); c++) {
- if (table.NumColumnBits(c) > 0) {
- Stats* column_stats = table_stats->Child(column_names[c]);
- column_stats->AddBits(table.NumRows() * table.NumColumnBits(c), table.NumRows());
+// Decode the CodeInfo while collecting size statistics.
+void CodeInfo::CollectSizeStats(const uint8_t* code_info_data, /*out*/ Stats* parent) {
+ Stats* codeinfo_stats = parent->Child("CodeInfo");
+ BitMemoryReader reader(code_info_data);
+ ForEachHeaderField([&reader](auto) { reader.ReadVarint(); });
+ codeinfo_stats->Child("Header")->AddBits(reader.NumberOfReadBits());
+ CodeInfo code_info; // Temporary storage for decoded tables.
+ ForEachBitTableField([codeinfo_stats, &reader, &code_info](auto member_pointer) {
+ auto& table = code_info.*member_pointer;
+ size_t bit_offset = reader.NumberOfReadBits();
+ bool deduped = DecodeTable(table, reader);
+ if (deduped) {
+ codeinfo_stats->Child("DedupeOffset")->AddBits(reader.NumberOfReadBits() - bit_offset);
+ } else {
+ Stats* table_stats = codeinfo_stats->Child(table.GetName());
+ table_stats->AddBits(reader.NumberOfReadBits() - bit_offset);
+ const char* const* column_names = table.GetColumnNames();
+ for (size_t c = 0; c < table.NumColumns(); c++) {
+ if (table.NumColumnBits(c) > 0) {
+ Stats* column_stats = table_stats->Child(column_names[c]);
+ column_stats->AddBits(table.NumRows() * table.NumColumnBits(c), table.NumRows());
+ }
+ }
}
- }
-}
-
-void CodeInfo::AddSizeStats(/*out*/ Stats* parent) const {
- Stats* stats = parent->Child("CodeInfo");
- stats->AddBytes(Size());
- AddTableSizeStats<StackMap>("StackMaps", stack_maps_, stats);
- AddTableSizeStats<RegisterMask>("RegisterMasks", register_masks_, stats);
- AddTableSizeStats<MaskInfo>("StackMasks", stack_masks_, stats);
- AddTableSizeStats<InlineInfo>("InlineInfos", inline_infos_, stats);
- AddTableSizeStats<MethodInfo>("MethodInfo", method_infos_, stats);
- AddTableSizeStats<MaskInfo>("DexRegisterMasks", dex_register_masks_, stats);
- AddTableSizeStats<DexRegisterMapInfo>("DexRegisterMaps", dex_register_maps_, stats);
- AddTableSizeStats<DexRegisterInfo>("DexRegisterCatalog", dex_register_catalog_, stats);
+ });
+ codeinfo_stats->AddBytes(BitsToBytesRoundUp(reader.NumberOfReadBits()));
}
void DexRegisterMap::Dump(VariableIndentationOutputStream* vios) const {
@@ -249,56 +222,49 @@
}
}
-template<typename Accessor>
-static void DumpTable(VariableIndentationOutputStream* vios,
- const char* table_name,
- const BitTable<Accessor>& table,
- bool verbose,
- bool is_mask = false) {
- if (table.NumRows() != 0) {
- vios->Stream() << table_name << " BitSize=" << table.BitSize();
- vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
- const char* const* column_names = GetBitTableColumnNames<Accessor>();
- for (size_t c = 0; c < table.NumColumns(); c++) {
- vios->Stream() << (c != 0 ? " " : "");
- vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
- }
- vios->Stream() << "}\n";
- if (verbose) {
- ScopedIndentation indent1(vios);
- for (size_t r = 0; r < table.NumRows(); r++) {
- vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
- for (size_t c = 0; c < table.NumColumns(); c++) {
- vios->Stream() << (c != 0 ? " " : "");
- if (is_mask) {
- BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
- for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
- vios->Stream() << bits.LoadBit(e - b - 1);
- }
- } else {
- vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
- }
- }
- vios->Stream() << "}\n";
- }
- }
- }
-}
-
void CodeInfo::Dump(VariableIndentationOutputStream* vios,
uint32_t code_offset,
bool verbose,
InstructionSet instruction_set) const {
- vios->Stream() << "CodeInfo\n";
+ vios->Stream() << "CodeInfo BitSize=" << size_in_bits_
+ << " FrameSize:" << packed_frame_size_ * kStackAlignment
+ << " CoreSpillMask:" << std::hex << core_spill_mask_
+ << " FpSpillMask:" << std::hex << fp_spill_mask_
+ << " NumberOfDexRegisters:" << std::dec << number_of_dex_registers_
+ << "\n";
ScopedIndentation indent1(vios);
- DumpTable<StackMap>(vios, "StackMaps", stack_maps_, verbose);
- DumpTable<RegisterMask>(vios, "RegisterMasks", register_masks_, verbose);
- DumpTable<MaskInfo>(vios, "StackMasks", stack_masks_, verbose, true /* is_mask */);
- DumpTable<InlineInfo>(vios, "InlineInfos", inline_infos_, verbose);
- DumpTable<MethodInfo>(vios, "MethodInfo", method_infos_, verbose);
- DumpTable<MaskInfo>(vios, "DexRegisterMasks", dex_register_masks_, verbose, true /* is_mask */);
- DumpTable<DexRegisterMapInfo>(vios, "DexRegisterMaps", dex_register_maps_, verbose);
- DumpTable<DexRegisterInfo>(vios, "DexRegisterCatalog", dex_register_catalog_, verbose);
+ ForEachBitTableField([this, &vios, verbose](auto member_pointer) {
+ const auto& table = this->*member_pointer;
+ if (table.NumRows() != 0) {
+ vios->Stream() << table.GetName() << " BitSize=" << table.DataBitSize();
+ vios->Stream() << " Rows=" << table.NumRows() << " Bits={";
+ const char* const* column_names = table.GetColumnNames();
+ for (size_t c = 0; c < table.NumColumns(); c++) {
+ vios->Stream() << (c != 0 ? " " : "");
+ vios->Stream() << column_names[c] << "=" << table.NumColumnBits(c);
+ }
+ vios->Stream() << "}\n";
+ if (verbose) {
+ ScopedIndentation indent1(vios);
+ for (size_t r = 0; r < table.NumRows(); r++) {
+ vios->Stream() << "[" << std::right << std::setw(3) << r << "]={";
+ for (size_t c = 0; c < table.NumColumns(); c++) {
+ vios->Stream() << (c != 0 ? " " : "");
+ if (&table == static_cast<const void*>(&stack_masks_) ||
+ &table == static_cast<const void*>(&dex_register_masks_)) {
+ BitMemoryRegion bits = table.GetBitMemoryRegion(r, c);
+ for (size_t b = 0, e = bits.size_in_bits(); b < e; b++) {
+ vios->Stream() << bits.LoadBit(e - b - 1);
+ }
+ } else {
+ vios->Stream() << std::right << std::setw(8) << static_cast<int32_t>(table.Get(r, c));
+ }
+ }
+ vios->Stream() << "}\n";
+ }
+ }
+ }
+ });
// Display stack maps along with (live) Dex register maps.
if (verbose) {
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index bfd646d..5f44286 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -128,7 +128,7 @@
OSR = 1,
Debug = 2,
};
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(StackMap)
BIT_TABLE_COLUMN(0, Kind)
BIT_TABLE_COLUMN(1, PackedNativePc)
BIT_TABLE_COLUMN(2, DexPc)
@@ -174,7 +174,7 @@
*/
class InlineInfo : public BitTableAccessor<6> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(InlineInfo)
BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths.
BIT_TABLE_COLUMN(1, DexPc)
BIT_TABLE_COLUMN(2, MethodInfoIndex)
@@ -201,21 +201,27 @@
const StackMap& stack_map) const;
};
-class MaskInfo : public BitTableAccessor<1> {
+class StackMask : public BitTableAccessor<1> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(StackMask)
+ BIT_TABLE_COLUMN(0, Mask)
+};
+
+class DexRegisterMask : public BitTableAccessor<1> {
+ public:
+ BIT_TABLE_HEADER(DexRegisterMask)
BIT_TABLE_COLUMN(0, Mask)
};
class DexRegisterMapInfo : public BitTableAccessor<1> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(DexRegisterMapInfo)
BIT_TABLE_COLUMN(0, CatalogueIndex)
};
class DexRegisterInfo : public BitTableAccessor<2> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(DexRegisterInfo)
BIT_TABLE_COLUMN(0, Kind)
BIT_TABLE_COLUMN(1, PackedValue)
@@ -246,7 +252,7 @@
// therefore it is worth encoding the mask as value+shift.
class RegisterMask : public BitTableAccessor<2> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(RegisterMask)
BIT_TABLE_COLUMN(0, Value)
BIT_TABLE_COLUMN(1, Shift)
@@ -259,7 +265,7 @@
// Separating them greatly improves dedup efficiency of the other tables.
class MethodInfo : public BitTableAccessor<1> {
public:
- BIT_TABLE_HEADER()
+ BIT_TABLE_HEADER(MethodInfo)
BIT_TABLE_COLUMN(0, MethodIndex)
};
@@ -280,9 +286,6 @@
size_t Dedupe(const uint8_t* code_info);
private:
- template<typename Accessor>
- void DedupeTable(BitMemoryReader& reader);
-
BitMemoryWriter<std::vector<uint8_t>> writer_;
// Deduplicate at BitTable level. The value is bit offset within the output.
@@ -290,7 +293,7 @@
};
enum DecodeFlags {
- Default = 0,
+ AllTables = 0,
// Limits the decoding only to the data needed by GC.
GcMasksOnly = 1,
// Limits the decoding only to the main stack map table and inline info table.
@@ -298,11 +301,11 @@
InlineInfoOnly = 2,
};
- explicit CodeInfo(const uint8_t* data, DecodeFlags flags = DecodeFlags::Default) {
+ explicit CodeInfo(const uint8_t* data, DecodeFlags flags = AllTables) {
Decode(reinterpret_cast<const uint8_t*>(data), flags);
}
- explicit CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags = DecodeFlags::Default);
+ explicit CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags = AllTables);
size_t Size() const {
return BitsToBytesRoundUp(size_in_bits_);
@@ -431,7 +434,7 @@
InstructionSet instruction_set) const;
// Accumulate code info size statistics into the given Stats tree.
- void AddSizeStats(/*out*/ Stats* parent) const;
+ static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats* parent);
ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data) {
BitMemoryReader reader(data);
@@ -442,6 +445,8 @@
}
private:
+ CodeInfo() {}
+
// Returns lower bound (fist stack map which has pc greater or equal than the desired one).
// It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const;
@@ -453,16 +458,44 @@
void Decode(const uint8_t* data, DecodeFlags flags);
+ // Invokes the callback with member pointer of each header field.
+ template<typename Callback>
+ ALWAYS_INLINE static void ForEachHeaderField(Callback callback) {
+ callback(&CodeInfo::packed_frame_size_);
+ callback(&CodeInfo::core_spill_mask_);
+ callback(&CodeInfo::fp_spill_mask_);
+ callback(&CodeInfo::number_of_dex_registers_);
+ }
+
+ // Invokes the callback with member pointer of each BitTable field.
+ template<typename Callback>
+ ALWAYS_INLINE static void ForEachBitTableField(Callback callback, DecodeFlags flags = AllTables) {
+ callback(&CodeInfo::stack_maps_);
+ callback(&CodeInfo::register_masks_);
+ callback(&CodeInfo::stack_masks_);
+ if (flags & DecodeFlags::GcMasksOnly) {
+ return;
+ }
+ callback(&CodeInfo::inline_infos_);
+ callback(&CodeInfo::method_infos_);
+ if (flags & DecodeFlags::InlineInfoOnly) {
+ return;
+ }
+ callback(&CodeInfo::dex_register_masks_);
+ callback(&CodeInfo::dex_register_maps_);
+ callback(&CodeInfo::dex_register_catalog_);
+ }
+
uint32_t packed_frame_size_; // Frame size in kStackAlignment units.
uint32_t core_spill_mask_;
uint32_t fp_spill_mask_;
uint32_t number_of_dex_registers_;
BitTable<StackMap> stack_maps_;
BitTable<RegisterMask> register_masks_;
- BitTable<MaskInfo> stack_masks_;
+ BitTable<StackMask> stack_masks_;
BitTable<InlineInfo> inline_infos_;
BitTable<MethodInfo> method_infos_;
- BitTable<MaskInfo> dex_register_masks_;
+ BitTable<DexRegisterMask> dex_register_masks_;
BitTable<DexRegisterMapInfo> dex_register_maps_;
BitTable<DexRegisterInfo> dex_register_catalog_;
uint32_t size_in_bits_ = 0;
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 0703a07..df7f19d 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -3605,7 +3605,7 @@
reinterpret_cast<uintptr_t>(cur_quick_frame));
uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
CodeInfo code_info(method_header, kPrecise
- ? CodeInfo::DecodeFlags::Default // We will need dex register maps.
+ ? CodeInfo::DecodeFlags::AllTables // We will need dex register maps.
: CodeInfo::DecodeFlags::GcMasksOnly);
StackMap map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
DCHECK(map.IsValid());