diff options
-rw-r--r-- | compiler/optimizing/stack_map_test.cc | 6 | ||||
-rw-r--r-- | dex2oat/linker/oat_writer.cc | 27 | ||||
-rw-r--r-- | libartbase/base/bit_memory_region.h | 122 | ||||
-rw-r--r-- | libartbase/base/bit_table.h | 20 | ||||
-rw-r--r-- | libartbase/base/bit_table_test.cc | 12 | ||||
-rw-r--r-- | runtime/stack_map.cc | 87 | ||||
-rw-r--r-- | runtime/stack_map.h | 30 |
7 files changed, 170 insertions, 134 deletions
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc index a281bb30f4..d28f09fbba 100644 --- a/compiler/optimizing/stack_map_test.cc +++ b/compiler/optimizing/stack_map_test.cc @@ -750,9 +750,9 @@ TEST(StackMapTest, TestDedupeBitTables) { ScopedArenaVector<uint8_t> memory = stream.Encode(); std::vector<uint8_t> out; - CodeInfo::DedupeMap dedupe_map; - size_t deduped1 = CodeInfo::Dedupe(&out, memory.data(), &dedupe_map); - size_t deduped2 = CodeInfo::Dedupe(&out, memory.data(), &dedupe_map); + CodeInfo::Deduper deduper(&out); + size_t deduped1 = deduper.Dedupe(memory.data()); + size_t deduped2 = deduper.Dedupe(memory.data()); for (size_t deduped : { deduped1, deduped2 }) { CodeInfo code_info(out.data() + deduped); diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc index 28942dad95..8bac7206c6 100644 --- a/dex2oat/linker/oat_writer.cc +++ b/dex2oat/linker/oat_writer.cc @@ -1438,10 +1438,10 @@ class OatWriter::LayoutReserveOffsetCodeMethodVisitor : public OrderedMethodVisi class OatWriter::InitMapMethodVisitor : public OatDexMethodVisitor { public: - static constexpr bool kDebugVerifyDedupedCodeInfo = false; - InitMapMethodVisitor(OatWriter* writer, size_t offset) - : OatDexMethodVisitor(writer, offset) {} + : OatDexMethodVisitor(writer, offset), + dedupe_bit_table_(&writer_->code_info_data_) { + } bool VisitMethod(size_t class_def_method_index, const ClassAccessor::Method& method ATTRIBUTE_UNUSED) @@ -1455,21 +1455,9 @@ class OatWriter::InitMapMethodVisitor : public OatDexMethodVisitor { ArrayRef<const uint8_t> map = compiled_method->GetVmapTable(); if (map.size() != 0u) { - // Deduplicate the inner bittables within the CodeInfo. - std::vector<uint8_t>* data = &writer_->code_info_data_; size_t offset = dedupe_code_info_.GetOrCreate(map.data(), [=]() { - size_t deduped_offset = CodeInfo::Dedupe(data, map.data(), &dedupe_bit_table_); - if (kDebugVerifyDedupedCodeInfo) { - InstructionSet isa = writer_->GetCompilerOptions().GetInstructionSet(); - std::stringstream old_code_info; - VariableIndentationOutputStream old_vios(&old_code_info); - std::stringstream new_code_info; - VariableIndentationOutputStream new_vios(&new_code_info); - CodeInfo(map.data()).Dump(&old_vios, 0, true, isa); - CodeInfo(data->data() + deduped_offset).Dump(&new_vios, 0, true, isa); - DCHECK_EQ(old_code_info.str(), new_code_info.str()); - } - return offset_ + deduped_offset; + // Deduplicate the inner BitTable<>s within the CodeInfo. + return offset_ + dedupe_bit_table_.Dedupe(map.data()); }); // Code offset is not initialized yet, so set the map offset to 0u-offset. DCHECK_EQ(oat_class->method_offsets_[method_offsets_index_].code_offset_, 0u); @@ -1487,8 +1475,8 @@ class OatWriter::InitMapMethodVisitor : public OatDexMethodVisitor { // The compiler already deduplicated the pointers but it did not dedupe the tables. SafeMap<const uint8_t*, size_t> dedupe_code_info_; - // Deduplicate at BitTable level. The value is bit offset within code_info_data_. - std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_bit_table_; + // Deduplicate at BitTable level. + CodeInfo::Deduper dedupe_bit_table_; }; class OatWriter::InitImageMethodVisitor : public OatDexMethodVisitor { @@ -2082,6 +2070,7 @@ size_t OatWriter::InitOatMaps(size_t offset) { InitMapMethodVisitor visitor(this, offset); bool success = VisitDexMethods(&visitor); DCHECK(success); + code_info_data_.shrink_to_fit(); offset += code_info_data_.size(); } return offset; diff --git a/libartbase/base/bit_memory_region.h b/libartbase/base/bit_memory_region.h index 7d8de399b9..f9b643e2c2 100644 --- a/libartbase/base/bit_memory_region.h +++ b/libartbase/base/bit_memory_region.h @@ -29,39 +29,26 @@ namespace art { class BitMemoryRegion FINAL : public ValueObject { public: struct Less { - constexpr bool operator()(const BitMemoryRegion& lhs, const BitMemoryRegion& rhs) const { - if (lhs.size_in_bits() != rhs.size_in_bits()) { - return lhs.size_in_bits() < rhs.size_in_bits(); - } - size_t bit = 0; - constexpr size_t kNumBits = BitSizeOf<uint32_t>(); - for (; bit + kNumBits <= lhs.size_in_bits(); bit += kNumBits) { - uint32_t lhs_bits = lhs.LoadBits(bit, kNumBits); - uint32_t rhs_bits = rhs.LoadBits(bit, kNumBits); - if (lhs_bits != rhs_bits) { - return lhs_bits < rhs_bits; - } - } - size_t num_bits = lhs.size_in_bits() - bit; - return lhs.LoadBits(bit, num_bits) < rhs.LoadBits(bit, num_bits); + bool operator()(const BitMemoryRegion& lhs, const BitMemoryRegion& rhs) const { + return Compare(lhs, rhs) < 0; } }; BitMemoryRegion() = default; - ALWAYS_INLINE BitMemoryRegion(void* data, size_t bit_start, size_t bit_size) - : data_(reinterpret_cast<uintptr_t*>(AlignDown(data, sizeof(uintptr_t)))), - bit_start_(bit_start + 8 * (reinterpret_cast<uintptr_t>(data) % sizeof(uintptr_t))), - bit_size_(bit_size) { + ALWAYS_INLINE BitMemoryRegion(uint8_t* data, ssize_t bit_start, size_t bit_size) { + // Normalize the data pointer. Note that bit_start may be negative. + uint8_t* aligned_data = AlignDown(data + (bit_start >> kBitsPerByteLog2), sizeof(uintptr_t)); + data_ = reinterpret_cast<uintptr_t*>(aligned_data); + bit_start_ = bit_start + kBitsPerByte * (data - aligned_data); + bit_size_ = bit_size; + DCHECK_LT(bit_start_, static_cast<size_t>(kBitsPerIntPtrT)); } ALWAYS_INLINE explicit BitMemoryRegion(MemoryRegion region) : BitMemoryRegion(region.begin(), /* bit_start */ 0, region.size_in_bits()) { } ALWAYS_INLINE BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_length) : BitMemoryRegion(region) { - DCHECK_LE(bit_offset, bit_size_); - DCHECK_LE(bit_length, bit_size_ - bit_offset); - bit_start_ += bit_offset; - bit_size_ = bit_length; + *this = Subregion(bit_offset, bit_length); } ALWAYS_INLINE bool IsValid() const { return data_ != nullptr; } @@ -70,6 +57,10 @@ class BitMemoryRegion FINAL : public ValueObject { return bit_size_; } + void Resize(size_t bit_size) { + bit_size_ = bit_size; + } + ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset, size_t bit_length) const { DCHECK_LE(bit_offset, bit_size_); DCHECK_LE(bit_length, bit_size_ - bit_offset); @@ -79,12 +70,11 @@ class BitMemoryRegion FINAL : public ValueObject { return result; } - // Increase the size of the region and return the newly added range (starting at the old end). - ALWAYS_INLINE BitMemoryRegion Extend(size_t bit_length) { + ALWAYS_INLINE BitMemoryRegion Subregion(size_t bit_offset) const { + DCHECK_LE(bit_offset, bit_size_); BitMemoryRegion result = *this; - result.bit_start_ += result.bit_size_; - result.bit_size_ = bit_length; - bit_size_ += bit_length; + result.bit_start_ += bit_offset; + result.bit_size_ -= bit_offset; return result; } @@ -183,10 +173,26 @@ class BitMemoryRegion FINAL : public ValueObject { return count; } - ALWAYS_INLINE bool Equals(const BitMemoryRegion& other) const { - return data_ == other.data_ && - bit_start_ == other.bit_start_ && - bit_size_ == other.bit_size_; + static int Compare(const BitMemoryRegion& lhs, const BitMemoryRegion& rhs) { + if (lhs.size_in_bits() != rhs.size_in_bits()) { + return (lhs.size_in_bits() < rhs.size_in_bits()) ? -1 : 1; + } + size_t bit = 0; + constexpr size_t kNumBits = BitSizeOf<uint32_t>(); + for (; bit + kNumBits <= lhs.size_in_bits(); bit += kNumBits) { + uint32_t lhs_bits = lhs.LoadBits(bit, kNumBits); + uint32_t rhs_bits = rhs.LoadBits(bit, kNumBits); + if (lhs_bits != rhs_bits) { + return (lhs_bits < rhs_bits) ? -1 : 1; + } + } + size_t num_bits = lhs.size_in_bits() - bit; + uint32_t lhs_bits = lhs.LoadBits(bit, num_bits); + uint32_t rhs_bits = rhs.LoadBits(bit, num_bits); + if (lhs_bits != rhs_bits) { + return (lhs_bits < rhs_bits) ? -1 : 1; + } + return 0; } private: @@ -198,28 +204,30 @@ class BitMemoryRegion FINAL : public ValueObject { class BitMemoryReader { public: - explicit BitMemoryReader(const uint8_t* data, size_t bit_offset = 0) - : finished_region_(const_cast<uint8_t*>(data), /* bit_start */ 0, bit_offset) { - DCHECK_EQ(GetBitOffset(), bit_offset); + BitMemoryReader(BitMemoryReader&&) = default; + explicit BitMemoryReader(BitMemoryRegion data) + : finished_region_(data.Subregion(0, 0) /* set the length to zero */ ) { + } + explicit BitMemoryReader(const uint8_t* data, ssize_t bit_offset = 0) + : finished_region_(const_cast<uint8_t*>(data), bit_offset, /* bit_length */ 0) { } - size_t GetBitOffset() const { return finished_region_.size_in_bits(); } + BitMemoryRegion GetReadRegion() const { return finished_region_; } - ALWAYS_INLINE BitMemoryRegion Skip(size_t bit_length) { - return finished_region_.Extend(bit_length); - } + size_t NumberOfReadBits() const { return finished_region_.size_in_bits(); } - // Get the most recently read bits. - ALWAYS_INLINE BitMemoryRegion Tail(size_t bit_length) { - return finished_region_.Subregion(finished_region_.size_in_bits() - bit_length, bit_length); + ALWAYS_INLINE BitMemoryRegion ReadRegion(size_t bit_length) { + size_t bit_offset = finished_region_.size_in_bits(); + finished_region_.Resize(bit_offset + bit_length); + return finished_region_.Subregion(bit_offset, bit_length); } ALWAYS_INLINE uint32_t ReadBits(size_t bit_length) { - return finished_region_.Extend(bit_length).LoadBits(0, bit_length); + return ReadRegion(bit_length).LoadBits(/* bit_offset */ 0, bit_length); } ALWAYS_INLINE bool ReadBit() { - return finished_region_.Extend(1).LoadBit(0); + return ReadRegion(/* bit_length */ 1).LoadBit(/* bit_offset */ 0); } private: @@ -234,36 +242,46 @@ template<typename Vector> class BitMemoryWriter { public: explicit BitMemoryWriter(Vector* out, size_t bit_offset = 0) - : out_(out), bit_offset_(bit_offset) { - DCHECK_EQ(GetBitOffset(), bit_offset); + : out_(out), bit_start_(bit_offset), bit_offset_(bit_offset) { + DCHECK_EQ(NumberOfWrittenBits(), 0u); + } + + BitMemoryRegion GetWrittenRegion() const { + return BitMemoryRegion(out_->data(), bit_start_, bit_offset_ - bit_start_); } const uint8_t* data() const { return out_->data(); } - size_t GetBitOffset() const { return bit_offset_; } + size_t NumberOfWrittenBits() const { return bit_offset_ - bit_start_; } ALWAYS_INLINE BitMemoryRegion Allocate(size_t bit_length) { out_->resize(BitsToBytesRoundUp(bit_offset_ + bit_length)); - BitMemoryRegion region(MemoryRegion(out_->data(), out_->size()), bit_offset_, bit_length); + BitMemoryRegion region(out_->data(), bit_offset_, bit_length); DCHECK_LE(bit_length, std::numeric_limits<size_t>::max() - bit_offset_) << "Overflow"; bit_offset_ += bit_length; return region; } + ALWAYS_INLINE void WriteRegion(const BitMemoryRegion& region) { + Allocate(region.size_in_bits()).StoreBits(/* bit_offset */ 0, region, region.size_in_bits()); + } + ALWAYS_INLINE void WriteBits(uint32_t value, size_t bit_length) { - Allocate(bit_length).StoreBits(0, value, bit_length); + Allocate(bit_length).StoreBits(/* bit_offset */ 0, value, bit_length); } ALWAYS_INLINE void WriteBit(bool value) { - Allocate(1).StoreBit(0, value); + Allocate(1).StoreBit(/* bit_offset */ 0, value); } - ALWAYS_INLINE void WriteRegion(const BitMemoryRegion& region) { - Allocate(region.size_in_bits()).StoreBits(0, region, region.size_in_bits()); + ALWAYS_INLINE void ByteAlign() { + size_t end = bit_start_ + bit_offset_; + bit_offset_ += RoundUp(end, kBitsPerByte) - end; } private: Vector* out_; + size_t bit_start_; size_t bit_offset_; DISALLOW_COPY_AND_ASSIGN(BitMemoryWriter); diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h index 1c7614b695..18de3d33d3 100644 --- a/libartbase/base/bit_table.h +++ b/libartbase/base/bit_table.h @@ -77,7 +77,7 @@ class BitTableBase { ALWAYS_INLINE void Decode(BitMemoryReader& reader) { // Decode row count and column sizes from the table header. - size_t initial_bit_offset = reader.GetBitOffset(); + size_t initial_bit_offset = reader.NumberOfReadBits(); num_rows_ = DecodeVarintBits(reader); if (num_rows_ != 0) { column_offset_[0] = 0; @@ -86,10 +86,10 @@ class BitTableBase { column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end); } } - header_bit_size_ = reader.GetBitOffset() - initial_bit_offset; + header_bit_size_ = reader.NumberOfReadBits() - initial_bit_offset; // Record the region which contains the table data and skip past it. - table_data_ = reader.Skip(num_rows_ * NumRowBits()); + table_data_ = reader.ReadRegion(num_rows_ * NumRowBits()); } ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column = 0) const { @@ -122,6 +122,12 @@ class BitTableBase { size_t BitSize() const { return header_bit_size_ + table_data_.size_in_bits(); } + bool Equals(const BitTableBase& other) const { + return num_rows_ == other.num_rows_ && + std::equal(column_offset_, column_offset_ + kNumColumns, other.column_offset_) && + BitMemoryRegion::Compare(table_data_, other.table_data_) == 0; + } + protected: BitMemoryRegion table_data_; size_t num_rows_ = 0; @@ -376,7 +382,7 @@ class BitTableBuilderBase { // Encode the stored data into a BitTable. template<typename Vector> void Encode(BitMemoryWriter<Vector>& out) const { - size_t initial_bit_offset = out.GetBitOffset(); + size_t initial_bit_offset = out.NumberOfWrittenBits(); std::array<uint32_t, kNumColumns> column_bits; Measure(&column_bits); @@ -398,7 +404,7 @@ class BitTableBuilderBase { // Verify the written data. if (kIsDebugBuild) { BitTableBase<kNumColumns> table; - BitMemoryReader reader(out.data(), initial_bit_offset); + BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset)); table.Decode(reader); DCHECK_EQ(size(), table.NumRows()); for (uint32_t c = 0; c < kNumColumns; c++) { @@ -467,7 +473,7 @@ class BitmapTableBuilder { // Encode the stored data into a BitTable. template<typename Vector> void Encode(BitMemoryWriter<Vector>& out) const { - size_t initial_bit_offset = out.GetBitOffset(); + size_t initial_bit_offset = out.NumberOfWrittenBits(); EncodeVarintBits(out, size()); if (size() != 0) { @@ -484,7 +490,7 @@ class BitmapTableBuilder { // Verify the written data. if (kIsDebugBuild) { BitTableBase<1> table; - BitMemoryReader reader(out.data(), initial_bit_offset); + BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset)); table.Decode(reader); DCHECK_EQ(size(), table.NumRows()); DCHECK_EQ(max_num_bits_, table.NumColumnBits(0)); diff --git a/libartbase/base/bit_table_test.cc b/libartbase/base/bit_table_test.cc index 2fd9052516..4f25730152 100644 --- a/libartbase/base/bit_table_test.cc +++ b/libartbase/base/bit_table_test.cc @@ -36,7 +36,7 @@ TEST(BitTableTest, TestVarint) { BitMemoryReader reader(buffer.data(), start_bit_offset); uint32_t result = DecodeVarintBits(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); EXPECT_EQ(value, result); } } @@ -54,7 +54,7 @@ TEST(BitTableTest, TestEmptyTable) { BitMemoryReader reader(buffer.data()); BitTableBase<1> table(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); EXPECT_EQ(0u, table.NumRows()); } @@ -75,7 +75,7 @@ TEST(BitTableTest, TestSingleColumnTable) { BitMemoryReader reader(buffer.data()); BitTableBase<1> table(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); EXPECT_EQ(4u, table.NumRows()); EXPECT_EQ(42u, table.Get(0)); EXPECT_EQ(kNoValue, table.Get(1)); @@ -98,7 +98,7 @@ TEST(BitTableTest, TestUnalignedTable) { BitMemoryReader reader(buffer.data(), start_bit_offset); BitTableBase<1> table(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); EXPECT_EQ(1u, table.NumRows()); EXPECT_EQ(42u, table.Get(0)); } @@ -119,7 +119,7 @@ TEST(BitTableTest, TestBigTable) { BitMemoryReader reader(buffer.data()); BitTableBase<4> table(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); EXPECT_EQ(2u, table.NumRows()); EXPECT_EQ(42u, table.Get(0, 0)); EXPECT_EQ(kNoValue, table.Get(0, 1)); @@ -169,7 +169,7 @@ TEST(BitTableTest, TestBitmapTable) { BitMemoryReader reader(buffer.data()); BitTableBase<1> table(reader); - EXPECT_EQ(writer.GetBitOffset(), reader.GetBitOffset()); + EXPECT_EQ(writer.NumberOfWrittenBits(), reader.NumberOfReadBits()); for (auto it : indicies) { uint64_t expected = it.first; BitMemoryRegion actual = table.GetBitMemoryRegion(it.second); diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc index d1000c5375..b0c59a67dd 100644 --- a/runtime/stack_map.cc +++ b/runtime/stack_map.cc @@ -34,15 +34,10 @@ CodeInfo::CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags) template<typename Accessor> ALWAYS_INLINE static void DecodeTable(BitTable<Accessor>& table, BitMemoryReader& reader, - const uint8_t* data) { - bool is_deduped = reader.ReadBit(); - if (is_deduped) { - // 'data' points to the start of the reader's data. - uint32_t current_bit_offset = reader.GetBitOffset(); - uint32_t bit_offset_backwards = DecodeVarintBits(reader) - current_bit_offset; - uint32_t byte_offset_backwards = BitsToBytesRoundUp(bit_offset_backwards); - BitMemoryReader reader2(data - byte_offset_backwards, - byte_offset_backwards * kBitsPerByte - bit_offset_backwards); + const uint8_t* reader_data) { + if (reader.ReadBit() /* is_deduped */) { + ssize_t bit_offset = reader.NumberOfReadBits() - DecodeVarintBits(reader); + BitMemoryReader reader2(reader_data, bit_offset); // The offset is negative. table.Decode(reader2); } else { table.Decode(reader); @@ -69,45 +64,63 @@ void CodeInfo::Decode(const uint8_t* data, DecodeFlags flags) { DecodeTable(dex_register_masks_, reader, data); DecodeTable(dex_register_maps_, reader, data); DecodeTable(dex_register_catalog_, reader, data); - size_in_bits_ = reader.GetBitOffset(); + size_in_bits_ = reader.NumberOfReadBits(); } template<typename Accessor> -ALWAYS_INLINE static void DedupeTable(BitMemoryWriter<std::vector<uint8_t>>& writer, - BitMemoryReader& reader, - CodeInfo::DedupeMap* dedupe_map) { +ALWAYS_INLINE void CodeInfo::Deduper::DedupeTable(BitMemoryReader& reader) { bool is_deduped = reader.ReadBit(); DCHECK(!is_deduped); + size_t bit_table_start = reader.NumberOfReadBits(); BitTable<Accessor> bit_table(reader); - BitMemoryRegion region = reader.Tail(bit_table.BitSize()); - auto it = dedupe_map->insert(std::make_pair(region, writer.GetBitOffset() + 1 /* dedupe bit */)); + BitMemoryRegion region = reader.GetReadRegion().Subregion(bit_table_start); + auto it = dedupe_map_.insert(std::make_pair(region, /* placeholder */ 0)); if (it.second /* new bit table */ || region.size_in_bits() < 32) { - writer.WriteBit(false); // Is not deduped. - writer.WriteRegion(region); + writer_.WriteBit(false); // Is not deduped. + it.first->second = writer_.NumberOfWrittenBits(); + writer_.WriteRegion(region); } else { - writer.WriteBit(true); // Is deduped. - EncodeVarintBits(writer, writer.GetBitOffset() - it.first->second); + writer_.WriteBit(true); // Is deduped. + size_t bit_offset = writer_.NumberOfWrittenBits(); + EncodeVarintBits(writer_, bit_offset - it.first->second); } } -size_t CodeInfo::Dedupe(std::vector<uint8_t>* out, const uint8_t* in, DedupeMap* dedupe_map) { - // Remember the current offset in the output buffer so that we can return it later. - const size_t result = out->size(); - BitMemoryReader reader(in); - BitMemoryWriter<std::vector<uint8_t>> writer(out, /* bit_offset */ out->size() * kBitsPerByte); - EncodeVarintBits(writer, DecodeVarintBits(reader)); // packed_frame_size_. - EncodeVarintBits(writer, DecodeVarintBits(reader)); // core_spill_mask_. - EncodeVarintBits(writer, DecodeVarintBits(reader)); // fp_spill_mask_. - EncodeVarintBits(writer, DecodeVarintBits(reader)); // number_of_dex_registers_. - DedupeTable<StackMap>(writer, reader, dedupe_map); - DedupeTable<RegisterMask>(writer, reader, dedupe_map); - DedupeTable<MaskInfo>(writer, reader, dedupe_map); - DedupeTable<InlineInfo>(writer, reader, dedupe_map); - DedupeTable<MethodInfo>(writer, reader, dedupe_map); - DedupeTable<MaskInfo>(writer, reader, dedupe_map); - DedupeTable<DexRegisterMapInfo>(writer, reader, dedupe_map); - DedupeTable<DexRegisterInfo>(writer, reader, dedupe_map); - return result; +size_t CodeInfo::Deduper::Dedupe(const uint8_t* code_info) { + writer_.ByteAlign(); + size_t deduped_offset = writer_.NumberOfWrittenBits() / kBitsPerByte; + BitMemoryReader reader(code_info); + EncodeVarintBits(writer_, DecodeVarintBits(reader)); // packed_frame_size_. + EncodeVarintBits(writer_, DecodeVarintBits(reader)); // core_spill_mask_. + EncodeVarintBits(writer_, DecodeVarintBits(reader)); // fp_spill_mask_. + EncodeVarintBits(writer_, DecodeVarintBits(reader)); // number_of_dex_registers_. + DedupeTable<StackMap>(reader); + DedupeTable<RegisterMask>(reader); + DedupeTable<MaskInfo>(reader); + DedupeTable<InlineInfo>(reader); + DedupeTable<MethodInfo>(reader); + DedupeTable<MaskInfo>(reader); + DedupeTable<DexRegisterMapInfo>(reader); + DedupeTable<DexRegisterInfo>(reader); + + if (kIsDebugBuild) { + CodeInfo old_code_info(code_info); + CodeInfo new_code_info(writer_.data() + deduped_offset); + DCHECK_EQ(old_code_info.packed_frame_size_, new_code_info.packed_frame_size_); + DCHECK_EQ(old_code_info.core_spill_mask_, new_code_info.core_spill_mask_); + DCHECK_EQ(old_code_info.fp_spill_mask_, new_code_info.fp_spill_mask_); + DCHECK_EQ(old_code_info.number_of_dex_registers_, new_code_info.number_of_dex_registers_); + DCHECK(old_code_info.stack_maps_.Equals(new_code_info.stack_maps_)); + DCHECK(old_code_info.register_masks_.Equals(new_code_info.register_masks_)); + DCHECK(old_code_info.stack_masks_.Equals(new_code_info.stack_masks_)); + DCHECK(old_code_info.inline_infos_.Equals(new_code_info.inline_infos_)); + DCHECK(old_code_info.method_infos_.Equals(new_code_info.method_infos_)); + DCHECK(old_code_info.dex_register_masks_.Equals(new_code_info.dex_register_masks_)); + DCHECK(old_code_info.dex_register_maps_.Equals(new_code_info.dex_register_maps_)); + DCHECK(old_code_info.dex_register_catalog_.Equals(new_code_info.dex_register_catalog_)); + } + + return deduped_offset; } BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const { diff --git a/runtime/stack_map.h b/runtime/stack_map.h index d6db05a3b8..cde08f312b 100644 --- a/runtime/stack_map.h +++ b/runtime/stack_map.h @@ -269,6 +269,26 @@ class MethodInfo : public BitTableAccessor<1> { */ class CodeInfo { public: + class Deduper { + public: + explicit Deduper(std::vector<uint8_t>* output) : writer_(output) { + DCHECK_EQ(output->size(), 0u); + } + + // Copy CodeInfo into output while de-duplicating the internal bit tables. + // It returns the byte offset of the copied CodeInfo within the output. + size_t Dedupe(const uint8_t* code_info); + + private: + template<typename Accessor> + void DedupeTable(BitMemoryReader& reader); + + BitMemoryWriter<std::vector<uint8_t>> writer_; + + // Deduplicate at BitTable level. The value is bit offset within the output. + std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_map_; + }; + enum DecodeFlags { Default = 0, // Limits the decoding only to the data needed by GC. @@ -421,16 +441,6 @@ class CodeInfo { DecodeVarintBits(reader)); // fp_spill_mask_. } - typedef std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> DedupeMap; - - // Copy CodeInfo data while de-duplicating the internal bit tables. - // The 'out' vector must be reused between Dedupe calls (it does not have to be empty). - // The 'dedupe_map' stores the bit offsets of bit tables within the 'out' vector. - // It returns the byte offset of the copied CodeInfo within the 'out' vector. - static size_t Dedupe(std::vector<uint8_t>* out, - const uint8_t* in, - /*inout*/ DedupeMap* dedupe_map); - private: // Returns lower bound (fist stack map which has pc greater or equal than the desired one). // It ignores catch stack maps at the end (it is the same as if they had maximum pc value). |