From a38e6cf2aaf4fd3d92b05c0a7a146fb5525ea72d Mon Sep 17 00:00:00 2001 From: David Srbecky Date: Tue, 26 Jun 2018 18:13:49 +0100 Subject: Remove explicit size from CodeInfo. It was mostly there since it was necessary to create the bound-checked MemoryRegion for loading. The new BitMemoryReader interface is much easier to tweak to avoid needing to know the size ahead of time. Keep the CHECK that the loader reads the expected number of bytes, but move it to FillInCodeInfo. This saves 0.2% of .oat file size. Test: test-art-host-gtest-stack_map_test Test: test-art-host-gtest-bit_table_test Change-Id: I92ee936e9fd004da61b90841aff9c9f2029fcfbf --- runtime/stack_map.cc | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) (limited to 'runtime/stack_map.cc') diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc index 42d24784de..a3c6e05045 100644 --- a/runtime/stack_map.cc +++ b/runtime/stack_map.cc @@ -32,15 +32,12 @@ CodeInfo::CodeInfo(const OatQuickMethodHeader* header) } void CodeInfo::Decode(const uint8_t* data) { - size_t non_header_size = DecodeUnsignedLeb128(&data); - size_ = UnsignedLeb128Size(non_header_size) + non_header_size; - const uint8_t* end = data + non_header_size; + const uint8_t* begin = data; frame_size_in_bytes_ = DecodeUnsignedLeb128(&data); core_spill_mask_ = DecodeUnsignedLeb128(&data); fp_spill_mask_ = DecodeUnsignedLeb128(&data); number_of_dex_registers_ = DecodeUnsignedLeb128(&data); - MemoryRegion region(const_cast(data), end - data); - BitMemoryReader reader(BitMemoryRegion(region), /* bit_offset */ 0); + BitMemoryReader reader(data, /* bit_offset */ 0); stack_maps_.Decode(reader); register_masks_.Decode(reader); stack_masks_.Decode(reader); @@ -49,7 +46,7 @@ void CodeInfo::Decode(const uint8_t* data) { dex_register_masks_.Decode(reader); dex_register_maps_.Decode(reader); dex_register_catalog_.Decode(reader); - CHECK_EQ(BitsToBytesRoundUp(reader.GetBitOffset()), region.size()) << "Invalid CodeInfo"; + size_in_bits_ = (data - begin) * kBitsPerByte + reader.GetBitOffset(); } BitTable::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const { @@ -154,8 +151,7 @@ static void AddTableSizeStats(const char* table_name, void CodeInfo::AddSizeStats(/*out*/ Stats* parent) const { Stats* stats = parent->Child("CodeInfo"); - stats->AddBytes(size_); - stats->Child("Header")->AddBytes(UnsignedLeb128Size(size_)); + stats->AddBytes(Size()); AddTableSizeStats("StackMaps", stack_maps_, stats); AddTableSizeStats("RegisterMasks", register_masks_, stats); AddTableSizeStats("StackMasks", stack_masks_, stats); @@ -222,7 +218,7 @@ void CodeInfo::Dump(VariableIndentationOutputStream* vios, const MethodInfo& method_info) const { vios->Stream() << "CodeInfo" - << " BitSize=" << size_ * kBitsPerByte + << " BitSize=" << size_in_bits_ << "\n"; ScopedIndentation indent1(vios); DumpTable(vios, "StackMaps", stack_maps_, verbose); -- cgit v1.2.3-59-g8ed1b