summaryrefslogtreecommitdiff
path: root/runtime/stack_map.cc
diff options
context:
space:
mode:
author David Srbecky <dsrbecky@google.com> 2018-06-26 18:13:49 +0100
committer David Srbecky <dsrbecky@google.com> 2018-06-27 10:15:32 +0100
commita38e6cf2aaf4fd3d92b05c0a7a146fb5525ea72d (patch)
tree377c74c834776b9942bb7c2853e44447dac27f83 /runtime/stack_map.cc
parentf6ba5b316b51d0fb9f91cb51a42e51dfeee62ee4 (diff)
Remove explicit size from CodeInfo.
It was mostly there since it was necessary to create the bound-checked MemoryRegion for loading. The new BitMemoryReader interface is much easier to tweak to avoid needing to know the size ahead of time. Keep the CHECK that the loader reads the expected number of bytes, but move it to FillInCodeInfo. This saves 0.2% of .oat file size. Test: test-art-host-gtest-stack_map_test Test: test-art-host-gtest-bit_table_test Change-Id: I92ee936e9fd004da61b90841aff9c9f2029fcfbf
Diffstat (limited to 'runtime/stack_map.cc')
-rw-r--r--runtime/stack_map.cc14
1 files changed, 5 insertions, 9 deletions
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 42d24784de..a3c6e05045 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -32,15 +32,12 @@ CodeInfo::CodeInfo(const OatQuickMethodHeader* header)
}
void CodeInfo::Decode(const uint8_t* data) {
- size_t non_header_size = DecodeUnsignedLeb128(&data);
- size_ = UnsignedLeb128Size(non_header_size) + non_header_size;
- const uint8_t* end = data + non_header_size;
+ const uint8_t* begin = data;
frame_size_in_bytes_ = DecodeUnsignedLeb128(&data);
core_spill_mask_ = DecodeUnsignedLeb128(&data);
fp_spill_mask_ = DecodeUnsignedLeb128(&data);
number_of_dex_registers_ = DecodeUnsignedLeb128(&data);
- MemoryRegion region(const_cast<uint8_t*>(data), end - data);
- BitMemoryReader reader(BitMemoryRegion(region), /* bit_offset */ 0);
+ BitMemoryReader reader(data, /* bit_offset */ 0);
stack_maps_.Decode(reader);
register_masks_.Decode(reader);
stack_masks_.Decode(reader);
@@ -49,7 +46,7 @@ void CodeInfo::Decode(const uint8_t* data) {
dex_register_masks_.Decode(reader);
dex_register_maps_.Decode(reader);
dex_register_catalog_.Decode(reader);
- CHECK_EQ(BitsToBytesRoundUp(reader.GetBitOffset()), region.size()) << "Invalid CodeInfo";
+ size_in_bits_ = (data - begin) * kBitsPerByte + reader.GetBitOffset();
}
BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const {
@@ -154,8 +151,7 @@ static void AddTableSizeStats(const char* table_name,
void CodeInfo::AddSizeStats(/*out*/ Stats* parent) const {
Stats* stats = parent->Child("CodeInfo");
- stats->AddBytes(size_);
- stats->Child("Header")->AddBytes(UnsignedLeb128Size(size_));
+ stats->AddBytes(Size());
AddTableSizeStats<StackMap>("StackMaps", stack_maps_, stats);
AddTableSizeStats<RegisterMask>("RegisterMasks", register_masks_, stats);
AddTableSizeStats<MaskInfo>("StackMasks", stack_masks_, stats);
@@ -222,7 +218,7 @@ void CodeInfo::Dump(VariableIndentationOutputStream* vios,
const MethodInfo& method_info) const {
vios->Stream()
<< "CodeInfo"
- << " BitSize=" << size_ * kBitsPerByte
+ << " BitSize=" << size_in_bits_
<< "\n";
ScopedIndentation indent1(vios);
DumpTable<StackMap>(vios, "StackMaps", stack_maps_, verbose);