summaryrefslogtreecommitdiff
path: root/runtime/stack_map.cc
diff options
context:
space:
mode:
author David Srbecky <dsrbecky@google.com> 2018-06-21 15:36:48 +0100
committer David Srbecky <dsrbecky@google.com> 2018-06-22 13:24:28 +0100
commit078d7ba2c20d7778f8988e89a70d47e07372cc5a (patch)
tree5f9340183de479241e8c043a0996aadc41583b5a /runtime/stack_map.cc
parent0b4e5a3a1275a4aa6955a0576ab9d57eedd5bdd2 (diff)
Implement BitMemory{Reader,Writer}
Two simple classes which replace the need to pass the (BitMemoryRegion, bit_offset) tuple everywhere. The slightly simplifies the code and it also makes it possible to optimize those classes in the future. Test: test-art-host-gtest-stack_map_test Test: test-art-host-gtest-bit_table_test Change-Id: I4806c805149a07e1a11b76405ca27960a0012c69
Diffstat (limited to 'runtime/stack_map.cc')
-rw-r--r--runtime/stack_map.cc17
1 files changed, 17 insertions, 0 deletions
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index f40168b8b8..f9e2d27439 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -26,6 +26,23 @@
namespace art {
+void CodeInfo::Decode(const uint8_t* data) {
+ size_t non_header_size = DecodeUnsignedLeb128(&data);
+ size_ = UnsignedLeb128Size(non_header_size) + non_header_size;
+ MemoryRegion region(const_cast<uint8_t*>(data), non_header_size);
+ BitMemoryReader reader(BitMemoryRegion(region), /* bit_offset */ 0);
+ stack_maps_.Decode(reader);
+ register_masks_.Decode(reader);
+ stack_masks_.Decode(reader);
+ invoke_infos_.Decode(reader);
+ inline_infos_.Decode(reader);
+ dex_register_masks_.Decode(reader);
+ dex_register_maps_.Decode(reader);
+ dex_register_catalog_.Decode(reader);
+ number_of_dex_registers_ = DecodeVarintBits(reader);
+ CHECK_EQ(non_header_size, BitsToBytesRoundUp(reader.GetBitOffset())) << "Invalid CodeInfo";
+}
+
BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const {
return std::partition_point(
stack_maps_.begin(),