diff options
| -rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 8 | ||||
| -rw-r--r-- | compiler/optimizing/stack_map_test.cc | 27 | ||||
| -rw-r--r-- | oatdump/oatdump.cc | 4 | ||||
| -rw-r--r-- | runtime/bit_memory_region.h | 69 | ||||
| -rw-r--r-- | runtime/memory_region.cc | 4 | ||||
| -rw-r--r-- | runtime/memory_region_test.cc | 32 | ||||
| -rw-r--r-- | runtime/oat.h | 2 | ||||
| -rw-r--r-- | runtime/quick_exception_handler.cc | 2 | ||||
| -rw-r--r-- | runtime/stack_map.cc | 4 | ||||
| -rw-r--r-- | runtime/stack_map.h | 48 | ||||
| -rw-r--r-- | runtime/thread.cc | 2 |
11 files changed, 164 insertions, 38 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index a9a1e6f592..1b9bd7eb31 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -165,7 +165,7 @@ size_t StackMapStream::PrepareForFillIn() { inline_info_size_, register_mask_max_, stack_mask_number_of_bits); - stack_maps_size_ = stack_maps_.size() * stack_map_size; + stack_maps_size_ = RoundUp(stack_maps_.size() * stack_map_size, kBitsPerByte) / kBitsPerByte; dex_register_location_catalog_size_ = ComputeDexRegisterLocationCatalogSize(); size_t non_header_size = @@ -178,7 +178,7 @@ size_t StackMapStream::PrepareForFillIn() { CodeInfoEncoding code_info_encoding; code_info_encoding.non_header_size = non_header_size; code_info_encoding.number_of_stack_maps = stack_maps_.size(); - code_info_encoding.stack_map_size_in_bytes = stack_map_size; + code_info_encoding.stack_map_size_in_bits = stack_map_size; code_info_encoding.stack_map_encoding = stack_map_encoding_; code_info_encoding.inline_info_encoding = inline_info_encoding_; code_info_encoding.number_of_location_catalog_entries = location_catalog_entries_.size(); @@ -322,7 +322,7 @@ void StackMapStream::FillIn(MemoryRegion region) { stack_map.SetDexPc(stack_map_encoding_, entry.dex_pc); stack_map.SetNativePcCodeOffset(stack_map_encoding_, entry.native_pc_code_offset); stack_map.SetRegisterMask(stack_map_encoding_, entry.register_mask); - size_t number_of_stack_mask_bits = stack_map.GetNumberOfStackMaskBits(stack_map_encoding_); + size_t number_of_stack_mask_bits = code_info.GetNumberOfStackMaskBits(encoding); if (entry.sp_mask != nullptr) { for (size_t bit = 0; bit < number_of_stack_mask_bits; bit++) { stack_map.SetStackMaskBit(stack_map_encoding_, bit, entry.sp_mask->IsBitSet(bit)); @@ -551,7 +551,7 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const { entry.native_pc_code_offset.Uint32Value(instruction_set_)); DCHECK_EQ(stack_map.GetDexPc(stack_map_encoding), entry.dex_pc); DCHECK_EQ(stack_map.GetRegisterMask(stack_map_encoding), entry.register_mask); - size_t num_stack_mask_bits = stack_map.GetNumberOfStackMaskBits(stack_map_encoding); + size_t num_stack_mask_bits = code_info.GetNumberOfStackMaskBits(encoding); if (entry.sp_mask != nullptr) { DCHECK_GE(num_stack_mask_bits, entry.sp_mask->GetNumberOfBits()); for (size_t b = 0; b < num_stack_mask_bits; b++) { diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc index f68695bcbc..da4597e385 100644 --- a/compiler/optimizing/stack_map_test.cc +++ b/compiler/optimizing/stack_map_test.cc @@ -27,10 +27,10 @@ namespace art { // Check that the stack mask of given stack map is identical // to the given bit vector. Returns true if they are same. static bool CheckStackMask( + int number_of_bits, const StackMap& stack_map, StackMapEncoding& encoding, const BitVector& bit_vector) { - int number_of_bits = stack_map.GetNumberOfStackMaskBits(encoding); if (bit_vector.GetHighestBitSet() >= number_of_bits) { return false; } @@ -81,7 +81,10 @@ TEST(StackMapTest, Test1) { ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA)); ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding.stack_map_encoding)); - ASSERT_TRUE(CheckStackMask(stack_map, encoding.stack_map_encoding, sp_mask)); + ASSERT_TRUE(CheckStackMask(code_info.GetNumberOfStackMaskBits(encoding), + stack_map, + encoding.stack_map_encoding, + sp_mask)); ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding)); DexRegisterMap dex_register_map = @@ -196,7 +199,10 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(64u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA)); ASSERT_EQ(0x3u, stack_map.GetRegisterMask(encoding.stack_map_encoding)); - ASSERT_TRUE(CheckStackMask(stack_map, encoding.stack_map_encoding, sp_mask1)); + ASSERT_TRUE(CheckStackMask(code_info.GetNumberOfStackMaskBits(encoding), + stack_map, + encoding.stack_map_encoding, + sp_mask1)); ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding)); DexRegisterMap dex_register_map = @@ -255,7 +261,10 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(128u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA)); ASSERT_EQ(0xFFu, stack_map.GetRegisterMask(encoding.stack_map_encoding)); - ASSERT_TRUE(CheckStackMask(stack_map, encoding.stack_map_encoding, sp_mask2)); + ASSERT_TRUE(CheckStackMask(code_info.GetNumberOfStackMaskBits(encoding), + stack_map, + encoding.stack_map_encoding, + sp_mask2)); ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding)); DexRegisterMap dex_register_map = @@ -309,7 +318,10 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(192u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA)); ASSERT_EQ(0xABu, stack_map.GetRegisterMask(encoding.stack_map_encoding)); - ASSERT_TRUE(CheckStackMask(stack_map, encoding.stack_map_encoding, sp_mask3)); + ASSERT_TRUE(CheckStackMask(code_info.GetNumberOfStackMaskBits(encoding), + stack_map, + encoding.stack_map_encoding, + sp_mask3)); ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding)); DexRegisterMap dex_register_map = @@ -363,7 +375,10 @@ TEST(StackMapTest, Test2) { ASSERT_EQ(256u, stack_map.GetNativePcOffset(encoding.stack_map_encoding, kRuntimeISA)); ASSERT_EQ(0xCDu, stack_map.GetRegisterMask(encoding.stack_map_encoding)); - ASSERT_TRUE(CheckStackMask(stack_map, encoding.stack_map_encoding, sp_mask4)); + ASSERT_TRUE(CheckStackMask(code_info.GetNumberOfStackMaskBits(encoding), + stack_map, + encoding.stack_map_encoding, + sp_mask4)); ASSERT_TRUE(stack_map.HasDexRegisterMap(encoding.stack_map_encoding)); DexRegisterMap dex_register_map = diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc index 69901c13cd..b6da6c13f3 100644 --- a/oatdump/oatdump.cc +++ b/oatdump/oatdump.cc @@ -1575,7 +1575,7 @@ class OatDumper { stats_.AddBits( Stats::kByteKindStackMapRegisterMask, stack_map_encoding.GetRegisterMaskEncoding().BitSize() * num_stack_maps); - const size_t stack_mask_bits = encoding.stack_map_size_in_bytes * kBitsPerByte - + const size_t stack_mask_bits = encoding.stack_map_size_in_bits - stack_map_encoding.GetStackMaskBitOffset(); stats_.AddBits( Stats::kByteKindStackMapMask, @@ -1584,7 +1584,7 @@ class OatDumper { stack_map_encoding.GetStackMaskBitOffset() + stack_mask_bits; stats_.AddBits( Stats::kByteKindStackMapOther, - (encoding.stack_map_size_in_bytes * kBitsPerByte - stack_map_bits) * num_stack_maps); + (encoding.stack_map_size_in_bits - stack_map_bits) * num_stack_maps); const size_t stack_map_bytes = helper.GetCodeInfo().GetStackMapsSize(encoding); const size_t location_catalog_bytes = helper.GetCodeInfo().GetDexRegisterLocationCatalogSize(encoding); diff --git a/runtime/bit_memory_region.h b/runtime/bit_memory_region.h new file mode 100644 index 0000000000..90a198193e --- /dev/null +++ b/runtime/bit_memory_region.h @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2017 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ART_RUNTIME_BIT_MEMORY_REGION_H_ +#define ART_RUNTIME_BIT_MEMORY_REGION_H_ + +#include "memory_region.h" + +namespace art { + +// Bit memory region is a bit offset subregion of a normal memoryregion. This is useful for +// abstracting away the bit start offset to avoid needing passing as an argument everywhere. +class BitMemoryRegion FINAL : public ValueObject { + public: + BitMemoryRegion() = default; + BitMemoryRegion(MemoryRegion region, size_t bit_offset, size_t bit_size) { + bit_start_ = bit_offset % kBitsPerByte; + const size_t start = bit_offset / kBitsPerByte; + const size_t end = (bit_offset + bit_size + kBitsPerByte - 1) / kBitsPerByte; + region_ = region.Subregion(start, end - start); + } + + void* pointer() const { return region_.pointer(); } + size_t size() const { return region_.size(); } + size_t BitOffset() const { return bit_start_; } + size_t size_in_bits() const { + return region_.size_in_bits(); + } + + // Load a single bit in the region. The bit at offset 0 is the least + // significant bit in the first byte. + ALWAYS_INLINE bool LoadBit(uintptr_t bit_offset) const { + return region_.LoadBit(bit_offset + bit_start_); + } + + ALWAYS_INLINE void StoreBit(uintptr_t bit_offset, bool value) const { + region_.StoreBit(bit_offset + bit_start_, value); + } + + ALWAYS_INLINE uint32_t LoadBits(uintptr_t bit_offset, size_t length) const { + return region_.LoadBits(bit_offset + bit_start_, length); + } + + // Store at a bit offset from inside the bit memory region. + ALWAYS_INLINE void StoreBits(uintptr_t bit_offset, uint32_t value, size_t length) { + region_.StoreBits(bit_offset + bit_start_, value, length); + } + + private: + MemoryRegion region_; + size_t bit_start_ = 0; +}; + +} // namespace art + +#endif // ART_RUNTIME_BIT_MEMORY_REGION_H_ diff --git a/runtime/memory_region.cc b/runtime/memory_region.cc index 5bf0f40eff..b0ecab40c5 100644 --- a/runtime/memory_region.cc +++ b/runtime/memory_region.cc @@ -43,8 +43,8 @@ void MemoryRegion::StoreBits(uintptr_t bit_offset, uint32_t value, size_t length // Bits are stored in this order {7 6 5 4 3 2 1 0}. // How many remaining bits in current byte is (bit_offset % kBitsPerByte) + 1. uint8_t* out = ComputeInternalPointer<uint8_t>(bit_offset >> kBitsPerByteLog2); - auto orig_len = length; - auto orig_value = value; + size_t orig_len = length; + uint32_t orig_value = value; uintptr_t bit_remainder = bit_offset % kBitsPerByte; while (true) { const uintptr_t remaining_bits = kBitsPerByte - bit_remainder; diff --git a/runtime/memory_region_test.cc b/runtime/memory_region_test.cc index 72e03a485a..6634c60193 100644 --- a/runtime/memory_region_test.cc +++ b/runtime/memory_region_test.cc @@ -14,6 +14,7 @@ * limitations under the License. */ +#include "bit_memory_region.h" #include "memory_region.h" #include "gtest/gtest.h" @@ -55,4 +56,35 @@ TEST(MemoryRegion, StoreUnaligned) { } } +TEST(MemoryRegion, TestBits) { + const size_t n = 8; + uint8_t data[n] = { 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF }; + MemoryRegion region(&data, n); + uint32_t value = 0xDEADBEEF; + // Try various offsets and lengths. + for (size_t bit_offset = 0; bit_offset < 2 * kBitsPerByte; ++bit_offset) { + for (size_t length = 0; length < 2 * kBitsPerByte; ++length) { + const uint32_t length_mask = (1 << length) - 1; + uint32_t masked_value = value & length_mask; + BitMemoryRegion bmr(region, bit_offset, length); + region.StoreBits(bit_offset, masked_value, length); + EXPECT_EQ(region.LoadBits(bit_offset, length), masked_value); + EXPECT_EQ(bmr.LoadBits(0, length), masked_value); + // Check adjacent bits to make sure they were not incorrectly cleared. + EXPECT_EQ(region.LoadBits(0, bit_offset), (1u << bit_offset) - 1); + EXPECT_EQ(region.LoadBits(bit_offset + length, length), length_mask); + region.StoreBits(bit_offset, length_mask, length); + // Store with bit memory region. + bmr.StoreBits(0, masked_value, length); + EXPECT_EQ(bmr.LoadBits(0, length), masked_value); + // Check adjacent bits to make sure they were not incorrectly cleared. + EXPECT_EQ(region.LoadBits(0, bit_offset), (1u << bit_offset) - 1); + EXPECT_EQ(region.LoadBits(bit_offset + length, length), length_mask); + region.StoreBits(bit_offset, length_mask, length); + // Flip the value to try different edge bit combinations. + value = ~value; + } + } +} + } // namespace art diff --git a/runtime/oat.h b/runtime/oat.h index 4a68036e00..106bd4096f 100644 --- a/runtime/oat.h +++ b/runtime/oat.h @@ -32,7 +32,7 @@ class InstructionSetFeatures; class PACKED(4) OatHeader { public: static constexpr uint8_t kOatMagic[] = { 'o', 'a', 't', '\n' }; - static constexpr uint8_t kOatVersion[] = { '1', '0', '4', '\0' }; // Array allocation entrypoints + static constexpr uint8_t kOatVersion[] = { '1', '0', '5', '\0' }; // Stack map alignment change. static constexpr const char* kImageLocationKey = "image-location"; static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline"; diff --git a/runtime/quick_exception_handler.cc b/runtime/quick_exception_handler.cc index b809c3eb56..8d758a4a4b 100644 --- a/runtime/quick_exception_handler.cc +++ b/runtime/quick_exception_handler.cc @@ -438,7 +438,7 @@ class DeoptimizeStackVisitor FINAL : public StackVisitor { const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset; value = *reinterpret_cast<const uint32_t*>(addr); uint32_t bit = (offset >> 2); - if (stack_map.GetNumberOfStackMaskBits(encoding.stack_map_encoding) > bit && + if (code_info.GetNumberOfStackMaskBits(encoding) > bit && stack_map.GetStackMaskBit(encoding.stack_map_encoding, bit)) { is_reference = true; } diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc index 690b069c8e..e093293e75 100644 --- a/runtime/stack_map.cc +++ b/runtime/stack_map.cc @@ -198,7 +198,7 @@ void StackMap::Dump(VariableIndentationOutputStream* vios, << "StackMap" << header_suffix << std::hex << " [native_pc=0x" << code_offset + pc_offset << "]" - << " [entry_size=0x" << encoding.stack_map_size_in_bytes << "]" + << " [entry_size=0x" << encoding.stack_map_size_in_bits << " bits]" << " (dex_pc=0x" << GetDexPc(stack_map_encoding) << ", native_pc_offset=0x" << pc_offset << ", dex_register_map_offset=0x" << GetDexRegisterMapOffset(stack_map_encoding) @@ -206,7 +206,7 @@ void StackMap::Dump(VariableIndentationOutputStream* vios, << ", register_mask=0x" << GetRegisterMask(stack_map_encoding) << std::dec << ", stack_mask=0b"; - for (size_t i = 0, e = GetNumberOfStackMaskBits(stack_map_encoding); i < e; ++i) { + for (size_t i = 0, e = code_info.GetNumberOfStackMaskBits(encoding); i < e; ++i) { vios->Stream() << GetStackMaskBit(stack_map_encoding, e - i - 1); } vios->Stream() << ")\n"; diff --git a/runtime/stack_map.h b/runtime/stack_map.h index 578252181d..679218d5be 100644 --- a/runtime/stack_map.h +++ b/runtime/stack_map.h @@ -20,6 +20,7 @@ #include "arch/code_offset.h" #include "base/bit_vector.h" #include "base/bit_utils.h" +#include "bit_memory_region.h" #include "dex_file.h" #include "memory_region.h" #include "leb128.h" @@ -665,12 +666,14 @@ struct FieldEncoding { ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; } - ALWAYS_INLINE int32_t Load(const MemoryRegion& region) const { + template <typename Region> + ALWAYS_INLINE int32_t Load(const Region& region) const { DCHECK_LE(end_offset_, region.size_in_bits()); return static_cast<int32_t>(region.LoadBits(start_offset_, BitSize())) + min_value_; } - ALWAYS_INLINE void Store(MemoryRegion region, int32_t value) const { + template <typename Region> + ALWAYS_INLINE void Store(Region region, int32_t value) const { region.StoreBits(start_offset_, value - min_value_, BitSize()); DCHECK_EQ(Load(region), value); } @@ -686,7 +689,7 @@ class StackMapEncoding { StackMapEncoding() {} // Set stack map bit layout based on given sizes. - // Returns the size of stack map in bytes. + // Returns the size of stack map in bits. size_t SetFromSizes(size_t native_pc_max, size_t dex_pc_max, size_t dex_register_map_size, @@ -719,7 +722,7 @@ class StackMapEncoding { stack_mask_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); bit_offset += stack_mask_bit_size; - return RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte; + return bit_offset; } ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const { @@ -741,6 +744,10 @@ class StackMapEncoding { // The end offset is not encoded. It is implicitly the end of stack map entry. return stack_mask_bit_offset_; } + ALWAYS_INLINE size_t GetNumberOfStackMaskBits(size_t stack_map_bits) const { + // Note that the stack mask bits are last. + return stack_map_bits - GetStackMaskBitOffset(); + } void Dump(VariableIndentationOutputStream* vios) const; @@ -769,7 +776,7 @@ class StackMapEncoding { class StackMap { public: StackMap() {} - explicit StackMap(MemoryRegion region) : region_(region) {} + explicit StackMap(BitMemoryRegion region) : region_(region) {} ALWAYS_INLINE bool IsValid() const { return region_.pointer() != nullptr; } @@ -817,10 +824,6 @@ class StackMap { encoding.GetRegisterMaskEncoding().Store(region_, mask); } - ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const StackMapEncoding& encoding) const { - return region_.size_in_bits() - encoding.GetStackMaskBitOffset(); - } - ALWAYS_INLINE bool GetStackMaskBit(const StackMapEncoding& encoding, size_t index) const { return region_.LoadBit(encoding.GetStackMaskBitOffset() + index); } @@ -838,7 +841,9 @@ class StackMap { } ALWAYS_INLINE bool Equals(const StackMap& other) const { - return region_.pointer() == other.region_.pointer() && region_.size() == other.region_.size(); + return region_.pointer() == other.region_.pointer() && + region_.size() == other.region_.size() && + region_.BitOffset() == other.region_.BitOffset(); } void Dump(VariableIndentationOutputStream* vios, @@ -860,7 +865,7 @@ class StackMap { private: static constexpr int kFixedSize = 0; - MemoryRegion region_; + BitMemoryRegion region_; friend class StackMapStream; }; @@ -1026,7 +1031,7 @@ class InlineInfo { struct CodeInfoEncoding { uint32_t non_header_size; uint32_t number_of_stack_maps; - uint32_t stack_map_size_in_bytes; + uint32_t stack_map_size_in_bits; uint32_t number_of_location_catalog_entries; StackMapEncoding stack_map_encoding; InlineInfoEncoding inline_info_encoding; @@ -1038,7 +1043,7 @@ struct CodeInfoEncoding { const uint8_t* ptr = reinterpret_cast<const uint8_t*>(data); non_header_size = DecodeUnsignedLeb128(&ptr); number_of_stack_maps = DecodeUnsignedLeb128(&ptr); - stack_map_size_in_bytes = DecodeUnsignedLeb128(&ptr); + stack_map_size_in_bits = DecodeUnsignedLeb128(&ptr); number_of_location_catalog_entries = DecodeUnsignedLeb128(&ptr); static_assert(alignof(StackMapEncoding) == 1, "StackMapEncoding should not require alignment"); @@ -1059,7 +1064,7 @@ struct CodeInfoEncoding { void Compress(Vector* dest) const { EncodeUnsignedLeb128(dest, non_header_size); EncodeUnsignedLeb128(dest, number_of_stack_maps); - EncodeUnsignedLeb128(dest, stack_map_size_in_bytes); + EncodeUnsignedLeb128(dest, stack_map_size_in_bits); EncodeUnsignedLeb128(dest, number_of_location_catalog_entries); const uint8_t* stack_map_ptr = reinterpret_cast<const uint8_t*>(&stack_map_encoding); dest->insert(dest->end(), stack_map_ptr, stack_map_ptr + sizeof(StackMapEncoding)); @@ -1078,7 +1083,7 @@ struct CodeInfoEncoding { * * where CodeInfoEncoding is of the form: * - * [non_header_size, number_of_stack_maps, stack_map_size_in_bytes, + * [non_header_size, number_of_stack_maps, stack_map_size_in_bits, * number_of_location_catalog_entries, StackMapEncoding] */ class CodeInfo { @@ -1108,9 +1113,13 @@ class CodeInfo { GetDexRegisterLocationCatalogSize(encoding))); } + ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const CodeInfoEncoding& encoding) const { + return encoding.stack_map_encoding.GetNumberOfStackMaskBits(encoding.stack_map_size_in_bits); + } + ALWAYS_INLINE StackMap GetStackMapAt(size_t i, const CodeInfoEncoding& encoding) const { - size_t stack_map_size = encoding.stack_map_size_in_bytes; - return StackMap(GetStackMaps(encoding).Subregion(i * stack_map_size, stack_map_size)); + const size_t map_size = encoding.stack_map_size_in_bits; + return StackMap(BitMemoryRegion(GetStackMaps(encoding), i * map_size, map_size)); } uint32_t GetNumberOfLocationCatalogEntries(const CodeInfoEncoding& encoding) const { @@ -1128,7 +1137,8 @@ class CodeInfo { // Get the size of all the stack maps of this CodeInfo object, in bytes. size_t GetStackMapsSize(const CodeInfoEncoding& encoding) const { - return encoding.stack_map_size_in_bytes * GetNumberOfStackMaps(encoding); + return RoundUp(encoding.stack_map_size_in_bits * GetNumberOfStackMaps(encoding), kBitsPerByte) / + kBitsPerByte; } uint32_t GetDexRegisterLocationCatalogOffset(const CodeInfoEncoding& encoding) const { @@ -1278,7 +1288,7 @@ class CodeInfo { << encoding.non_header_size << "\n" << encoding.number_of_location_catalog_entries << "\n" << encoding.number_of_stack_maps << "\n" - << encoding.stack_map_size_in_bytes; + << encoding.stack_map_size_in_bits; } } diff --git a/runtime/thread.cc b/runtime/thread.cc index 66a03a6826..ae87569e7e 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -3035,7 +3035,7 @@ class ReferenceMapVisitor : public StackVisitor { T vreg_info(m, code_info, encoding, map, visitor_); // Visit stack entries that hold pointers. - size_t number_of_bits = map.GetNumberOfStackMaskBits(encoding.stack_map_encoding); + size_t number_of_bits = code_info.GetNumberOfStackMaskBits(encoding); for (size_t i = 0; i < number_of_bits; ++i) { if (map.GetStackMaskBit(encoding.stack_map_encoding, i)) { auto* ref_addr = vreg_base + i; |