diff options
Diffstat (limited to 'runtime/stack_map.h')
| -rw-r--r-- | runtime/stack_map.h | 185 |
1 files changed, 106 insertions, 79 deletions
diff --git a/runtime/stack_map.h b/runtime/stack_map.h index cd9a3f04cf..062404dbf2 100644 --- a/runtime/stack_map.h +++ b/runtime/stack_map.h @@ -20,6 +20,7 @@ #include "arch/code_offset.h" #include "base/bit_vector.h" #include "base/bit_utils.h" +#include "bit_memory_region.h" #include "dex_file.h" #include "memory_region.h" #include "leb128.h" @@ -665,37 +666,14 @@ struct FieldEncoding { ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; } - ALWAYS_INLINE int32_t Load(const MemoryRegion& region) const { + template <typename Region> + ALWAYS_INLINE int32_t Load(const Region& region) const { DCHECK_LE(end_offset_, region.size_in_bits()); - const size_t bit_count = BitSize(); - if (bit_count == 0) { - // Do not touch any memory if the range is empty. - return min_value_; - } - uint8_t* address = region.start() + start_offset_ / kBitsPerByte; - const uint32_t shift = start_offset_ & (kBitsPerByte - 1); - // Load the value (reading only the strictly needed bytes). - const uint32_t load_bit_count = shift + bit_count; - uint32_t value = *address++ >> shift; - if (load_bit_count > 8) { - value |= static_cast<uint32_t>(*address++) << (8 - shift); - if (load_bit_count > 16) { - value |= static_cast<uint32_t>(*address++) << (16 - shift); - if (load_bit_count > 24) { - value |= static_cast<uint32_t>(*address++) << (24 - shift); - if (load_bit_count > 32) { - value |= static_cast<uint32_t>(*address++) << (32 - shift); - } - } - } - } - // Clear unwanted most significant bits. - uint32_t clear_bit_count = 32 - bit_count; - value = (value << clear_bit_count) >> clear_bit_count; - return value + min_value_; + return static_cast<int32_t>(region.LoadBits(start_offset_, BitSize())) + min_value_; } - ALWAYS_INLINE void Store(MemoryRegion region, int32_t value) const { + template <typename Region> + ALWAYS_INLINE void Store(Region region, int32_t value) const { region.StoreBits(start_offset_, value - min_value_, BitSize()); DCHECK_EQ(Load(region), value); } @@ -711,40 +689,40 @@ class StackMapEncoding { StackMapEncoding() {} // Set stack map bit layout based on given sizes. - // Returns the size of stack map in bytes. + // Returns the size of stack map in bits. size_t SetFromSizes(size_t native_pc_max, size_t dex_pc_max, size_t dex_register_map_size, size_t inline_info_size, - size_t register_mask_max, - size_t stack_mask_bit_size) { - size_t bit_offset = 0; - DCHECK_EQ(kNativePcBitOffset, bit_offset); - bit_offset += MinimumBitsToStore(native_pc_max); + size_t number_of_register_masks, + size_t number_of_stack_masks) { + total_bit_size_ = 0; + DCHECK_EQ(kNativePcBitOffset, total_bit_size_); + total_bit_size_ += MinimumBitsToStore(native_pc_max); - dex_pc_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); - bit_offset += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max); + dex_pc_bit_offset_ = total_bit_size_; + total_bit_size_ += MinimumBitsToStore(1 /* kNoDexPc */ + dex_pc_max); // We also need +1 for kNoDexRegisterMap, but since the size is strictly // greater than any offset we might try to encode, we already implicitly have it. - dex_register_map_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); - bit_offset += MinimumBitsToStore(dex_register_map_size); + dex_register_map_bit_offset_ = total_bit_size_; + total_bit_size_ += MinimumBitsToStore(dex_register_map_size); // We also need +1 for kNoInlineInfo, but since the inline_info_size is strictly // greater than the offset we might try to encode, we already implicitly have it. // If inline_info_size is zero, we can encode only kNoInlineInfo (in zero bits). - inline_info_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); + inline_info_bit_offset_ = total_bit_size_; if (inline_info_size != 0) { - bit_offset += MinimumBitsToStore(dex_register_map_size + inline_info_size); + total_bit_size_ += MinimumBitsToStore(dex_register_map_size + inline_info_size); } - register_mask_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); - bit_offset += MinimumBitsToStore(register_mask_max); + register_mask_index_bit_offset_ = total_bit_size_; + total_bit_size_ += MinimumBitsToStore(number_of_register_masks); - stack_mask_bit_offset_ = dchecked_integral_cast<uint8_t>(bit_offset); - bit_offset += stack_mask_bit_size; + stack_mask_index_bit_offset_ = total_bit_size_; + total_bit_size_ += MinimumBitsToStore(number_of_stack_masks); - return RoundUp(bit_offset, kBitsPerByte) / kBitsPerByte; + return total_bit_size_; } ALWAYS_INLINE FieldEncoding GetNativePcEncoding() const { @@ -757,14 +735,18 @@ class StackMapEncoding { return FieldEncoding(dex_register_map_bit_offset_, inline_info_bit_offset_, -1 /* min_value */); } ALWAYS_INLINE FieldEncoding GetInlineInfoEncoding() const { - return FieldEncoding(inline_info_bit_offset_, register_mask_bit_offset_, -1 /* min_value */); + return FieldEncoding(inline_info_bit_offset_, + register_mask_index_bit_offset_, + -1 /* min_value */); } - ALWAYS_INLINE FieldEncoding GetRegisterMaskEncoding() const { - return FieldEncoding(register_mask_bit_offset_, stack_mask_bit_offset_); + ALWAYS_INLINE FieldEncoding GetRegisterMaskIndexEncoding() const { + return FieldEncoding(register_mask_index_bit_offset_, stack_mask_index_bit_offset_); } - ALWAYS_INLINE size_t GetStackMaskBitOffset() const { - // The end offset is not encoded. It is implicitly the end of stack map entry. - return stack_mask_bit_offset_; + ALWAYS_INLINE FieldEncoding GetStackMaskIndexEncoding() const { + return FieldEncoding(stack_mask_index_bit_offset_, total_bit_size_); + } + ALWAYS_INLINE size_t BitSize() const { + return total_bit_size_; } void Dump(VariableIndentationOutputStream* vios) const; @@ -774,8 +756,9 @@ class StackMapEncoding { uint8_t dex_pc_bit_offset_; uint8_t dex_register_map_bit_offset_; uint8_t inline_info_bit_offset_; - uint8_t register_mask_bit_offset_; - uint8_t stack_mask_bit_offset_; + uint8_t register_mask_index_bit_offset_; + uint8_t stack_mask_index_bit_offset_; + uint8_t total_bit_size_; }; /** @@ -788,13 +771,13 @@ class StackMapEncoding { * * The information is of the form: * - * [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_offset, register_mask, - * stack_mask]. + * [native_pc_offset, dex_pc, dex_register_map_offset, inlining_info_offset, register_mask_index, + * stack_mask_index]. */ class StackMap { public: StackMap() {} - explicit StackMap(MemoryRegion region) : region_(region) {} + explicit StackMap(BitMemoryRegion region) : region_(region) {} ALWAYS_INLINE bool IsValid() const { return region_.pointer() != nullptr; } @@ -834,24 +817,20 @@ class StackMap { encoding.GetInlineInfoEncoding().Store(region_, offset); } - ALWAYS_INLINE uint32_t GetRegisterMask(const StackMapEncoding& encoding) const { - return encoding.GetRegisterMaskEncoding().Load(region_); - } - - ALWAYS_INLINE void SetRegisterMask(const StackMapEncoding& encoding, uint32_t mask) { - encoding.GetRegisterMaskEncoding().Store(region_, mask); + ALWAYS_INLINE uint32_t GetRegisterMaskIndex(const StackMapEncoding& encoding) const { + return encoding.GetRegisterMaskIndexEncoding().Load(region_); } - ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const StackMapEncoding& encoding) const { - return region_.size_in_bits() - encoding.GetStackMaskBitOffset(); + ALWAYS_INLINE void SetRegisterMaskIndex(const StackMapEncoding& encoding, uint32_t mask) { + encoding.GetRegisterMaskIndexEncoding().Store(region_, mask); } - ALWAYS_INLINE bool GetStackMaskBit(const StackMapEncoding& encoding, size_t index) const { - return region_.LoadBit(encoding.GetStackMaskBitOffset() + index); + ALWAYS_INLINE uint32_t GetStackMaskIndex(const StackMapEncoding& encoding) const { + return encoding.GetStackMaskIndexEncoding().Load(region_); } - ALWAYS_INLINE void SetStackMaskBit(const StackMapEncoding& encoding, size_t index, bool value) { - region_.StoreBit(encoding.GetStackMaskBitOffset() + index, value); + ALWAYS_INLINE void SetStackMaskIndex(const StackMapEncoding& encoding, uint32_t mask) { + encoding.GetStackMaskIndexEncoding().Store(region_, mask); } ALWAYS_INLINE bool HasDexRegisterMap(const StackMapEncoding& encoding) const { @@ -863,7 +842,9 @@ class StackMap { } ALWAYS_INLINE bool Equals(const StackMap& other) const { - return region_.pointer() == other.region_.pointer() && region_.size() == other.region_.size(); + return region_.pointer() == other.region_.pointer() && + region_.size() == other.region_.size() && + region_.BitOffset() == other.region_.BitOffset(); } void Dump(VariableIndentationOutputStream* vios, @@ -885,7 +866,7 @@ class StackMap { private: static constexpr int kFixedSize = 0; - MemoryRegion region_; + BitMemoryRegion region_; friend class StackMapStream; }; @@ -1051,7 +1032,10 @@ class InlineInfo { struct CodeInfoEncoding { uint32_t non_header_size; uint32_t number_of_stack_maps; - uint32_t stack_map_size_in_bytes; + uint32_t number_of_stack_masks; + uint32_t number_of_register_masks; + uint32_t stack_mask_size_in_bits; + uint32_t register_mask_size_in_bits; uint32_t number_of_location_catalog_entries; StackMapEncoding stack_map_encoding; InlineInfoEncoding inline_info_encoding; @@ -1063,7 +1047,10 @@ struct CodeInfoEncoding { const uint8_t* ptr = reinterpret_cast<const uint8_t*>(data); non_header_size = DecodeUnsignedLeb128(&ptr); number_of_stack_maps = DecodeUnsignedLeb128(&ptr); - stack_map_size_in_bytes = DecodeUnsignedLeb128(&ptr); + number_of_stack_masks = DecodeUnsignedLeb128(&ptr); + number_of_register_masks = DecodeUnsignedLeb128(&ptr); + stack_mask_size_in_bits = DecodeUnsignedLeb128(&ptr); + register_mask_size_in_bits = DecodeUnsignedLeb128(&ptr); number_of_location_catalog_entries = DecodeUnsignedLeb128(&ptr); static_assert(alignof(StackMapEncoding) == 1, "StackMapEncoding should not require alignment"); @@ -1084,7 +1071,10 @@ struct CodeInfoEncoding { void Compress(Vector* dest) const { EncodeUnsignedLeb128(dest, non_header_size); EncodeUnsignedLeb128(dest, number_of_stack_maps); - EncodeUnsignedLeb128(dest, stack_map_size_in_bytes); + EncodeUnsignedLeb128(dest, number_of_stack_masks); + EncodeUnsignedLeb128(dest, number_of_register_masks); + EncodeUnsignedLeb128(dest, stack_mask_size_in_bits); + EncodeUnsignedLeb128(dest, register_mask_size_in_bits); EncodeUnsignedLeb128(dest, number_of_location_catalog_entries); const uint8_t* stack_map_ptr = reinterpret_cast<const uint8_t*>(&stack_map_encoding); dest->insert(dest->end(), stack_map_ptr, stack_map_ptr + sizeof(StackMapEncoding)); @@ -1103,7 +1093,7 @@ struct CodeInfoEncoding { * * where CodeInfoEncoding is of the form: * - * [non_header_size, number_of_stack_maps, stack_map_size_in_bytes, + * [non_header_size, number_of_stack_maps, stack_map_size_in_bits, * number_of_location_catalog_entries, StackMapEncoding] */ class CodeInfo { @@ -1118,7 +1108,7 @@ class CodeInfo { } CodeInfoEncoding ExtractEncoding() const { - CodeInfoEncoding encoding(region_.start()); + CodeInfoEncoding encoding(region_.begin()); AssertValidStackMap(encoding); return encoding; } @@ -1133,9 +1123,41 @@ class CodeInfo { GetDexRegisterLocationCatalogSize(encoding))); } + ALWAYS_INLINE size_t GetNumberOfStackMaskBits(const CodeInfoEncoding& encoding) const { + return encoding.stack_mask_size_in_bits; + } + ALWAYS_INLINE StackMap GetStackMapAt(size_t i, const CodeInfoEncoding& encoding) const { - size_t stack_map_size = encoding.stack_map_size_in_bytes; - return StackMap(GetStackMaps(encoding).Subregion(i * stack_map_size, stack_map_size)); + const size_t map_size = encoding.stack_map_encoding.BitSize(); + return StackMap(BitMemoryRegion(GetStackMaps(encoding), i * map_size, map_size)); + } + + BitMemoryRegion GetStackMask(const CodeInfoEncoding& encoding, size_t stack_mask_index) const { + // All stack mask data is stored before register map data (which is at the very end). + const size_t entry_size = GetNumberOfStackMaskBits(encoding); + const size_t register_mask_bits = + encoding.register_mask_size_in_bits * encoding.number_of_register_masks; + return BitMemoryRegion(region_, + region_.size_in_bits() - register_mask_bits - + entry_size * (stack_mask_index + 1), + entry_size); + } + + BitMemoryRegion GetStackMaskOf(const CodeInfoEncoding& encoding, + const StackMap& stack_map) const { + return GetStackMask(encoding, stack_map.GetStackMaskIndex(encoding.stack_map_encoding)); + } + + BitMemoryRegion GetRegisterMask(const CodeInfoEncoding& encoding, size_t index) const { + const size_t entry_size = encoding.register_mask_size_in_bits; + return BitMemoryRegion(region_, + region_.size_in_bits() - entry_size * (index + 1), + entry_size); + } + + uint32_t GetRegisterMaskOf(const CodeInfoEncoding& encoding, const StackMap& stack_map) const { + size_t index = stack_map.GetRegisterMaskIndex(encoding.stack_map_encoding); + return GetRegisterMask(encoding, index).LoadBits(0u, encoding.register_mask_size_in_bits); } uint32_t GetNumberOfLocationCatalogEntries(const CodeInfoEncoding& encoding) const { @@ -1151,9 +1173,14 @@ class CodeInfo { return encoding.number_of_stack_maps; } + // Get the size of all the stack maps of this CodeInfo object, in bits. Not byte aligned. + ALWAYS_INLINE size_t GetStackMapsSizeInBits(const CodeInfoEncoding& encoding) const { + return encoding.stack_map_encoding.BitSize() * GetNumberOfStackMaps(encoding); + } + // Get the size of all the stack maps of this CodeInfo object, in bytes. size_t GetStackMapsSize(const CodeInfoEncoding& encoding) const { - return encoding.stack_map_size_in_bytes * GetNumberOfStackMaps(encoding); + return RoundUp(GetStackMapsSizeInBits(encoding), kBitsPerByte) / kBitsPerByte; } uint32_t GetDexRegisterLocationCatalogOffset(const CodeInfoEncoding& encoding) const { @@ -1303,7 +1330,7 @@ class CodeInfo { << encoding.non_header_size << "\n" << encoding.number_of_location_catalog_entries << "\n" << encoding.number_of_stack_maps << "\n" - << encoding.stack_map_size_in_bytes; + << encoding.stack_map_encoding.BitSize(); } } |