Use variable encoding for StackMap.
dex_pc, native_pc_offset, dex_register_map_offset, inline_info_offset,
and register_mask can now be encoded in 1, 2, 3, or 4 bytes.
Change-Id: I15f93e8226ce374204d44c5a80a9fd89bda2687c
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index b77e604..a73c8d7 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -68,6 +68,7 @@
stack_mask_max_(-1),
dex_pc_max_(0),
native_pc_offset_max_(0),
+ register_mask_max_(0),
number_of_stack_maps_with_inline_info_(0),
dex_map_hash_to_stack_map_indices_(std::less<uint32_t>(), allocator->Adapter()) {}
@@ -128,6 +129,7 @@
dex_pc_max_ = std::max(dex_pc_max_, dex_pc);
native_pc_offset_max_ = std::max(native_pc_offset_max_, native_pc_offset);
+ register_mask_max_ = std::max(register_mask_max_, register_mask);
}
void AddInlineInfoEntry(uint32_t method_index) {
@@ -156,7 +158,8 @@
ComputeInlineInfoSize(),
ComputeDexRegisterMapsSize(),
dex_pc_max_,
- native_pc_offset_max_);
+ native_pc_offset_max_,
+ register_mask_max_);
}
// Compute the size of the Dex register location catalog of `entry`.
@@ -248,8 +251,11 @@
ComputeInlineInfoStart(),
inline_info_size);
- code_info.SetEncoding(
- inline_info_size, dex_register_map_size, dex_pc_max_, native_pc_offset_max_);
+ code_info.SetEncoding(inline_info_size,
+ dex_register_map_size,
+ dex_pc_max_,
+ native_pc_offset_max_,
+ register_mask_max_);
code_info.SetNumberOfStackMaps(stack_maps_.Size());
code_info.SetStackMaskSize(stack_mask_size);
DCHECK_EQ(code_info.GetStackMapsSize(), ComputeStackMapsSize());
@@ -476,6 +482,7 @@
int stack_mask_max_;
uint32_t dex_pc_max_;
uint32_t native_pc_offset_max_;
+ uint32_t register_mask_max_;
size_t number_of_stack_maps_with_inline_info_;
ArenaSafeMap<uint32_t, GrowableArray<uint32_t>> dex_map_hash_to_stack_map_indices_;
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index b9bf016..8d160bc 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -401,9 +401,8 @@
// ...the offset of the second Dex register map (relative to the
// beginning of the Dex register maps region) is 255 (i.e.,
// kNoDexRegisterMapSmallEncoding).
- ASSERT_NE(StackMap::kNoDexRegisterMap, stack_map1.GetDexRegisterMapOffset(code_info));
- ASSERT_EQ(StackMap::kNoDexRegisterMapSmallEncoding,
- stack_map1.GetDexRegisterMapOffset(code_info));
+ ASSERT_NE(stack_map1.GetDexRegisterMapOffset(code_info), StackMap::kNoDexRegisterMap);
+ ASSERT_EQ(stack_map1.GetDexRegisterMapOffset(code_info), 0xFFu);
}
TEST(StackMapTest, TestShareDexRegisterMap) {
diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc
index 28f42c1..11e7e44 100644
--- a/runtime/stack_map.cc
+++ b/runtime/stack_map.cc
@@ -16,12 +16,11 @@
#include "stack_map.h"
+#include <stdint.h>
+
namespace art {
constexpr size_t DexRegisterLocationCatalog::kNoLocationEntryIndex;
-
-constexpr uint32_t StackMap::kNoDexRegisterMapSmallEncoding;
-constexpr uint32_t StackMap::kNoInlineInfoSmallEncoding;
constexpr uint32_t StackMap::kNoDexRegisterMap;
constexpr uint32_t StackMap::kNoInlineInfo;
@@ -49,120 +48,150 @@
return dex_register_location_catalog.GetDexRegisterLocation(location_catalog_entry_index);
}
-uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
- return info.HasSmallDexPc()
- ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset())
- : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset());
-}
-
-void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
- DCHECK(!info.HasSmallDexPc() || IsUint<kBitsForSmallEncoding>(dex_pc)) << dex_pc;
- info.HasSmallDexPc()
- ? region_.StoreUnaligned<kSmallEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc)
- : region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapDexPcOffset(), dex_pc);
-}
-
-uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
- return info.HasSmallNativePc()
- ? region_.LoadUnaligned<kSmallEncoding>(info.ComputeStackMapNativePcOffset())
- : region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapNativePcOffset());
-}
-
-void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
- DCHECK(!info.HasSmallNativePc()
- || IsUint<kBitsForSmallEncoding>(native_pc_offset)) << native_pc_offset;
- uint32_t entry = info.ComputeStackMapNativePcOffset();
- info.HasSmallNativePc()
- ? region_.StoreUnaligned<kSmallEncoding>(entry, native_pc_offset)
- : region_.StoreUnaligned<kLargeEncoding>(entry, native_pc_offset);
-}
-
-uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
- if (info.HasSmallDexRegisterMap()) {
- uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
- info.ComputeStackMapDexRegisterMapOffset());
- if (value == kNoDexRegisterMapSmallEncoding) {
- return kNoDexRegisterMap;
+// Loads `number_of_bytes` at the given `offset` and assemble a uint32_t. If `check_max` is true,
+// this method converts a maximum value of size `number_of_bytes` into a uint32_t 0xFFFFFFFF.
+static uint32_t LoadAt(MemoryRegion region,
+ size_t number_of_bytes,
+ size_t offset,
+ bool check_max = false) {
+ if (number_of_bytes == 0u) {
+ DCHECK(!check_max);
+ return 0;
+ } else if (number_of_bytes == 1u) {
+ uint8_t value = region.LoadUnaligned<uint8_t>(offset);
+ if (check_max && value == 0xFF) {
+ return -1;
+ } else {
+ return value;
+ }
+ } else if (number_of_bytes == 2u) {
+ uint16_t value = region.LoadUnaligned<uint16_t>(offset);
+ if (check_max && value == 0xFFFF) {
+ return -1;
+ } else {
+ return value;
+ }
+ } else if (number_of_bytes == 3u) {
+ uint16_t low = region.LoadUnaligned<uint16_t>(offset);
+ uint16_t high = region.LoadUnaligned<uint8_t>(offset + sizeof(uint16_t));
+ uint32_t value = (high << 16) + low;
+ if (check_max && value == 0xFFFFFF) {
+ return -1;
} else {
return value;
}
} else {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapDexRegisterMapOffset());
+ DCHECK_EQ(number_of_bytes, 4u);
+ return region.LoadUnaligned<uint32_t>(offset);
}
}
+static void StoreAt(MemoryRegion region, size_t number_of_bytes, size_t offset, uint32_t value) {
+ if (number_of_bytes == 0u) {
+ DCHECK_EQ(value, 0u);
+ } else if (number_of_bytes == 1u) {
+ region.StoreUnaligned<uint8_t>(offset, value);
+ } else if (number_of_bytes == 2u) {
+ region.StoreUnaligned<uint16_t>(offset, value);
+ } else if (number_of_bytes == 3u) {
+ region.StoreUnaligned<uint16_t>(offset, Low16Bits(value));
+ region.StoreUnaligned<uint8_t>(offset + sizeof(uint16_t), High16Bits(value));
+ } else {
+ region.StoreUnaligned<uint32_t>(offset, value);
+ DCHECK_EQ(number_of_bytes, 4u);
+ }
+}
+
+uint32_t StackMap::GetDexPc(const CodeInfo& info) const {
+ return LoadAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset());
+}
+
+void StackMap::SetDexPc(const CodeInfo& info, uint32_t dex_pc) {
+ StoreAt(region_, info.NumberOfBytesForDexPc(), info.ComputeStackMapDexPcOffset(), dex_pc);
+}
+
+uint32_t StackMap::GetNativePcOffset(const CodeInfo& info) const {
+ return LoadAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset());
+}
+
+void StackMap::SetNativePcOffset(const CodeInfo& info, uint32_t native_pc_offset) {
+ StoreAt(region_, info.NumberOfBytesForNativePc(), info.ComputeStackMapNativePcOffset(), native_pc_offset);
+}
+
+uint32_t StackMap::GetDexRegisterMapOffset(const CodeInfo& info) const {
+ return LoadAt(region_,
+ info.NumberOfBytesForDexRegisterMap(),
+ info.ComputeStackMapDexRegisterMapOffset(),
+ /* check_max */ true);
+}
+
void StackMap::SetDexRegisterMapOffset(const CodeInfo& info, uint32_t offset) {
- DCHECK(!info.HasSmallDexRegisterMap()
- || (IsUint<kBitsForSmallEncoding>(offset)
- || (offset == kNoDexRegisterMap))) << offset;
- size_t dex_register_map_entry = info.ComputeStackMapDexRegisterMapOffset();
- info.HasSmallDexRegisterMap()
- ? region_.StoreUnaligned<kSmallEncoding>(dex_register_map_entry, offset)
- : region_.StoreUnaligned<kLargeEncoding>(dex_register_map_entry, offset);
+ StoreAt(region_,
+ info.NumberOfBytesForDexRegisterMap(),
+ info.ComputeStackMapDexRegisterMapOffset(),
+ offset);
}
uint32_t StackMap::GetInlineDescriptorOffset(const CodeInfo& info) const {
if (!info.HasInlineInfo()) return kNoInlineInfo;
- if (info.HasSmallInlineInfo()) {
- uint8_t value = region_.LoadUnaligned<kSmallEncoding>(
- info.ComputeStackMapInlineInfoOffset());
- if (value == kNoInlineInfoSmallEncoding) {
- return kNoInlineInfo;
- } else {
- return value;
- }
- } else {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapInlineInfoOffset());
- }
+ return LoadAt(region_,
+ info.NumberOfBytesForInlineInfo(),
+ info.ComputeStackMapInlineInfoOffset(),
+ /* check_max */ true);
}
void StackMap::SetInlineDescriptorOffset(const CodeInfo& info, uint32_t offset) {
DCHECK(info.HasInlineInfo());
- DCHECK(!info.HasSmallInlineInfo()
- || (IsUint<kBitsForSmallEncoding>(offset)
- || (offset == kNoInlineInfo))) << offset;
- size_t inline_entry = info.ComputeStackMapInlineInfoOffset();
- info.HasSmallInlineInfo()
- ? region_.StoreUnaligned<kSmallEncoding>(inline_entry, offset)
- : region_.StoreUnaligned<kLargeEncoding>(inline_entry, offset);
+ StoreAt(region_,
+ info.NumberOfBytesForInlineInfo(),
+ info.ComputeStackMapInlineInfoOffset(),
+ offset);
}
uint32_t StackMap::GetRegisterMask(const CodeInfo& info) const {
- return region_.LoadUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset());
+ return LoadAt(region_,
+ info.NumberOfBytesForRegisterMask(),
+ info.ComputeStackMapRegisterMaskOffset());
}
void StackMap::SetRegisterMask(const CodeInfo& info, uint32_t mask) {
- region_.StoreUnaligned<kLargeEncoding>(info.ComputeStackMapRegisterMaskOffset(), mask);
+ StoreAt(region_,
+ info.NumberOfBytesForRegisterMask(),
+ info.ComputeStackMapRegisterMaskOffset(),
+ mask);
}
-size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
- bool has_inline_info,
- bool is_small_inline_info,
- bool is_small_dex_map,
- bool is_small_dex_pc,
- bool is_small_native_pc) {
- return StackMap::kFixedSize
- + stack_mask_size
- + (has_inline_info ? NumberOfBytesForEntry(is_small_inline_info) : 0)
- + NumberOfBytesForEntry(is_small_dex_map)
- + NumberOfBytesForEntry(is_small_dex_pc)
- + NumberOfBytesForEntry(is_small_native_pc);
+size_t StackMap::ComputeStackMapSizeInternal(size_t stack_mask_size,
+ size_t number_of_bytes_for_inline_info,
+ size_t number_of_bytes_for_dex_map,
+ size_t number_of_bytes_for_dex_pc,
+ size_t number_of_bytes_for_native_pc,
+ size_t number_of_bytes_for_register_mask) {
+ return stack_mask_size
+ + number_of_bytes_for_inline_info
+ + number_of_bytes_for_dex_map
+ + number_of_bytes_for_dex_pc
+ + number_of_bytes_for_native_pc
+ + number_of_bytes_for_register_mask;
}
size_t StackMap::ComputeStackMapSize(size_t stack_mask_size,
size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max) {
- return ComputeStackMapSize(
+ size_t native_pc_max,
+ size_t register_mask_max) {
+ return ComputeStackMapSizeInternal(
stack_mask_size,
- inline_info_size != 0,
- // + 1 to also encode kNoInlineInfo.
- IsUint<kBitsForSmallEncoding>(inline_info_size + dex_register_map_size + 1),
+ inline_info_size == 0
+ ? 0
+ // + 1 to also encode kNoInlineInfo.
+ : CodeInfo::EncodingSizeInBytes(inline_info_size + dex_register_map_size + 1),
// + 1 to also encode kNoDexRegisterMap.
- IsUint<kBitsForSmallEncoding>(dex_register_map_size + 1),
- IsUint<kBitsForSmallEncoding>(dex_pc_max),
- IsUint<kBitsForSmallEncoding>(native_pc_max));
+ CodeInfo::EncodingSizeInBytes(dex_register_map_size + 1),
+ CodeInfo::EncodingSizeInBytes(dex_pc_max),
+ CodeInfo::EncodingSizeInBytes(native_pc_max),
+ CodeInfo::EncodingSizeInBytes(register_mask_max));
}
MemoryRegion StackMap::GetStackMask(const CodeInfo& info) const {
@@ -204,10 +233,11 @@
<< ", number_of_dex_registers=" << number_of_dex_registers
<< ", number_of_stack_maps=" << number_of_stack_maps
<< ", has_inline_info=" << HasInlineInfo()
- << ", has_small_inline_info=" << HasSmallInlineInfo()
- << ", has_small_dex_register_map=" << HasSmallDexRegisterMap()
- << ", has_small_dex_pc=" << HasSmallDexPc()
- << ", has_small_native_pc=" << HasSmallNativePc()
+ << ", number_of_bytes_for_inline_info=" << NumberOfBytesForInlineInfo()
+ << ", number_of_bytes_for_dex_register_map=" << NumberOfBytesForDexRegisterMap()
+ << ", number_of_bytes_for_dex_pc=" << NumberOfBytesForDexPc()
+ << ", number_of_bytes_for_native_pc=" << NumberOfBytesForNativePc()
+ << ", number_of_bytes_for_register_mask=" << NumberOfBytesForRegisterMask()
<< ")\n";
// Display the Dex register location catalog.
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index ab7f926..f68cafe 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -726,49 +726,32 @@
}
static size_t ComputeStackMapSize(size_t stack_mask_size,
- bool has_inline_info,
- bool is_small_inline_info,
- bool is_small_dex_map,
- bool is_small_dex_pc,
- bool is_small_native_pc);
-
- static size_t ComputeStackMapSize(size_t stack_mask_size,
size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max);
-
- // TODO: Revisit this abstraction if we allow 3 bytes encoding.
- typedef uint8_t kSmallEncoding;
- typedef uint32_t kLargeEncoding;
- static constexpr size_t kBytesForSmallEncoding = sizeof(kSmallEncoding);
- static constexpr size_t kBitsForSmallEncoding = kBitsPerByte * kBytesForSmallEncoding;
- static constexpr size_t kBytesForLargeEncoding = sizeof(kLargeEncoding);
- static constexpr size_t kBitsForLargeEncoding = kBitsPerByte * kBytesForLargeEncoding;
+ size_t native_pc_max,
+ size_t register_mask_max);
// Special (invalid) offset for the DexRegisterMapOffset field meaning
// that there is no Dex register map for this stack map.
static constexpr uint32_t kNoDexRegisterMap = -1;
- static constexpr uint32_t kNoDexRegisterMapSmallEncoding =
- std::numeric_limits<kSmallEncoding>::max();
// Special (invalid) offset for the InlineDescriptorOffset field meaning
// that there is no inline info for this stack map.
static constexpr uint32_t kNoInlineInfo = -1;
- static constexpr uint32_t kNoInlineInfoSmallEncoding =
- std::numeric_limits<kSmallEncoding>::max();
-
- // Returns the number of bytes needed for an entry in the StackMap.
- static size_t NumberOfBytesForEntry(bool small_encoding) {
- return small_encoding ? kBytesForSmallEncoding : kBytesForLargeEncoding;
- }
private:
+ static size_t ComputeStackMapSizeInternal(size_t stack_mask_size,
+ size_t number_of_bytes_for_inline_info,
+ size_t number_of_bytes_for_dex_map,
+ size_t number_of_bytes_for_dex_pc,
+ size_t number_of_bytes_for_native_pc,
+ size_t number_of_bytes_for_register_mask);
+
// TODO: Instead of plain types such as "uint32_t", introduce
// typedefs (and document the memory layout of StackMap).
static constexpr int kRegisterMaskOffset = 0;
- static constexpr int kFixedSize = kRegisterMaskOffset + sizeof(uint32_t);
- static constexpr int kStackMaskOffset = kFixedSize;
+ static constexpr int kFixedSize = 0;
MemoryRegion region_;
@@ -792,50 +775,77 @@
region_ = MemoryRegion(const_cast<void*>(data), size);
}
+ static size_t EncodingSizeInBytes(size_t max_element) {
+ DCHECK(IsUint<32>(max_element));
+ return (max_element == 0) ? 0
+ : IsUint<8>(max_element) ? 1
+ : IsUint<16>(max_element) ? 2
+ : IsUint<24>(max_element) ? 3
+ : 4;
+ }
+
void SetEncoding(size_t inline_info_size,
size_t dex_register_map_size,
size_t dex_pc_max,
- size_t native_pc_max) {
+ size_t native_pc_max,
+ size_t register_mask_max) {
if (inline_info_size != 0) {
region_.StoreBit(kHasInlineInfoBitOffset, 1);
- region_.StoreBit(kHasSmallInlineInfoBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(
- // + 1 to also encode kNoInlineInfo: if an inline info offset
- // is at 0xFF, we want to overflow to a larger encoding, because it will
- // conflict with kNoInlineInfo.
- // The offset is relative to the dex register map. TODO: Change this.
- inline_info_size + dex_register_map_size + 1));
+ // + 1 to also encode kNoInlineInfo: if an inline info offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoInlineInfo.
+ // The offset is relative to the dex register map. TODO: Change this.
+ SetEncodingAt(kInlineInfoBitOffset,
+ EncodingSizeInBytes(dex_register_map_size + inline_info_size + 1));
} else {
region_.StoreBit(kHasInlineInfoBitOffset, 0);
- region_.StoreBit(kHasSmallInlineInfoBitOffset, 0);
+ SetEncodingAt(kInlineInfoBitOffset, 0);
}
- region_.StoreBit(kHasSmallDexRegisterMapBitOffset,
- // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
- // is at 0xFF, we want to overflow to a larger encoding, because it will
- // conflict with kNoDexRegisterMap.
- IsUint<StackMap::kBitsForSmallEncoding>(dex_register_map_size + 1));
- region_.StoreBit(kHasSmallDexPcBitOffset, IsUint<StackMap::kBitsForSmallEncoding>(dex_pc_max));
- region_.StoreBit(kHasSmallNativePcBitOffset,
- IsUint<StackMap::kBitsForSmallEncoding>(native_pc_max));
+ // + 1 to also encode kNoDexRegisterMap: if a dex register map offset
+ // is at 0xFF, we want to overflow to a larger encoding, because it will
+ // conflict with kNoDexRegisterMap.
+ SetEncodingAt(kDexRegisterMapBitOffset, EncodingSizeInBytes(dex_register_map_size + 1));
+ SetEncodingAt(kDexPcBitOffset, EncodingSizeInBytes(dex_pc_max));
+ SetEncodingAt(kNativePcBitOffset, EncodingSizeInBytes(native_pc_max));
+ SetEncodingAt(kRegisterMaskBitOffset, EncodingSizeInBytes(register_mask_max));
+ }
+
+ void SetEncodingAt(size_t bit_offset, size_t number_of_bytes) {
+ // We encode the number of bytes needed for writing a value on 3 bits,
+ // for values that we know are maximum 32bits.
+ region_.StoreBit(bit_offset, (number_of_bytes & 1));
+ region_.StoreBit(bit_offset + 1, (number_of_bytes & 2));
+ region_.StoreBit(bit_offset + 2, (number_of_bytes & 4));
+ }
+
+ size_t GetNumberOfBytesForEncoding(size_t bit_offset) const {
+ return region_.LoadBit(bit_offset)
+ + (region_.LoadBit(bit_offset + 1) << 1)
+ + (region_.LoadBit(bit_offset + 2) << 2);
}
bool HasInlineInfo() const {
return region_.LoadBit(kHasInlineInfoBitOffset);
}
- bool HasSmallInlineInfo() const {
- return region_.LoadBit(kHasSmallInlineInfoBitOffset);
+ size_t NumberOfBytesForInlineInfo() const {
+ return GetNumberOfBytesForEncoding(kInlineInfoBitOffset);
}
- bool HasSmallDexRegisterMap() const {
- return region_.LoadBit(kHasSmallDexRegisterMapBitOffset);
+ size_t NumberOfBytesForDexRegisterMap() const {
+ return GetNumberOfBytesForEncoding(kDexRegisterMapBitOffset);
}
- bool HasSmallNativePc() const {
- return region_.LoadBit(kHasSmallNativePcBitOffset);
+ size_t NumberOfBytesForRegisterMask() const {
+ return GetNumberOfBytesForEncoding(kRegisterMaskBitOffset);
}
- bool HasSmallDexPc() const {
- return region_.LoadBit(kHasSmallDexPcBitOffset);
+ size_t NumberOfBytesForNativePc() const {
+ return GetNumberOfBytesForEncoding(kNativePcBitOffset);
+ }
+
+ size_t NumberOfBytesForDexPc() const {
+ return GetNumberOfBytesForEncoding(kDexPcBitOffset);
}
size_t ComputeStackMapRegisterMaskOffset() const {
@@ -843,7 +853,8 @@
}
size_t ComputeStackMapStackMaskOffset() const {
- return StackMap::kStackMaskOffset;
+ return ComputeStackMapRegisterMaskOffset()
+ + (NumberOfBytesForRegisterMask() * sizeof(uint8_t));
}
size_t ComputeStackMapDexPcOffset() const {
@@ -852,18 +863,18 @@
size_t ComputeStackMapNativePcOffset() const {
return ComputeStackMapDexPcOffset()
- + (HasSmallDexPc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForDexPc() * sizeof(uint8_t));
}
size_t ComputeStackMapDexRegisterMapOffset() const {
return ComputeStackMapNativePcOffset()
- + (HasSmallNativePc() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForNativePc() * sizeof(uint8_t));
}
size_t ComputeStackMapInlineInfoOffset() const {
CHECK(HasInlineInfo());
return ComputeStackMapDexRegisterMapOffset()
- + (HasSmallDexRegisterMap() ? sizeof(uint8_t) : sizeof(uint32_t));
+ + (NumberOfBytesForDexRegisterMap() * sizeof(uint8_t));
}
uint32_t GetDexRegisterLocationCatalogOffset() const {
@@ -921,12 +932,12 @@
// Get the size of one stack map of this CodeInfo object, in bytes.
// All stack maps of a CodeInfo have the same size.
size_t StackMapSize() const {
- return StackMap::ComputeStackMapSize(GetStackMaskSize(),
- HasInlineInfo(),
- HasSmallInlineInfo(),
- HasSmallDexRegisterMap(),
- HasSmallDexPc(),
- HasSmallNativePc());
+ return StackMap::ComputeStackMapSizeInternal(GetStackMaskSize(),
+ NumberOfBytesForInlineInfo(),
+ NumberOfBytesForDexRegisterMap(),
+ NumberOfBytesForDexPc(),
+ NumberOfBytesForNativePc(),
+ NumberOfBytesForRegisterMask());
}
// Get the size all the stack maps of this CodeInfo object, in bytes.
@@ -989,17 +1000,18 @@
static constexpr int kOverallSizeOffset = 0;
static constexpr int kEncodingInfoOffset = kOverallSizeOffset + sizeof(uint32_t);
static constexpr int kNumberOfDexRegisterLocationCatalogEntriesOffset =
- kEncodingInfoOffset + sizeof(uint8_t);
+ kEncodingInfoOffset + sizeof(uint16_t);
static constexpr int kNumberOfStackMapsOffset =
kNumberOfDexRegisterLocationCatalogEntriesOffset + sizeof(uint32_t);
static constexpr int kStackMaskSizeOffset = kNumberOfStackMapsOffset + sizeof(uint32_t);
static constexpr int kFixedSize = kStackMaskSizeOffset + sizeof(uint32_t);
static constexpr int kHasInlineInfoBitOffset = (kEncodingInfoOffset * kBitsPerByte);
- static constexpr int kHasSmallInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
- static constexpr int kHasSmallDexRegisterMapBitOffset = kHasSmallInlineInfoBitOffset + 1;
- static constexpr int kHasSmallDexPcBitOffset = kHasSmallDexRegisterMapBitOffset + 1;
- static constexpr int kHasSmallNativePcBitOffset = kHasSmallDexPcBitOffset + 1;
+ static constexpr int kInlineInfoBitOffset = kHasInlineInfoBitOffset + 1;
+ static constexpr int kDexRegisterMapBitOffset = kInlineInfoBitOffset + 3;
+ static constexpr int kDexPcBitOffset = kDexRegisterMapBitOffset + 3;
+ static constexpr int kNativePcBitOffset = kDexPcBitOffset + 3;
+ static constexpr int kRegisterMaskBitOffset = kNativePcBitOffset + 3;
MemoryRegion GetStackMaps() const {
return region_.size() == 0