summaryrefslogtreecommitdiff
path: root/runtime/stack_map.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/stack_map.h')
-rw-r--r--runtime/stack_map.h379
1 files changed, 165 insertions, 214 deletions
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 6da002138c..2f2053a52a 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -19,6 +19,7 @@
#include <limits>
+#include "arch/instruction_set.h"
#include "base/bit_memory_region.h"
#include "base/bit_table.h"
#include "base/bit_utils.h"
@@ -28,10 +29,11 @@
#include "dex/dex_file_types.h"
#include "dex_register_location.h"
#include "method_info.h"
-#include "oat_quick_method_header.h"
+#include "quick/quick_method_frame_info.h"
namespace art {
+class OatQuickMethodHeader;
class VariableIndentationOutputStream;
// Size of a frame slot, in bytes. This constant is a signed value,
@@ -39,95 +41,69 @@ class VariableIndentationOutputStream;
// (signed) values.
static constexpr ssize_t kFrameSlotSize = 4;
+// The delta compression of dex register maps means we need to scan the stackmaps backwards.
+// We compress the data in such a way so that there is an upper bound on the search distance.
+// Max distance 0 means each stack map must be fully defined and no scanning back is allowed.
+// If this value is changed, the oat file version should be incremented (for DCHECK to pass).
+static constexpr size_t kMaxDexRegisterMapSearchDistance = 32;
+
class ArtMethod;
class CodeInfo;
+class Stats;
std::ostream& operator<<(std::ostream& stream, const DexRegisterLocation& reg);
// Information on Dex register locations for a specific PC.
// Effectively just a convenience wrapper for DexRegisterLocation vector.
// If the size is small enough, it keeps the data on the stack.
+// TODO: Replace this with generic purpose "small-vector" implementation.
class DexRegisterMap {
public:
- // Create map for given number of registers and initialize all locations to None.
- explicit DexRegisterMap(size_t count) : count_(count), regs_small_{} {
+ using iterator = DexRegisterLocation*;
+ using const_iterator = const DexRegisterLocation*;
+
+ // Create map for given number of registers and initialize them to the given value.
+ DexRegisterMap(size_t count, DexRegisterLocation value) : count_(count), regs_small_{} {
if (count_ <= kSmallCount) {
- std::fill_n(regs_small_.begin(), count, DexRegisterLocation::None());
+ std::fill_n(regs_small_.begin(), count, value);
} else {
- regs_large_.resize(count, DexRegisterLocation::None());
+ regs_large_.resize(count, value);
}
}
DexRegisterLocation* data() {
return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
}
+ const DexRegisterLocation* data() const {
+ return count_ <= kSmallCount ? regs_small_.data() : regs_large_.data();
+ }
+ iterator begin() { return data(); }
+ iterator end() { return data() + count_; }
+ const_iterator begin() const { return data(); }
+ const_iterator end() const { return data() + count_; }
size_t size() const { return count_; }
+ bool empty() const { return count_ == 0; }
- bool IsValid() const { return count_ != 0; }
-
- DexRegisterLocation Get(size_t index) const {
+ DexRegisterLocation& operator[](size_t index) {
DCHECK_LT(index, count_);
- return count_ <= kSmallCount ? regs_small_[index] : regs_large_[index];
- }
-
- DexRegisterLocation::Kind GetLocationKind(uint16_t dex_register_number) const {
- return Get(dex_register_number).GetKind();
- }
-
- // TODO: Remove.
- DexRegisterLocation::Kind GetLocationInternalKind(uint16_t dex_register_number) const {
- return Get(dex_register_number).GetKind();
+ return data()[index];
}
-
- DexRegisterLocation GetDexRegisterLocation(uint16_t dex_register_number) const {
- return Get(dex_register_number);
- }
-
- int32_t GetStackOffsetInBytes(uint16_t dex_register_number) const {
- DexRegisterLocation location = Get(dex_register_number);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInStack);
- return location.GetValue();
- }
-
- int32_t GetConstant(uint16_t dex_register_number) const {
- DexRegisterLocation location = Get(dex_register_number);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kConstant);
- return location.GetValue();
- }
-
- int32_t GetMachineRegister(uint16_t dex_register_number) const {
- DexRegisterLocation location = Get(dex_register_number);
- DCHECK(location.GetKind() == DexRegisterLocation::Kind::kInRegister ||
- location.GetKind() == DexRegisterLocation::Kind::kInRegisterHigh ||
- location.GetKind() == DexRegisterLocation::Kind::kInFpuRegister ||
- location.GetKind() == DexRegisterLocation::Kind::kInFpuRegisterHigh);
- return location.GetValue();
- }
-
- ALWAYS_INLINE bool IsDexRegisterLive(uint16_t dex_register_number) const {
- return Get(dex_register_number).IsLive();
+ const DexRegisterLocation& operator[](size_t index) const {
+ DCHECK_LT(index, count_);
+ return data()[index];
}
size_t GetNumberOfLiveDexRegisters() const {
- size_t number_of_live_dex_registers = 0;
- for (size_t i = 0; i < count_; ++i) {
- if (IsDexRegisterLive(i)) {
- ++number_of_live_dex_registers;
- }
- }
- return number_of_live_dex_registers;
+ return std::count_if(begin(), end(), [](auto& loc) { return loc.IsLive(); });
}
bool HasAnyLiveDexRegisters() const {
- for (size_t i = 0; i < count_; ++i) {
- if (IsDexRegisterLive(i)) {
- return true;
- }
- }
- return false;
+ return std::any_of(begin(), end(), [](auto& loc) { return loc.IsLive(); });
}
+ void Dump(VariableIndentationOutputStream* vios) const;
+
private:
// Store the data inline if the number of registers is small to avoid memory allocations.
// If count_ <= kSmallCount, we use the regs_small_ array, and regs_large_ otherwise.
@@ -145,19 +121,26 @@ class DexRegisterMap {
* - Knowing the inlining information,
* - Knowing the values of dex registers.
*/
-class StackMap : public BitTable<7>::Accessor {
+class StackMap : public BitTableAccessor<8> {
public:
+ enum Kind {
+ Default = -1,
+ Catch = 0,
+ OSR = 1,
+ Debug = 2,
+ };
BIT_TABLE_HEADER()
- BIT_TABLE_COLUMN(0, PackedNativePc)
- BIT_TABLE_COLUMN(1, DexPc)
- BIT_TABLE_COLUMN(2, RegisterMaskIndex)
- BIT_TABLE_COLUMN(3, StackMaskIndex)
- BIT_TABLE_COLUMN(4, InlineInfoIndex)
- BIT_TABLE_COLUMN(5, DexRegisterMaskIndex)
- BIT_TABLE_COLUMN(6, DexRegisterMapIndex)
+ BIT_TABLE_COLUMN(0, Kind)
+ BIT_TABLE_COLUMN(1, PackedNativePc)
+ BIT_TABLE_COLUMN(2, DexPc)
+ BIT_TABLE_COLUMN(3, RegisterMaskIndex)
+ BIT_TABLE_COLUMN(4, StackMaskIndex)
+ BIT_TABLE_COLUMN(5, InlineInfoIndex)
+ BIT_TABLE_COLUMN(6, DexRegisterMaskIndex)
+ BIT_TABLE_COLUMN(7, DexRegisterMapIndex)
ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
- return UnpackNativePc(Get<kPackedNativePc>(), instruction_set);
+ return UnpackNativePc(GetPackedNativePc(), instruction_set);
}
ALWAYS_INLINE bool HasInlineInfo() const {
@@ -183,7 +166,6 @@ class StackMap : public BitTable<7>::Accessor {
const CodeInfo& code_info,
const MethodInfo& method_info,
uint32_t code_offset,
- uint16_t number_of_dex_registers,
InstructionSet instruction_set) const;
};
@@ -192,7 +174,7 @@ class StackMap : public BitTable<7>::Accessor {
* The row referenced from the StackMap holds information at depth 0.
* Following rows hold information for further depths.
*/
-class InlineInfo : public BitTable<7>::Accessor {
+class InlineInfo : public BitTableAccessor<6> {
public:
BIT_TABLE_HEADER()
BIT_TABLE_COLUMN(0, IsLast) // Determines if there are further rows for further depths.
@@ -200,7 +182,7 @@ class InlineInfo : public BitTable<7>::Accessor {
BIT_TABLE_COLUMN(2, MethodInfoIndex)
BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*.
BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*.
- BIT_TABLE_COLUMN(5, DexRegisterMaskIndex)
+ BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method.
BIT_TABLE_COLUMN(6, DexRegisterMapIndex)
static constexpr uint32_t kLast = -1;
@@ -220,18 +202,13 @@ class InlineInfo : public BitTable<7>::Accessor {
return reinterpret_cast<ArtMethod*>((hi << 32) | lo);
}
- ALWAYS_INLINE bool HasDexRegisterMap() const {
- return HasDexRegisterMapIndex();
- }
-
void Dump(VariableIndentationOutputStream* vios,
const CodeInfo& info,
const StackMap& stack_map,
- const MethodInfo& method_info,
- uint16_t number_of_dex_registers) const;
+ const MethodInfo& method_info) const;
};
-class InvokeInfo : public BitTable<3>::Accessor {
+class InvokeInfo : public BitTableAccessor<3> {
public:
BIT_TABLE_HEADER()
BIT_TABLE_COLUMN(0, PackedNativePc)
@@ -239,7 +216,7 @@ class InvokeInfo : public BitTable<3>::Accessor {
BIT_TABLE_COLUMN(2, MethodInfoIndex)
ALWAYS_INLINE uint32_t GetNativePcOffset(InstructionSet instruction_set) const {
- return StackMap::UnpackNativePc(Get<kPackedNativePc>(), instruction_set);
+ return StackMap::UnpackNativePc(GetPackedNativePc(), instruction_set);
}
uint32_t GetMethodIndex(MethodInfo method_info) const {
@@ -247,7 +224,19 @@ class InvokeInfo : public BitTable<3>::Accessor {
}
};
-class DexRegisterInfo : public BitTable<2>::Accessor {
+class MaskInfo : public BitTableAccessor<1> {
+ public:
+ BIT_TABLE_HEADER()
+ BIT_TABLE_COLUMN(0, Mask)
+};
+
+class DexRegisterMapInfo : public BitTableAccessor<1> {
+ public:
+ BIT_TABLE_HEADER()
+ BIT_TABLE_COLUMN(0, CatalogueIndex)
+};
+
+class DexRegisterInfo : public BitTableAccessor<2> {
public:
BIT_TABLE_HEADER()
BIT_TABLE_COLUMN(0, Kind)
@@ -278,7 +267,7 @@ class DexRegisterInfo : public BitTable<2>::Accessor {
// Register masks tend to have many trailing zero bits (caller-saves are usually not encoded),
// therefore it is worth encoding the mask as value+shift.
-class RegisterMask : public BitTable<2>::Accessor {
+class RegisterMask : public BitTableAccessor<2> {
public:
BIT_TABLE_HEADER()
BIT_TABLE_COLUMN(0, Value)
@@ -300,23 +289,21 @@ class CodeInfo {
}
explicit CodeInfo(MemoryRegion region) : CodeInfo(region.begin()) {
- DCHECK_EQ(size_, region.size());
+ DCHECK_EQ(Size(), region.size());
}
- explicit CodeInfo(const OatQuickMethodHeader* header)
- : CodeInfo(header->GetOptimizedCodeInfoPtr()) {
- }
+ explicit CodeInfo(const OatQuickMethodHeader* header);
size_t Size() const {
- return size_;
+ return BitsToBytesRoundUp(size_in_bits_);
}
- bool HasInlineInfo() const {
- return inline_infos_.NumRows() > 0;
+ ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const {
+ return stack_maps_;
}
ALWAYS_INLINE StackMap GetStackMapAt(size_t index) const {
- return StackMap(&stack_maps_, index);
+ return stack_maps_.GetRow(index);
}
BitMemoryRegion GetStackMask(size_t index) const {
@@ -330,7 +317,7 @@ class CodeInfo {
uint32_t GetRegisterMaskOf(const StackMap& stack_map) const {
uint32_t index = stack_map.GetRegisterMaskIndex();
- return (index == StackMap::kNoValue) ? 0 : RegisterMask(&register_masks_, index).GetMask();
+ return (index == StackMap::kNoValue) ? 0 : register_masks_.GetRow(index).GetMask();
}
uint32_t GetNumberOfLocationCatalogEntries() const {
@@ -338,7 +325,13 @@ class CodeInfo {
}
ALWAYS_INLINE DexRegisterLocation GetDexRegisterCatalogEntry(size_t index) const {
- return DexRegisterInfo(&dex_register_catalog_, index).GetLocation();
+ return (index == StackMap::kNoValue)
+ ? DexRegisterLocation::None()
+ : dex_register_catalog_.GetRow(index).GetLocation();
+ }
+
+ bool HasInlineInfo() const {
+ return inline_infos_.NumRows() > 0;
}
uint32_t GetNumberOfStackMaps() const {
@@ -346,174 +339,132 @@ class CodeInfo {
}
InvokeInfo GetInvokeInfo(size_t index) const {
- return InvokeInfo(&invoke_infos_, index);
- }
-
- ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map,
- size_t num_dex_registers) const {
- return DecodeDexRegisterMap(stack_map.GetDexRegisterMaskIndex(),
- stack_map.GetDexRegisterMapIndex(),
- num_dex_registers);
- }
-
- ALWAYS_INLINE DexRegisterMap GetDexRegisterMapAtDepth(uint8_t depth,
- StackMap stack_map,
- size_t num_dex_registers) const {
- InlineInfo inline_info = GetInlineInfoAtDepth(stack_map, depth);
- return DecodeDexRegisterMap(inline_info.GetDexRegisterMaskIndex(),
- inline_info.GetDexRegisterMapIndex(),
- num_dex_registers);
+ return invoke_infos_.GetRow(index);
}
- InlineInfo GetInlineInfo(size_t index) const {
- return InlineInfo(&inline_infos_, index);
+ ALWAYS_INLINE DexRegisterMap GetDexRegisterMapOf(StackMap stack_map) const {
+ if (stack_map.HasDexRegisterMap()) {
+ DexRegisterMap map(number_of_dex_registers_, DexRegisterLocation::Invalid());
+ DecodeDexRegisterMap(stack_map.Row(), /* first_dex_register */ 0, &map);
+ return map;
+ }
+ return DexRegisterMap(0, DexRegisterLocation::None());
+ }
+
+ ALWAYS_INLINE DexRegisterMap GetInlineDexRegisterMapOf(StackMap stack_map,
+ InlineInfo inline_info) const {
+ if (stack_map.HasDexRegisterMap()) {
+ DCHECK(stack_map.HasInlineInfoIndex());
+ uint32_t depth = inline_info.Row() - stack_map.GetInlineInfoIndex();
+ // The register counts are commutative and include all outer levels.
+ // This allows us to determine the range [first, last) in just two lookups.
+ // If we are at depth 0 (the first inlinee), the count from the main method is used.
+ uint32_t first = (depth == 0)
+ ? number_of_dex_registers_
+ : inline_infos_.GetRow(inline_info.Row() - 1).GetNumberOfDexRegisters();
+ uint32_t last = inline_info.GetNumberOfDexRegisters();
+ DexRegisterMap map(last - first, DexRegisterLocation::Invalid());
+ DecodeDexRegisterMap(stack_map.Row(), first, &map);
+ return map;
+ }
+ return DexRegisterMap(0, DexRegisterLocation::None());
}
- uint32_t GetInlineDepthOf(StackMap stack_map) const {
- uint32_t depth = 0;
+ BitTableRange<InlineInfo> GetInlineInfosOf(StackMap stack_map) const {
uint32_t index = stack_map.GetInlineInfoIndex();
if (index != StackMap::kNoValue) {
- while (GetInlineInfo(index + depth++).GetIsLast() == InlineInfo::kMore) { }
+ auto begin = inline_infos_.begin() + index;
+ auto end = begin;
+ while ((*end++).GetIsLast() == InlineInfo::kMore) { }
+ return BitTableRange<InlineInfo>(begin, end);
+ } else {
+ return BitTableRange<InlineInfo>();
}
- return depth;
- }
-
- InlineInfo GetInlineInfoAtDepth(StackMap stack_map, uint32_t depth) const {
- DCHECK(stack_map.HasInlineInfo());
- DCHECK_LT(depth, GetInlineDepthOf(stack_map));
- return GetInlineInfo(stack_map.GetInlineInfoIndex() + depth);
}
StackMap GetStackMapForDexPc(uint32_t dex_pc) const {
- for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
- StackMap stack_map = GetStackMapAt(i);
- if (stack_map.GetDexPc() == dex_pc) {
+ for (StackMap stack_map : stack_maps_) {
+ if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() != StackMap::Kind::Debug) {
return stack_map;
}
}
- return StackMap();
+ return stack_maps_.GetInvalidRow();
}
- // Searches the stack map list backwards because catch stack maps are stored
- // at the end.
+ // Searches the stack map list backwards because catch stack maps are stored at the end.
StackMap GetCatchStackMapForDexPc(uint32_t dex_pc) const {
for (size_t i = GetNumberOfStackMaps(); i > 0; --i) {
StackMap stack_map = GetStackMapAt(i - 1);
- if (stack_map.GetDexPc() == dex_pc) {
+ if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::Catch) {
return stack_map;
}
}
- return StackMap();
+ return stack_maps_.GetInvalidRow();
}
StackMap GetOsrStackMapForDexPc(uint32_t dex_pc) const {
- size_t e = GetNumberOfStackMaps();
- if (e == 0) {
- // There cannot be OSR stack map if there is no stack map.
- return StackMap();
- }
- // Walk over all stack maps. If two consecutive stack maps are identical, then we
- // have found a stack map suitable for OSR.
- for (size_t i = 0; i < e - 1; ++i) {
- StackMap stack_map = GetStackMapAt(i);
- if (stack_map.GetDexPc() == dex_pc) {
- StackMap other = GetStackMapAt(i + 1);
- if (other.GetDexPc() == dex_pc &&
- other.GetNativePcOffset(kRuntimeISA) ==
- stack_map.GetNativePcOffset(kRuntimeISA)) {
- DCHECK_EQ(other.GetDexRegisterMapIndex(),
- stack_map.GetDexRegisterMapIndex());
- if (i < e - 2) {
- // Make sure there are not three identical stack maps following each other.
- DCHECK_NE(
- stack_map.GetNativePcOffset(kRuntimeISA),
- GetStackMapAt(i + 2).GetNativePcOffset(kRuntimeISA));
- }
- return stack_map;
- }
- }
- }
- return StackMap();
- }
-
- StackMap GetStackMapForNativePcOffset(uint32_t native_pc_offset) const {
- // TODO: Safepoint stack maps are sorted by native_pc_offset but catch stack
- // maps are not. If we knew that the method does not have try/catch,
- // we could do binary search.
- for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) {
- StackMap stack_map = GetStackMapAt(i);
- if (stack_map.GetNativePcOffset(kRuntimeISA) == native_pc_offset) {
+ for (StackMap stack_map : stack_maps_) {
+ if (stack_map.GetDexPc() == dex_pc && stack_map.GetKind() == StackMap::Kind::OSR) {
return stack_map;
}
}
- return StackMap();
+ return stack_maps_.GetInvalidRow();
}
+ StackMap GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa = kRuntimeISA) const;
+
InvokeInfo GetInvokeInfoForNativePcOffset(uint32_t native_pc_offset) {
- for (size_t index = 0; index < invoke_infos_.NumRows(); index++) {
- InvokeInfo item = GetInvokeInfo(index);
+ for (InvokeInfo item : invoke_infos_) {
if (item.GetNativePcOffset(kRuntimeISA) == native_pc_offset) {
return item;
}
}
- return InvokeInfo();
+ return invoke_infos_.GetInvalidRow();
}
// Dump this CodeInfo object on `vios`.
// `code_offset` is the (absolute) native PC of the compiled method.
void Dump(VariableIndentationOutputStream* vios,
uint32_t code_offset,
- uint16_t number_of_dex_registers,
bool verbose,
InstructionSet instruction_set,
const MethodInfo& method_info) const;
+ // Accumulate code info size statistics into the given Stats tree.
+ void AddSizeStats(/*out*/ Stats* parent) const;
+
+ ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data) {
+ return QuickMethodFrameInfo(
+ DecodeUnsignedLeb128(&data),
+ DecodeUnsignedLeb128(&data),
+ DecodeUnsignedLeb128(&data));
+ }
+
private:
- ALWAYS_INLINE DexRegisterMap DecodeDexRegisterMap(uint32_t mask_index,
- uint32_t map_index,
- uint32_t num_dex_registers) const {
- DexRegisterMap map(map_index == StackMap::kNoValue ? 0 : num_dex_registers);
- if (mask_index != StackMap::kNoValue) {
- BitMemoryRegion mask = dex_register_masks_.GetBitMemoryRegion(mask_index);
- num_dex_registers = std::min<uint32_t>(num_dex_registers, mask.size_in_bits());
- DexRegisterLocation* regs = map.data();
- for (uint32_t r = 0; r < mask.size_in_bits(); r++) {
- if (mask.LoadBit(r) /* is_live */) {
- DCHECK_LT(r, map.size());
- regs[r] = GetDexRegisterCatalogEntry(dex_register_maps_.Get(map_index++));
- }
- }
- }
- return map;
- }
-
- void Decode(const uint8_t* data) {
- size_t non_header_size = DecodeUnsignedLeb128(&data);
- BitMemoryRegion region(MemoryRegion(const_cast<uint8_t*>(data), non_header_size));
- size_t bit_offset = 0;
- size_ = UnsignedLeb128Size(non_header_size) + non_header_size;
- stack_maps_.Decode(region, &bit_offset);
- register_masks_.Decode(region, &bit_offset);
- stack_masks_.Decode(region, &bit_offset);
- invoke_infos_.Decode(region, &bit_offset);
- inline_infos_.Decode(region, &bit_offset);
- dex_register_masks_.Decode(region, &bit_offset);
- dex_register_maps_.Decode(region, &bit_offset);
- dex_register_catalog_.Decode(region, &bit_offset);
- CHECK_EQ(non_header_size, BitsToBytesRoundUp(bit_offset)) << "Invalid CodeInfo";
- }
-
- size_t size_;
- BitTable<StackMap::kCount> stack_maps_;
- BitTable<RegisterMask::kCount> register_masks_;
- BitTable<1> stack_masks_;
- BitTable<InvokeInfo::kCount> invoke_infos_;
- BitTable<InlineInfo::kCount> inline_infos_;
- BitTable<1> dex_register_masks_;
- BitTable<1> dex_register_maps_;
- BitTable<DexRegisterInfo::kCount> dex_register_catalog_;
-
- friend class OatDumper;
+ // Returns lower bound (fist stack map which has pc greater or equal than the desired one).
+ // It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
+ BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const;
+
+ // Scan backward to determine dex register locations at given stack map.
+ void DecodeDexRegisterMap(uint32_t stack_map_index,
+ uint32_t first_dex_register,
+ /*out*/ DexRegisterMap* map) const;
+
+ void Decode(const uint8_t* data);
+
+ uint32_t frame_size_in_bytes_;
+ uint32_t core_spill_mask_;
+ uint32_t fp_spill_mask_;
+ uint32_t number_of_dex_registers_;
+ BitTable<StackMap> stack_maps_;
+ BitTable<RegisterMask> register_masks_;
+ BitTable<MaskInfo> stack_masks_;
+ BitTable<InvokeInfo> invoke_infos_;
+ BitTable<InlineInfo> inline_infos_;
+ BitTable<MaskInfo> dex_register_masks_;
+ BitTable<DexRegisterMapInfo> dex_register_maps_;
+ BitTable<DexRegisterInfo> dex_register_catalog_;
+ uint32_t size_in_bits_;
};
#undef ELEMENT_BYTE_OFFSET_AFTER