summaryrefslogtreecommitdiff
path: root/runtime/stack_map.h
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/stack_map.h')
-rw-r--r--runtime/stack_map.h110
1 files changed, 56 insertions, 54 deletions
diff --git a/runtime/stack_map.h b/runtime/stack_map.h
index 87133cf59c..2065a79dd5 100644
--- a/runtime/stack_map.h
+++ b/runtime/stack_map.h
@@ -181,7 +181,6 @@ class InlineInfo : public BitTableAccessor<6> {
BIT_TABLE_COLUMN(3, ArtMethodHi) // High bits of ArtMethod*.
BIT_TABLE_COLUMN(4, ArtMethodLo) // Low bits of ArtMethod*.
BIT_TABLE_COLUMN(5, NumberOfDexRegisters) // Includes outer levels and the main method.
- BIT_TABLE_COLUMN(6, DexRegisterMapIndex)
static constexpr uint32_t kLast = -1;
static constexpr uint32_t kMore = 0;
@@ -292,26 +291,14 @@ class CodeInfo {
std::map<BitMemoryRegion, uint32_t, BitMemoryRegion::Less> dedupe_map_;
};
- enum DecodeFlags {
- AllTables = 0,
- // Limits the decoding only to the data needed by GC.
- GcMasksOnly = 1,
- // Limits the decoding only to the main stack map table and inline info table.
- // This is sufficient for many use cases and makes the header decoding faster.
- InlineInfoOnly = 2,
- };
-
- CodeInfo() {}
-
- explicit CodeInfo(const uint8_t* data, DecodeFlags flags = AllTables) {
- Decode(reinterpret_cast<const uint8_t*>(data), flags);
- }
+ ALWAYS_INLINE CodeInfo() {}
+ ALWAYS_INLINE explicit CodeInfo(const uint8_t* data, size_t* num_read_bits = nullptr);
+ ALWAYS_INLINE explicit CodeInfo(const OatQuickMethodHeader* header);
- explicit CodeInfo(const OatQuickMethodHeader* header, DecodeFlags flags = AllTables);
-
- size_t Size() const {
- return BitsToBytesRoundUp(size_in_bits_);
- }
+ // The following methods decode only part of the data.
+ static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data);
+ static CodeInfo DecodeGcMasksOnly(const OatQuickMethodHeader* header);
+ static CodeInfo DecodeInlineInfoOnly(const OatQuickMethodHeader* header);
ALWAYS_INLINE const BitTable<StackMap>& GetStackMaps() const {
return stack_maps_;
@@ -438,58 +425,72 @@ class CodeInfo {
// Accumulate code info size statistics into the given Stats tree.
static void CollectSizeStats(const uint8_t* code_info, /*out*/ Stats* parent);
- ALWAYS_INLINE static QuickMethodFrameInfo DecodeFrameInfo(const uint8_t* data) {
- BitMemoryReader reader(data);
- return QuickMethodFrameInfo(
- reader.ReadVarint() * kStackAlignment, // Decode packed_frame_size_ and unpack.
- reader.ReadVarint(), // core_spill_mask_.
- reader.ReadVarint()); // fp_spill_mask_.
+ ALWAYS_INLINE static bool HasInlineInfo(const uint8_t* code_info_data) {
+ return (*code_info_data & kHasInlineInfo) != 0;
}
- private:
- // Returns lower bound (fist stack map which has pc greater or equal than the desired one).
- // It ignores catch stack maps at the end (it is the same as if they had maximum pc value).
- BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const;
+ ALWAYS_INLINE static bool IsBaseline(const uint8_t* code_info_data) {
+ return (*code_info_data & kIsBaseline) != 0;
+ }
+ private:
// Scan backward to determine dex register locations at given stack map.
void DecodeDexRegisterMap(uint32_t stack_map_index,
uint32_t first_dex_register,
/*out*/ DexRegisterMap* map) const;
- void Decode(const uint8_t* data, DecodeFlags flags);
+ template<typename DecodeCallback> // (size_t index, BitTable<...>*, BitMemoryRegion).
+ ALWAYS_INLINE CodeInfo(const uint8_t* data, size_t* num_read_bits, DecodeCallback callback);
- // Invokes the callback with member pointer of each header field.
+ // Invokes the callback with index and member pointer of each header field.
template<typename Callback>
ALWAYS_INLINE static void ForEachHeaderField(Callback callback) {
- callback(&CodeInfo::packed_frame_size_);
- callback(&CodeInfo::core_spill_mask_);
- callback(&CodeInfo::fp_spill_mask_);
- callback(&CodeInfo::number_of_dex_registers_);
+ size_t index = 0;
+ callback(index++, &CodeInfo::flags_);
+ callback(index++, &CodeInfo::packed_frame_size_);
+ callback(index++, &CodeInfo::core_spill_mask_);
+ callback(index++, &CodeInfo::fp_spill_mask_);
+ callback(index++, &CodeInfo::number_of_dex_registers_);
+ callback(index++, &CodeInfo::bit_table_flags_);
+ DCHECK_EQ(index, kNumHeaders);
}
- // Invokes the callback with member pointer of each BitTable field.
+ // Invokes the callback with index and member pointer of each BitTable field.
template<typename Callback>
- ALWAYS_INLINE static void ForEachBitTableField(Callback callback, DecodeFlags flags = AllTables) {
- callback(&CodeInfo::stack_maps_);
- callback(&CodeInfo::register_masks_);
- callback(&CodeInfo::stack_masks_);
- if (flags & DecodeFlags::GcMasksOnly) {
- return;
- }
- callback(&CodeInfo::inline_infos_);
- callback(&CodeInfo::method_infos_);
- if (flags & DecodeFlags::InlineInfoOnly) {
- return;
- }
- callback(&CodeInfo::dex_register_masks_);
- callback(&CodeInfo::dex_register_maps_);
- callback(&CodeInfo::dex_register_catalog_);
- }
+ ALWAYS_INLINE static void ForEachBitTableField(Callback callback) {
+ size_t index = 0;
+ callback(index++, &CodeInfo::stack_maps_);
+ callback(index++, &CodeInfo::register_masks_);
+ callback(index++, &CodeInfo::stack_masks_);
+ callback(index++, &CodeInfo::inline_infos_);
+ callback(index++, &CodeInfo::method_infos_);
+ callback(index++, &CodeInfo::dex_register_masks_);
+ callback(index++, &CodeInfo::dex_register_maps_);
+ callback(index++, &CodeInfo::dex_register_catalog_);
+ DCHECK_EQ(index, kNumBitTables);
+ }
+
+ bool HasBitTable(size_t i) { return ((bit_table_flags_ >> i) & 1) != 0; }
+ bool IsBitTableDeduped(size_t i) { return ((bit_table_flags_ >> (kNumBitTables + i)) & 1) != 0; }
+ void SetBitTableDeduped(size_t i) { bit_table_flags_ |= 1 << (kNumBitTables + i); }
+
+ enum Flags {
+ kHasInlineInfo = 1 << 0,
+ kIsBaseline = 1 << 1,
+ };
+ // The CodeInfo starts with sequence of variable-length bit-encoded integers.
+ static constexpr size_t kNumHeaders = 6;
+ uint32_t flags_ = 0;
uint32_t packed_frame_size_ = 0; // Frame size in kStackAlignment units.
uint32_t core_spill_mask_ = 0;
uint32_t fp_spill_mask_ = 0;
uint32_t number_of_dex_registers_ = 0;
+ uint32_t bit_table_flags_ = 0;
+
+ // The encoded bit-tables follow the header. Based on the above flags field,
+ // bit-tables might be omitted or replaced by relative bit-offset if deduped.
+ static constexpr size_t kNumBitTables = 8;
BitTable<StackMap> stack_maps_;
BitTable<RegisterMask> register_masks_;
BitTable<StackMask> stack_masks_;
@@ -498,7 +499,8 @@ class CodeInfo {
BitTable<DexRegisterMask> dex_register_masks_;
BitTable<DexRegisterMapInfo> dex_register_maps_;
BitTable<DexRegisterInfo> dex_register_catalog_;
- uint32_t size_in_bits_ = 0;
+
+ friend class StackMapStream;
};
#undef ELEMENT_BYTE_OFFSET_AFTER