diff options
| -rw-r--r-- | compiler/optimizing/stack_map_stream.cc | 10 | ||||
| -rw-r--r-- | libartbase/base/bit_table.h | 43 | ||||
| -rw-r--r-- | runtime/stack_map.cc | 21 | ||||
| -rw-r--r-- | runtime/stack_map.h | 20 |
4 files changed, 79 insertions, 15 deletions
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc index fb5d9332d0..e8b333047d 100644 --- a/compiler/optimizing/stack_map_stream.cc +++ b/compiler/optimizing/stack_map_stream.cc @@ -61,6 +61,16 @@ void StackMapStream::BeginStackMapEntry(uint32_t dex_pc, current_stack_map_[StackMap::kPackedNativePc] = StackMap::PackNativePc(native_pc_offset, instruction_set_); current_stack_map_[StackMap::kDexPc] = dex_pc; + if (stack_maps_.size() > 0) { + // Check that non-catch stack maps are sorted by pc. + // Catch stack maps are at the end and may be unordered. + if (stack_maps_.back()[StackMap::kKind] == StackMap::Kind::Catch) { + DCHECK(current_stack_map_[StackMap::kKind] == StackMap::Kind::Catch); + } else if (current_stack_map_[StackMap::kKind] != StackMap::Kind::Catch) { + DCHECK_LE(stack_maps_.back()[StackMap::kPackedNativePc], + current_stack_map_[StackMap::kPackedNativePc]); + } + } if (register_mask != 0) { uint32_t shift = LeastSignificantBit(register_mask); BitTableBuilder<RegisterMask>::Entry entry; diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h index 053bf1f783..8036db1a16 100644 --- a/libartbase/base/bit_table.h +++ b/libartbase/base/bit_table.h @@ -188,13 +188,55 @@ static const char* const* GetBitTableColumnNames() { template<typename Accessor> class BitTable : public BitTableBase<Accessor::kNumColumns> { public: + class const_iterator : public std::iterator<std::random_access_iterator_tag, + /* value_type */ Accessor, + /* difference_type */ int32_t, + /* pointer */ void, + /* reference */ void> { + public: + using difference_type = int32_t; + const_iterator() {} + const_iterator(const BitTable* table, uint32_t row) : table_(table), row_(row) {} + const_iterator operator+(difference_type n) { return const_iterator(table_, row_ + n); } + const_iterator operator-(difference_type n) { return const_iterator(table_, row_ - n); } + difference_type operator-(const const_iterator& other) { return row_ - other.row_; } + void operator+=(difference_type rows) { row_ += rows; } + void operator-=(difference_type rows) { row_ -= rows; } + const_iterator operator++() { return const_iterator(table_, ++row_); } + const_iterator operator--() { return const_iterator(table_, --row_); } + const_iterator operator++(int) { return const_iterator(table_, row_++); } + const_iterator operator--(int) { return const_iterator(table_, row_--); } + bool operator==(const_iterator i) const { DCHECK(table_ == i.table_); return row_ == i.row_; } + bool operator!=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ != i.row_; } + bool operator<=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ <= i.row_; } + bool operator>=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ >= i.row_; } + bool operator<(const_iterator i) const { DCHECK(table_ == i.table_); return row_ < i.row_; } + bool operator>(const_iterator i) const { DCHECK(table_ == i.table_); return row_ > i.row_; } + Accessor operator*() { return Accessor(table_, row_); } + Accessor operator->() { return Accessor(table_, row_); } + Accessor operator[](size_t index) { return Accessor(table_, row_ + index); } + private: + const BitTable* table_ = nullptr; + uint32_t row_ = 0; + }; + using BitTableBase<Accessor::kNumColumns>::BitTableBase; // Constructors. + ALWAYS_INLINE const_iterator begin() const { return const_iterator(this, 0); } + ALWAYS_INLINE const_iterator end() const { return const_iterator(this, this->NumRows()); } + ALWAYS_INLINE Accessor GetRow(uint32_t row) const { return Accessor(this, row); } }; +template<typename Accessor> +typename BitTable<Accessor>::const_iterator operator+( + typename BitTable<Accessor>::const_iterator::difference_type n, + typename BitTable<Accessor>::const_iterator a) { + return a + n; +} + // Helper class for encoding BitTable. It can optionally de-duplicate the inputs. template<uint32_t kNumColumns> class BitTableBuilderBase { @@ -234,6 +276,7 @@ class BitTableBuilderBase { Entry& operator[](size_t row) { return rows_[row]; } const Entry& operator[](size_t row) const { return rows_[row]; } + const Entry& back() const { return rows_.back(); } size_t size() const { return rows_.size(); } // Append given value to the vector without de-duplication. diff --git a/runtime/stack_map.cc b/runtime/stack_map.cc index f2418d0f1a..f40168b8b8 100644 --- a/runtime/stack_map.cc +++ b/runtime/stack_map.cc @@ -26,6 +26,27 @@ namespace art { +BitTable<StackMap>::const_iterator CodeInfo::BinarySearchNativePc(uint32_t packed_pc) const { + return std::partition_point( + stack_maps_.begin(), + stack_maps_.end(), + [packed_pc](const StackMap& sm) { + return sm.GetPackedNativePc() < packed_pc && sm.GetKind() != StackMap::Kind::Catch; + }); +} + +StackMap CodeInfo::GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa) const { + auto it = BinarySearchNativePc(StackMap::PackNativePc(pc, isa)); + // Start at the lower bound and iterate over all stack maps with the given native pc. + for (; it != stack_maps_.end() && (*it).GetNativePcOffset(isa) == pc; ++it) { + StackMap::Kind kind = static_cast<StackMap::Kind>((*it).GetKind()); + if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) { + return *it; + } + } + return StackMap(); +} + // Scan backward to determine dex register locations at given stack map. // All registers for a stack map are combined - inlined registers are just appended, // therefore 'first_dex_register' allows us to select a sub-range to decode. diff --git a/runtime/stack_map.h b/runtime/stack_map.h index 64a084f1c2..7aac792523 100644 --- a/runtime/stack_map.h +++ b/runtime/stack_map.h @@ -412,21 +412,7 @@ class CodeInfo { return StackMap(); } - StackMap GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa = kRuntimeISA) const { - // TODO: Safepoint stack maps are sorted by native_pc_offset but catch stack - // maps are not. If we knew that the method does not have try/catch, - // we could do binary search. - for (size_t i = 0, e = GetNumberOfStackMaps(); i < e; ++i) { - StackMap stack_map = GetStackMapAt(i); - if (stack_map.GetNativePcOffset(isa) == pc) { - StackMap::Kind kind = static_cast<StackMap::Kind>(stack_map.GetKind()); - if (kind == StackMap::Kind::Default || kind == StackMap::Kind::OSR) { - return stack_map; - } - } - } - return StackMap(); - } + StackMap GetStackMapForNativePcOffset(uint32_t pc, InstructionSet isa = kRuntimeISA) const; InvokeInfo GetInvokeInfoForNativePcOffset(uint32_t native_pc_offset) { for (size_t index = 0; index < invoke_infos_.NumRows(); index++) { @@ -450,6 +436,10 @@ class CodeInfo { void AddSizeStats(/*out*/ Stats* parent) const; private: + // Returns lower bound (fist stack map which has pc greater or equal than the desired one). + // It ignores catch stack maps at the end (it is the same as if they had maximum pc value). + BitTable<StackMap>::const_iterator BinarySearchNativePc(uint32_t packed_pc) const; + // Scan backward to determine dex register locations at given stack map. void DecodeDexRegisterMap(uint32_t stack_map_index, uint32_t first_dex_register, |