diff options
author | 2019-06-03 14:35:22 +0100 | |
---|---|---|
committer | 2019-06-25 11:03:34 +0000 | |
commit | 145a18a3771e8a3ba5105a759d274efab3685431 (patch) | |
tree | 9fdbf8746b465e6c34be07573bfc7b90f149c475 | |
parent | 89867bf1273fd76c6421b4f663076be08d6b51c0 (diff) |
Stack walk: Cache CodeInfo and StackMap for current PC.
This speeds maps startup by 0.15%.
Test: test.py -b --host --64 --optimizing
Change-Id: Ic37eeba727148b877f21fdfacfa9f55558db88a7
-rw-r--r-- | libartbase/base/bit_table.h | 1 | ||||
-rw-r--r-- | runtime/stack.cc | 42 | ||||
-rw-r--r-- | runtime/stack.h | 10 |
3 files changed, 42 insertions, 11 deletions
diff --git a/libartbase/base/bit_table.h b/libartbase/base/bit_table.h index 5ec162d1e8..0c1b04e720 100644 --- a/libartbase/base/bit_table.h +++ b/libartbase/base/bit_table.h @@ -108,6 +108,7 @@ class BitTableAccessor { static constexpr uint32_t kNumColumns = NumColumns; static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue; + BitTableAccessor() = default; BitTableAccessor(const BitTableBase<kNumColumns>* table, uint32_t row) : table_(table), row_(row) { DCHECK(table_ != nullptr); diff --git a/runtime/stack.cc b/runtime/stack.cc index ec89d3f3a5..172fe3eb16 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -69,6 +69,8 @@ StackVisitor::StackVisitor(Thread* thread, cur_oat_quick_method_header_(nullptr), num_frames_(num_frames), cur_depth_(0), + cur_inline_info_(nullptr, CodeInfo()), + cur_stack_map_(0, StackMap()), context_(context), check_suspended_(check_suspended) { if (check_suspended_) { @@ -76,15 +78,34 @@ StackVisitor::StackVisitor(Thread* thread, } } +CodeInfo* StackVisitor::GetCurrentInlineInfo() const { + DCHECK(!(*cur_quick_frame_)->IsNative()); + const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader(); + if (cur_inline_info_.first != header) { + cur_inline_info_ = std::make_pair(header, CodeInfo(header, CodeInfo::InlineInfoOnly)); + } + return &cur_inline_info_.second; +} + +StackMap* StackVisitor::GetCurrentStackMap() const { + DCHECK(!(*cur_quick_frame_)->IsNative()); + const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader(); + if (cur_stack_map_.first != cur_quick_frame_pc_) { + uint32_t pc = header->NativeQuickPcOffset(cur_quick_frame_pc_); + cur_stack_map_ = std::make_pair(cur_quick_frame_pc_, + GetCurrentInlineInfo()->GetStackMapForNativePcOffset(pc)); + } + return &cur_stack_map_.second; +} + ArtMethod* StackVisitor::GetMethod() const { if (cur_shadow_frame_ != nullptr) { return cur_shadow_frame_->GetMethod(); } else if (cur_quick_frame_ != nullptr) { if (IsInInlinedFrame()) { - const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader(); - CodeInfo code_info(method_header); + CodeInfo* code_info = GetCurrentInlineInfo(); DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames); - return GetResolvedMethod(*GetCurrentQuickFrame(), code_info, current_inline_frames_); + return GetResolvedMethod(*GetCurrentQuickFrame(), *code_info, current_inline_frames_); } else { return *cur_quick_frame_; } @@ -100,6 +121,10 @@ uint32_t StackVisitor::GetDexPc(bool abort_on_failure) const { return current_inline_frames_.back().GetDexPc(); } else if (cur_oat_quick_method_header_ == nullptr) { return dex::kDexNoIndex; + } else if (!(*GetCurrentQuickFrame())->IsNative()) { + StackMap* stack_map = GetCurrentStackMap(); + DCHECK(stack_map->IsValid()); + return stack_map->GetDexPc(); } else { return cur_oat_quick_method_header_->ToDexPc( GetMethod(), cur_quick_frame_pc_, abort_on_failure); @@ -819,14 +844,11 @@ void StackVisitor::WalkStack(bool include_transitions) { && !method->IsNative() // JNI methods cannot have any inlined frames. && CodeInfo::HasInlineInfo(cur_oat_quick_method_header_->GetOptimizedCodeInfoPtr())) { DCHECK_NE(cur_quick_frame_pc_, 0u); - current_code_info_ = CodeInfo(cur_oat_quick_method_header_, - CodeInfo::DecodeFlags::InlineInfoOnly); - uint32_t native_pc_offset = - cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_); - StackMap stack_map = current_code_info_.GetStackMapForNativePcOffset(native_pc_offset); - if (stack_map.IsValid() && stack_map.HasInlineInfo()) { + CodeInfo* code_info = GetCurrentInlineInfo(); + StackMap* stack_map = GetCurrentStackMap(); + if (stack_map->IsValid() && stack_map->HasInlineInfo()) { DCHECK_EQ(current_inline_frames_.size(), 0u); - for (current_inline_frames_ = current_code_info_.GetInlineInfosOf(stack_map); + for (current_inline_frames_ = code_info->GetInlineInfosOf(*stack_map); !current_inline_frames_.empty(); current_inline_frames_.pop_back()) { bool should_continue = VisitFrame(); diff --git a/runtime/stack.h b/runtime/stack.h index ff80d132f2..aa741dfe81 100644 --- a/runtime/stack.h +++ b/runtime/stack.h @@ -339,6 +339,9 @@ class StackVisitor { void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_); + ALWAYS_INLINE CodeInfo* GetCurrentInlineInfo() const; + ALWAYS_INLINE StackMap* GetCurrentStackMap() const; + Thread* const thread_; const StackWalkKind walk_kind_; ShadowFrame* cur_shadow_frame_; @@ -351,9 +354,14 @@ class StackVisitor { size_t cur_depth_; // Current inlined frames of the method we are currently at. // We keep poping frames from the end as we visit the frames. - CodeInfo current_code_info_; BitTableRange<InlineInfo> current_inline_frames_; + // Cache the most recently decoded inline info data. + // The 'current_inline_frames_' refers to this data, so we need to keep it alive anyway. + // Marked mutable since the cache fields are updated from const getters. + mutable std::pair<const OatQuickMethodHeader*, CodeInfo> cur_inline_info_; + mutable std::pair<uintptr_t, StackMap> cur_stack_map_; + protected: Context* const context_; const bool check_suspended_; |