Stack walk: Cache CodeInfo and StackMap for current PC.
This speeds maps startup by 0.15%.
Test: test.py -b --host --64 --optimizing
Change-Id: Ic37eeba727148b877f21fdfacfa9f55558db88a7
diff --git a/runtime/stack.cc b/runtime/stack.cc
index ec89d3f..172fe3e 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -69,6 +69,8 @@
cur_oat_quick_method_header_(nullptr),
num_frames_(num_frames),
cur_depth_(0),
+ cur_inline_info_(nullptr, CodeInfo()),
+ cur_stack_map_(0, StackMap()),
context_(context),
check_suspended_(check_suspended) {
if (check_suspended_) {
@@ -76,15 +78,34 @@
}
}
+CodeInfo* StackVisitor::GetCurrentInlineInfo() const {
+ DCHECK(!(*cur_quick_frame_)->IsNative());
+ const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader();
+ if (cur_inline_info_.first != header) {
+ cur_inline_info_ = std::make_pair(header, CodeInfo(header, CodeInfo::InlineInfoOnly));
+ }
+ return &cur_inline_info_.second;
+}
+
+StackMap* StackVisitor::GetCurrentStackMap() const {
+ DCHECK(!(*cur_quick_frame_)->IsNative());
+ const OatQuickMethodHeader* header = GetCurrentOatQuickMethodHeader();
+ if (cur_stack_map_.first != cur_quick_frame_pc_) {
+ uint32_t pc = header->NativeQuickPcOffset(cur_quick_frame_pc_);
+ cur_stack_map_ = std::make_pair(cur_quick_frame_pc_,
+ GetCurrentInlineInfo()->GetStackMapForNativePcOffset(pc));
+ }
+ return &cur_stack_map_.second;
+}
+
ArtMethod* StackVisitor::GetMethod() const {
if (cur_shadow_frame_ != nullptr) {
return cur_shadow_frame_->GetMethod();
} else if (cur_quick_frame_ != nullptr) {
if (IsInInlinedFrame()) {
- const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
- CodeInfo code_info(method_header);
+ CodeInfo* code_info = GetCurrentInlineInfo();
DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames);
- return GetResolvedMethod(*GetCurrentQuickFrame(), code_info, current_inline_frames_);
+ return GetResolvedMethod(*GetCurrentQuickFrame(), *code_info, current_inline_frames_);
} else {
return *cur_quick_frame_;
}
@@ -100,6 +121,10 @@
return current_inline_frames_.back().GetDexPc();
} else if (cur_oat_quick_method_header_ == nullptr) {
return dex::kDexNoIndex;
+ } else if (!(*GetCurrentQuickFrame())->IsNative()) {
+ StackMap* stack_map = GetCurrentStackMap();
+ DCHECK(stack_map->IsValid());
+ return stack_map->GetDexPc();
} else {
return cur_oat_quick_method_header_->ToDexPc(
GetMethod(), cur_quick_frame_pc_, abort_on_failure);
@@ -819,14 +844,11 @@
&& !method->IsNative() // JNI methods cannot have any inlined frames.
&& CodeInfo::HasInlineInfo(cur_oat_quick_method_header_->GetOptimizedCodeInfoPtr())) {
DCHECK_NE(cur_quick_frame_pc_, 0u);
- current_code_info_ = CodeInfo(cur_oat_quick_method_header_,
- CodeInfo::DecodeFlags::InlineInfoOnly);
- uint32_t native_pc_offset =
- cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
- StackMap stack_map = current_code_info_.GetStackMapForNativePcOffset(native_pc_offset);
- if (stack_map.IsValid() && stack_map.HasInlineInfo()) {
+ CodeInfo* code_info = GetCurrentInlineInfo();
+ StackMap* stack_map = GetCurrentStackMap();
+ if (stack_map->IsValid() && stack_map->HasInlineInfo()) {
DCHECK_EQ(current_inline_frames_.size(), 0u);
- for (current_inline_frames_ = current_code_info_.GetInlineInfosOf(stack_map);
+ for (current_inline_frames_ = code_info->GetInlineInfosOf(*stack_map);
!current_inline_frames_.empty();
current_inline_frames_.pop_back()) {
bool should_continue = VisitFrame();