Support for inlining methods that call/throw.
Mostly fixes here and there to make it working.
Change-Id: I1b535e895105d78b65634636d675b818551f783e
diff --git a/runtime/entrypoints/entrypoint_utils-inl.h b/runtime/entrypoints/entrypoint_utils-inl.h
index 625e695..526fb8d 100644
--- a/runtime/entrypoints/entrypoint_utils-inl.h
+++ b/runtime/entrypoints/entrypoint_utils-inl.h
@@ -38,6 +38,29 @@
namespace art {
+inline mirror::ArtMethod* GetResolvedMethod(mirror::ArtMethod* outer_method,
+ uint32_t method_index,
+ InvokeType invoke_type)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ mirror::ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index);
+ if (!caller->IsRuntimeMethod()) {
+ return caller;
+ }
+
+ // The method in the dex cache can be the runtime method responsible for invoking
+ // the stub that will then update the dex cache. Therefore, we need to do the
+ // resolution ourselves.
+
+ StackHandleScope<3> hs(Thread::Current());
+ ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
+ Handle<mirror::ArtMethod> outer(hs.NewHandle(outer_method));
+ Handle<mirror::ClassLoader> class_loader(hs.NewHandle(outer->GetClassLoader()));
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer->GetDexCache()));
+ Handle<mirror::ArtMethod> referrer;
+ return class_linker->ResolveMethod(
+ *outer->GetDexFile(), method_index, dex_cache, class_loader, referrer, invoke_type);
+}
+
inline mirror::ArtMethod* GetCalleeSaveMethodCaller(StackReference<mirror::ArtMethod>* sp,
Runtime::CalleeSaveType type,
bool do_caller_check = false)
@@ -47,7 +70,25 @@
const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
auto* caller_sp = reinterpret_cast<StackReference<mirror::ArtMethod>*>(
reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
- auto* caller = caller_sp->AsMirrorPtr();
+ mirror::ArtMethod* outer_method = caller_sp->AsMirrorPtr();
+ mirror::ArtMethod* caller = outer_method;
+
+ if ((outer_method != nullptr) && outer_method->IsOptimized(sizeof(void*))) {
+ const size_t callee_return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, type);
+ uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
+ (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
+ uintptr_t native_pc_offset = outer_method->NativeQuickPcOffset(caller_pc);
+ CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
+ StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
+ DCHECK(stack_map.IsValid());
+ if (stack_map.HasInlineInfo(code_info)) {
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ uint32_t method_index = inline_info.GetMethodIndexAtDepth(inline_info.GetDepth() - 1);
+ InvokeType invoke_type = static_cast<InvokeType>(
+ inline_info.GetInvokeTypeAtDepth(inline_info.GetDepth() - 1));
+ caller = GetResolvedMethod(outer_method, method_index, invoke_type);
+ }
+ }
if (kIsDebugBuild && do_caller_check) {
// Note that do_caller_check is optional, as this method can be called by
diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
index c029eeb..33d7065 100644
--- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
+++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc
@@ -297,10 +297,37 @@
return GetCalleeSaveMethodCaller(sp, Runtime::kRefsAndArgs);
}
+ static mirror::ArtMethod* GetOuterMethod(StackReference<mirror::ArtMethod>* sp)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
+ uint8_t* previous_sp =
+ reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
+ return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
+ }
+
static uint32_t GetCallingDexPc(StackReference<mirror::ArtMethod>* sp)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
- return GetCallingMethod(sp)->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
+ const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
+ auto* caller_sp = reinterpret_cast<StackReference<mirror::ArtMethod>*>(
+ reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
+ mirror::ArtMethod* outer_method = caller_sp->AsMirrorPtr();
+ uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
+ uintptr_t outer_pc_offset = outer_method->NativeQuickPcOffset(outer_pc);
+
+ if (outer_method->IsOptimized(sizeof(void*))) {
+ CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
+ StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset);
+ DCHECK(stack_map.IsValid());
+ if (stack_map.HasInlineInfo(code_info)) {
+ InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
+ return inline_info.GetDexPcAtDepth(inline_info.GetDepth() - 1);
+ } else {
+ return stack_map.GetDexPc(code_info);
+ }
+ } else {
+ return outer_method->ToDexPc(outer_pc);
+ }
}
// For the given quick ref and args quick frame, return the caller's PC.
@@ -2068,7 +2095,11 @@
StackReference<mirror::ArtMethod>* sp)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
ScopedQuickEntrypointChecks sqec(self);
- mirror::ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
+ // The optimizing compiler currently does not inline methods that have an interface
+ // invocation. We use the outer method directly to avoid fetching a stack map, which is
+ // more expensive.
+ mirror::ArtMethod* caller_method = QuickArgumentVisitor::GetOuterMethod(sp);
+ DCHECK_EQ(caller_method, QuickArgumentVisitor::GetCallingMethod(sp));
mirror::ArtMethod* interface_method = caller_method->GetDexCacheResolvedMethod(dex_method_idx);
mirror::ArtMethod* method;
if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
diff --git a/runtime/stack.cc b/runtime/stack.cc
index f7b96ea..09b56a1 100644
--- a/runtime/stack.cc
+++ b/runtime/stack.cc
@@ -18,6 +18,7 @@
#include "arch/context.h"
#include "base/hex_dump.h"
+#include "entrypoints/entrypoint_utils-inl.h"
#include "entrypoints/runtime_asm_entrypoints.h"
#include "gc_map.h"
#include "mirror/art_method-inl.h"
@@ -119,8 +120,11 @@
} else if (cur_quick_frame_ != nullptr) {
if (IsInInlinedFrame()) {
size_t depth_in_stack_map = current_inlining_depth_ - 1;
- return GetCurrentQuickFrame()->AsMirrorPtr()->GetDexCacheResolvedMethod(
- GetCurrentInlineInfo().GetMethodIndexAtDepth(depth_in_stack_map));
+ InlineInfo inline_info = GetCurrentInlineInfo();
+ uint32_t method_index = inline_info.GetMethodIndexAtDepth(depth_in_stack_map);
+ InvokeType invoke_type =
+ static_cast<InvokeType>(inline_info.GetInvokeTypeAtDepth(depth_in_stack_map));
+ return GetResolvedMethod(GetCurrentQuickFrame()->AsMirrorPtr(), method_index, invoke_type);
} else {
return cur_quick_frame_->AsMirrorPtr();
}
@@ -761,6 +765,7 @@
if (UNLIKELY(!should_continue)) {
return;
}
+ cur_depth_++;
}
}
}