summaryrefslogtreecommitdiff
path: root/compiler/jni/quick
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/jni/quick')
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.cc6
-rw-r--r--compiler/jni/quick/arm/calling_convention_arm.h4
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.cc6
-rw-r--r--compiler/jni/quick/arm64/calling_convention_arm64.h4
-rw-r--r--compiler/jni/quick/calling_convention.cc12
-rw-r--r--compiler/jni/quick/calling_convention.h8
-rw-r--r--compiler/jni/quick/jni_compiler.cc190
-rw-r--r--compiler/jni/quick/jni_compiler.h3
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.cc6
-rw-r--r--compiler/jni/quick/x86/calling_convention_x86.h4
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.cc6
-rw-r--r--compiler/jni/quick/x86_64/calling_convention_x86_64.h4
12 files changed, 204 insertions, 49 deletions
diff --git a/compiler/jni/quick/arm/calling_convention_arm.cc b/compiler/jni/quick/arm/calling_convention_arm.cc
index c1afdb8238..d81ca77b74 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.cc
+++ b/compiler/jni/quick/arm/calling_convention_arm.cc
@@ -23,7 +23,7 @@
#include "base/macros.h"
#include "utils/arm/managed_register_arm.h"
-namespace art {
+namespace art HIDDEN {
namespace arm {
//
@@ -199,6 +199,10 @@ ManagedRegister ArmManagedRuntimeCallingConvention::MethodRegister() {
return ArmManagedRegister::FromCoreRegister(R0);
}
+ManagedRegister ArmManagedRuntimeCallingConvention::ArgumentRegisterForMethodExitHook() {
+ return ArmManagedRegister::FromCoreRegister(R2);
+}
+
void ArmManagedRuntimeCallingConvention::ResetIterator(FrameOffset displacement) {
ManagedRuntimeCallingConvention::ResetIterator(displacement);
gpr_index_ = 1u; // Skip r0 for ArtMethod*
diff --git a/compiler/jni/quick/arm/calling_convention_arm.h b/compiler/jni/quick/arm/calling_convention_arm.h
index 4526d9e759..3a09d4eaad 100644
--- a/compiler/jni/quick/arm/calling_convention_arm.h
+++ b/compiler/jni/quick/arm/calling_convention_arm.h
@@ -18,9 +18,10 @@
#define ART_COMPILER_JNI_QUICK_ARM_CALLING_CONVENTION_ARM_H_
#include "base/enums.h"
+#include "base/macros.h"
#include "jni/quick/calling_convention.h"
-namespace art {
+namespace art HIDDEN {
namespace arm {
class ArmManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
@@ -39,6 +40,7 @@ class ArmManagedRuntimeCallingConvention final : public ManagedRuntimeCallingCon
void ResetIterator(FrameOffset displacement) override;
// Managed runtime calling convention
ManagedRegister MethodRegister() override;
+ ManagedRegister ArgumentRegisterForMethodExitHook() override;
void Next() override;
bool IsCurrentParamInRegister() override;
bool IsCurrentParamOnStack() override;
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.cc b/compiler/jni/quick/arm64/calling_convention_arm64.cc
index ec77db3dcb..e716502911 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.cc
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.cc
@@ -22,7 +22,7 @@
#include "arch/instruction_set.h"
#include "utils/arm64/managed_register_arm64.h"
-namespace art {
+namespace art HIDDEN {
namespace arm64 {
static constexpr ManagedRegister kXArgumentRegisters[] = {
@@ -174,6 +174,10 @@ ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
return Arm64ManagedRegister::FromXRegister(X0);
}
+ManagedRegister Arm64ManagedRuntimeCallingConvention::ArgumentRegisterForMethodExitHook() {
+ return Arm64ManagedRegister::FromXRegister(X4);
+}
+
bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
if (IsCurrentParamAFloatOrDouble()) {
return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
diff --git a/compiler/jni/quick/arm64/calling_convention_arm64.h b/compiler/jni/quick/arm64/calling_convention_arm64.h
index 176271e3dc..f29eb15fa8 100644
--- a/compiler/jni/quick/arm64/calling_convention_arm64.h
+++ b/compiler/jni/quick/arm64/calling_convention_arm64.h
@@ -18,9 +18,10 @@
#define ART_COMPILER_JNI_QUICK_ARM64_CALLING_CONVENTION_ARM64_H_
#include "base/enums.h"
+#include "base/macros.h"
#include "jni/quick/calling_convention.h"
-namespace art {
+namespace art HIDDEN {
namespace arm64 {
class Arm64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
@@ -35,6 +36,7 @@ class Arm64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingC
ManagedRegister ReturnRegister() const override;
// Managed runtime calling convention
ManagedRegister MethodRegister() override;
+ ManagedRegister ArgumentRegisterForMethodExitHook() override;
bool IsCurrentParamInRegister() override;
bool IsCurrentParamOnStack() override;
ManagedRegister CurrentParamRegister() override;
diff --git a/compiler/jni/quick/calling_convention.cc b/compiler/jni/quick/calling_convention.cc
index eb4d3724ee..2b9da6ba1a 100644
--- a/compiler/jni/quick/calling_convention.cc
+++ b/compiler/jni/quick/calling_convention.cc
@@ -37,7 +37,7 @@
#include "jni/quick/x86_64/calling_convention_x86_64.h"
#endif
-namespace art {
+namespace art HIDDEN {
// Managed runtime calling convention
@@ -74,6 +74,10 @@ std::unique_ptr<ManagedRuntimeCallingConvention> ManagedRuntimeCallingConvention
is_static, is_synchronized, shorty));
#endif
default:
+ UNUSED(allocator);
+ UNUSED(is_static);
+ UNUSED(is_synchronized);
+ UNUSED(shorty);
LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
UNREACHABLE();
}
@@ -165,6 +169,12 @@ std::unique_ptr<JniCallingConvention> JniCallingConvention::Create(ArenaAllocato
is_static, is_synchronized, is_fast_native, is_critical_native, shorty));
#endif
default:
+ UNUSED(allocator);
+ UNUSED(is_static);
+ UNUSED(is_synchronized);
+ UNUSED(is_fast_native);
+ UNUSED(is_critical_native);
+ UNUSED(shorty);
LOG(FATAL) << "Unknown InstructionSet: " << instruction_set;
UNREACHABLE();
}
diff --git a/compiler/jni/quick/calling_convention.h b/compiler/jni/quick/calling_convention.h
index e2f3bfb78c..0187b14256 100644
--- a/compiler/jni/quick/calling_convention.h
+++ b/compiler/jni/quick/calling_convention.h
@@ -20,11 +20,12 @@
#include "base/arena_object.h"
#include "base/array_ref.h"
#include "base/enums.h"
+#include "base/macros.h"
#include "dex/primitive.h"
#include "thread.h"
#include "utils/managed_register.h"
-namespace art {
+namespace art HIDDEN {
enum class InstructionSet;
@@ -244,6 +245,11 @@ class ManagedRuntimeCallingConvention : public CallingConvention {
// Register that holds the incoming method argument
virtual ManagedRegister MethodRegister() = 0;
+ // Register that is used to pass frame size for method exit hook call. This
+ // shouldn't be the same as the return register since method exit hook also expects
+ // return values in the return register.
+ virtual ManagedRegister ArgumentRegisterForMethodExitHook() = 0;
+
// Iterator interface
bool HasNext();
virtual void Next();
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc
index 6cb50211e1..c60d97467e 100644
--- a/compiler/jni/quick/jni_compiler.cc
+++ b/compiler/jni/quick/jni_compiler.cc
@@ -36,7 +36,9 @@
#include "dex/dex_file-inl.h"
#include "driver/compiler_options.h"
#include "entrypoints/quick/quick_entrypoints.h"
+#include "instrumentation.h"
#include "jni/jni_env_ext.h"
+#include "runtime.h"
#include "thread.h"
#include "utils/arm/managed_register_arm.h"
#include "utils/arm64/managed_register_arm64.h"
@@ -47,7 +49,7 @@
#define __ jni_asm->
-namespace art {
+namespace art HIDDEN {
constexpr size_t kIRTCookieSize = JniCallingConvention::SavedLocalReferenceCookieSize();
@@ -68,6 +70,12 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
ManagedRegister in_reg);
template <PointerSize kPointerSize>
+static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm,
+ JniCallingConvention* jni_conv,
+ ManagedRegister mr_return_reg,
+ size_t main_out_arg_size);
+
+template <PointerSize kPointerSize>
static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler(
ArenaAllocator* allocator, InstructionSet isa, const InstructionSetFeatures* features) {
return JNIMacroAssembler<kPointerSize>::Create(allocator, isa, features);
@@ -101,6 +109,24 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// i.e. if the method was annotated with @CriticalNative
const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u;
+ bool is_debuggable = compiler_options.GetDebuggable();
+ bool needs_entry_exit_hooks = is_debuggable && compiler_options.IsJitCompiler();
+ // We don't support JITing stubs for critical native methods in debuggable runtimes yet.
+ // TODO(mythria): Add support required for calling method entry / exit hooks from critical native
+ // methods.
+ DCHECK_IMPLIES(needs_entry_exit_hooks, !is_critical_native);
+
+ // The fast-path for decoding a reference skips CheckJNI checks, so we do not inline the
+ // decoding in debug build or for debuggable apps (both cases enable CheckJNI by default).
+ bool inline_decode_reference = !kIsDebugBuild && !is_debuggable;
+
+ // When walking the stack the top frame doesn't have a pc associated with it. We then depend on
+ // the invariant that we don't have JITed code when AOT code is available. In debuggable runtimes
+ // this invariant doesn't hold. So we tag the SP for JITed code to indentify if we are executing
+ // JITed code or AOT code. Since tagging involves additional instructions we tag only in
+ // debuggable runtimes.
+ bool should_tag_sp = needs_entry_exit_hooks;
+
VLOG(jni) << "JniCompile: Method :: "
<< dex_file.PrettyMethod(method_idx, /* with signature */ true)
<< " :: access_flags = " << std::hex << access_flags << std::dec;
@@ -182,7 +208,7 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// Skip this for @CriticalNative because we're not passing a `jclass` to the native method.
std::unique_ptr<JNIMacroLabel> jclass_read_barrier_slow_path;
std::unique_ptr<JNIMacroLabel> jclass_read_barrier_return;
- if (kUseReadBarrier && is_static && LIKELY(!is_critical_native)) {
+ if (gUseReadBarrier && is_static && LIKELY(!is_critical_native)) {
jclass_read_barrier_slow_path = __ CreateLabel();
jclass_read_barrier_return = __ CreateLabel();
@@ -219,7 +245,22 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// because garbage collections are disabled within the execution of a
// @CriticalNative method.
if (LIKELY(!is_critical_native)) {
- __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>());
+ __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>(), should_tag_sp);
+ }
+
+ // 1.5. Call any method entry hooks if required.
+ // For critical native methods, we don't JIT stubs in debuggable runtimes (see
+ // OptimizingCompiler::JitCompile).
+ // TODO(mythria): Add support to call method entry / exit hooks for critical native methods too.
+ std::unique_ptr<JNIMacroLabel> method_entry_hook_slow_path;
+ std::unique_ptr<JNIMacroLabel> method_entry_hook_return;
+ if (UNLIKELY(needs_entry_exit_hooks)) {
+ uint64_t address = reinterpret_cast64<uint64_t>(Runtime::Current()->GetInstrumentation());
+ int offset = instrumentation::Instrumentation::HaveMethodEntryListenersOffset().Int32Value();
+ method_entry_hook_slow_path = __ CreateLabel();
+ method_entry_hook_return = __ CreateLabel();
+ __ TestByteAndJumpIfNotZero(address + offset, method_entry_hook_slow_path.get());
+ __ Bind(method_entry_hook_return.get());
}
// 2. Lock the object (if synchronized) and transition out of Runnable (if normal native).
@@ -442,8 +483,7 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
__ Bind(transition_to_runnable_resume.get());
}
- // 5.2. For methods that return a reference, do an early exception check so that the
- // `JniDecodeReferenceResult()` in the main path does not need to check for exceptions.
+ // 5.2. For methods that return a reference, do an exception check before decoding the reference.
std::unique_ptr<JNIMacroLabel> exception_slow_path =
LIKELY(!is_critical_native) ? __ CreateLabel() : nullptr;
if (reference_return) {
@@ -462,23 +502,23 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
__ Bind(suspend_check_resume.get());
}
- // 5.4 For methods with reference return, decode the `jobject` with `JniDecodeReferenceResult()`.
+ // 5.4 For methods with reference return, decode the `jobject`, either directly
+ // or with a call to `JniDecodeReferenceResult()`.
+ std::unique_ptr<JNIMacroLabel> decode_reference_slow_path;
+ std::unique_ptr<JNIMacroLabel> decode_reference_resume;
if (reference_return) {
DCHECK(!is_critical_native);
- // We abuse the JNI calling convention here, that is guaranteed to support passing
- // two pointer arguments, `JNIEnv*` and `jclass`/`jobject`.
- main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
- ThreadOffset<kPointerSize> jni_decode_reference_result =
- QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniDecodeReferenceResult);
- // Pass result.
- SetNativeParameter(jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister());
- main_jni_conv->Next();
- if (main_jni_conv->IsCurrentParamInRegister()) {
- __ GetCurrentThread(main_jni_conv->CurrentParamRegister());
- __ Call(main_jni_conv->CurrentParamRegister(), Offset(jni_decode_reference_result));
+ if (inline_decode_reference) {
+ // Decode local and JNI transition references in the main path.
+ decode_reference_slow_path = __ CreateLabel();
+ decode_reference_resume = __ CreateLabel();
+ __ DecodeJNITransitionOrLocalJObject(mr_conv->ReturnRegister(),
+ decode_reference_slow_path.get(),
+ decode_reference_resume.get());
+ __ Bind(decode_reference_resume.get());
} else {
- __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset());
- __ CallFromThread(jni_decode_reference_result);
+ CallDecodeReferenceResult<kPointerSize>(
+ jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister(), main_out_arg_size);
}
} // if (!is_critical_native)
@@ -532,7 +572,21 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
__ Bind(suspend_check_resume.get());
}
- // 7.5. Remove activation - need to restore callee save registers since the GC
+ // 7.5. Check if method exit hooks needs to be called
+ // For critical native methods, we don't JIT stubs in debuggable runtimes.
+ // TODO(mythria): Add support to call method entry / exit hooks for critical native methods too.
+ std::unique_ptr<JNIMacroLabel> method_exit_hook_slow_path;
+ std::unique_ptr<JNIMacroLabel> method_exit_hook_return;
+ if (UNLIKELY(needs_entry_exit_hooks)) {
+ uint64_t address = reinterpret_cast64<uint64_t>(Runtime::Current()->GetInstrumentation());
+ int offset = instrumentation::Instrumentation::RunExitHooksOffset().Int32Value();
+ method_exit_hook_slow_path = __ CreateLabel();
+ method_exit_hook_return = __ CreateLabel();
+ __ TestByteAndJumpIfNotZero(address + offset, method_exit_hook_slow_path.get());
+ __ Bind(method_exit_hook_return.get());
+ }
+
+ // 7.6. Remove activation - need to restore callee save registers since the GC
// may have changed them.
DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
if (LIKELY(!is_critical_native) || !main_jni_conv->UseTailCall()) {
@@ -547,7 +601,7 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
// 8.1. Read barrier slow path for the declaring class in the method for a static call.
// Skip this for @CriticalNative because we're not passing a `jclass` to the native method.
- if (kUseReadBarrier && is_static && !is_critical_native) {
+ if (gUseReadBarrier && is_static && !is_critical_native) {
__ Bind(jclass_read_barrier_slow_path.get());
// Construct slow path for read barrier:
@@ -594,7 +648,37 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
__ Jump(transition_to_runnable_resume.get());
}
- // 8.4. Suspend check slow path.
+ // 8.4. Exception poll slow path(s).
+ if (LIKELY(!is_critical_native)) {
+ __ Bind(exception_slow_path.get());
+ if (reference_return) {
+ // We performed the exception check early, so we need to adjust SP and pop IRT frame.
+ if (main_out_arg_size != 0) {
+ jni_asm->cfi().AdjustCFAOffset(main_out_arg_size);
+ __ DecreaseFrameSize(main_out_arg_size);
+ }
+ PopLocalReferenceFrame<kPointerSize>(
+ jni_asm.get(), jni_env_reg, saved_cookie_reg, callee_save_temp);
+ }
+ DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
+ __ DeliverPendingException();
+ }
+
+ // 8.5 Slow path for decoding the `jobject`.
+ if (reference_return && inline_decode_reference) {
+ __ Bind(decode_reference_slow_path.get());
+ if (main_out_arg_size != 0) {
+ jni_asm->cfi().AdjustCFAOffset(main_out_arg_size);
+ }
+ CallDecodeReferenceResult<kPointerSize>(
+ jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister(), main_out_arg_size);
+ __ Jump(decode_reference_resume.get());
+ if (main_out_arg_size != 0) {
+ jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size);
+ }
+ }
+
+ // 8.6. Suspend check slow path.
if (UNLIKELY(is_fast_native)) {
__ Bind(suspend_check_slow_path.get());
if (reference_return && main_out_arg_size != 0) {
@@ -605,29 +689,34 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp
if (reference_return) {
// Suspend check entry point overwrites top of managed stack and leaves it clobbered.
// We need to restore the top for subsequent runtime call to `JniDecodeReferenceResult()`.
- __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>());
+ __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>(), should_tag_sp);
}
if (reference_return && main_out_arg_size != 0) {
__ IncreaseFrameSize(main_out_arg_size);
- jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size);
}
__ Jump(suspend_check_resume.get());
+ if (reference_return && main_out_arg_size != 0) {
+ jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size);
+ }
}
- // 8.5. Exception poll slow path(s).
- if (LIKELY(!is_critical_native)) {
- __ Bind(exception_slow_path.get());
- if (reference_return) {
- // We performed the exception check early, so we need to adjust SP and pop IRT frame.
- if (main_out_arg_size != 0) {
- jni_asm->cfi().AdjustCFAOffset(main_out_arg_size);
- __ DecreaseFrameSize(main_out_arg_size);
- }
- PopLocalReferenceFrame<kPointerSize>(
- jni_asm.get(), jni_env_reg, saved_cookie_reg, callee_save_temp);
- }
- DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
- __ DeliverPendingException();
+ // 8.7. Method entry / exit hooks slow paths.
+ if (UNLIKELY(needs_entry_exit_hooks)) {
+ __ Bind(method_entry_hook_slow_path.get());
+ // Use Jni specific method entry hook that saves all the arguments. We have only saved the
+ // callee save registers at this point. So go through Jni specific stub that saves the rest
+ // of the live registers.
+ __ CallFromThread(QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEntryHook));
+ __ ExceptionPoll(exception_slow_path.get());
+ __ Jump(method_entry_hook_return.get());
+
+ __ Bind(method_exit_hook_slow_path.get());
+ // Method exit hooks is called just before tearing down the frame. So there are no live
+ // registers and we can directly call the method exit hook and don't need a Jni specific
+ // entrypoint.
+ __ Move(mr_conv->ArgumentRegisterForMethodExitHook(), managed_frame_size);
+ __ CallFromThread(QUICK_ENTRYPOINT_OFFSET(kPointerSize, pMethodExitHook));
+ __ Jump(method_exit_hook_return.get());
}
// 9. Finalize code generation.
@@ -693,6 +782,31 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
}
}
+template <PointerSize kPointerSize>
+static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm,
+ JniCallingConvention* jni_conv,
+ ManagedRegister mr_return_reg,
+ size_t main_out_arg_size) {
+ // We abuse the JNI calling convention here, that is guaranteed to support passing
+ // two pointer arguments, `JNIEnv*` and `jclass`/`jobject`.
+ jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
+ ThreadOffset<kPointerSize> jni_decode_reference_result =
+ QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniDecodeReferenceResult);
+ // Pass result.
+ SetNativeParameter(jni_asm, jni_conv, mr_return_reg);
+ jni_conv->Next();
+ if (jni_conv->IsCurrentParamInRegister()) {
+ __ GetCurrentThread(jni_conv->CurrentParamRegister());
+ __ Call(jni_conv->CurrentParamRegister(), Offset(jni_decode_reference_result));
+ } else {
+ __ GetCurrentThread(jni_conv->CurrentParamStackOffset());
+ __ CallFromThread(jni_decode_reference_result);
+ }
+ // Note: If the native ABI returns the pointer in a register different from
+ // `mr_return_register`, the `JniDecodeReferenceResult` entrypoint must be
+ // a stub that moves the result to `mr_return_register`.
+}
+
JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
uint32_t access_flags,
uint32_t method_idx,
diff --git a/compiler/jni/quick/jni_compiler.h b/compiler/jni/quick/jni_compiler.h
index 52a6f3cf02..d43b2a9917 100644
--- a/compiler/jni/quick/jni_compiler.h
+++ b/compiler/jni/quick/jni_compiler.h
@@ -21,8 +21,9 @@
#include "arch/instruction_set.h"
#include "base/array_ref.h"
+#include "base/macros.h"
-namespace art {
+namespace art HIDDEN {
class ArenaAllocator;
class ArtMethod;
diff --git a/compiler/jni/quick/x86/calling_convention_x86.cc b/compiler/jni/quick/x86/calling_convention_x86.cc
index 65be92cdce..598e8e72ff 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.cc
+++ b/compiler/jni/quick/x86/calling_convention_x86.cc
@@ -22,7 +22,7 @@
#include "arch/x86/jni_frame_x86.h"
#include "utils/x86/managed_register_x86.h"
-namespace art {
+namespace art HIDDEN {
namespace x86 {
static constexpr ManagedRegister kManagedCoreArgumentRegisters[] = {
@@ -143,6 +143,10 @@ ManagedRegister X86ManagedRuntimeCallingConvention::MethodRegister() {
return X86ManagedRegister::FromCpuRegister(EAX);
}
+ManagedRegister X86ManagedRuntimeCallingConvention::ArgumentRegisterForMethodExitHook() {
+ return X86ManagedRegister::FromCpuRegister(EBX);
+}
+
void X86ManagedRuntimeCallingConvention::ResetIterator(FrameOffset displacement) {
ManagedRuntimeCallingConvention::ResetIterator(displacement);
gpr_arg_count_ = 1u; // Skip EAX for ArtMethod*
diff --git a/compiler/jni/quick/x86/calling_convention_x86.h b/compiler/jni/quick/x86/calling_convention_x86.h
index cd7ef5b557..f0d663dd98 100644
--- a/compiler/jni/quick/x86/calling_convention_x86.h
+++ b/compiler/jni/quick/x86/calling_convention_x86.h
@@ -18,9 +18,10 @@
#define ART_COMPILER_JNI_QUICK_X86_CALLING_CONVENTION_X86_H_
#include "base/enums.h"
+#include "base/macros.h"
#include "jni/quick/calling_convention.h"
-namespace art {
+namespace art HIDDEN {
namespace x86 {
class X86ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
@@ -37,6 +38,7 @@ class X86ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingCon
void ResetIterator(FrameOffset displacement) override;
// Managed runtime calling convention
ManagedRegister MethodRegister() override;
+ ManagedRegister ArgumentRegisterForMethodExitHook() override;
void Next() override;
bool IsCurrentParamInRegister() override;
bool IsCurrentParamOnStack() override;
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
index 862ee5e2be..9d0761d2f7 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.cc
@@ -23,7 +23,7 @@
#include "base/bit_utils.h"
#include "utils/x86_64/managed_register_x86_64.h"
-namespace art {
+namespace art HIDDEN {
namespace x86_64 {
static constexpr ManagedRegister kCoreArgumentRegisters[] = {
@@ -147,6 +147,10 @@ ManagedRegister X86_64ManagedRuntimeCallingConvention::MethodRegister() {
return X86_64ManagedRegister::FromCpuRegister(RDI);
}
+ManagedRegister X86_64ManagedRuntimeCallingConvention::ArgumentRegisterForMethodExitHook() {
+ return X86_64ManagedRegister::FromCpuRegister(R8);
+}
+
bool X86_64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
if (IsCurrentParamAFloatOrDouble()) {
return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
diff --git a/compiler/jni/quick/x86_64/calling_convention_x86_64.h b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
index 483f1f5806..859a277c60 100644
--- a/compiler/jni/quick/x86_64/calling_convention_x86_64.h
+++ b/compiler/jni/quick/x86_64/calling_convention_x86_64.h
@@ -18,9 +18,10 @@
#define ART_COMPILER_JNI_QUICK_X86_64_CALLING_CONVENTION_X86_64_H_
#include "base/enums.h"
+#include "base/macros.h"
#include "jni/quick/calling_convention.h"
-namespace art {
+namespace art HIDDEN {
namespace x86_64 {
class X86_64ManagedRuntimeCallingConvention final : public ManagedRuntimeCallingConvention {
@@ -35,6 +36,7 @@ class X86_64ManagedRuntimeCallingConvention final : public ManagedRuntimeCalling
ManagedRegister ReturnRegister() const override;
// Managed runtime calling convention
ManagedRegister MethodRegister() override;
+ ManagedRegister ArgumentRegisterForMethodExitHook() override;
bool IsCurrentParamInRegister() override;
bool IsCurrentParamOnStack() override;
ManagedRegister CurrentParamRegister() override;