diff options
author | 2022-05-24 14:49:25 +0000 | |
---|---|---|
committer | 2022-06-24 08:49:01 +0000 | |
commit | 90f12677f80169dc3ef919c2067349f94b943e7f (patch) | |
tree | c9acb9042b82c2021f15048d847237c016da1c4b /compiler | |
parent | a2c4150bd4487e28348bb9b26b680f8f058d5ce7 (diff) |
Don't use instrumentation stubs for native methods in debuggable
Don't install instrumentation stubs for native methods in debuggable
runtimes. The GenericJniTrampoline is updated to call method entry /
exit hooks. When JITing JNI stubs in debuggable runtimes we also include
calls to method entry / exit hooks when required.
Bug: 206029744
Test: art/test.py
Change-Id: I1d92ddb1d03daed74d88f5c70d38427dc6055446
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/jni/quick/jni_compiler.cc | 68 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 8 | ||||
-rw-r--r-- | compiler/utils/arm/jni_macro_assembler_arm_vixl.cc | 8 | ||||
-rw-r--r-- | compiler/utils/arm/jni_macro_assembler_arm_vixl.h | 2 | ||||
-rw-r--r-- | compiler/utils/arm64/jni_macro_assembler_arm64.cc | 8 | ||||
-rw-r--r-- | compiler/utils/arm64/jni_macro_assembler_arm64.h | 2 | ||||
-rw-r--r-- | compiler/utils/assembler_thumb_test_expected.cc.inc | 2 | ||||
-rw-r--r-- | compiler/utils/jni_macro_assembler.h | 2 | ||||
-rw-r--r-- | compiler/utils/x86/jni_macro_assembler_x86.cc | 6 | ||||
-rw-r--r-- | compiler/utils/x86/jni_macro_assembler_x86.h | 2 | ||||
-rw-r--r-- | compiler/utils/x86_64/jni_macro_assembler_x86_64.cc | 7 | ||||
-rw-r--r-- | compiler/utils/x86_64/jni_macro_assembler_x86_64.h | 2 |
12 files changed, 110 insertions, 7 deletions
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc index b88ebaf956..d672500126 100644 --- a/compiler/jni/quick/jni_compiler.cc +++ b/compiler/jni/quick/jni_compiler.cc @@ -36,7 +36,9 @@ #include "dex/dex_file-inl.h" #include "driver/compiler_options.h" #include "entrypoints/quick/quick_entrypoints.h" +#include "instrumentation.h" #include "jni/jni_env_ext.h" +#include "runtime.h" #include "thread.h" #include "utils/arm/managed_register_arm.h" #include "utils/arm64/managed_register_arm64.h" @@ -95,6 +97,12 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp const InstructionSetFeatures* instruction_set_features = compiler_options.GetInstructionSetFeatures(); + // i.e. if the method was annotated with @FastNative + const bool is_fast_native = (access_flags & kAccFastNative) != 0u; + + // i.e. if the method was annotated with @CriticalNative + const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u; + // When walking the stack the top frame doesn't have a pc associated with it. We then depend on // the invariant that we don't have JITed code when AOT code is available. In debuggable runtimes // this invariant doesn't hold. So we tag the SP for JITed code to indentify if we are executing @@ -102,11 +110,12 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp // debuggable runtimes. bool should_tag_sp = compiler_options.GetDebuggable() && compiler_options.IsJitCompiler(); - // i.e. if the method was annotated with @FastNative - const bool is_fast_native = (access_flags & kAccFastNative) != 0u; - - // i.e. if the method was annotated with @CriticalNative - const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u; + // We don't JIT stubs for critical native methods in debuggable runtimes. + // TODO(mythria): Add support required for calling method entry / exit hooks from critical native + // methods. + bool needs_entry_exit_hooks = compiler_options.GetDebuggable() && + compiler_options.IsJitCompiler() && + !is_critical_native; VLOG(jni) << "JniCompile: Method :: " << dex_file.PrettyMethod(method_idx, /* with signature */ true) @@ -229,6 +238,21 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>(), should_tag_sp); } + // 1.5. Call any method entry hooks if required. + // For critical native methods, we don't JIT stubs in debuggable runtimes (see + // OptimizingCompiler::JitCompile). + // TODO(mythria): Add support to call method entry / exit hooks for critical native methods too. + std::unique_ptr<JNIMacroLabel> method_entry_hook_slow_path; + std::unique_ptr<JNIMacroLabel> method_entry_hook_return; + if (UNLIKELY(needs_entry_exit_hooks)) { + uint64_t address = reinterpret_cast64<uint64_t>(Runtime::Current()->GetInstrumentation()); + int offset = instrumentation::Instrumentation::NeedsEntryExitHooksOffset().Int32Value(); + method_entry_hook_slow_path = __ CreateLabel(); + method_entry_hook_return = __ CreateLabel(); + __ TestByteAndJumpIfNotZero(address + offset, method_entry_hook_slow_path.get()); + __ Bind(method_entry_hook_return.get()); + } + // 2. Lock the object (if synchronized) and transition out of Runnable (if normal native). // 2.1. Lock the synchronization object (`this` or class) for synchronized methods. @@ -539,7 +563,21 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ Bind(suspend_check_resume.get()); } - // 7.5. Remove activation - need to restore callee save registers since the GC + // 7.5. Check if method exit hooks needs to be called + // For critical native methods, we don't JIT stubs in debuggable runtimes. + // TODO(mythria): Add support to call method entry / exit hooks for critical native methods too. + std::unique_ptr<JNIMacroLabel> method_exit_hook_slow_path; + std::unique_ptr<JNIMacroLabel> method_exit_hook_return; + if (UNLIKELY(needs_entry_exit_hooks)) { + uint64_t address = reinterpret_cast64<uint64_t>(Runtime::Current()->GetInstrumentation()); + int offset = instrumentation::Instrumentation::NeedsEntryExitHooksOffset().Int32Value(); + method_exit_hook_slow_path = __ CreateLabel(); + method_exit_hook_return = __ CreateLabel(); + __ TestByteAndJumpIfNotZero(address + offset, method_exit_hook_slow_path.get()); + __ Bind(method_exit_hook_return.get()); + } + + // 7.6. Remove activation - need to restore callee save registers since the GC // may have changed them. DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size)); if (LIKELY(!is_critical_native) || !main_jni_conv->UseTailCall()) { @@ -637,6 +675,24 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ DeliverPendingException(); } + // 8.6. Method entry / exit hooks slow paths. + if (UNLIKELY(needs_entry_exit_hooks)) { + __ Bind(method_entry_hook_slow_path.get()); + // Use Jni specific method entry hook that saves all the arguments. We have only saved the + // callee save registers at this point. So go through Jni specific stub that saves the rest + // of the live registers. + __ CallFromThread(QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEntryHook)); + __ ExceptionPoll(exception_slow_path.get()); + __ Jump(method_entry_hook_return.get()); + + __ Bind(method_exit_hook_slow_path.get()); + // Method exit hooks is called just before tearing down the frame. So there are no live + // registers and we can directly call the method exit hook and don't need a Jni specific + // entrypoint. + __ CallFromThread(QUICK_ENTRYPOINT_OFFSET(kPointerSize, pMethodExitHook)); + __ Jump(method_exit_hook_return.get()); + } + // 9. Finalize code generation. __ FinalizeCode(); size_t cs = __ CodeSize(); diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 6eb3d01e42..a499c55757 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -1233,6 +1233,14 @@ bool OptimizingCompiler::JitCompile(Thread* self, ArenaAllocator allocator(runtime->GetJitArenaPool()); if (UNLIKELY(method->IsNative())) { + // Use GenericJniTrampoline for critical native methods in debuggable runtimes. We don't + // support calling method entry / exit hooks for critical native methods yet. + // TODO(mythria): Add support for calling method entry / exit hooks in JITed stubs for critical + // native methods too. + if (runtime->IsJavaDebuggable() && method->IsCriticalNative()) { + DCHECK(compiler_options.IsJitCompiler()); + return false; + } JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod( compiler_options, access_flags, method_idx, *dex_file, &allocator); std::vector<Handle<mirror::Object>> roots; diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc index 61151fe6be..a4fddbce20 100644 --- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc +++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.cc @@ -1220,6 +1220,14 @@ void ArmVIXLJNIMacroAssembler::TestMarkBit(ManagedRegister mref, } } +void ArmVIXLJNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) { + UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); + vixl32::Register scratch = temps.Acquire(); + ___ Mov(scratch, static_cast<uint32_t>(address)); + ___ Ldrb(scratch, MemOperand(scratch, 0)); + ___ CompareAndBranchIfNonZero(scratch, ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); +} + void ArmVIXLJNIMacroAssembler::Bind(JNIMacroLabel* label) { CHECK(label != nullptr); ___ Bind(ArmVIXLJNIMacroLabel::Cast(label)->AsArm()); diff --git a/compiler/utils/arm/jni_macro_assembler_arm_vixl.h b/compiler/utils/arm/jni_macro_assembler_arm_vixl.h index 980de41392..5965552980 100644 --- a/compiler/utils/arm/jni_macro_assembler_arm_vixl.h +++ b/compiler/utils/arm/jni_macro_assembler_arm_vixl.h @@ -213,6 +213,8 @@ class ArmVIXLJNIMacroAssembler final void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; // Emit a conditional jump to the label by applying a unary condition test to object's mark bit. void TestMarkBit(ManagedRegister ref, JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; + // Emit a conditional jump to label if the loaded value from specified locations is not zero. + void TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) override; // Code at this offset will serve as the target for the Jump call. void Bind(JNIMacroLabel* label) override; diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.cc b/compiler/utils/arm64/jni_macro_assembler_arm64.cc index 323a01e60b..c5fe3d632d 100644 --- a/compiler/utils/arm64/jni_macro_assembler_arm64.cc +++ b/compiler/utils/arm64/jni_macro_assembler_arm64.cc @@ -1040,6 +1040,14 @@ void Arm64JNIMacroAssembler::TestMarkBit(ManagedRegister m_ref, } } +void Arm64JNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) { + UseScratchRegisterScope temps(asm_.GetVIXLAssembler()); + Register scratch = temps.AcquireW(); + ___ Mov(scratch, address); + ___ Ldrb(scratch, MEM_OP(scratch, 0)); + ___ Cbnz(scratch, Arm64JNIMacroLabel::Cast(label)->AsArm64()); +} + void Arm64JNIMacroAssembler::Bind(JNIMacroLabel* label) { CHECK(label != nullptr); ___ Bind(Arm64JNIMacroLabel::Cast(label)->AsArm64()); diff --git a/compiler/utils/arm64/jni_macro_assembler_arm64.h b/compiler/utils/arm64/jni_macro_assembler_arm64.h index daea95ded8..9d3e821a36 100644 --- a/compiler/utils/arm64/jni_macro_assembler_arm64.h +++ b/compiler/utils/arm64/jni_macro_assembler_arm64.h @@ -197,6 +197,8 @@ class Arm64JNIMacroAssembler final : public JNIMacroAssemblerFwd<Arm64Assembler, void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; // Emit a conditional jump to the label by applying a unary condition test to object's mark bit. void TestMarkBit(ManagedRegister ref, JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; + // Emit a conditional jump to label if the loaded value from specified locations is not zero. + void TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) override; // Code at this offset will serve as the target for the Jump call. void Bind(JNIMacroLabel* label) override; diff --git a/compiler/utils/assembler_thumb_test_expected.cc.inc b/compiler/utils/assembler_thumb_test_expected.cc.inc index dac21ae4dc..ae84338711 100644 --- a/compiler/utils/assembler_thumb_test_expected.cc.inc +++ b/compiler/utils/assembler_thumb_test_expected.cc.inc @@ -155,7 +155,7 @@ const char* const VixlJniHelpersResults = { " 224: d9 f8 24 80 ldr.w r8, [r9, #36]\n" " 228: 70 47 bx lr\n" " 22a: d9 f8 9c 00 ldr.w r0, [r9, #156]\n" - " 22e: d9 f8 d0 e2 ldr.w lr, [r9, #720]\n" + " 22e: d9 f8 d4 e2 ldr.w lr, [r9, #724]\n" " 232: f0 47 blx lr\n" }; diff --git a/compiler/utils/jni_macro_assembler.h b/compiler/utils/jni_macro_assembler.h index c8c713ae67..36de012495 100644 --- a/compiler/utils/jni_macro_assembler.h +++ b/compiler/utils/jni_macro_assembler.h @@ -286,6 +286,8 @@ class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> { virtual void TestMarkBit(ManagedRegister ref, JNIMacroLabel* label, JNIMacroUnaryCondition cond) = 0; + // Emit a conditional jump to label if the loaded value from specified locations is not zero. + virtual void TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) = 0; // Code at this offset will serve as the target for the Jump call. virtual void Bind(JNIMacroLabel* label) = 0; diff --git a/compiler/utils/x86/jni_macro_assembler_x86.cc b/compiler/utils/x86/jni_macro_assembler_x86.cc index 55d54283fa..e292c5bd58 100644 --- a/compiler/utils/x86/jni_macro_assembler_x86.cc +++ b/compiler/utils/x86/jni_macro_assembler_x86.cc @@ -734,6 +734,12 @@ void X86JNIMacroAssembler::TestMarkBit(ManagedRegister mref, __ j(UnaryConditionToX86Condition(cond), X86JNIMacroLabel::Cast(label)->AsX86()); } + +void X86JNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) { + __ cmpb(Address::Absolute(address), Immediate(0)); + __ j(kNotZero, X86JNIMacroLabel::Cast(label)->AsX86()); +} + void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) { CHECK(label != nullptr); __ Bind(X86JNIMacroLabel::Cast(label)->AsX86()); diff --git a/compiler/utils/x86/jni_macro_assembler_x86.h b/compiler/utils/x86/jni_macro_assembler_x86.h index f8ce38b541..571b213647 100644 --- a/compiler/utils/x86/jni_macro_assembler_x86.h +++ b/compiler/utils/x86/jni_macro_assembler_x86.h @@ -189,6 +189,8 @@ class X86JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86Assembler, Poi void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; // Emit a conditional jump to the label by applying a unary condition test to object's mark bit. void TestMarkBit(ManagedRegister ref, JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; + // Emit a conditional jump to label if the loaded value from specified locations is not zero. + void TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) override; // Code at this offset will serve as the target for the Jump call. void Bind(JNIMacroLabel* label) override; diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc index adc431f716..811591114d 100644 --- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc +++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.cc @@ -810,6 +810,13 @@ void X86_64JNIMacroAssembler::TestMarkBit(ManagedRegister mref, __ j(UnaryConditionToX86_64Condition(cond), X86_64JNIMacroLabel::Cast(label)->AsX86_64()); } +void X86_64JNIMacroAssembler::TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) { + CpuRegister scratch = GetScratchRegister(); + __ movq(scratch, Immediate(address)); + __ cmpb(Address(scratch, 0), Immediate(0)); + __ j(kNotZero, X86_64JNIMacroLabel::Cast(label)->AsX86_64()); +} + void X86_64JNIMacroAssembler::Bind(JNIMacroLabel* label) { CHECK(label != nullptr); __ Bind(X86_64JNIMacroLabel::Cast(label)->AsX86_64()); diff --git a/compiler/utils/x86_64/jni_macro_assembler_x86_64.h b/compiler/utils/x86_64/jni_macro_assembler_x86_64.h index feaf27e53d..04c6bfcf96 100644 --- a/compiler/utils/x86_64/jni_macro_assembler_x86_64.h +++ b/compiler/utils/x86_64/jni_macro_assembler_x86_64.h @@ -209,6 +209,8 @@ class X86_64JNIMacroAssembler final : public JNIMacroAssemblerFwd<X86_64Assemble void TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; // Emit a conditional jump to the label by applying a unary condition test to object's mark bit. void TestMarkBit(ManagedRegister ref, JNIMacroLabel* label, JNIMacroUnaryCondition cond) override; + // Emit a conditional jump to label if the loaded value from specified locations is not zero. + void TestByteAndJumpIfNotZero(uintptr_t address, JNIMacroLabel* label) override; // Code at this offset will serve as the target for the Jump call. void Bind(JNIMacroLabel* label) override; |