diff options
Diffstat (limited to 'compiler/jni/quick/jni_compiler.cc')
-rw-r--r-- | compiler/jni/quick/jni_compiler.cc | 122 |
1 files changed, 86 insertions, 36 deletions
diff --git a/compiler/jni/quick/jni_compiler.cc b/compiler/jni/quick/jni_compiler.cc index 58d11aed24..c60d97467e 100644 --- a/compiler/jni/quick/jni_compiler.cc +++ b/compiler/jni/quick/jni_compiler.cc @@ -70,6 +70,12 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm, ManagedRegister in_reg); template <PointerSize kPointerSize> +static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm, + JniCallingConvention* jni_conv, + ManagedRegister mr_return_reg, + size_t main_out_arg_size); + +template <PointerSize kPointerSize> static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler( ArenaAllocator* allocator, InstructionSet isa, const InstructionSetFeatures* features) { return JNIMacroAssembler<kPointerSize>::Create(allocator, isa, features); @@ -103,13 +109,17 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp // i.e. if the method was annotated with @CriticalNative const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u; - bool needs_entry_exit_hooks = - compiler_options.GetDebuggable() && compiler_options.IsJitCompiler(); + bool is_debuggable = compiler_options.GetDebuggable(); + bool needs_entry_exit_hooks = is_debuggable && compiler_options.IsJitCompiler(); // We don't support JITing stubs for critical native methods in debuggable runtimes yet. // TODO(mythria): Add support required for calling method entry / exit hooks from critical native // methods. DCHECK_IMPLIES(needs_entry_exit_hooks, !is_critical_native); + // The fast-path for decoding a reference skips CheckJNI checks, so we do not inline the + // decoding in debug build or for debuggable apps (both cases enable CheckJNI by default). + bool inline_decode_reference = !kIsDebugBuild && !is_debuggable; + // When walking the stack the top frame doesn't have a pc associated with it. We then depend on // the invariant that we don't have JITed code when AOT code is available. In debuggable runtimes // this invariant doesn't hold. So we tag the SP for JITed code to indentify if we are executing @@ -473,8 +483,7 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ Bind(transition_to_runnable_resume.get()); } - // 5.2. For methods that return a reference, do an early exception check so that the - // `JniDecodeReferenceResult()` in the main path does not need to check for exceptions. + // 5.2. For methods that return a reference, do an exception check before decoding the reference. std::unique_ptr<JNIMacroLabel> exception_slow_path = LIKELY(!is_critical_native) ? __ CreateLabel() : nullptr; if (reference_return) { @@ -493,23 +502,23 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ Bind(suspend_check_resume.get()); } - // 5.4 For methods with reference return, decode the `jobject` with `JniDecodeReferenceResult()`. + // 5.4 For methods with reference return, decode the `jobject`, either directly + // or with a call to `JniDecodeReferenceResult()`. + std::unique_ptr<JNIMacroLabel> decode_reference_slow_path; + std::unique_ptr<JNIMacroLabel> decode_reference_resume; if (reference_return) { DCHECK(!is_critical_native); - // We abuse the JNI calling convention here, that is guaranteed to support passing - // two pointer arguments, `JNIEnv*` and `jclass`/`jobject`. - main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); - ThreadOffset<kPointerSize> jni_decode_reference_result = - QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniDecodeReferenceResult); - // Pass result. - SetNativeParameter(jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister()); - main_jni_conv->Next(); - if (main_jni_conv->IsCurrentParamInRegister()) { - __ GetCurrentThread(main_jni_conv->CurrentParamRegister()); - __ Call(main_jni_conv->CurrentParamRegister(), Offset(jni_decode_reference_result)); + if (inline_decode_reference) { + // Decode local and JNI transition references in the main path. + decode_reference_slow_path = __ CreateLabel(); + decode_reference_resume = __ CreateLabel(); + __ DecodeJNITransitionOrLocalJObject(mr_conv->ReturnRegister(), + decode_reference_slow_path.get(), + decode_reference_resume.get()); + __ Bind(decode_reference_resume.get()); } else { - __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset()); - __ CallFromThread(jni_decode_reference_result); + CallDecodeReferenceResult<kPointerSize>( + jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister(), main_out_arg_size); } } // if (!is_critical_native) @@ -639,7 +648,37 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp __ Jump(transition_to_runnable_resume.get()); } - // 8.4. Suspend check slow path. + // 8.4. Exception poll slow path(s). + if (LIKELY(!is_critical_native)) { + __ Bind(exception_slow_path.get()); + if (reference_return) { + // We performed the exception check early, so we need to adjust SP and pop IRT frame. + if (main_out_arg_size != 0) { + jni_asm->cfi().AdjustCFAOffset(main_out_arg_size); + __ DecreaseFrameSize(main_out_arg_size); + } + PopLocalReferenceFrame<kPointerSize>( + jni_asm.get(), jni_env_reg, saved_cookie_reg, callee_save_temp); + } + DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size)); + __ DeliverPendingException(); + } + + // 8.5 Slow path for decoding the `jobject`. + if (reference_return && inline_decode_reference) { + __ Bind(decode_reference_slow_path.get()); + if (main_out_arg_size != 0) { + jni_asm->cfi().AdjustCFAOffset(main_out_arg_size); + } + CallDecodeReferenceResult<kPointerSize>( + jni_asm.get(), main_jni_conv.get(), mr_conv->ReturnRegister(), main_out_arg_size); + __ Jump(decode_reference_resume.get()); + if (main_out_arg_size != 0) { + jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size); + } + } + + // 8.6. Suspend check slow path. if (UNLIKELY(is_fast_native)) { __ Bind(suspend_check_slow_path.get()); if (reference_return && main_out_arg_size != 0) { @@ -654,28 +693,14 @@ static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& comp } if (reference_return && main_out_arg_size != 0) { __ IncreaseFrameSize(main_out_arg_size); - jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size); } __ Jump(suspend_check_resume.get()); - } - - // 8.5. Exception poll slow path(s). - if (LIKELY(!is_critical_native)) { - __ Bind(exception_slow_path.get()); - if (reference_return) { - // We performed the exception check early, so we need to adjust SP and pop IRT frame. - if (main_out_arg_size != 0) { - jni_asm->cfi().AdjustCFAOffset(main_out_arg_size); - __ DecreaseFrameSize(main_out_arg_size); - } - PopLocalReferenceFrame<kPointerSize>( - jni_asm.get(), jni_env_reg, saved_cookie_reg, callee_save_temp); + if (reference_return && main_out_arg_size != 0) { + jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size); } - DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size)); - __ DeliverPendingException(); } - // 8.6. Method entry / exit hooks slow paths. + // 8.7. Method entry / exit hooks slow paths. if (UNLIKELY(needs_entry_exit_hooks)) { __ Bind(method_entry_hook_slow_path.get()); // Use Jni specific method entry hook that saves all the arguments. We have only saved the @@ -757,6 +782,31 @@ static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm, } } +template <PointerSize kPointerSize> +static void CallDecodeReferenceResult(JNIMacroAssembler<kPointerSize>* jni_asm, + JniCallingConvention* jni_conv, + ManagedRegister mr_return_reg, + size_t main_out_arg_size) { + // We abuse the JNI calling convention here, that is guaranteed to support passing + // two pointer arguments, `JNIEnv*` and `jclass`/`jobject`. + jni_conv->ResetIterator(FrameOffset(main_out_arg_size)); + ThreadOffset<kPointerSize> jni_decode_reference_result = + QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniDecodeReferenceResult); + // Pass result. + SetNativeParameter(jni_asm, jni_conv, mr_return_reg); + jni_conv->Next(); + if (jni_conv->IsCurrentParamInRegister()) { + __ GetCurrentThread(jni_conv->CurrentParamRegister()); + __ Call(jni_conv->CurrentParamRegister(), Offset(jni_decode_reference_result)); + } else { + __ GetCurrentThread(jni_conv->CurrentParamStackOffset()); + __ CallFromThread(jni_decode_reference_result); + } + // Note: If the native ABI returns the pointer in a register different from + // `mr_return_register`, the `JniDecodeReferenceResult` entrypoint must be + // a stub that moves the result to `mr_return_register`. +} + JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options, uint32_t access_flags, uint32_t method_idx, |