diff options
30 files changed, 190 insertions, 961 deletions
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 73c72fc57a..a281c4a310 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -1196,69 +1196,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, Runtime* runtime = Runtime::Current(); ArenaAllocator allocator(runtime->GetJitArenaPool()); - - if (UNLIKELY(method->IsNative())) { - JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod( - GetCompilerDriver(), access_flags, method_idx, *dex_file); - ScopedNullHandle<mirror::ObjectArray<mirror::Object>> roots; - ArenaSet<ArtMethod*, std::less<ArtMethod*>> cha_single_implementation_list( - allocator.Adapter(kArenaAllocCHA)); - const void* code = code_cache->CommitCode( - self, - method, - /* stack_map_data */ nullptr, - /* method_info_data */ nullptr, - /* roots_data */ nullptr, - jni_compiled_method.GetFrameSize(), - jni_compiled_method.GetCoreSpillMask(), - jni_compiled_method.GetFpSpillMask(), - jni_compiled_method.GetCode().data(), - jni_compiled_method.GetCode().size(), - /* data_size */ 0u, - osr, - roots, - /* has_should_deoptimize_flag */ false, - cha_single_implementation_list); - if (code == nullptr) { - return false; - } - - const CompilerOptions& compiler_options = GetCompilerDriver()->GetCompilerOptions(); - if (compiler_options.GetGenerateDebugInfo()) { - const auto* method_header = reinterpret_cast<const OatQuickMethodHeader*>(code); - const uintptr_t code_address = reinterpret_cast<uintptr_t>(method_header->GetCode()); - debug::MethodDebugInfo info = {}; - DCHECK(info.trampoline_name.empty()); - info.dex_file = dex_file; - info.class_def_index = class_def_idx; - info.dex_method_index = method_idx; - info.access_flags = access_flags; - info.code_item = code_item; - info.isa = jni_compiled_method.GetInstructionSet(); - info.deduped = false; - info.is_native_debuggable = compiler_options.GetNativeDebuggable(); - info.is_optimized = true; - info.is_code_address_text_relative = false; - info.code_address = code_address; - info.code_size = jni_compiled_method.GetCode().size(); - info.frame_size_in_bytes = method_header->GetFrameSizeInBytes(); - info.code_info = nullptr; - info.cfi = jni_compiled_method.GetCfi(); - std::vector<uint8_t> elf_file = debug::WriteDebugElfFileForMethods( - GetCompilerDriver()->GetInstructionSet(), - GetCompilerDriver()->GetInstructionSetFeatures(), - ArrayRef<const debug::MethodDebugInfo>(&info, 1)); - CreateJITCodeEntryForAddress(code_address, std::move(elf_file)); - } - - Runtime::Current()->GetJit()->AddMemoryUsage(method, allocator.BytesUsed()); - if (jit_logger != nullptr) { - jit_logger->WriteLog(code, jni_compiled_method.GetCode().size(), method); - } - return true; - } - - ArenaStack arena_stack(runtime->GetJitArenaPool()); + ArenaStack arena_stack(Runtime::Current()->GetJitArenaPool()); CodeVectorAllocator code_allocator(&allocator); VariableSizedHandleScope handles(self); diff --git a/runtime/arch/arm/quick_entrypoints_arm.S b/runtime/arch/arm/quick_entrypoints_arm.S index 6ec9c48b92..6ff8dd60b8 100644 --- a/runtime/arch/arm/quick_entrypoints_arm.S +++ b/runtime/arch/arm/quick_entrypoints_arm.S @@ -1783,9 +1783,7 @@ ENTRY art_quick_generic_jni_trampoline .cfi_adjust_cfa_offset FRAME_SIZE_SAVE_REFS_AND_ARGS-FRAME_SIZE_SAVE_REFS_ONLY .Lexception_in_native: - ldr ip, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] - add ip, ip, #-1 // Remove the GenericJNI tag. ADD/SUB writing directly to SP is UNPREDICTABLE. - mov sp, ip + ldr sp, [r9, #THREAD_TOP_QUICK_FRAME_OFFSET] .cfi_def_cfa_register sp # This will create a new save-all frame, required by the runtime. DELIVER_PENDING_EXCEPTION diff --git a/runtime/arch/arm64/quick_entrypoints_arm64.S b/runtime/arch/arm64/quick_entrypoints_arm64.S index 47efeb9200..280e5937c6 100644 --- a/runtime/arch/arm64/quick_entrypoints_arm64.S +++ b/runtime/arch/arm64/quick_entrypoints_arm64.S @@ -2299,7 +2299,7 @@ ENTRY art_quick_generic_jni_trampoline .Lexception_in_native: // Move to x1 then sp to please assembler. ldr x1, [xSELF, # THREAD_TOP_QUICK_FRAME_OFFSET] - add sp, x1, #-1 // Remove the GenericJNI tag. + mov sp, x1 .cfi_def_cfa_register sp # This will create a new save-all frame, required by the runtime. DELIVER_PENDING_EXCEPTION diff --git a/runtime/arch/mips/quick_entrypoints_mips.S b/runtime/arch/mips/quick_entrypoints_mips.S index fc77a641b3..489c52c0d2 100644 --- a/runtime/arch/mips/quick_entrypoints_mips.S +++ b/runtime/arch/mips/quick_entrypoints_mips.S @@ -2283,8 +2283,7 @@ ENTRY art_quick_generic_jni_trampoline nop 2: - lw $t0, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) - addiu $sp, $t0, -1 // Remove the GenericJNI tag. + lw $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) move $gp, $s3 # restore $gp from $s3 # This will create a new save-all frame, required by the runtime. DELIVER_PENDING_EXCEPTION diff --git a/runtime/arch/mips64/quick_entrypoints_mips64.S b/runtime/arch/mips64/quick_entrypoints_mips64.S index 3fb83d9232..98ffe6504a 100644 --- a/runtime/arch/mips64/quick_entrypoints_mips64.S +++ b/runtime/arch/mips64/quick_entrypoints_mips64.S @@ -2158,8 +2158,7 @@ ENTRY art_quick_generic_jni_trampoline dmtc1 $v0, $f0 # place return value to FP return value 1: - ld $t0, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) - daddiu $sp, $t0, -1 // Remove the GenericJNI tag. + ld $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # This will create a new save-all frame, required by the runtime. DELIVER_PENDING_EXCEPTION END art_quick_generic_jni_trampoline diff --git a/runtime/arch/x86/quick_entrypoints_x86.S b/runtime/arch/x86/quick_entrypoints_x86.S index a46ceeba12..25716dc1bb 100644 --- a/runtime/arch/x86/quick_entrypoints_x86.S +++ b/runtime/arch/x86/quick_entrypoints_x86.S @@ -1969,9 +1969,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline punpckldq %xmm1, %xmm0 ret .Lexception_in_native: - pushl %fs:THREAD_TOP_QUICK_FRAME_OFFSET - addl LITERAL(-1), (%esp) // Remove the GenericJNI tag. - movl (%esp), %esp + movl %fs:THREAD_TOP_QUICK_FRAME_OFFSET, %esp // Do a call to push a new save-all frame required by the runtime. call .Lexception_call .Lexception_call: diff --git a/runtime/arch/x86_64/quick_entrypoints_x86_64.S b/runtime/arch/x86_64/quick_entrypoints_x86_64.S index 463e5a279f..2c3da90f25 100644 --- a/runtime/arch/x86_64/quick_entrypoints_x86_64.S +++ b/runtime/arch/x86_64/quick_entrypoints_x86_64.S @@ -1958,9 +1958,7 @@ DEFINE_FUNCTION art_quick_generic_jni_trampoline movq %rax, %xmm0 ret .Lexception_in_native: - pushq %gs:THREAD_TOP_QUICK_FRAME_OFFSET - addq LITERAL(-1), (%rsp) // Remove the GenericJNI tag. - movq (%rsp), %rsp + movq %gs:THREAD_TOP_QUICK_FRAME_OFFSET, %rsp CFI_DEF_CFA_REGISTER(rsp) // Do a call to push a new save-all frame required by the runtime. call .Lexception_call diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h index 31abf94889..50913def93 100644 --- a/runtime/art_method-inl.h +++ b/runtime/art_method-inl.h @@ -95,12 +95,10 @@ inline uint16_t ArtMethod::GetMethodIndexDuringLinking() { return method_index_; } -template <ReadBarrierOption kReadBarrierOption> inline uint32_t ArtMethod::GetDexMethodIndex() { if (kCheckDeclaringClassState) { - CHECK(IsRuntimeMethod() || - GetDeclaringClass<kReadBarrierOption>()->IsIdxLoaded() || - GetDeclaringClass<kReadBarrierOption>()->IsErroneous()); + CHECK(IsRuntimeMethod() || GetDeclaringClass()->IsIdxLoaded() || + GetDeclaringClass()->IsErroneous()); } return GetDexMethodIndexUnchecked(); } @@ -204,14 +202,7 @@ inline const char* ArtMethod::GetShorty() { inline const char* ArtMethod::GetShorty(uint32_t* out_length) { DCHECK(!IsProxyMethod()); const DexFile* dex_file = GetDexFile(); - // Don't do a read barrier in the DCHECK() inside GetDexMethodIndex() as GetShorty() - // can be called when the declaring class is about to be unloaded and cannot be added - // to the mark stack (subsequent GC assertion would fail). - // It is safe to avoid the read barrier as the ArtMethod is constructed with a declaring - // Class already satisfying the DCHECK() inside GetDexMethodIndex(), so even if that copy - // of declaring class becomes a from-space object, it shall satisfy the DCHECK(). - return dex_file->GetMethodShorty(dex_file->GetMethodId(GetDexMethodIndex<kWithoutReadBarrier>()), - out_length); + return dex_file->GetMethodShorty(dex_file->GetMethodId(GetDexMethodIndex()), out_length); } inline const Signature ArtMethod::GetSignature() { @@ -328,7 +319,7 @@ inline mirror::ClassLoader* ArtMethod::GetClassLoader() { template <ReadBarrierOption kReadBarrierOption> inline mirror::DexCache* ArtMethod::GetDexCache() { - if (LIKELY(!IsObsolete<kReadBarrierOption>())) { + if (LIKELY(!IsObsolete())) { mirror::Class* klass = GetDeclaringClass<kReadBarrierOption>(); return klass->GetDexCache<kDefaultVerifyFlags, kReadBarrierOption>(); } else { diff --git a/runtime/art_method.cc b/runtime/art_method.cc index bdbc4509f3..fa0c501e31 100644 --- a/runtime/art_method.cc +++ b/runtime/art_method.cc @@ -587,6 +587,11 @@ const OatQuickMethodHeader* ArtMethod::GetOatQuickMethodHeader(uintptr_t pc) { CHECK(existing_entry_point != nullptr) << PrettyMethod() << "@" << this; ClassLinker* class_linker = runtime->GetClassLinker(); + if (class_linker->IsQuickGenericJniStub(existing_entry_point)) { + // The generic JNI does not have any method header. + return nullptr; + } + if (existing_entry_point == GetQuickProxyInvokeHandler()) { DCHECK(IsProxyMethod() && !IsConstructor()); // The proxy entry point does not have any method header. @@ -594,8 +599,7 @@ const OatQuickMethodHeader* ArtMethod::GetOatQuickMethodHeader(uintptr_t pc) { } // Check whether the current entry point contains this pc. - if (!class_linker->IsQuickGenericJniStub(existing_entry_point) && - !class_linker->IsQuickResolutionStub(existing_entry_point) && + if (!class_linker->IsQuickResolutionStub(existing_entry_point) && !class_linker->IsQuickToInterpreterBridge(existing_entry_point)) { OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromEntryPoint(existing_entry_point); @@ -628,13 +632,19 @@ const OatQuickMethodHeader* ArtMethod::GetOatQuickMethodHeader(uintptr_t pc) { OatFile::OatMethod oat_method = FindOatMethodFor(this, class_linker->GetImagePointerSize(), &found); if (!found) { - if (IsNative()) { - // We are running the GenericJNI stub. The entrypoint may point - // to different entrypoints or to a JIT-compiled JNI stub. - DCHECK(class_linker->IsQuickGenericJniStub(existing_entry_point) || - class_linker->IsQuickResolutionStub(existing_entry_point) || - existing_entry_point == GetQuickInstrumentationEntryPoint() || - (jit != nullptr && jit->GetCodeCache()->ContainsPc(existing_entry_point))); + if (class_linker->IsQuickResolutionStub(existing_entry_point)) { + // We are running the generic jni stub, but the entry point of the method has not + // been updated yet. + DCHECK_EQ(pc, 0u) << "Should be a downcall"; + DCHECK(IsNative()); + return nullptr; + } + if (existing_entry_point == GetQuickInstrumentationEntryPoint()) { + // We are running the generic jni stub, but the method is being instrumented. + // NB We would normally expect the pc to be zero but we can have non-zero pc's if + // instrumentation is installed or removed during the call which is using the generic jni + // trampoline. + DCHECK(IsNative()); return nullptr; } // Only for unit tests. diff --git a/runtime/art_method.h b/runtime/art_method.h index 0a592e0528..dca6f37254 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -242,9 +242,8 @@ class ArtMethod FINAL { return (GetAccessFlags() & kAccDefault) != 0; } - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> bool IsObsolete() { - return (GetAccessFlags<kReadBarrierOption>() & kAccObsoleteMethod) != 0; + return (GetAccessFlags() & kAccObsoleteMethod) != 0; } void SetIsObsolete() { @@ -377,7 +376,6 @@ class ArtMethod FINAL { ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() { return dex_method_index_; } - template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier> ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_); void SetDexMethodIndex(uint32_t new_idx) { @@ -462,11 +460,12 @@ class ArtMethod FINAL { } ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_) { - // Don't do a read barrier in the DCHECK() inside GetAccessFlags() called by IsNative(), - // as GetProfilingInfo is called in places where the declaring class is treated as a weak - // reference (accessing it with a read barrier would either prevent unloading the class, - // or crash the runtime if the GC wants to unload it). - if (UNLIKELY(IsNative<kWithoutReadBarrier>()) || UNLIKELY(IsProxyMethod())) { + // Don't do a read barrier in the DCHECK, as GetProfilingInfo is called in places + // where the declaring class is treated as a weak reference (accessing it with + // a read barrier would either prevent unloading the class, or crash the runtime if + // the GC wants to unload it). + DCHECK(!IsNative<kWithoutReadBarrier>()); + if (UNLIKELY(IsProxyMethod())) { return nullptr; } return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size)); diff --git a/runtime/entrypoints/entrypoint_utils.cc b/runtime/entrypoints/entrypoint_utils.cc index f3450da306..2bf4372b1f 100644 --- a/runtime/entrypoints/entrypoint_utils.cc +++ b/runtime/entrypoints/entrypoint_utils.cc @@ -245,7 +245,7 @@ ArtMethod* GetCalleeSaveMethodCaller(ArtMethod** sp, CalleeSaveType type, bool d CallerAndOuterMethod GetCalleeSaveMethodCallerAndOuterMethod(Thread* self, CalleeSaveType type) { CallerAndOuterMethod result; ScopedAssertNoThreadSuspension ants(__FUNCTION__); - ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrameKnownNotTagged(); + ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame(); auto outer_caller_and_pc = DoGetCalleeSaveMethodOuterCallerAndPc(sp, type); result.outer_method = outer_caller_and_pc.first; uintptr_t caller_pc = outer_caller_and_pc.second; @@ -256,7 +256,7 @@ CallerAndOuterMethod GetCalleeSaveMethodCallerAndOuterMethod(Thread* self, Calle ArtMethod* GetCalleeSaveOuterMethod(Thread* self, CalleeSaveType type) { ScopedAssertNoThreadSuspension ants(__FUNCTION__); - ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrameKnownNotTagged(); + ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame(); return DoGetCalleeSaveMethodOuterCallerAndPc(sp, type).first; } diff --git a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc index 0a76cddf5e..2496aa0f58 100644 --- a/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc +++ b/runtime/entrypoints/quick/quick_trampoline_entrypoints.cc @@ -31,7 +31,6 @@ #include "index_bss_mapping.h" #include "instrumentation.h" #include "interpreter/interpreter.h" -#include "jit/jit.h" #include "linear_alloc.h" #include "method_handles.h" #include "method_reference.h" @@ -2168,11 +2167,6 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** // Note: We cannot walk the stack properly until fixed up below. ArtMethod* called = *sp; DCHECK(called->IsNative()) << called->PrettyMethod(true); - Runtime* runtime = Runtime::Current(); - jit::Jit* jit = runtime->GetJit(); - if (jit != nullptr) { - jit->AddSamples(self, called, 1u, /*with_backedges*/ false); - } uint32_t shorty_len = 0; const char* shorty = called->GetShorty(&shorty_len); bool critical_native = called->IsCriticalNative(); @@ -2194,7 +2188,7 @@ extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** } // Fix up managed-stack things in Thread. After this we can walk the stack. - self->SetTopOfStackTagged(sp); + self->SetTopOfStack(sp); self->VerifyStack(); @@ -2314,7 +2308,6 @@ extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, // anything that requires a mutator lock before that would cause problems as GC may have the // exclusive mutator lock and may be moving objects, etc. ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame(); - DCHECK(self->GetManagedStack()->GetTopQuickFrameTag()); uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp); ArtMethod* called = *sp; uint32_t cookie = *(sp32 - 1); diff --git a/runtime/jit/jit.cc b/runtime/jit/jit.cc index 0d95bc6e64..953e195480 100644 --- a/runtime/jit/jit.cc +++ b/runtime/jit/jit.cc @@ -643,7 +643,7 @@ void Jit::AddSamples(Thread* self, ArtMethod* method, uint16_t count, bool with_ return; } - if (method->IsClassInitializer() || !method->IsCompilable()) { + if (method->IsClassInitializer() || method->IsNative() || !method->IsCompilable()) { // We do not want to compile such methods. return; } @@ -659,8 +659,7 @@ void Jit::AddSamples(Thread* self, ArtMethod* method, uint16_t count, bool with_ count *= priority_thread_weight_; } int32_t new_count = starting_count + count; // int32 here to avoid wrap-around; - // Note: Native method have no "warm" state or profiling info. - if (LIKELY(!method->IsNative()) && starting_count < warm_method_threshold_) { + if (starting_count < warm_method_threshold_) { if ((new_count >= warm_method_threshold_) && (method->GetProfilingInfo(kRuntimePointerSize) == nullptr)) { bool success = ProfilingInfo::Create(self, method, /* retry_allocation */ false); @@ -697,7 +696,6 @@ void Jit::AddSamples(Thread* self, ArtMethod* method, uint16_t count, bool with_ // If the samples don't contain any back edge, we don't increment the hotness. return; } - DCHECK(!method->IsNative()); // No back edges reported for native methods. if ((new_count >= osr_method_threshold_) && !code_cache_->IsOsrCompiled(method)) { DCHECK(thread_pool_ != nullptr); thread_pool_->AddTask(self, new JitCompileTask(method, JitCompileTask::kCompileOsr)); diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc index a5c167eee8..32205138bd 100644 --- a/runtime/jit/jit_code_cache.cc +++ b/runtime/jit/jit_code_cache.cc @@ -55,107 +55,6 @@ static constexpr int kProtCode = PROT_READ | PROT_EXEC; static constexpr size_t kCodeSizeLogThreshold = 50 * KB; static constexpr size_t kStackMapSizeLogThreshold = 50 * KB; -class JitCodeCache::JniStubKey { - public: - explicit JniStubKey(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) - : shorty_(method->GetShorty()), - is_static_(method->IsStatic()), - is_fast_native_(method->IsFastNative()), - is_critical_native_(method->IsCriticalNative()), - is_synchronized_(method->IsSynchronized()) { - DCHECK(!(is_fast_native_ && is_critical_native_)); - } - - bool operator<(const JniStubKey& rhs) const { - if (is_static_ != rhs.is_static_) { - return rhs.is_static_; - } - if (is_synchronized_ != rhs.is_synchronized_) { - return rhs.is_synchronized_; - } - if (is_fast_native_ != rhs.is_fast_native_) { - return rhs.is_fast_native_; - } - if (is_critical_native_ != rhs.is_critical_native_) { - return rhs.is_critical_native_; - } - return strcmp(shorty_, rhs.shorty_) < 0; - } - - // Update the shorty to point to another method's shorty. Call this function when removing - // the method that references the old shorty from JniCodeData and not removing the entire - // JniCodeData; the old shorty may become a dangling pointer when that method is unloaded. - void UpdateShorty(ArtMethod* method) const REQUIRES_SHARED(Locks::mutator_lock_) { - const char* shorty = method->GetShorty(); - DCHECK_STREQ(shorty_, shorty); - shorty_ = shorty; - } - - private: - // The shorty points to a DexFile data and may need to change - // to point to the same shorty in a different DexFile. - mutable const char* shorty_; - - const bool is_static_; - const bool is_fast_native_; - const bool is_critical_native_; - const bool is_synchronized_; -}; - -class JitCodeCache::JniStubData { - public: - JniStubData() : code_(nullptr), methods_() {} - - void SetCode(const void* code) { - DCHECK(code != nullptr); - code_ = code; - } - - const void* GetCode() const { - return code_; - } - - bool IsCompiled() const { - return GetCode() != nullptr; - } - - void AddMethod(ArtMethod* method) { - if (!ContainsElement(methods_, method)) { - methods_.push_back(method); - } - } - - const std::vector<ArtMethod*>& GetMethods() const { - return methods_; - } - - void RemoveMethodsIn(const LinearAlloc& alloc) { - auto kept_end = std::remove_if( - methods_.begin(), - methods_.end(), - [&alloc](ArtMethod* method) { return alloc.ContainsUnsafe(method); }); - methods_.erase(kept_end, methods_.end()); - } - - bool RemoveMethod(ArtMethod* method) { - auto it = std::find(methods_.begin(), methods_.end(), method); - if (it != methods_.end()) { - methods_.erase(it); - return true; - } else { - return false; - } - } - - void MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method) { - std::replace(methods_.begin(), methods_.end(), old_method, new_method); - } - - private: - const void* code_; - std::vector<ArtMethod*> methods_; -}; - JitCodeCache* JitCodeCache::Create(size_t initial_capacity, size_t max_capacity, bool generate_debug_info, @@ -294,36 +193,14 @@ bool JitCodeCache::ContainsPc(const void* ptr) const { bool JitCodeCache::ContainsMethod(ArtMethod* method) { MutexLock mu(Thread::Current(), lock_); - if (UNLIKELY(method->IsNative())) { - auto it = jni_stubs_map_.find(JniStubKey(method)); - if (it != jni_stubs_map_.end() && - it->second.IsCompiled() && - ContainsElement(it->second.GetMethods(), method)) { + for (auto& it : method_code_map_) { + if (it.second == method) { return true; } - } else { - for (const auto& it : method_code_map_) { - if (it.second == method) { - return true; - } - } } return false; } -const void* JitCodeCache::GetJniStubCode(ArtMethod* method) { - DCHECK(method->IsNative()); - MutexLock mu(Thread::Current(), lock_); - auto it = jni_stubs_map_.find(JniStubKey(method)); - if (it != jni_stubs_map_.end()) { - JniStubData& data = it->second; - if (data.IsCompiled() && ContainsElement(data.GetMethods(), method)) { - return data.GetCode(); - } - } - return nullptr; -} - class ScopedCodeCacheWrite : ScopedTrace { public: explicit ScopedCodeCacheWrite(MemMap* code_map, bool only_for_tlb_shootdown = false) @@ -549,9 +426,7 @@ void JitCodeCache::FreeCode(const void* code_ptr) { // Notify native debugger that we are about to remove the code. // It does nothing if we are not using native debugger. DeleteJITCodeEntryForAddress(reinterpret_cast<uintptr_t>(code_ptr)); - if (OatQuickMethodHeader::FromCodePointer(code_ptr)->IsOptimized()) { - FreeData(GetRootTable(code_ptr)); - } // else this is a JNI stub without any data. + FreeData(GetRootTable(code_ptr)); FreeCode(reinterpret_cast<uint8_t*>(allocation)); } @@ -588,16 +463,6 @@ void JitCodeCache::RemoveMethodsIn(Thread* self, const LinearAlloc& alloc) { // lead to a deadlock. { ScopedCodeCacheWrite scc(code_map_.get()); - for (auto it = jni_stubs_map_.begin(); it != jni_stubs_map_.end();) { - it->second.RemoveMethodsIn(alloc); - if (it->second.GetMethods().empty()) { - method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->second.GetCode())); - it = jni_stubs_map_.erase(it); - } else { - it->first.UpdateShorty(it->second.GetMethods().front()); - ++it; - } - } for (auto it = method_code_map_.begin(); it != method_code_map_.end();) { if (alloc.ContainsUnsafe(it->second)) { method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first)); @@ -707,8 +572,7 @@ uint8_t* JitCodeCache::CommitCodeInternal(Thread* self, bool has_should_deoptimize_flag, const ArenaSet<ArtMethod*>& cha_single_implementation_list) { - DCHECK_NE(stack_map != nullptr, method->IsNative()); - DCHECK(!method->IsNative() || !osr); + DCHECK(stack_map != nullptr); size_t alignment = GetInstructionSetAlignment(kRuntimeISA); // Ensure the header ends up at expected instruction alignment. size_t header_size = RoundUp(sizeof(OatQuickMethodHeader), alignment); @@ -732,8 +596,8 @@ uint8_t* JitCodeCache::CommitCodeInternal(Thread* self, std::copy(code, code + code_size, code_ptr); method_header = OatQuickMethodHeader::FromCodePointer(code_ptr); new (method_header) OatQuickMethodHeader( - (stack_map != nullptr) ? code_ptr - stack_map : 0u, - (method_info != nullptr) ? code_ptr - method_info : 0u, + code_ptr - stack_map, + code_ptr - method_info, frame_size_in_bytes, core_spill_mask, fp_spill_mask, @@ -788,40 +652,24 @@ uint8_t* JitCodeCache::CommitCodeInternal(Thread* self, // possible that the compiled code is considered invalidated by some class linking, // but below we still make the compiled code valid for the method. MutexLock mu(self, lock_); - if (UNLIKELY(method->IsNative())) { - DCHECK(stack_map == nullptr); - DCHECK(roots_data == nullptr); - auto it = jni_stubs_map_.find(JniStubKey(method)); - DCHECK(it != jni_stubs_map_.end()) - << "Entry inserted in NotifyCompilationOf() should be alive."; - JniStubData* data = &it->second; - DCHECK(ContainsElement(data->GetMethods(), method)) - << "Entry inserted in NotifyCompilationOf() should contain this method."; - data->SetCode(code_ptr); - instrumentation::Instrumentation* instrum = Runtime::Current()->GetInstrumentation(); - for (ArtMethod* m : data->GetMethods()) { - instrum->UpdateMethodsCode(m, method_header->GetEntryPoint()); - } + // Fill the root table before updating the entry point. + DCHECK_EQ(FromStackMapToRoots(stack_map), roots_data); + DCHECK_LE(roots_data, stack_map); + FillRootTable(roots_data, roots); + { + // Flush data cache, as compiled code references literals in it. + // We also need a TLB shootdown to act as memory barrier across cores. + ScopedCodeCacheWrite ccw(code_map_.get(), /* only_for_tlb_shootdown */ true); + FlushDataCache(reinterpret_cast<char*>(roots_data), + reinterpret_cast<char*>(roots_data + data_size)); + } + method_code_map_.Put(code_ptr, method); + if (osr) { + number_of_osr_compilations_++; + osr_code_map_.Put(method, code_ptr); } else { - // Fill the root table before updating the entry point. - DCHECK_EQ(FromStackMapToRoots(stack_map), roots_data); - DCHECK_LE(roots_data, stack_map); - FillRootTable(roots_data, roots); - { - // Flush data cache, as compiled code references literals in it. - // We also need a TLB shootdown to act as memory barrier across cores. - ScopedCodeCacheWrite ccw(code_map_.get(), /* only_for_tlb_shootdown */ true); - FlushDataCache(reinterpret_cast<char*>(roots_data), - reinterpret_cast<char*>(roots_data + data_size)); - } - method_code_map_.Put(code_ptr, method); - if (osr) { - number_of_osr_compilations_++; - osr_code_map_.Put(method, code_ptr); - } else { - Runtime::Current()->GetInstrumentation()->UpdateMethodsCode( - method, method_header->GetEntryPoint()); - } + Runtime::Current()->GetInstrumentation()->UpdateMethodsCode( + method, method_header->GetEntryPoint()); } if (collection_in_progress_) { // We need to update the live bitmap if there is a GC to ensure it sees this new @@ -855,18 +703,45 @@ size_t JitCodeCache::CodeCacheSize() { } bool JitCodeCache::RemoveMethod(ArtMethod* method, bool release_memory) { - // This function is used only for testing and only with non-native methods. - CHECK(!method->IsNative()); - MutexLock mu(Thread::Current(), lock_); + if (method->IsNative()) { + return false; + } + + bool in_cache = false; + { + ScopedCodeCacheWrite ccw(code_map_.get()); + for (auto code_iter = method_code_map_.begin(); code_iter != method_code_map_.end();) { + if (code_iter->second == method) { + if (release_memory) { + FreeCode(code_iter->first); + } + code_iter = method_code_map_.erase(code_iter); + in_cache = true; + continue; + } + ++code_iter; + } + } - bool osr = osr_code_map_.find(method) != osr_code_map_.end(); - bool in_cache = RemoveMethodLocked(method, release_memory); + bool osr = false; + auto code_map = osr_code_map_.find(method); + if (code_map != osr_code_map_.end()) { + osr_code_map_.erase(code_map); + osr = true; + } if (!in_cache) { return false; } + ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); + if (info != nullptr) { + auto profile = std::find(profiling_infos_.begin(), profiling_infos_.end(), info); + DCHECK(profile != profiling_infos_.end()); + profiling_infos_.erase(profile); + } + method->SetProfilingInfo(nullptr); method->ClearCounter(); Runtime::Current()->GetInstrumentation()->UpdateMethodsCode( method, GetQuickToInterpreterBridge()); @@ -878,58 +753,34 @@ bool JitCodeCache::RemoveMethod(ArtMethod* method, bool release_memory) { return true; } -bool JitCodeCache::RemoveMethodLocked(ArtMethod* method, bool release_memory) { - if (LIKELY(!method->IsNative())) { - ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); - if (info != nullptr) { - RemoveElement(profiling_infos_, info); - } - method->SetProfilingInfo(nullptr); - } - - bool in_cache = false; - ScopedCodeCacheWrite ccw(code_map_.get()); - if (UNLIKELY(method->IsNative())) { - auto it = jni_stubs_map_.find(JniStubKey(method)); - if (it != jni_stubs_map_.end() && it->second.RemoveMethod(method)) { - in_cache = true; - if (it->second.GetMethods().empty()) { - if (release_memory) { - FreeCode(it->second.GetCode()); - } - jni_stubs_map_.erase(it); - } else { - it->first.UpdateShorty(it->second.GetMethods().front()); - } - } - } else { - for (auto it = method_code_map_.begin(); it != method_code_map_.end();) { - if (it->second == method) { - in_cache = true; - if (release_memory) { - FreeCode(it->first); - } - it = method_code_map_.erase(it); - } else { - ++it; - } - } - - auto osr_it = osr_code_map_.find(method); - if (osr_it != osr_code_map_.end()) { - osr_code_map_.erase(osr_it); - } - } - - return in_cache; -} - // This notifies the code cache that the given method has been redefined and that it should remove // any cached information it has on the method. All threads must be suspended before calling this // method. The compiled code for the method (if there is any) must not be in any threads call stack. void JitCodeCache::NotifyMethodRedefined(ArtMethod* method) { MutexLock mu(Thread::Current(), lock_); - RemoveMethodLocked(method, /* release_memory */ true); + if (method->IsNative()) { + return; + } + ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); + if (info != nullptr) { + auto profile = std::find(profiling_infos_.begin(), profiling_infos_.end(), info); + DCHECK(profile != profiling_infos_.end()); + profiling_infos_.erase(profile); + } + method->SetProfilingInfo(nullptr); + ScopedCodeCacheWrite ccw(code_map_.get()); + for (auto code_iter = method_code_map_.begin(); code_iter != method_code_map_.end();) { + if (code_iter->second == method) { + FreeCode(code_iter->first); + code_iter = method_code_map_.erase(code_iter); + continue; + } + ++code_iter; + } + auto code_map = osr_code_map_.find(method); + if (code_map != osr_code_map_.end()) { + osr_code_map_.erase(code_map); + } } // This invalidates old_method. Once this function returns one can no longer use old_method to @@ -939,15 +790,11 @@ void JitCodeCache::NotifyMethodRedefined(ArtMethod* method) { // shouldn't be used since it is no longer logically in the jit code cache. // TODO We should add DCHECKS that validate that the JIT is paused when this method is entered. void JitCodeCache::MoveObsoleteMethod(ArtMethod* old_method, ArtMethod* new_method) { - MutexLock mu(Thread::Current(), lock_); + // Native methods have no profiling info and need no special handling from the JIT code cache. if (old_method->IsNative()) { - // Update methods in jni_stubs_map_. - for (auto& entry : jni_stubs_map_) { - JniStubData& data = entry.second; - data.MoveObsoleteMethod(old_method, new_method); - } return; } + MutexLock mu(Thread::Current(), lock_); // Update ProfilingInfo to the new one and remove it from the old_method. if (old_method->GetProfilingInfo(kRuntimePointerSize) != nullptr) { DCHECK_EQ(old_method->GetProfilingInfo(kRuntimePointerSize)->GetMethod(), old_method); @@ -1089,7 +936,7 @@ class MarkCodeClosure FINAL : public Closure { // its stack frame, it is not the method owning return_pc_. We just pass null to // LookupMethodHeader: the method is only checked against in debug builds. OatQuickMethodHeader* method_header = - code_cache_->LookupMethodHeader(frame.return_pc_, /* method */ nullptr); + code_cache_->LookupMethodHeader(frame.return_pc_, nullptr); if (method_header != nullptr) { const void* code = method_header->GetCode(); CHECK(code_cache_->GetLiveBitmap()->Test(FromCodeToAllocation(code))); @@ -1242,7 +1089,7 @@ void JitCodeCache::GarbageCollectCache(Thread* self) { const void* entry_point = info->GetMethod()->GetEntryPointFromQuickCompiledCode(); if (ContainsPc(entry_point)) { info->SetSavedEntryPoint(entry_point); - // Don't call Instrumentation::UpdateMethodsCode(), as it can check the declaring + // Don't call Instrumentation::UpdateMethods, as it can check the declaring // class of the method. We may be concurrently running a GC which makes accessing // the class unsafe. We know it is OK to bypass the instrumentation as we've just // checked that the current entry point is JIT compiled code. @@ -1251,25 +1098,6 @@ void JitCodeCache::GarbageCollectCache(Thread* self) { } DCHECK(CheckLiveCompiledCodeHasProfilingInfo()); - - // Change entry points of native methods back to the GenericJNI entrypoint. - for (const auto& entry : jni_stubs_map_) { - const JniStubData& data = entry.second; - if (!data.IsCompiled()) { - continue; - } - // Make sure a single invocation of the GenericJNI trampoline tries to recompile. - uint16_t new_counter = Runtime::Current()->GetJit()->HotMethodThreshold() - 1u; - const OatQuickMethodHeader* method_header = - OatQuickMethodHeader::FromCodePointer(data.GetCode()); - for (ArtMethod* method : data.GetMethods()) { - if (method->GetEntryPointFromQuickCompiledCode() == method_header->GetEntryPoint()) { - // Don't call Instrumentation::UpdateMethodsCode(), same as for normal methods above. - method->SetCounter(new_counter); - method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub()); - } - } - } } live_bitmap_.reset(nullptr); NotifyCollectionDone(self); @@ -1285,22 +1113,13 @@ void JitCodeCache::RemoveUnmarkedCode(Thread* self) { MutexLock mu(self, lock_); ScopedCodeCacheWrite scc(code_map_.get()); // Iterate over all compiled code and remove entries that are not marked. - for (auto it = jni_stubs_map_.begin(); it != jni_stubs_map_.end();) { - JniStubData* data = &it->second; - if (!data->IsCompiled() || GetLiveBitmap()->Test(FromCodeToAllocation(data->GetCode()))) { - ++it; - } else { - method_headers.insert(OatQuickMethodHeader::FromCodePointer(data->GetCode())); - it = jni_stubs_map_.erase(it); - } - } for (auto it = method_code_map_.begin(); it != method_code_map_.end();) { const void* code_ptr = it->first; uintptr_t allocation = FromCodeToAllocation(code_ptr); if (GetLiveBitmap()->Test(allocation)) { ++it; } else { - method_headers.insert(OatQuickMethodHeader::FromCodePointer(code_ptr)); + method_headers.insert(OatQuickMethodHeader::FromCodePointer(it->first)); it = method_code_map_.erase(it); } } @@ -1339,17 +1158,6 @@ void JitCodeCache::DoCollection(Thread* self, bool collect_profiling_info) { // an entry point is either: // - an osr compiled code, that will be removed if not in a thread call stack. // - discarded compiled code, that will be removed if not in a thread call stack. - for (const auto& entry : jni_stubs_map_) { - const JniStubData& data = entry.second; - const void* code_ptr = data.GetCode(); - const OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr); - for (ArtMethod* method : data.GetMethods()) { - if (method_header->GetEntryPoint() == method->GetEntryPointFromQuickCompiledCode()) { - GetLiveBitmap()->AtomicTestAndSet(FromCodeToAllocation(code_ptr)); - break; - } - } - } for (const auto& it : method_code_map_) { ArtMethod* method = it.second; const void* code_ptr = it.first; @@ -1429,51 +1237,19 @@ OatQuickMethodHeader* JitCodeCache::LookupMethodHeader(uintptr_t pc, ArtMethod* return nullptr; } - if (!kIsDebugBuild) { - // Called with null `method` only from MarkCodeClosure::Run() in debug build. - CHECK(method != nullptr); - } - MutexLock mu(Thread::Current(), lock_); - OatQuickMethodHeader* method_header = nullptr; - ArtMethod* found_method = nullptr; // Only for DCHECK(), not for JNI stubs. - if (method != nullptr && UNLIKELY(method->IsNative())) { - auto it = jni_stubs_map_.find(JniStubKey(method)); - if (it == jni_stubs_map_.end() || !ContainsElement(it->second.GetMethods(), method)) { - return nullptr; - } - const void* code_ptr = it->second.GetCode(); - method_header = OatQuickMethodHeader::FromCodePointer(code_ptr); - if (!method_header->Contains(pc)) { - return nullptr; - } - } else { - auto it = method_code_map_.lower_bound(reinterpret_cast<const void*>(pc)); - if (it != method_code_map_.begin()) { - --it; - const void* code_ptr = it->first; - if (OatQuickMethodHeader::FromCodePointer(code_ptr)->Contains(pc)) { - method_header = OatQuickMethodHeader::FromCodePointer(code_ptr); - found_method = it->second; - } - } - if (method_header == nullptr && method == nullptr) { - // Scan all compiled JNI stubs as well. This slow search is used only - // for checks in debug build, for release builds the `method` is not null. - for (auto&& entry : jni_stubs_map_) { - const JniStubData& data = entry.second; - if (data.IsCompiled() && - OatQuickMethodHeader::FromCodePointer(data.GetCode())->Contains(pc)) { - method_header = OatQuickMethodHeader::FromCodePointer(data.GetCode()); - } - } - } - if (method_header == nullptr) { - return nullptr; - } + if (method_code_map_.empty()) { + return nullptr; } + auto it = method_code_map_.lower_bound(reinterpret_cast<const void*>(pc)); + --it; - if (kIsDebugBuild && method != nullptr && !method->IsNative()) { + const void* code_ptr = it->first; + OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(code_ptr); + if (!method_header->Contains(pc)) { + return nullptr; + } + if (kIsDebugBuild && method != nullptr) { // When we are walking the stack to redefine classes and creating obsolete methods it is // possible that we might have updated the method_code_map by making this method obsolete in a // previous frame. Therefore we should just check that the non-obsolete version of this method @@ -1482,9 +1258,9 @@ OatQuickMethodHeader* JitCodeCache::LookupMethodHeader(uintptr_t pc, ArtMethod* // occur when we are in the process of allocating and setting up obsolete methods. Otherwise // method and it->second should be identical. (See openjdkjvmti/ti_redefine.cc for more // information.) - DCHECK_EQ(found_method->GetNonObsoleteMethod(), method->GetNonObsoleteMethod()) + DCHECK_EQ(it->second->GetNonObsoleteMethod(), method->GetNonObsoleteMethod()) << ArtMethod::PrettyMethod(method->GetNonObsoleteMethod()) << " " - << ArtMethod::PrettyMethod(found_method->GetNonObsoleteMethod()) << " " + << ArtMethod::PrettyMethod(it->second->GetNonObsoleteMethod()) << " " << std::hex << pc; } return method_header; @@ -1673,51 +1449,21 @@ bool JitCodeCache::NotifyCompilationOf(ArtMethod* method, Thread* self, bool osr return false; } - if (UNLIKELY(method->IsNative())) { - JniStubKey key(method); - auto it = jni_stubs_map_.find(key); - bool new_compilation = false; - if (it == jni_stubs_map_.end()) { - // Create a new entry to mark the stub as being compiled. - it = jni_stubs_map_.Put(key, JniStubData{}); - new_compilation = true; - } - JniStubData* data = &it->second; - data->AddMethod(method); - if (data->IsCompiled()) { - OatQuickMethodHeader* method_header = OatQuickMethodHeader::FromCodePointer(data->GetCode()); - const void* entrypoint = method_header->GetEntryPoint(); - // Update also entrypoints of other methods held by the JniStubData. - // We could simply update the entrypoint of `method` but if the last JIT GC has - // changed these entrypoints to GenericJNI in preparation for a full GC, we may - // as well change them back as this stub shall not be collected anyway and this - // can avoid a few expensive GenericJNI calls. - instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation(); - for (ArtMethod* m : data->GetMethods()) { - instrumentation->UpdateMethodsCode(m, entrypoint); - } - if (collection_in_progress_) { - GetLiveBitmap()->AtomicTestAndSet(FromCodeToAllocation(data->GetCode())); - } - } - return new_compilation; - } else { - ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); - if (info == nullptr) { - VLOG(jit) << method->PrettyMethod() << " needs a ProfilingInfo to be compiled"; - // Because the counter is not atomic, there are some rare cases where we may not hit the - // threshold for creating the ProfilingInfo. Reset the counter now to "correct" this. - ClearMethodCounter(method, /*was_warm*/ false); - return false; - } - - if (info->IsMethodBeingCompiled(osr)) { - return false; - } + ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); + if (info == nullptr) { + VLOG(jit) << method->PrettyMethod() << " needs a ProfilingInfo to be compiled"; + // Because the counter is not atomic, there are some rare cases where we may not hit the + // threshold for creating the ProfilingInfo. Reset the counter now to "correct" this. + ClearMethodCounter(method, /*was_warm*/ false); + return false; + } - info->SetIsMethodBeingCompiled(true, osr); - return true; + if (info->IsMethodBeingCompiled(osr)) { + return false; } + + info->SetIsMethodBeingCompiled(true, osr); + return true; } ProfilingInfo* JitCodeCache::NotifyCompilerUse(ArtMethod* method, Thread* self) { @@ -1739,23 +1485,10 @@ void JitCodeCache::DoneCompilerUse(ArtMethod* method, Thread* self) { info->DecrementInlineUse(); } -void JitCodeCache::DoneCompiling(ArtMethod* method, Thread* self, bool osr) { - DCHECK_EQ(Thread::Current(), self); - MutexLock mu(self, lock_); - if (UNLIKELY(method->IsNative())) { - auto it = jni_stubs_map_.find(JniStubKey(method)); - DCHECK(it != jni_stubs_map_.end()); - JniStubData* data = &it->second; - DCHECK(ContainsElement(data->GetMethods(), method)); - if (UNLIKELY(!data->IsCompiled())) { - // Failed to compile; the JNI compiler never fails, but the cache may be full. - jni_stubs_map_.erase(it); // Remove the entry added in NotifyCompilationOf(). - } // else CommitCodeInternal() updated entrypoints of all methods in the JniStubData. - } else { - ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); - DCHECK(info->IsMethodBeingCompiled(osr)); - info->SetIsMethodBeingCompiled(false, osr); - } +void JitCodeCache::DoneCompiling(ArtMethod* method, Thread* self ATTRIBUTE_UNUSED, bool osr) { + ProfilingInfo* info = method->GetProfilingInfo(kRuntimePointerSize); + DCHECK(info->IsMethodBeingCompiled(osr)); + info->SetIsMethodBeingCompiled(false, osr); } size_t JitCodeCache::GetMemorySizeOfCodePointer(const void* ptr) { @@ -1765,7 +1498,6 @@ size_t JitCodeCache::GetMemorySizeOfCodePointer(const void* ptr) { void JitCodeCache::InvalidateCompiledCodeFor(ArtMethod* method, const OatQuickMethodHeader* header) { - DCHECK(!method->IsNative()); ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize); if ((profiling_info != nullptr) && (profiling_info->GetSavedEntryPoint() == header->GetEntryPoint())) { @@ -1821,7 +1553,6 @@ void JitCodeCache::Dump(std::ostream& os) { os << "Current JIT code cache size: " << PrettySize(used_memory_for_code_) << "\n" << "Current JIT data cache size: " << PrettySize(used_memory_for_data_) << "\n" << "Current JIT capacity: " << PrettySize(current_capacity_) << "\n" - << "Current number of JIT JNI stub entries: " << jni_stubs_map_.size() << "\n" << "Current number of JIT code cache entries: " << method_code_map_.size() << "\n" << "Total number of JIT compilations: " << number_of_compilations_ << "\n" << "Total number of JIT compilations for on stack replacement: " diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h index fc011ddb96..46a408590b 100644 --- a/runtime/jit/jit_code_cache.h +++ b/runtime/jit/jit_code_cache.h @@ -35,23 +35,9 @@ template<class T> class Handle; class LinearAlloc; class InlineCache; class IsMarkedVisitor; -class JitJniStubTestHelper; class OatQuickMethodHeader; struct ProfileMethodInfo; class ProfilingInfo; -class Thread; - -namespace gc { -namespace accounting { -template<size_t kAlignment> class MemoryRangeBitmap; -} // namespace accounting -} // namespace gc - -namespace mirror { -class Class; -class Object; -template<class T> class ObjectArray; -} // namespace mirror namespace gc { namespace accounting { @@ -151,9 +137,6 @@ class JitCodeCache { // Return true if the code cache contains this method. bool ContainsMethod(ArtMethod* method) REQUIRES(!lock_); - // Return the code pointer for a JNI-compiled stub if the method is in the cache, null otherwise. - const void* GetJniStubCode(ArtMethod* method) REQUIRES(!lock_); - // Allocate a region of data that contain `size` bytes, and potentially space // for storing `number_of_roots` roots. Returns null if there is no more room. // Return the number of bytes allocated. @@ -177,6 +160,11 @@ class JitCodeCache { return live_bitmap_.get(); } + // Return whether we should do a full collection given the current state of the cache. + bool ShouldDoFullCollection() + REQUIRES(lock_) + REQUIRES_SHARED(Locks::mutator_lock_); + // Perform a collection on the code cache. void GarbageCollectCache(Thread* self) REQUIRES(!lock_) @@ -308,12 +296,6 @@ class JitCodeCache { REQUIRES(!lock_) REQUIRES(!Locks::cha_lock_); - // Removes method from the cache. The caller must ensure that all threads - // are suspended and the method should not be in any thread's stack. - bool RemoveMethodLocked(ArtMethod* method, bool release_memory) - REQUIRES(lock_) - REQUIRES(Locks::mutator_lock_); - // Free in the mspace allocations for `code_ptr`. void FreeCode(const void* code_ptr) REQUIRES(lock_); @@ -333,11 +315,6 @@ class JitCodeCache { // Set the footprint limit of the code cache. void SetFootprintLimit(size_t new_footprint) REQUIRES(lock_); - // Return whether we should do a full collection given the current state of the cache. - bool ShouldDoFullCollection() - REQUIRES(lock_) - REQUIRES_SHARED(Locks::mutator_lock_); - void DoCollection(Thread* self, bool collect_profiling_info) REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_); @@ -364,9 +341,6 @@ class JitCodeCache { REQUIRES(!lock_) REQUIRES_SHARED(Locks::mutator_lock_); - class JniStubKey; - class JniStubData; - // Lock for guarding allocations, collections, and the method_code_map_. Mutex lock_; // Condition to wait on during collection. @@ -383,8 +357,6 @@ class JitCodeCache { void* data_mspace_ GUARDED_BY(lock_); // Bitmap for collecting code and data. std::unique_ptr<CodeCacheBitmap> live_bitmap_; - // Holds compiled code associated with the shorty for a JNI stub. - SafeMap<JniStubKey, JniStubData> jni_stubs_map_ GUARDED_BY(lock_); // Holds compiled code associated to the ArtMethod. SafeMap<const void*, ArtMethod*> method_code_map_ GUARDED_BY(lock_); // Holds osr compiled code associated to the ArtMethod. @@ -446,7 +418,6 @@ class JitCodeCache { // Condition to wait on for accessing inline caches. ConditionVariable inline_cache_cond_ GUARDED_BY(lock_); - friend class art::JitJniStubTestHelper; DISALLOW_IMPLICIT_CONSTRUCTORS(JitCodeCache); }; diff --git a/runtime/jit/profile_saver.cc b/runtime/jit/profile_saver.cc index acbc6e63a4..01853de403 100644 --- a/runtime/jit/profile_saver.cc +++ b/runtime/jit/profile_saver.cc @@ -357,8 +357,8 @@ static void SampleClassesAndExecutedMethods(pthread_t profiler_pthread, sampled_methods->AddReference(method.GetDexFile(), method.GetDexMethodIndex()); } } else { - // We do not record native methods. Once we AOT-compile the app, all native - // methods shall have their thunks compiled. + CHECK_EQ(method.GetCounter(), 0u) << method.PrettyMethod() + << " access_flags=" << method.GetAccessFlags(); } } } diff --git a/runtime/managed_stack-inl.h b/runtime/managed_stack-inl.h index 678be8e098..689dd8009a 100644 --- a/runtime/managed_stack-inl.h +++ b/runtime/managed_stack-inl.h @@ -24,7 +24,7 @@ namespace art { inline ShadowFrame* ManagedStack::PushShadowFrame(ShadowFrame* new_top_frame) { - DCHECK(!HasTopQuickFrame()); + DCHECK(top_quick_frame_ == nullptr); ShadowFrame* old_frame = top_shadow_frame_; top_shadow_frame_ = new_top_frame; new_top_frame->SetLink(old_frame); @@ -32,7 +32,7 @@ inline ShadowFrame* ManagedStack::PushShadowFrame(ShadowFrame* new_top_frame) { } inline ShadowFrame* ManagedStack::PopShadowFrame() { - DCHECK(!HasTopQuickFrame()); + DCHECK(top_quick_frame_ == nullptr); CHECK(top_shadow_frame_ != nullptr); ShadowFrame* frame = top_shadow_frame_; top_shadow_frame_ = frame->GetLink(); diff --git a/runtime/managed_stack.h b/runtime/managed_stack.h index 07078ecb13..4f1984d55a 100644 --- a/runtime/managed_stack.h +++ b/runtime/managed_stack.h @@ -24,7 +24,6 @@ #include "base/logging.h" #include "base/macros.h" #include "base/mutex.h" -#include "base/bit_utils.h" namespace art { @@ -43,9 +42,7 @@ template <typename T> class StackReference; class PACKED(4) ManagedStack { public: ManagedStack() - : tagged_top_quick_frame_(TaggedTopQuickFrame::CreateNotTagged(nullptr)), - link_(nullptr), - top_shadow_frame_(nullptr) {} + : top_quick_frame_(nullptr), link_(nullptr), top_shadow_frame_(nullptr) {} void PushManagedStackFragment(ManagedStack* fragment) { // Copy this top fragment into given fragment. @@ -66,36 +63,17 @@ class PACKED(4) ManagedStack { return link_; } - ArtMethod** GetTopQuickFrameKnownNotTagged() const { - return tagged_top_quick_frame_.GetSpKnownNotTagged(); - } - ArtMethod** GetTopQuickFrame() const { - return tagged_top_quick_frame_.GetSp(); - } - - bool GetTopQuickFrameTag() const { - return tagged_top_quick_frame_.GetTag(); - } - - bool HasTopQuickFrame() const { - return tagged_top_quick_frame_.GetTaggedSp() != 0u; + return top_quick_frame_; } void SetTopQuickFrame(ArtMethod** top) { DCHECK(top_shadow_frame_ == nullptr); - DCHECK_ALIGNED(top, 4u); - tagged_top_quick_frame_ = TaggedTopQuickFrame::CreateNotTagged(top); + top_quick_frame_ = top; } - void SetTopQuickFrameTagged(ArtMethod** top) { - DCHECK(top_shadow_frame_ == nullptr); - DCHECK_ALIGNED(top, 4u); - tagged_top_quick_frame_ = TaggedTopQuickFrame::CreateTagged(top); - } - - static size_t TaggedTopQuickFrameOffset() { - return OFFSETOF_MEMBER(ManagedStack, tagged_top_quick_frame_); + static size_t TopQuickFrameOffset() { + return OFFSETOF_MEMBER(ManagedStack, top_quick_frame_); } ALWAYS_INLINE ShadowFrame* PushShadowFrame(ShadowFrame* new_top_frame); @@ -105,12 +83,8 @@ class PACKED(4) ManagedStack { return top_shadow_frame_; } - bool HasTopShadowFrame() const { - return GetTopShadowFrame() != nullptr; - } - void SetTopShadowFrame(ShadowFrame* top) { - DCHECK_EQ(tagged_top_quick_frame_.GetTaggedSp(), 0u); + DCHECK(top_quick_frame_ == nullptr); top_shadow_frame_ = top; } @@ -123,47 +97,7 @@ class PACKED(4) ManagedStack { bool ShadowFramesContain(StackReference<mirror::Object>* shadow_frame_entry) const; private: - // Encodes the top quick frame (which must be at least 4-byte aligned) - // and a flag that marks the GenericJNI trampoline. - class TaggedTopQuickFrame { - public: - static TaggedTopQuickFrame CreateNotTagged(ArtMethod** sp) { - DCHECK_ALIGNED(sp, 4u); - return TaggedTopQuickFrame(reinterpret_cast<uintptr_t>(sp)); - } - - static TaggedTopQuickFrame CreateTagged(ArtMethod** sp) { - DCHECK_ALIGNED(sp, 4u); - return TaggedTopQuickFrame(reinterpret_cast<uintptr_t>(sp) | 1u); - } - - // Get SP known to be not tagged and non-null. - ArtMethod** GetSpKnownNotTagged() const { - DCHECK(!GetTag()); - DCHECK_NE(tagged_sp_, 0u); - return reinterpret_cast<ArtMethod**>(tagged_sp_); - } - - ArtMethod** GetSp() const { - return reinterpret_cast<ArtMethod**>(tagged_sp_ & ~static_cast<uintptr_t>(1u)); - } - - bool GetTag() const { - return (tagged_sp_ & 1u) != 0u; - } - - uintptr_t GetTaggedSp() const { - return tagged_sp_; - } - - private: - explicit TaggedTopQuickFrame(uintptr_t tagged_sp) : tagged_sp_(tagged_sp) { } - - uintptr_t tagged_sp_; - }; - static_assert(sizeof(TaggedTopQuickFrame) == sizeof(uintptr_t), "TaggedTopQuickFrame size check"); - - TaggedTopQuickFrame tagged_top_quick_frame_; + ArtMethod** top_quick_frame_; ManagedStack* link_; ShadowFrame* top_shadow_frame_; }; diff --git a/runtime/stack.cc b/runtime/stack.cc index 5ad1f7c9c5..ab9fb0d73f 100644 --- a/runtime/stack.cc +++ b/runtime/stack.cc @@ -735,19 +735,12 @@ QuickMethodFrameInfo StackVisitor::GetCurrentQuickFrameInfo() const { return runtime->GetCalleeSaveMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs); } - // The only remaining case is if the method is native and uses the generic JNI stub, - // called either directly or through some (resolution, instrumentation) trampoline. + // The only remaining case is if the method is native and uses the generic JNI stub. DCHECK(method->IsNative()); - if (kIsDebugBuild) { - ClassLinker* class_linker = runtime->GetClassLinker(); - const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method, - kRuntimePointerSize); - CHECK(class_linker->IsQuickGenericJniStub(entry_point) || - // The current entrypoint (after filtering out trampolines) may have changed - // from GenericJNI to JIT-compiled stub since we have entered this frame. - (runtime->GetJit() != nullptr && - runtime->GetJit()->GetCodeCache()->ContainsPc(entry_point))) << method->PrettyMethod(); - } + ClassLinker* class_linker = runtime->GetClassLinker(); + const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method, + kRuntimePointerSize); + DCHECK(class_linker->IsQuickGenericJniStub(entry_point)) << method->PrettyMethod(); // Generic JNI frame. uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method) + 1; size_t scope_size = HandleScope::SizeOf(handle_refs); @@ -783,48 +776,8 @@ void StackVisitor::WalkStack(bool include_transitions) { // Can't be both a shadow and a quick fragment. DCHECK(current_fragment->GetTopShadowFrame() == nullptr); ArtMethod* method = *cur_quick_frame_; - DCHECK(method != nullptr); - bool header_retrieved = false; - if (method->IsNative()) { - // We do not have a PC for the first frame, so we cannot simply use - // ArtMethod::GetOatQuickMethodHeader() as we're unable to distinguish there - // between GenericJNI frame and JIT-compiled JNI stub; the entrypoint may have - // changed since the frame was entered. The top quick frame tag indicates - // GenericJNI here, otherwise it's either AOT-compiled or JNI-compiled JNI stub. - if (UNLIKELY(current_fragment->GetTopQuickFrameTag())) { - // The generic JNI does not have any method header. - cur_oat_quick_method_header_ = nullptr; - } else { - const void* existing_entry_point = method->GetEntryPointFromQuickCompiledCode(); - CHECK(existing_entry_point != nullptr); - Runtime* runtime = Runtime::Current(); - ClassLinker* class_linker = runtime->GetClassLinker(); - // Check whether we can quickly get the header from the current entrypoint. - if (!class_linker->IsQuickGenericJniStub(existing_entry_point) && - !class_linker->IsQuickResolutionStub(existing_entry_point) && - existing_entry_point != GetQuickInstrumentationEntryPoint()) { - cur_oat_quick_method_header_ = - OatQuickMethodHeader::FromEntryPoint(existing_entry_point); - } else { - const void* code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize()); - if (code != nullptr) { - cur_oat_quick_method_header_ = OatQuickMethodHeader::FromEntryPoint(code); - } else { - // This must be a JITted JNI stub frame. - CHECK(runtime->GetJit() != nullptr); - code = runtime->GetJit()->GetCodeCache()->GetJniStubCode(method); - CHECK(code != nullptr) << method->PrettyMethod(); - cur_oat_quick_method_header_ = OatQuickMethodHeader::FromCodePointer(code); - } - } - } - header_retrieved = true; - } while (method != nullptr) { - if (!header_retrieved) { - cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_); - } - header_retrieved = false; // Force header retrieval in next iteration. + cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_); SanityCheckFrame(); if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames) diff --git a/runtime/stack.h b/runtime/stack.h index a16930bba0..bd6204f8d2 100644 --- a/runtime/stack.h +++ b/runtime/stack.h @@ -140,7 +140,8 @@ class StackVisitor { }; template <CountTransitions kCount = CountTransitions::kYes> - void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_); + void WalkStack(bool include_transitions = false) + REQUIRES_SHARED(Locks::mutator_lock_); Thread* GetThread() const { return thread_; diff --git a/runtime/thread.cc b/runtime/thread.cc index bec1c908ad..712eabc888 100644 --- a/runtime/thread.cc +++ b/runtime/thread.cc @@ -1884,7 +1884,9 @@ static bool ShouldShowNativeStack(const Thread* thread) } // Threads with no managed stack frames should be shown. - if (!thread->HasManagedStack()) { + const ManagedStack* managed_stack = thread->GetManagedStack(); + if (managed_stack == nullptr || (managed_stack->GetTopQuickFrame() == nullptr && + managed_stack->GetTopShadowFrame() == nullptr)) { return true; } diff --git a/runtime/thread.h b/runtime/thread.h index 0803975d26..39be66d5c2 100644 --- a/runtime/thread.h +++ b/runtime/thread.h @@ -474,16 +474,13 @@ class Thread { tlsPtr_.managed_stack.SetTopQuickFrame(top_method); } - void SetTopOfStackTagged(ArtMethod** top_method) { - tlsPtr_.managed_stack.SetTopQuickFrameTagged(top_method); - } - void SetTopOfShadowStack(ShadowFrame* top) { tlsPtr_.managed_stack.SetTopShadowFrame(top); } bool HasManagedStack() const { - return tlsPtr_.managed_stack.HasTopQuickFrame() || tlsPtr_.managed_stack.HasTopShadowFrame(); + return (tlsPtr_.managed_stack.GetTopQuickFrame() != nullptr) || + (tlsPtr_.managed_stack.GetTopShadowFrame() != nullptr); } // If 'msg' is null, no detail message is set. @@ -836,7 +833,7 @@ class Thread { static ThreadOffset<pointer_size> TopOfManagedStackOffset() { return ThreadOffsetFromTlsPtr<pointer_size>( OFFSETOF_MEMBER(tls_ptr_sized_values, managed_stack) + - ManagedStack::TaggedTopQuickFrameOffset()); + ManagedStack::TopQuickFrameOffset()); } const ManagedStack* GetManagedStack() const { diff --git a/test/655-jit-clinit/src/Main.java b/test/655-jit-clinit/src/Main.java index 2fb8f2a86e..44b315478f 100644 --- a/test/655-jit-clinit/src/Main.java +++ b/test/655-jit-clinit/src/Main.java @@ -23,7 +23,7 @@ public class Main { Foo.hotMethod(); } - public native static boolean hasJitCompiledEntrypoint(Class<?> cls, String methodName); + public native static boolean isJitCompiled(Class<?> cls, String methodName); private native static boolean hasJit(); } @@ -36,7 +36,7 @@ class Foo { static { array = new Object[10000]; - while (!Main.hasJitCompiledEntrypoint(Foo.class, "hotMethod")) { + while (!Main.isJitCompiled(Foo.class, "hotMethod")) { Foo.hotMethod(); try { // Sleep to give a chance for the JIT to compile `hotMethod`. diff --git a/test/667-jit-jni-stub/expected.txt b/test/667-jit-jni-stub/expected.txt deleted file mode 100644 index 6a5618ebc6..0000000000 --- a/test/667-jit-jni-stub/expected.txt +++ /dev/null @@ -1 +0,0 @@ -JNI_OnLoad called diff --git a/test/667-jit-jni-stub/info.txt b/test/667-jit-jni-stub/info.txt deleted file mode 100644 index 6f25c44592..0000000000 --- a/test/667-jit-jni-stub/info.txt +++ /dev/null @@ -1 +0,0 @@ -Tests for JITting and collecting JNI stubs. diff --git a/test/667-jit-jni-stub/jit_jni_stub_test.cc b/test/667-jit-jni-stub/jit_jni_stub_test.cc deleted file mode 100644 index 82e06fc018..0000000000 --- a/test/667-jit-jni-stub/jit_jni_stub_test.cc +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -#include <jni.h> - -#include "jit/jit.h" -#include "jit/jit_code_cache.h" -#include "mirror/class.h" -#include "mirror/string.h" -#include "runtime.h" -#include "scoped_thread_state_change-inl.h" - -namespace art { - -// Local class declared as a friend of JitCodeCache so that we can access its internals. -class JitJniStubTestHelper { - public: - static bool isNextJitGcFull(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) { - CHECK(Runtime::Current()->GetJit() != nullptr); - jit::JitCodeCache* cache = Runtime::Current()->GetJit()->GetCodeCache(); - MutexLock mu(self, cache->lock_); - return cache->ShouldDoFullCollection(); - } -}; - -// Calls through to a static method with signature "()V". -extern "C" JNIEXPORT -void Java_Main_callThrough(JNIEnv* env, jclass, jclass klass, jstring methodName) { - ScopedObjectAccess soa(Thread::Current()); - std::string name = soa.Decode<mirror::String>(methodName)->ToModifiedUtf8(); - jmethodID method = env->GetStaticMethodID(klass, name.c_str(), "()V"); - CHECK(method != nullptr) << soa.Decode<mirror::Class>(klass)->PrettyDescriptor() << "." << name; - env->CallStaticVoidMethod(klass, method); -} - -extern "C" JNIEXPORT -void Java_Main_jitGc(JNIEnv*, jclass) { - CHECK(Runtime::Current()->GetJit() != nullptr); - jit::JitCodeCache* cache = Runtime::Current()->GetJit()->GetCodeCache(); - ScopedObjectAccess soa(Thread::Current()); - cache->GarbageCollectCache(Thread::Current()); -} - -extern "C" JNIEXPORT -jboolean Java_Main_isNextJitGcFull(JNIEnv*, jclass) { - ScopedObjectAccess soa(Thread::Current()); - return JitJniStubTestHelper::isNextJitGcFull(soa.Self()); -} - -} // namespace art diff --git a/test/667-jit-jni-stub/run b/test/667-jit-jni-stub/run deleted file mode 100755 index 1877be482e..0000000000 --- a/test/667-jit-jni-stub/run +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash -# -# Copyright (C) 2017 The Android Open Source Project -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Disable AOT compilation of JNI stubs. -${RUN} "${@}" --no-prebuild --no-dex2oat diff --git a/test/667-jit-jni-stub/src/Main.java b/test/667-jit-jni-stub/src/Main.java deleted file mode 100644 index b867970eab..0000000000 --- a/test/667-jit-jni-stub/src/Main.java +++ /dev/null @@ -1,180 +0,0 @@ -/* - * Copyright (C) 2017 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -public class Main { - public static void main(String[] args) throws Exception { - System.loadLibrary(args[0]); - if (isAotCompiled(Main.class, "hasJit")) { - throw new Error("This test must be run with --no-prebuild --no-dex2oat!"); - } - if (!hasJit()) { - return; - } - - testCompilationUseAndCollection(); - testMixedFramesOnStack(); - } - - public static void testCompilationUseAndCollection() { - // Test that callThrough() can be JIT-compiled. - assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertFalse(hasJitCompiledCode(Main.class, "callThrough")); - ensureCompiledCallThroughEntrypoint(/* call */ true); - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - - // Use callThrough() once again now that the method has a JIT-compiled stub. - callThrough(Main.class, "doNothing"); - - // Test that GC with the JIT-compiled stub on the stack does not collect it. - // Also tests stack walk over the JIT-compiled stub. - callThrough(Main.class, "testGcWithCallThroughStubOnStack"); - - // Test that, when marking used methods before a full JIT GC, a single execution - // of the GenericJNI trampoline can save the compiled stub from being collected. - testSingleInvocationTriggersRecompilation(); - - // Test that the JNI compiled stub can actually be collected. - testStubCanBeCollected(); - } - - public static void testGcWithCallThroughStubOnStack() { - // Check that this method was called via JIT-compiled callThrough() stub. - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - // This assertion also exercises stack walk over the JIT-compiled callThrough() stub. - assertTrue(new Throwable().getStackTrace()[1].getMethodName().equals("callThrough")); - - doJitGcsUntilFullJitGcIsScheduled(); - // The callThrough() on the stack above this method is using the compiled stub, - // so the JIT GC should not remove the compiled code. - jitGc(); - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - } - - public static void testSingleInvocationTriggersRecompilation() { - // After scheduling a full JIT GC, single call through the GenericJNI - // trampoline should ensure that the compiled stub is used again. - doJitGcsUntilFullJitGcIsScheduled(); - callThrough(Main.class, "doNothing"); - ensureCompiledCallThroughEntrypoint(/* call */ false); // Wait for the compilation task to run. - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - jitGc(); // This JIT GC should not collect the callThrough() stub. - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - } - - public static void testMixedFramesOnStack() { - // Starts without a compiled JNI stub for callThrough(). - assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertFalse(hasJitCompiledCode(Main.class, "callThrough")); - callThrough(Main.class, "testMixedFramesOnStackStage2"); - // We have just returned through the JIT-compiled JNI stub, so it must still - // be compiled (though not necessarily with the entrypoint pointing to it). - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - // Though the callThrough() is on the stack, that frame is using the GenericJNI - // and does not prevent the collection of the JNI stub. - testStubCanBeCollected(); - } - - public static void testMixedFramesOnStackStage2() { - // We cannot assert that callThrough() has no JIT compiled stub as that check - // may race against the compilation task. Just check the caller. - assertTrue(new Throwable().getStackTrace()[1].getMethodName().equals("callThrough")); - // Now ensure that the JNI stub is compiled and used. - ensureCompiledCallThroughEntrypoint(/* call */ true); - callThrough(Main.class, "testMixedFramesOnStackStage3"); - } - - public static void testMixedFramesOnStackStage3() { - // Check that this method was called via JIT-compiled callThrough() stub. - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - // This assertion also exercises stack walk over the JIT-compiled callThrough() stub. - assertTrue(new Throwable().getStackTrace()[1].getMethodName().equals("callThrough")); - // For a good measure, try a JIT GC. - jitGc(); - } - - public static void testStubCanBeCollected() { - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - doJitGcsUntilFullJitGcIsScheduled(); - assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - jitGc(); // JIT GC without callThrough() on the stack should collect the callThrough() stub. - assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertFalse(hasJitCompiledCode(Main.class, "callThrough")); - } - - public static void doJitGcsUntilFullJitGcIsScheduled() { - // We enter with a compiled stub for callThrough() but we also need the entrypoint to be set. - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - ensureCompiledCallThroughEntrypoint(/* call */ true); - // Perform JIT GC until the next GC is marked to do full collection. - do { - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - callThrough(Main.class, "jitGc"); // JIT GC with callThrough() safely on the stack. - } while (!isNextJitGcFull()); - // The JIT GC before the full collection resets entrypoints and waits to see - // if the methods are still in use. - assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - } - - public static void ensureCompiledCallThroughEntrypoint(boolean call) { - int count = 0; - while (!hasJitCompiledEntrypoint(Main.class, "callThrough")) { - // If `call` is true, also exercise the `callThrough()` method to increase hotness. - int limit = call ? 1 << Math.min(count, 12) : 0; - for (int i = 0; i < limit; ++i) { - callThrough(Main.class, "doNothing"); - } - try { - // Sleep to give a chance for the JIT to compile `hasJit` stub. - Thread.sleep(100); - } catch (Exception e) { - // Ignore - } - if (++count == 50) { - throw new Error("TIMEOUT"); - } - }; - } - - public static void assertTrue(boolean value) { - if (!value) { - throw new AssertionError("Expected true!"); - } - } - - public static void assertFalse(boolean value) { - if (value) { - throw new AssertionError("Expected false!"); - } - } - - public static void doNothing() { } - public static void throwError() { throw new Error(); } - - // Note that the callThrough()'s shorty differs from shorties of the other - // native methods used in this test because of the return type `void.` - public native static void callThrough(Class<?> cls, String methodName); - - public native static void jitGc(); - public native static boolean isNextJitGcFull(); - - public native static boolean isAotCompiled(Class<?> cls, String methodName); - public native static boolean hasJitCompiledEntrypoint(Class<?> cls, String methodName); - public native static boolean hasJitCompiledCode(Class<?> cls, String methodName); - private native static boolean hasJit(); -} diff --git a/test/Android.bp b/test/Android.bp index 01e424d5e3..ace62c23b8 100644 --- a/test/Android.bp +++ b/test/Android.bp @@ -385,7 +385,6 @@ cc_defaults { "656-annotation-lookup-generic-jni/test.cc", "661-oat-writer-layout/oat_writer_layout.cc", "664-aget-verifier/aget-verifier.cc", - "667-jit-jni-stub/jit_jni_stub_test.cc", "708-jit-cache-churn/jit.cc", ], shared_libs: [ diff --git a/test/common/runtime_state.cc b/test/common/runtime_state.cc index 34580800cc..df497c1181 100644 --- a/test/common/runtime_state.cc +++ b/test/common/runtime_state.cc @@ -152,10 +152,10 @@ extern "C" JNIEXPORT jboolean JNICALL Java_Main_isAotCompiled(JNIEnv* env, return method->GetOatMethodQuickCode(kRuntimePointerSize) != nullptr; } -extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasJitCompiledEntrypoint(JNIEnv* env, - jclass, - jclass cls, - jstring method_name) { +extern "C" JNIEXPORT jboolean JNICALL Java_Main_isJitCompiled(JNIEnv* env, + jclass, + jclass cls, + jstring method_name) { jit::Jit* jit = GetJitIfEnabled(); if (jit == nullptr) { return false; @@ -169,23 +169,6 @@ extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasJitCompiledEntrypoint(JNIEnv* return jit->GetCodeCache()->ContainsPc(method->GetEntryPointFromQuickCompiledCode()); } -extern "C" JNIEXPORT jboolean JNICALL Java_Main_hasJitCompiledCode(JNIEnv* env, - jclass, - jclass cls, - jstring method_name) { - jit::Jit* jit = GetJitIfEnabled(); - if (jit == nullptr) { - return false; - } - Thread* self = Thread::Current(); - ScopedObjectAccess soa(self); - ScopedUtfChars chars(env, method_name); - CHECK(chars.c_str() != nullptr); - ArtMethod* method = soa.Decode<mirror::Class>(cls)->FindDeclaredDirectMethodByName( - chars.c_str(), kRuntimePointerSize); - return jit->GetCodeCache()->ContainsMethod(method); -} - extern "C" JNIEXPORT void JNICALL Java_Main_ensureJitCompiled(JNIEnv* env, jclass, jclass cls, |