diff options
author | 2023-12-06 10:31:10 +0000 | |
---|---|---|
committer | 2023-12-07 09:20:17 +0000 | |
commit | fee0362d03e5ee1b0beaa4839e7f3653ed5ce72d (patch) | |
tree | af4d5634ac93669d644d174a380e9b9c2f2a784c | |
parent | c60b6f7754974809f7467a07d63475248274026b (diff) |
Remove logic of polling liveness of compiled code.
Data shows the current logic is having too many misses by
deleting baseline compiled code which could still be useful.
Reduces jank on compose view scrolling for 20 seconds, average of 50
runs:
- For Go Mokey:
- Before: ~433 frames drawn / ~18.47% janky frames
- After: ~485 frames drawn / ~17.02% janky frames
- For Pixel 8 pro:
- Before: ~2428 frames drawn / 1.20% janky frames
- After: ~2433 frames drawn / 1.02% jank frames
Test: test.py
Change-Id: I06b24ce069be493394a1af5f7402f0a381e00dfb
-rw-r--r-- | runtime/jit/jit_code_cache.cc | 118 | ||||
-rw-r--r-- | runtime/jit/jit_code_cache.h | 11 | ||||
-rw-r--r-- | test/667-jit-jni-stub/jit_jni_stub_test.cc | 17 | ||||
-rw-r--r-- | test/667-jit-jni-stub/src/Main.java | 15 |
4 files changed, 26 insertions, 135 deletions
diff --git a/runtime/jit/jit_code_cache.cc b/runtime/jit/jit_code_cache.cc index 2474399f59..8ca80131f9 100644 --- a/runtime/jit/jit_code_cache.cc +++ b/runtime/jit/jit_code_cache.cc @@ -261,7 +261,6 @@ JitCodeCache::JitCodeCache() zygote_map_(&shared_region_), lock_cond_("Jit code cache condition variable", *Locks::jit_lock_), collection_in_progress_(false), - last_collection_increased_code_cache_(false), garbage_collect_code_(true), number_of_baseline_compilations_(0), number_of_optimized_compilations_(0), @@ -1097,23 +1096,6 @@ bool JitCodeCache::IsAtMaxCapacity() const { return private_region_.GetCurrentCapacity() == private_region_.GetMaxCapacity(); } -bool JitCodeCache::ShouldDoFullCollection() { - if (IsAtMaxCapacity()) { - // Always do a full collection when the code cache is full. - return true; - } else if (private_region_.GetCurrentCapacity() < GetReservedCapacity()) { - // Always do partial collection when the code cache size is below the reserved - // capacity. - return false; - } else if (last_collection_increased_code_cache_) { - // This time do a full collection. - return true; - } else { - // This time do a partial collection. - return false; - } -} - void JitCodeCache::GarbageCollectCache(Thread* self) { ScopedTrace trace(__FUNCTION__); // Wait for an existing collection, or let everyone know we are starting one. @@ -1140,19 +1122,11 @@ void JitCodeCache::GarbageCollectCache(Thread* self) { { TimingLogger::ScopedTiming st("Code cache collection", &logger); - bool do_full_collection = false; - { - MutexLock mu(self, *Locks::jit_lock_); - do_full_collection = ShouldDoFullCollection(); - } - - VLOG(jit) << "Do " - << (do_full_collection ? "full" : "partial") - << " code cache collection, code=" + VLOG(jit) << "Do code cache collection, code=" << PrettySize(CodeCacheSize()) << ", data=" << PrettySize(DataCacheSize()); - DoCollection(self, /* collect_profiling_info= */ do_full_collection); + DoCollection(self); VLOG(jit) << "After code cache collection, code=" << PrettySize(CodeCacheSize()) @@ -1160,43 +1134,7 @@ void JitCodeCache::GarbageCollectCache(Thread* self) { { MutexLock mu(self, *Locks::jit_lock_); - - // Increase the code cache only when we do partial collections. - // TODO: base this strategy on how full the code cache is? - if (do_full_collection) { - last_collection_increased_code_cache_ = false; - } else { - last_collection_increased_code_cache_ = true; - private_region_.IncreaseCodeCacheCapacity(); - } - - bool next_collection_will_be_full = ShouldDoFullCollection(); - - // Start polling the liveness of compiled code to prepare for the next full collection. - if (next_collection_will_be_full) { - ScopedDebugDisallowReadBarriers sddrb(self); - for (auto it : profiling_infos_) { - it.second->ResetCounter(); - } - - // Change entry points of native methods back to the GenericJNI entrypoint. - for (const auto& entry : jni_stubs_map_) { - const JniStubData& data = entry.second; - if (!data.IsCompiled() || IsInZygoteExecSpace(data.GetCode())) { - continue; - } - const OatQuickMethodHeader* method_header = - OatQuickMethodHeader::FromCodePointer(data.GetCode()); - for (ArtMethod* method : data.GetMethods()) { - if (method->GetEntryPointFromQuickCompiledCode() == method_header->GetEntryPoint()) { - // Don't call Instrumentation::UpdateMethodsCode(), same as for normal methods above. - // Make sure a single invocation of the GenericJNI trampoline tries to recompile. - method->SetHotCounter(); - method->SetEntryPointFromQuickCompiledCode(GetQuickGenericJniStub()); - } - } - } - } + private_region_.IncreaseCodeCacheCapacity(); live_bitmap_.reset(nullptr); NotifyCollectionDone(self); } @@ -1340,35 +1278,11 @@ void JitCodeCache::ResetHotnessCounter(ArtMethod* method, Thread* self) { } -void JitCodeCache::DoCollection(Thread* self, bool collect_profiling_info) { +void JitCodeCache::DoCollection(Thread* self) { ScopedTrace trace(__FUNCTION__); { ScopedDebugDisallowReadBarriers sddrb(self); MutexLock mu(self, *Locks::jit_lock_); - - // Update to interpreter the methods that have baseline entrypoints and whose baseline - // hotness count hasn't changed. - // Note that these methods may be in thread stack or concurrently revived - // between. That's OK, as the thread executing it will mark it. - uint16_t warmup_threshold = Runtime::Current()->GetJITOptions()->GetWarmupThreshold(); - for (auto it : profiling_infos_) { - ProfilingInfo* info = it.second; - if (!info->CounterHasChanged()) { - const void* entry_point = info->GetMethod()->GetEntryPointFromQuickCompiledCode(); - if (ContainsPc(entry_point)) { - OatQuickMethodHeader* method_header = - OatQuickMethodHeader::FromEntryPoint(entry_point); - if (CodeInfo::IsBaseline(method_header->GetOptimizedCodeInfoPtr())) { - info->GetMethod()->ResetCounter(warmup_threshold); - Runtime::Current()->GetInstrumentation()->InitializeMethodsCode( - info->GetMethod(), /*aot_code=*/ nullptr); - } - } - } - } - // TODO: collect profiling info - // TODO: collect optimized code - // Mark compiled code that are entrypoints of ArtMethods. Compiled code that is not // an entry point is either: // - an osr compiled code, that will be removed if not in a thread call stack. @@ -1411,10 +1325,6 @@ void JitCodeCache::DoCollection(Thread* self, bool collect_profiling_info) { // change. We do know they cannot change to a code cache entry that is not marked, // therefore we can safely remove those entries. RemoveUnmarkedCode(self); - - if (collect_profiling_info) { - // TODO: Collect unused profiling infos. - } } OatQuickMethodHeader* JitCodeCache::LookupMethodHeader(uintptr_t pc, ArtMethod* method) { @@ -1824,7 +1734,22 @@ void JitCodeCache::InvalidateAllCompiledCode() { Runtime* runtime = Runtime::Current(); ClassLinker* linker = runtime->GetClassLinker(); instrumentation::Instrumentation* instr = runtime->GetInstrumentation(); - // TODO: Clear `jni_stubs_map_`? + + // Change entry points of native methods back to the GenericJNI entrypoint. + for (const auto& entry : jni_stubs_map_) { + const JniStubData& data = entry.second; + if (!data.IsCompiled() || IsInZygoteExecSpace(data.GetCode())) { + continue; + } + const OatQuickMethodHeader* method_header = + OatQuickMethodHeader::FromCodePointer(data.GetCode()); + for (ArtMethod* method : data.GetMethods()) { + if (method->GetEntryPointFromQuickCompiledCode() == method_header->GetEntryPoint()) { + ClearMethodCounter(method, /*was_warm=*/true); + instr->InitializeMethodsCode(method, /*aot_code=*/ nullptr); + } + } + } for (const auto& entry : method_code_map_) { ArtMethod* meth = entry.second; // We were compiled, so we must be warm. @@ -1843,8 +1768,7 @@ void JitCodeCache::InvalidateAllCompiledCode() { if (entry.method->IsPreCompiled()) { entry.method->ClearPreCompiled(); } - Runtime::Current()->GetInstrumentation()->InitializeMethodsCode(entry.method, - /*aot_code=*/nullptr); + instr->InitializeMethodsCode(entry.method, /*aot_code=*/nullptr); } saved_compiled_methods_map_.clear(); diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h index 88b54d5a5b..82510fbe3c 100644 --- a/runtime/jit/jit_code_cache.h +++ b/runtime/jit/jit_code_cache.h @@ -496,12 +496,7 @@ class JitCodeCache { // Return whether the code cache's capacity is at its maximum. bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_); - // Return whether we should do a full collection given the current state of the cache. - bool ShouldDoFullCollection() - REQUIRES(Locks::jit_lock_) - REQUIRES_SHARED(Locks::mutator_lock_); - - void DoCollection(Thread* self, bool collect_profiling_info) + void DoCollection(Thread* self) REQUIRES(!Locks::jit_lock_) REQUIRES_SHARED(Locks::mutator_lock_); @@ -594,9 +589,6 @@ class JitCodeCache { // Bitmap for collecting code and data. std::unique_ptr<CodeCacheBitmap> live_bitmap_; - // Whether the last collection round increased the code cache. - bool last_collection_increased_code_cache_ GUARDED_BY(Locks::jit_lock_); - // Whether we can do garbage collection. Not 'const' as tests may override this. bool garbage_collect_code_ GUARDED_BY(Locks::jit_lock_); @@ -623,7 +615,6 @@ class JitCodeCache { // Histograms for keeping track of profiling info statistics. Histogram<uint64_t> histogram_profiling_info_memory_use_ GUARDED_BY(Locks::jit_lock_); - friend class art::JitJniStubTestHelper; friend class ScopedCodeCacheWrite; friend class MarkCodeClosure; diff --git a/test/667-jit-jni-stub/jit_jni_stub_test.cc b/test/667-jit-jni-stub/jit_jni_stub_test.cc index c21971fa8b..2b8e14c03d 100644 --- a/test/667-jit-jni-stub/jit_jni_stub_test.cc +++ b/test/667-jit-jni-stub/jit_jni_stub_test.cc @@ -25,17 +25,6 @@ namespace art { -// Local class declared as a friend of JitCodeCache so that we can access its internals. -class JitJniStubTestHelper { - public: - static bool isNextJitGcFull(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_) { - CHECK(Runtime::Current()->GetJit() != nullptr); - jit::JitCodeCache* cache = Runtime::Current()->GetJit()->GetCodeCache(); - MutexLock mu(self, *Locks::jit_lock_); - return cache->ShouldDoFullCollection(); - } -}; - // Calls through to a static method with signature "()V". extern "C" JNIEXPORT void Java_Main_callThrough(JNIEnv* env, jclass, jclass klass, jstring methodName) { @@ -51,13 +40,15 @@ void Java_Main_jitGc(JNIEnv*, jclass) { CHECK(Runtime::Current()->GetJit() != nullptr); jit::JitCodeCache* cache = Runtime::Current()->GetJit()->GetCodeCache(); ScopedObjectAccess soa(Thread::Current()); + cache->InvalidateAllCompiledCode(); cache->GarbageCollectCache(Thread::Current()); } extern "C" JNIEXPORT jboolean Java_Main_isNextJitGcFull(JNIEnv*, jclass) { - ScopedObjectAccess soa(Thread::Current()); - return JitJniStubTestHelper::isNextJitGcFull(soa.Self()); + // Because we invalidate all compiled code above, we currently always do a + // full GC. + return true; } } // namespace art diff --git a/test/667-jit-jni-stub/src/Main.java b/test/667-jit-jni-stub/src/Main.java index 234c5daf4c..179e186f6a 100644 --- a/test/667-jit-jni-stub/src/Main.java +++ b/test/667-jit-jni-stub/src/Main.java @@ -43,10 +43,6 @@ public class Main { // Also tests stack walk over the JIT-compiled stub. callThrough(Main.class, "testGcWithCallThroughStubOnStack"); - // Test that, when marking used methods before a full JIT GC, a single execution - // of the GenericJNI trampoline can save the compiled stub from being collected. - testSingleInvocationTriggersRecompilation(); - // Test that the JNI compiled stub can actually be collected. testStubCanBeCollected(); } @@ -64,17 +60,6 @@ public class Main { assertTrue(hasJitCompiledCode(Main.class, "callThrough")); } - public static void testSingleInvocationTriggersRecompilation() { - // After scheduling a full JIT GC, single call through the GenericJNI - // trampoline should ensure that the compiled stub is used again. - doJitGcsUntilFullJitGcIsScheduled(); - callThrough(Main.class, "doNothing"); - ensureCompiledCallThroughEntrypoint(/* call */ false); // Wait for the compilation task to run. - assertTrue(hasJitCompiledEntrypoint(Main.class, "callThrough")); - jitGc(); // This JIT GC should not collect the callThrough() stub. - assertTrue(hasJitCompiledCode(Main.class, "callThrough")); - } - public static void testMixedFramesOnStack() { // Starts without a compiled JNI stub for callThrough(). assertFalse(hasJitCompiledEntrypoint(Main.class, "callThrough")); |