summaryrefslogtreecommitdiff
path: root/runtime/jit/jit_code_cache.h
diff options
context:
space:
mode:
author Nicolas Geoffray <ngeoffray@google.com> 2024-05-02 17:49:12 +0000
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2024-05-03 09:57:14 +0000
commitb3d88b3f4b47b7635cbdab54dfdcbf866b9813ff (patch)
treea77d0d9ca5d0026b2fc309e5230665304ae64193 /runtime/jit/jit_code_cache.h
parent5a4566f39c3e22ecca1e88008af772c1a808ac9b (diff)
Revert "Reland^2 "Revamp JIT GC.""
This reverts commit eac7cd76d9a39a61c7fecbd3c3eb4f2932a3d55c. Reason for revert: Bot failure Change-Id: Ia4632c06ec88cff218d64a8087f762337cabc79e
Diffstat (limited to 'runtime/jit/jit_code_cache.h')
-rw-r--r--runtime/jit/jit_code_cache.h50
1 files changed, 22 insertions, 28 deletions
diff --git a/runtime/jit/jit_code_cache.h b/runtime/jit/jit_code_cache.h
index 5202c2c37e..96fc7e2706 100644
--- a/runtime/jit/jit_code_cache.h
+++ b/runtime/jit/jit_code_cache.h
@@ -285,9 +285,11 @@ class JitCodeCache {
REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(!Locks::jit_lock_);
void FreeLocked(JitMemoryRegion* region, const uint8_t* code, const uint8_t* data)
+ REQUIRES_SHARED(Locks::mutator_lock_)
REQUIRES(Locks::jit_lock_);
- void IncreaseCodeCacheCapacity(Thread* self)
+ // Perform a collection on the code cache.
+ EXPORT void GarbageCollectCache(Thread* self)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -421,20 +423,9 @@ class JitCodeCache {
Thread* self)
REQUIRES_SHARED(Locks::mutator_lock_);
- // NO_THREAD_SAFETY_ANALYSIS because we may be called with the JIT lock held
- // or not. The implementation of this method handles the two cases.
- void AddZombieCode(ArtMethod* method, const void* code_ptr) NO_THREAD_SAFETY_ANALYSIS;
-
- EXPORT void DoCollection(Thread* self)
- REQUIRES(!Locks::jit_lock_);
-
private:
JitCodeCache();
- void AddZombieCodeInternal(ArtMethod* method, const void* code_ptr)
- REQUIRES(Locks::jit_lock_)
- REQUIRES_SHARED(Locks::mutator_lock_);
-
ProfilingInfo* AddProfilingInfoInternal(Thread* self,
ArtMethod* method,
const std::vector<uint32_t>& inline_cache_entries,
@@ -442,16 +433,21 @@ class JitCodeCache {
REQUIRES(Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
+ // If a collection is in progress, wait for it to finish. Must be called with the mutator lock.
+ // The non-mutator lock version should be used if possible. This method will release then
+ // re-acquire the mutator lock.
+ void WaitForPotentialCollectionToCompleteRunnable(Thread* self)
+ REQUIRES(Locks::jit_lock_, !Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_);
+
// If a collection is in progress, wait for it to finish. Return
// whether the thread actually waited.
bool WaitForPotentialCollectionToComplete(Thread* self)
- REQUIRES(Locks::jit_lock_) REQUIRES_SHARED(!Locks::mutator_lock_);
-
- // Notify all waiting threads that a collection is done.
- void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
+ REQUIRES(Locks::jit_lock_) REQUIRES(!Locks::mutator_lock_);
// Remove CHA dependents and underlying allocations for entries in `method_headers`.
void FreeAllMethodHeaders(const std::unordered_set<OatQuickMethodHeader*>& method_headers)
+ REQUIRES_SHARED(Locks::mutator_lock_)
+ REQUIRES(Locks::jit_lock_)
REQUIRES(!Locks::cha_lock_);
// Removes method from the cache. The caller must ensure that all threads
@@ -466,7 +462,8 @@ class JitCodeCache {
// Free code and data allocations for `code_ptr`.
void FreeCodeAndData(const void* code_ptr)
- REQUIRES(Locks::jit_lock_);
+ REQUIRES(Locks::jit_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_);
// Number of bytes allocated in the code cache.
size_t CodeCacheSize() REQUIRES(!Locks::jit_lock_);
@@ -480,9 +477,16 @@ class JitCodeCache {
// Number of bytes allocated in the data cache.
size_t DataCacheSizeLocked() REQUIRES(Locks::jit_lock_);
+ // Notify all waiting threads that a collection is done.
+ void NotifyCollectionDone(Thread* self) REQUIRES(Locks::jit_lock_);
+
// Return whether the code cache's capacity is at its maximum.
bool IsAtMaxCapacity() const REQUIRES(Locks::jit_lock_);
+ void DoCollection(Thread* self)
+ REQUIRES(!Locks::jit_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
void RemoveUnmarkedCode(Thread* self)
REQUIRES(!Locks::jit_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
@@ -559,28 +563,18 @@ class JitCodeCache {
// ProfilingInfo objects we have allocated.
SafeMap<ArtMethod*, ProfilingInfo*> profiling_infos_ GUARDED_BY(Locks::jit_lock_);
- // Zombie code and JNI methods to consider for collection.
- std::set<const void*> zombie_code_ GUARDED_BY(Locks::jit_lock_);
- std::set<ArtMethod*> zombie_jni_code_ GUARDED_BY(Locks::jit_lock_);
-
- std::set<const void*> processed_zombie_code_ GUARDED_BY(Locks::jit_lock_);
- std::set<ArtMethod*> processed_zombie_jni_code_ GUARDED_BY(Locks::jit_lock_);
-
// Methods that the zygote has compiled and can be shared across processes
// forked from the zygote.
ZygoteMap zygote_map_;
// -------------- JIT GC related data structures ----------------------- //
- // Condition to wait on during collection and for accessing weak references in inline caches.
+ // Condition to wait on during collection.
ConditionVariable lock_cond_ GUARDED_BY(Locks::jit_lock_);
// Whether there is a code cache collection in progress.
bool collection_in_progress_ GUARDED_BY(Locks::jit_lock_);
- // Whether a GC task is already scheduled.
- bool gc_task_scheduled_ GUARDED_BY(Locks::jit_lock_);
-
// Bitmap for collecting code and data.
std::unique_ptr<CodeCacheBitmap> live_bitmap_;