diff options
Diffstat (limited to 'runtime/base/mutex.cc')
| -rw-r--r-- | runtime/base/mutex.cc | 72 |
1 files changed, 58 insertions, 14 deletions
diff --git a/runtime/base/mutex.cc b/runtime/base/mutex.cc index 24846e5ceb..b0394a5255 100644 --- a/runtime/base/mutex.cc +++ b/runtime/base/mutex.cc @@ -46,7 +46,6 @@ Mutex* Locks::deoptimization_lock_ = nullptr; ReaderWriterMutex* Locks::heap_bitmap_lock_ = nullptr; Mutex* Locks::instrument_entrypoints_lock_ = nullptr; Mutex* Locks::intern_table_lock_ = nullptr; -Mutex* Locks::jdwp_event_list_lock_ = nullptr; Mutex* Locks::jni_function_table_lock_ = nullptr; Mutex* Locks::jni_libraries_lock_ = nullptr; Mutex* Locks::logging_lock_ = nullptr; @@ -74,6 +73,7 @@ ReaderWriterMutex* Locks::jni_globals_lock_ = nullptr; Mutex* Locks::jni_weak_globals_lock_ = nullptr; ReaderWriterMutex* Locks::dex_lock_ = nullptr; std::vector<BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_; +Atomic<const BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_guard_; struct AllMutexData { // A guard for all_mutexes_ that's not a mutex (Mutexes must CAS to acquire and busy wait). @@ -118,6 +118,26 @@ class ScopedAllMutexesLock FINAL { const BaseMutex* const mutex_; }; +class Locks::ScopedExpectedMutexesOnWeakRefAccessLock FINAL { + public: + explicit ScopedExpectedMutexesOnWeakRefAccessLock(const BaseMutex* mutex) : mutex_(mutex) { + while (!Locks::expected_mutexes_on_weak_ref_access_guard_.CompareExchangeWeakAcquire(0, + mutex)) { + NanoSleep(100); + } + } + + ~ScopedExpectedMutexesOnWeakRefAccessLock() { + while (!Locks::expected_mutexes_on_weak_ref_access_guard_.CompareExchangeWeakRelease(mutex_, + 0)) { + NanoSleep(100); + } + } + + private: + const BaseMutex* const mutex_; +}; + // Scoped class that generates events at the beginning and end of lock contention. class ScopedContentionRecorder FINAL : public ValueObject { public: @@ -999,7 +1019,6 @@ void Locks::Init() { DCHECK(verifier_deps_lock_ != nullptr); DCHECK(host_dlopen_handles_lock_ != nullptr); DCHECK(intern_table_lock_ != nullptr); - DCHECK(jdwp_event_list_lock_ != nullptr); DCHECK(jni_function_table_lock_ != nullptr); DCHECK(jni_libraries_lock_ != nullptr); DCHECK(logging_lock_ != nullptr); @@ -1042,10 +1061,6 @@ void Locks::Init() { DCHECK(runtime_shutdown_lock_ == nullptr); runtime_shutdown_lock_ = new Mutex("runtime shutdown lock", current_lock_level); - UPDATE_CURRENT_LOCK_LEVEL(kJdwpEventListLock); - DCHECK(jdwp_event_list_lock_ == nullptr); - jdwp_event_list_lock_ = new Mutex("JDWP event list lock", current_lock_level); - UPDATE_CURRENT_LOCK_LEVEL(kProfilerLock); DCHECK(profiler_lock_ == nullptr); profiler_lock_ = new Mutex("profiler lock", current_lock_level); @@ -1169,14 +1184,9 @@ void Locks::Init() { #undef UPDATE_CURRENT_LOCK_LEVEL // List of mutexes that we may hold when accessing a weak ref. - dex_lock_->SetShouldRespondToEmptyCheckpointRequest(true); - expected_mutexes_on_weak_ref_access_.push_back(dex_lock_); - classlinker_classes_lock_->SetShouldRespondToEmptyCheckpointRequest(true); - expected_mutexes_on_weak_ref_access_.push_back(classlinker_classes_lock_); - jdwp_event_list_lock_->SetShouldRespondToEmptyCheckpointRequest(true); - expected_mutexes_on_weak_ref_access_.push_back(jdwp_event_list_lock_); - jni_libraries_lock_->SetShouldRespondToEmptyCheckpointRequest(true); - expected_mutexes_on_weak_ref_access_.push_back(jni_libraries_lock_); + AddToExpectedMutexesOnWeakRefAccess(dex_lock_, /*need_lock*/ false); + AddToExpectedMutexesOnWeakRefAccess(classlinker_classes_lock_, /*need_lock*/ false); + AddToExpectedMutexesOnWeakRefAccess(jni_libraries_lock_, /*need_lock*/ false); InitConditions(); } @@ -1196,4 +1206,38 @@ bool Locks::IsSafeToCallAbortRacy() { return safe_to_call_abort_cb != nullptr && safe_to_call_abort_cb(); } +void Locks::AddToExpectedMutexesOnWeakRefAccess(BaseMutex* mutex, bool need_lock) { + if (need_lock) { + ScopedExpectedMutexesOnWeakRefAccessLock mu(mutex); + mutex->SetShouldRespondToEmptyCheckpointRequest(true); + expected_mutexes_on_weak_ref_access_.push_back(mutex); + } else { + mutex->SetShouldRespondToEmptyCheckpointRequest(true); + expected_mutexes_on_weak_ref_access_.push_back(mutex); + } +} + +void Locks::RemoveFromExpectedMutexesOnWeakRefAccess(BaseMutex* mutex, bool need_lock) { + if (need_lock) { + ScopedExpectedMutexesOnWeakRefAccessLock mu(mutex); + mutex->SetShouldRespondToEmptyCheckpointRequest(false); + std::vector<BaseMutex*>& list = expected_mutexes_on_weak_ref_access_; + auto it = std::find(list.begin(), list.end(), mutex); + DCHECK(it != list.end()); + list.erase(it); + } else { + mutex->SetShouldRespondToEmptyCheckpointRequest(false); + std::vector<BaseMutex*>& list = expected_mutexes_on_weak_ref_access_; + auto it = std::find(list.begin(), list.end(), mutex); + DCHECK(it != list.end()); + list.erase(it); + } +} + +bool Locks::IsExpectedOnWeakRefAccess(BaseMutex* mutex) { + ScopedExpectedMutexesOnWeakRefAccessLock mu(mutex); + std::vector<BaseMutex*>& list = expected_mutexes_on_weak_ref_access_; + return std::find(list.begin(), list.end(), mutex) != list.end(); +} + } // namespace art |