Shard classloader classes lock
Used to guard adding and removing classes.
Previously we used the class linker classes lock, but this had
a deadlock issue since the reference processor may need to acquire
the lock to mark the classes of a class loader. Another thread could
be blocked trying to access weak globals while also holding the
class linker classes lock.
Bug: 27946564
(cherry picked from commit 1609e3a42051769f4a8be3b6731e7bb2f828b3bb)
Change-Id: Ic5cfe573c4e6822d49ad0862ffdd9d036e439a96
diff --git a/runtime/base/mutex.h b/runtime/base/mutex.h
index 293451c..17e0339 100644
--- a/runtime/base/mutex.h
+++ b/runtime/base/mutex.h
@@ -88,6 +88,8 @@
kOatFileManagerLock,
kTracingUniqueMethodsLock,
kTracingStreamingLock,
+ kDeoptimizedMethodsLock,
+ kClassLoaderClassesLock,
kDefaultMutexLevel,
kMarkSweepLargeObjectLock,
kPinTableLock,
@@ -96,7 +98,7 @@
kAllocatedThreadIdsLock,
kMonitorPoolLock,
kMethodVerifiersLock,
- kClassLinkerClassesLock,
+ kClassLinkerClassesLock, // TODO rename.
kBreakpointLock,
kMonitorLock,
kMonitorListLock,
diff --git a/runtime/class_table-inl.h b/runtime/class_table-inl.h
index e512906..42e320a 100644
--- a/runtime/class_table-inl.h
+++ b/runtime/class_table-inl.h
@@ -23,6 +23,7 @@
template<class Visitor>
void ClassTable::VisitRoots(Visitor& visitor) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
for (GcRoot<mirror::Class>& root : class_set) {
visitor.VisitRoot(root.AddressWithoutBarrier());
@@ -35,6 +36,7 @@
template<class Visitor>
void ClassTable::VisitRoots(const Visitor& visitor) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
for (GcRoot<mirror::Class>& root : class_set) {
visitor.VisitRoot(root.AddressWithoutBarrier());
@@ -47,6 +49,7 @@
template <typename Visitor>
bool ClassTable::Visit(Visitor& visitor) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
for (GcRoot<mirror::Class>& root : class_set) {
if (!visitor(root.Read())) {
diff --git a/runtime/class_table.cc b/runtime/class_table.cc
index d815b1a..8267c68 100644
--- a/runtime/class_table.cc
+++ b/runtime/class_table.cc
@@ -20,17 +20,19 @@
namespace art {
-ClassTable::ClassTable() {
+ClassTable::ClassTable() : lock_("Class loader classes", kClassLoaderClassesLock) {
Runtime* const runtime = Runtime::Current();
classes_.push_back(ClassSet(runtime->GetHashTableMinLoadFactor(),
runtime->GetHashTableMaxLoadFactor()));
}
void ClassTable::FreezeSnapshot() {
+ WriterMutexLock mu(Thread::Current(), lock_);
classes_.push_back(ClassSet());
}
bool ClassTable::Contains(mirror::Class* klass) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
auto it = class_set.Find(GcRoot<mirror::Class>(klass));
if (it != class_set.end()) {
@@ -41,6 +43,7 @@
}
mirror::Class* ClassTable::LookupByDescriptor(mirror::Class* klass) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
auto it = class_set.Find(GcRoot<mirror::Class>(klass));
if (it != class_set.end()) {
@@ -51,6 +54,7 @@
}
mirror::Class* ClassTable::UpdateClass(const char* descriptor, mirror::Class* klass, size_t hash) {
+ WriterMutexLock mu(Thread::Current(), lock_);
// Should only be updating latest table.
auto existing_it = classes_.back().FindWithHash(descriptor, hash);
if (kIsDebugBuild && existing_it == classes_.back().end()) {
@@ -74,6 +78,7 @@
}
size_t ClassTable::NumZygoteClasses() const {
+ ReaderMutexLock mu(Thread::Current(), lock_);
size_t sum = 0;
for (size_t i = 0; i < classes_.size() - 1; ++i) {
sum += classes_[i].Size();
@@ -82,10 +87,12 @@
}
size_t ClassTable::NumNonZygoteClasses() const {
+ ReaderMutexLock mu(Thread::Current(), lock_);
return classes_.back().Size();
}
mirror::Class* ClassTable::Lookup(const char* descriptor, size_t hash) {
+ ReaderMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
auto it = class_set.FindWithHash(descriptor, hash);
if (it != class_set.end()) {
@@ -96,14 +103,17 @@
}
void ClassTable::Insert(mirror::Class* klass) {
+ WriterMutexLock mu(Thread::Current(), lock_);
classes_.back().Insert(GcRoot<mirror::Class>(klass));
}
void ClassTable::InsertWithHash(mirror::Class* klass, size_t hash) {
+ WriterMutexLock mu(Thread::Current(), lock_);
classes_.back().InsertWithHash(GcRoot<mirror::Class>(klass), hash);
}
bool ClassTable::Remove(const char* descriptor) {
+ WriterMutexLock mu(Thread::Current(), lock_);
for (ClassSet& class_set : classes_) {
auto it = class_set.Find(descriptor);
if (it != class_set.end()) {
@@ -137,6 +147,7 @@
}
bool ClassTable::InsertDexFile(mirror::Object* dex_file) {
+ WriterMutexLock mu(Thread::Current(), lock_);
DCHECK(dex_file != nullptr);
for (GcRoot<mirror::Object>& root : dex_files_) {
if (root.Read() == dex_file) {
@@ -148,6 +159,7 @@
}
size_t ClassTable::WriteToMemory(uint8_t* ptr) const {
+ ReaderMutexLock mu(Thread::Current(), lock_);
ClassSet combined;
// Combine all the class sets in case there are multiple, also adjusts load factor back to
// default in case classes were pruned.
@@ -173,6 +185,7 @@
}
void ClassTable::AddClassSet(ClassSet&& set) {
+ WriterMutexLock mu(Thread::Current(), lock_);
classes_.insert(classes_.begin(), std::move(set));
}
diff --git a/runtime/class_table.h b/runtime/class_table.h
index 0e0e860..eb784b5 100644
--- a/runtime/class_table.h
+++ b/runtime/class_table.h
@@ -71,87 +71,96 @@
// Used by image writer for checking.
bool Contains(mirror::Class* klass)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Freeze the current class tables by allocating a new table and never updating or modifying the
// existing table. This helps prevents dirty pages after caused by inserting after zygote fork.
void FreezeSnapshot()
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Returns the number of classes in previous snapshots.
- size_t NumZygoteClasses() const SHARED_REQUIRES(Locks::classlinker_classes_lock_);
+ size_t NumZygoteClasses() const REQUIRES(!lock_);
// Returns all off the classes in the lastest snapshot.
- size_t NumNonZygoteClasses() const SHARED_REQUIRES(Locks::classlinker_classes_lock_);
+ size_t NumNonZygoteClasses() const REQUIRES(!lock_);
// Update a class in the table with the new class. Returns the existing class which was replaced.
mirror::Class* UpdateClass(const char* descriptor, mirror::Class* new_klass, size_t hash)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// NO_THREAD_SAFETY_ANALYSIS for object marking requiring heap bitmap lock.
template<class Visitor>
void VisitRoots(Visitor& visitor)
NO_THREAD_SAFETY_ANALYSIS
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
+
template<class Visitor>
void VisitRoots(const Visitor& visitor)
NO_THREAD_SAFETY_ANALYSIS
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
// Stops visit if the visitor returns false.
template <typename Visitor>
bool Visit(Visitor& visitor)
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
// Return the first class that matches the descriptor. Returns null if there are none.
mirror::Class* Lookup(const char* descriptor, size_t hash)
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
// Return the first class that matches the descriptor of klass. Returns null if there are none.
mirror::Class* LookupByDescriptor(mirror::Class* klass)
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
void Insert(mirror::Class* klass)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
+
void InsertWithHash(mirror::Class* klass, size_t hash)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Returns true if the class was found and removed, false otherwise.
bool Remove(const char* descriptor)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Return true if we inserted the dex file, false if it already exists.
bool InsertDexFile(mirror::Object* dex_file)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Combines all of the tables into one class set.
size_t WriteToMemory(uint8_t* ptr) const
- SHARED_REQUIRES(Locks::classlinker_classes_lock_, Locks::mutator_lock_);
+ REQUIRES(!lock_)
+ SHARED_REQUIRES(Locks::mutator_lock_);
// Read a table from ptr and put it at the front of the class set.
size_t ReadFromMemory(uint8_t* ptr)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
// Add a class set to the front of classes.
void AddClassSet(ClassSet&& set)
- REQUIRES(Locks::classlinker_classes_lock_)
+ REQUIRES(!lock_)
SHARED_REQUIRES(Locks::mutator_lock_);
private:
- // TODO: shard lock to have one per class loader.
+ // Lock to guard inserting and removing.
+ mutable ReaderWriterMutex lock_;
// We have a vector to help prevent dirty pages after the zygote forks by calling FreezeSnapshot.
- std::vector<ClassSet> classes_ GUARDED_BY(Locks::classlinker_classes_lock_);
+ std::vector<ClassSet> classes_ GUARDED_BY(lock_);
// Dex files used by the class loader which may not be owned by the class loader. We keep these
// live so that we do not have issues closing any of the dex files.
- std::vector<GcRoot<mirror::Object>> dex_files_ GUARDED_BY(Locks::classlinker_classes_lock_);
+ std::vector<GcRoot<mirror::Object>> dex_files_ GUARDED_BY(lock_);
};
} // namespace art
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index a0c6bfb..34bc458 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -80,7 +80,7 @@
have_exception_caught_listeners_(false),
have_branch_listeners_(false),
have_invoke_virtual_or_interface_listeners_(false),
- deoptimized_methods_lock_("deoptimized methods lock"),
+ deoptimized_methods_lock_("deoptimized methods lock", kDeoptimizedMethodsLock),
deoptimization_enabled_(false),
interpreter_handler_table_(kMainHandlerTable),
quick_alloc_entry_points_instrumentation_counter_(0),
diff --git a/runtime/mirror/class_loader-inl.h b/runtime/mirror/class_loader-inl.h
index 84fa80f..cc910b0 100644
--- a/runtime/mirror/class_loader-inl.h
+++ b/runtime/mirror/class_loader-inl.h
@@ -34,7 +34,6 @@
VisitInstanceFieldsReferences<kVerifyFlags, kReadBarrierOption>(klass, visitor);
if (kVisitClasses) {
// Visit classes loaded after.
- ReaderMutexLock mu(Thread::Current(), *Locks::classlinker_classes_lock_);
ClassTable* const class_table = GetClassTable();
if (class_table != nullptr) {
class_table->VisitRoots(visitor);