summaryrefslogtreecommitdiff
path: root/runtime/class_linker.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/class_linker.cc')
-rw-r--r--runtime/class_linker.cc25
1 files changed, 17 insertions, 8 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index dbc5ceca0d..acb39c5402 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -79,6 +79,7 @@
#include "scoped_thread_state_change.h"
#include "handle_scope-inl.h"
#include "thread-inl.h"
+#include "trace.h"
#include "utils.h"
#include "utils/dex_cache_arrays_layout-inl.h"
#include "verifier/method_verifier.h"
@@ -1299,6 +1300,9 @@ bool ClassLinker::ClassInClassTable(mirror::Class* klass) {
}
void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
+ // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
+ // enabling tracing requires the mutator lock, there are no race conditions here.
+ const bool tracing_enabled = Trace::IsTracingEnabled();
Thread* const self = Thread::Current();
WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
@@ -1320,6 +1324,14 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// Need to make sure to not copy ArtMethods without doing read barriers since the roots are
// marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
boot_class_table_.VisitRoots(buffered_visitor);
+
+ // If tracing is enabled, then mark all the class loaders to prevent unloading.
+ if (tracing_enabled) {
+ for (const ClassLoaderData& data : class_loaders_) {
+ GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
+ root.VisitRoot(visitor, RootInfo(kRootVMInternal));
+ }
+ }
} else if ((flags & kVisitRootFlagNewRoots) != 0) {
for (auto& root : new_class_roots_) {
mirror::Class* old_ref = root.Read<kWithoutReadBarrier>();
@@ -2650,10 +2662,8 @@ mirror::DexCache* ClassLinker::FindDexCacheLocked(Thread* self,
const DexFile& dex_file,
bool allow_failure) {
// Search assuming unique-ness of dex file.
- JavaVMExt* const vm = self->GetJniEnv()->vm;
for (jweak weak_root : dex_caches_) {
- mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(
- vm->DecodeWeakGlobal(self, weak_root));
+ mirror::DexCache* dex_cache = down_cast<mirror::DexCache*>(self->DecodeJObject(weak_root));
if (dex_cache != nullptr && dex_cache->GetDexFile() == &dex_file) {
return dex_cache;
}
@@ -6202,10 +6212,9 @@ void ClassLinker::DropFindArrayClassCache() {
void ClassLinker::VisitClassLoaders(ClassLoaderVisitor* visitor) const {
Thread* const self = Thread::Current();
- JavaVMExt* const vm = self->GetJniEnv()->vm;
for (const ClassLoaderData& data : class_loaders_) {
- auto* const class_loader = down_cast<mirror::ClassLoader*>(
- vm->DecodeWeakGlobal(self, data.weak_root));
+ // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
+ auto* const class_loader = down_cast<mirror::ClassLoader*>(self->DecodeJObject(data.weak_root));
if (class_loader != nullptr) {
visitor->Visit(class_loader);
}
@@ -6218,8 +6227,8 @@ void ClassLinker::CleanupClassLoaders() {
JavaVMExt* const vm = Runtime::Current()->GetJavaVM();
for (auto it = class_loaders_.begin(); it != class_loaders_.end(); ) {
const ClassLoaderData& data = *it;
- auto* const class_loader = down_cast<mirror::ClassLoader*>(
- vm->DecodeWeakGlobal(self, data.weak_root));
+ // Need to use DecodeJObject so that we get null for cleared JNI weak globals.
+ auto* const class_loader = down_cast<mirror::ClassLoader*>(self->DecodeJObject(data.weak_root));
if (class_loader != nullptr) {
++it;
} else {