summaryrefslogtreecommitdiff
path: root/runtime/class_linker.cc
diff options
context:
space:
mode:
author Lokesh Gidra <lokeshgidra@google.com> 2023-10-17 21:38:44 +0000
committer Lokesh Gidra <lokeshgidra@google.com> 2023-10-17 21:42:50 +0000
commit1d556df25837ed6e14a9cc630e0f6967056a8c51 (patch)
treeeb85136a933da7633ecb00cf995378dc2aff8526 /runtime/class_linker.cc
parent9faffd5c4e062ca45bd6f29a3b6d1b276e6c9839 (diff)
Revert^2 "Update class-table and intern-table concurrently with uffd GC"
This reverts commit 9faffd5c4e062ca45bd6f29a3b6d1b276e6c9839. Reason for revert: Reland after fixing null-pointer dereference Bug: 160737021 Test: art/testrunner/testrunner.py Change-Id: I80d3eda827ea805efc5a0e0eb0b80a9d8ceb9dd5
Diffstat (limited to 'runtime/class_linker.cc')
-rw-r--r--runtime/class_linker.cc20
1 files changed, 7 insertions, 13 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 9d175cdba5..d4666e5f9c 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -2426,17 +2426,9 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
boot_class_table_->VisitRoots(root_visitor);
// If tracing is enabled, then mark all the class loaders to prevent unloading.
if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
- gc::Heap* const heap = Runtime::Current()->GetHeap();
- // Don't visit class-loaders if compacting with userfaultfd GC as these
- // weaks are updated using Runtime::SweepSystemWeaks() and the GC doesn't
- // tolerate double updates.
- if (!heap->IsPerformingUffdCompaction()) {
- for (const ClassLoaderData& data : class_loaders_) {
- GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
- root.VisitRoot(visitor, RootInfo(kRootVMInternal));
- }
- } else {
- DCHECK_EQ(heap->CurrentCollectorType(), gc::CollectorType::kCollectorTypeCMC);
+ for (const ClassLoaderData& data : class_loaders_) {
+ GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
+ root.VisitRoot(visitor, RootInfo(kRootVMInternal));
}
}
} else if (!gUseReadBarrier && (flags & kVisitRootFlagNewRoots) != 0) {
@@ -2476,9 +2468,11 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
// Keep in sync with InitCallback. Anything we visit, we need to
// reinit references to when reinitializing a ClassLinker from a
// mapped image.
-void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags) {
+void ClassLinker::VisitRoots(RootVisitor* visitor, VisitRootFlags flags, bool visit_class_roots) {
class_roots_.VisitRootIfNonNull(visitor, RootInfo(kRootVMInternal));
- VisitClassRoots(visitor, flags);
+ if (visit_class_roots) {
+ VisitClassRoots(visitor, flags);
+ }
// Instead of visiting the find_array_class_cache_ drop it so that it doesn't prevent class
// unloading if we are marking roots.
DropFindArrayClassCache();