Merge "Add visiting for class loaders in StickyMarkSweep"
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 6d45dad..c8875f4 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -1891,7 +1891,7 @@
boot_class_table_.VisitRoots(buffered_visitor);
// If tracing is enabled, then mark all the class loaders to prevent unloading.
- if (tracing_enabled) {
+ if ((flags & kVisitRootFlagClassLoader) != 0 || tracing_enabled) {
for (const ClassLoaderData& data : class_loaders_) {
GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
root.VisitRoot(visitor, RootInfo(kRootVMInternal));
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 7b73e43..673a97e 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -608,8 +608,7 @@
void MarkSweep::MarkConcurrentRoots(VisitRootFlags flags) {
TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
// Visit all runtime roots and clear dirty flags.
- Runtime::Current()->VisitConcurrentRoots(
- this, static_cast<VisitRootFlags>(flags | kVisitRootFlagNonMoving));
+ Runtime::Current()->VisitConcurrentRoots(this, flags);
}
class MarkSweep::DelayReferenceReferentVisitor {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 19c2e9a..a94cb27 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -98,7 +98,7 @@
REQUIRES(!mark_stack_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
- void MarkConcurrentRoots(VisitRootFlags flags)
+ virtual void MarkConcurrentRoots(VisitRootFlags flags)
REQUIRES(Locks::heap_bitmap_lock_)
REQUIRES(!mark_stack_lock_)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/runtime/gc/collector/sticky_mark_sweep.cc b/runtime/gc/collector/sticky_mark_sweep.cc
index bb7e854..a2dbe3f 100644
--- a/runtime/gc/collector/sticky_mark_sweep.cc
+++ b/runtime/gc/collector/sticky_mark_sweep.cc
@@ -56,6 +56,19 @@
RecursiveMarkDirtyObjects(false, accounting::CardTable::kCardDirty - 1);
}
+void StickyMarkSweep::MarkConcurrentRoots(VisitRootFlags flags) {
+ TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
+ // Visit all runtime roots and clear dirty flags including class loader. This is done to prevent
+ // incorrect class unloading since the GC does not card mark when storing store the class during
+ // object allocation. Doing this for each allocation would be slow.
+ // Since the card is not dirty, it means the object may not get scanned. This can cause class
+ // unloading to occur even though the class and class loader are reachable through the object's
+ // class.
+ Runtime::Current()->VisitConcurrentRoots(
+ this,
+ static_cast<VisitRootFlags>(flags | kVisitRootFlagClassLoader));
+}
+
void StickyMarkSweep::Sweep(bool swap_bitmaps ATTRIBUTE_UNUSED) {
SweepArray(GetHeap()->GetLiveStack(), false);
}
diff --git a/runtime/gc/collector/sticky_mark_sweep.h b/runtime/gc/collector/sticky_mark_sweep.h
index 100ca64..45f912f 100644
--- a/runtime/gc/collector/sticky_mark_sweep.h
+++ b/runtime/gc/collector/sticky_mark_sweep.h
@@ -33,6 +33,12 @@
StickyMarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
~StickyMarkSweep() {}
+ virtual void MarkConcurrentRoots(VisitRootFlags flags)
+ OVERRIDE
+ REQUIRES(Locks::heap_bitmap_lock_)
+ REQUIRES(!mark_stack_lock_)
+ REQUIRES_SHARED(Locks::mutator_lock_);
+
protected:
// Bind the live bits to the mark bits of bitmaps for all spaces, all spaces other than the
// alloc space will be marked as immune.
diff --git a/runtime/runtime.h b/runtime/runtime.h
index 364290d..6806180 100644
--- a/runtime/runtime.h
+++ b/runtime/runtime.h
@@ -107,9 +107,7 @@
kVisitRootFlagStartLoggingNewRoots = 0x4,
kVisitRootFlagStopLoggingNewRoots = 0x8,
kVisitRootFlagClearRootLog = 0x10,
- // Non moving means we can have optimizations where we don't visit some roots if they are
- // definitely reachable from another location. E.g. ArtMethod and ArtField roots.
- kVisitRootFlagNonMoving = 0x20,
+ kVisitRootFlagClassLoader = 0x20,
};
class Runtime {
diff --git a/test/141-class-unload/expected.txt b/test/141-class-unload/expected.txt
index 2b77b29..0a03ecb 100644
--- a/test/141-class-unload/expected.txt
+++ b/test/141-class-unload/expected.txt
@@ -21,3 +21,4 @@
class null false test
JNI_OnUnload called
Number of loaded unload-ex maps 0
+Too small false
diff --git a/test/141-class-unload/src/Main.java b/test/141-class-unload/src/Main.java
index f9b6180..2a6e944 100644
--- a/test/141-class-unload/src/Main.java
+++ b/test/141-class-unload/src/Main.java
@@ -47,6 +47,8 @@
stressTest(constructor);
// Test that the oat files are unloaded.
testOatFilesUnloaded(getPid());
+ // Test that objects keep class loader live for sticky GC.
+ testStickyUnload(constructor);
} catch (Exception e) {
e.printStackTrace();
}
@@ -161,6 +163,30 @@
return intHolder;
}
+ private static Object allocObjectInOtherClassLoader(Constructor<?> constructor)
+ throws Exception {
+ ClassLoader loader = (ClassLoader) constructor.newInstance(
+ DEX_FILE, LIBRARY_SEARCH_PATH, ClassLoader.getSystemClassLoader());
+ return loader.loadClass("IntHolder").newInstance();
+ }
+
+ // Regression test for public issue 227182.
+ private static void testStickyUnload(Constructor<?> constructor) throws Exception {
+ String s = "";
+ for (int i = 0; i < 10; ++i) {
+ s = "";
+ // The object is the only thing preventing the class loader from being unloaded.
+ Object o = allocObjectInOtherClassLoader(constructor);
+ for (int j = 0; j < 1000; ++j) {
+ s += j + " ";
+ }
+ // Make sure the object still has a valid class (hasn't been incorrectly unloaded).
+ s += o.getClass().getName();
+ o = null;
+ }
+ System.out.println("Too small " + (s.length() < 1000));
+ }
+
private static WeakReference<Class> setUpUnloadClassWeak(Constructor<?> constructor)
throws Exception {
return new WeakReference<Class>(setUpUnloadClass(constructor));