summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--runtime/class_linker.cc8
-rw-r--r--runtime/mirror/class.cc32
2 files changed, 36 insertions, 4 deletions
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index 7821da3397..48dc88d2b1 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -4063,10 +4063,10 @@ bool ClassLinker::LinkClass(Thread* self, const char* descriptor, Handle<mirror:
// Retire the temporary class and create the correctly sized resolved class.
StackHandleScope<1> hs(self);
auto h_new_class = hs.NewHandle(klass->CopyOf(self, class_size, imt, image_pointer_size_));
- // Set array lengths to 0 since we don't want the GC to visit two different classes with the
- // same ArtFields with the same If this occurs, it causes bugs in remembered sets since the GC
- // may not see any references to the from space and clean the card. Though there was references
- // to the from space that got marked by the first class.
+ // Set arrays to null since we don't want to have multiple classes with the same ArtField or
+ // ArtMethod array pointers. If this occurs, it causes bugs in remembered sets since the GC
+ // may not see any references to the target space and clean the card for a class if another
+ // class had the same array pointer.
klass->SetDirectMethodsPtrUnchecked(nullptr);
klass->SetVirtualMethodsPtr(nullptr);
klass->SetSFieldsPtrUnchecked(nullptr);
diff --git a/runtime/mirror/class.cc b/runtime/mirror/class.cc
index 701ba4a78b..6af90bbc65 100644
--- a/runtime/mirror/class.cc
+++ b/runtime/mirror/class.cc
@@ -824,6 +824,34 @@ void Class::PopulateEmbeddedImtAndVTable(ArtMethod* const (&methods)[kImtSize],
}
}
+class ReadBarrierOnNativeRootsVisitor {
+ public:
+ void operator()(mirror::Object* obj ATTRIBUTE_UNUSED,
+ MemberOffset offset ATTRIBUTE_UNUSED,
+ bool is_static ATTRIBUTE_UNUSED) const {}
+
+ void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ if (!root->IsNull()) {
+ VisitRoot(root);
+ }
+ }
+
+ void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const
+ SHARED_REQUIRES(Locks::mutator_lock_) {
+ mirror::Object* old_ref = root->AsMirrorPtr();
+ mirror::Object* new_ref = ReadBarrier::BarrierForRoot(root);
+ if (old_ref != new_ref) {
+ // Update the field atomically. This may fail if mutator updates before us, but it's ok.
+ auto* atomic_root =
+ reinterpret_cast<Atomic<mirror::CompressedReference<mirror::Object>>*>(root);
+ atomic_root->CompareExchangeStrongSequentiallyConsistent(
+ mirror::CompressedReference<mirror::Object>::FromMirrorPtr(old_ref),
+ mirror::CompressedReference<mirror::Object>::FromMirrorPtr(new_ref));
+ }
+ }
+};
+
// The pre-fence visitor for Class::CopyOf().
class CopyClassVisitor {
public:
@@ -842,6 +870,10 @@ class CopyClassVisitor {
mirror::Class::SetStatus(h_new_class_obj, Class::kStatusResolving, self_);
h_new_class_obj->PopulateEmbeddedImtAndVTable(imt_, pointer_size_);
h_new_class_obj->SetClassSize(new_length_);
+ // Visit all of the references to make sure there is no from space references in the native
+ // roots.
+ h_new_class_obj->VisitReferences<true>(h_new_class_obj->GetClass(),
+ ReadBarrierOnNativeRootsVisitor());
}
private: