Enforce "disallow read barrier" checks for GC roots.
Fix a bug in read barrier option propagation.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Bug: 194017033
Change-Id: I571272cdf6afc5cdfda03e995281f9bc3ad8f8b9
diff --git a/runtime/art_method-inl.h b/runtime/art_method-inl.h
index 5c9c739..c86e248 100644
--- a/runtime/art_method-inl.h
+++ b/runtime/art_method-inl.h
@@ -398,7 +398,7 @@
// However, for proxies we need to keep the interface method alive, so we visit its roots.
ArtMethod* interface_method = GetInterfaceMethodForProxyUnchecked(pointer_size);
DCHECK(interface_method != nullptr);
- interface_method->VisitRoots(visitor, pointer_size);
+ interface_method->VisitRoots<kReadBarrierOption>(visitor, pointer_size);
}
}
}
diff --git a/runtime/read_barrier-inl.h b/runtime/read_barrier-inl.h
index 2ccb8a1..b0434d8 100644
--- a/runtime/read_barrier-inl.h
+++ b/runtime/read_barrier-inl.h
@@ -103,7 +103,7 @@
MirrorType* ref = *root;
const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
if (kUseReadBarrier && with_read_barrier) {
- if (kIsDebugBuild) {
+ if (kCheckDebugDisallowReadBarrierCount) {
Thread* const self = Thread::Current();
if (self != nullptr) {
CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
@@ -147,31 +147,42 @@
GcRootSource* gc_root_source) {
MirrorType* ref = root->AsMirrorPtr();
const bool with_read_barrier = kReadBarrierOption == kWithReadBarrier;
- if (with_read_barrier && kUseBakerReadBarrier) {
- // TODO: separate the read barrier code from the collector code more.
- Thread* self = Thread::Current();
- if (self != nullptr && self->GetIsGcMarking()) {
- ref = reinterpret_cast<MirrorType*>(Mark(ref));
- }
- AssertToSpaceInvariant(gc_root_source, ref);
- return ref;
- } else if (with_read_barrier && kUseTableLookupReadBarrier) {
- Thread* self = Thread::Current();
- if (self != nullptr &&
- self->GetIsGcMarking() &&
- Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
- auto old_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
- ref = reinterpret_cast<MirrorType*>(Mark(ref));
- auto new_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
- // Update the field atomically. This may fail if mutator updates before us, but it's ok.
- if (new_ref.AsMirrorPtr() != old_ref.AsMirrorPtr()) {
- auto* atomic_root =
- reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
- atomic_root->CompareAndSetStrongRelaxed(old_ref, new_ref);
+ if (kUseReadBarrier && with_read_barrier) {
+ if (kCheckDebugDisallowReadBarrierCount) {
+ Thread* const self = Thread::Current();
+ if (self != nullptr) {
+ CHECK_EQ(self->GetDebugDisallowReadBarrierCount(), 0u);
}
}
- AssertToSpaceInvariant(gc_root_source, ref);
- return ref;
+ if (kUseBakerReadBarrier) {
+ // TODO: separate the read barrier code from the collector code more.
+ Thread* self = Thread::Current();
+ if (self != nullptr && self->GetIsGcMarking()) {
+ ref = reinterpret_cast<MirrorType*>(Mark(ref));
+ }
+ AssertToSpaceInvariant(gc_root_source, ref);
+ return ref;
+ } else if (kUseTableLookupReadBarrier) {
+ Thread* self = Thread::Current();
+ if (self != nullptr &&
+ self->GetIsGcMarking() &&
+ Runtime::Current()->GetHeap()->GetReadBarrierTable()->IsSet(ref)) {
+ auto old_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
+ ref = reinterpret_cast<MirrorType*>(Mark(ref));
+ auto new_ref = mirror::CompressedReference<MirrorType>::FromMirrorPtr(ref);
+ // Update the field atomically. This may fail if mutator updates before us, but it's ok.
+ if (new_ref.AsMirrorPtr() != old_ref.AsMirrorPtr()) {
+ auto* atomic_root =
+ reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root);
+ atomic_root->CompareAndSetStrongRelaxed(old_ref, new_ref);
+ }
+ }
+ AssertToSpaceInvariant(gc_root_source, ref);
+ return ref;
+ } else {
+ LOG(FATAL) << "Unexpected read barrier type";
+ UNREACHABLE();
+ }
} else {
return ref;
}