diff options
| author | 2015-07-24 18:58:57 +0000 | |
|---|---|---|
| committer | 2015-07-24 18:58:57 +0000 | |
| commit | 0fc01dd3725792a190f4c97cbdfefd2733b76e21 (patch) | |
| tree | eb713496fee4a6885a3c240309e5efc78c6774cf | |
| parent | 2a7ea34cda9bf776339dfc7f4a22ab01f9525f2d (diff) | |
| parent | da7c650022a974be10e2f00fa07d5109e3d8826f (diff) | |
Merge "Visit class native roots from VisitReferences"
| -rw-r--r-- | compiler/driver/compiler_driver.cc | 9 | ||||
| -rw-r--r-- | compiler/image_writer.cc | 13 | ||||
| -rw-r--r-- | patchoat/patchoat.h | 9 | ||||
| -rw-r--r-- | runtime/art_field.h | 3 | ||||
| -rw-r--r-- | runtime/art_method.h | 3 | ||||
| -rw-r--r-- | runtime/class_linker.cc | 12 | ||||
| -rw-r--r-- | runtime/gc/accounting/mod_union_table.cc | 40 | ||||
| -rw-r--r-- | runtime/gc/accounting/remembered_set.cc | 17 | ||||
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.cc | 84 | ||||
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.h | 2 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_compact.cc | 26 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_sweep.cc | 52 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_sweep.h | 6 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space.cc | 41 | ||||
| -rw-r--r-- | runtime/gc/heap.cc | 35 | ||||
| -rw-r--r-- | runtime/mirror/class-inl.h | 8 | ||||
| -rw-r--r-- | runtime/mirror/object.cc | 5 |
17 files changed, 295 insertions, 70 deletions
diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index 05705a2d31..a35f306612 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -883,8 +883,13 @@ class ClinitImageUpdate { } // java.lang.Reference visitor for VisitReferences. - void operator()(mirror::Class* /* klass */, mirror::Reference* /* ref */) const { - } + void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref ATTRIBUTE_UNUSED) + const {} + + // Ignore class native roots. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} void Walk() SHARED_REQUIRES(Locks::mutator_lock_) { // Use the initial classes as roots for a search. diff --git a/compiler/image_writer.cc b/compiler/image_writer.cc index 3ba1415d18..293a488ccf 100644 --- a/compiler/image_writer.cc +++ b/compiler/image_writer.cc @@ -1189,6 +1189,13 @@ class FixupVisitor { FixupVisitor(ImageWriter* image_writer, Object* copy) : image_writer_(image_writer), copy_(copy) { } + // Ignore class roots since we don't have a way to map them to the destination. These are handled + // with other logic. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + + void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { Object* ref = obj->GetFieldObject<Object, kVerifyNone>(offset); @@ -1200,8 +1207,7 @@ class FixupVisitor { // java.lang.ref.Reference visitor. void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { copy_->SetFieldObjectWithoutWriteBarrier<false, true, kVerifyNone>( mirror::Reference::ReferentOffset(), image_writer_->GetImageAddress(ref->GetReferent())); } @@ -1224,8 +1230,7 @@ class FixupClassVisitor FINAL : public FixupVisitor { void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref ATTRIBUTE_UNUSED) const - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { LOG(FATAL) << "Reference not expected here."; } }; diff --git a/patchoat/patchoat.h b/patchoat/patchoat.h index 6da516cbd9..466dacb492 100644 --- a/patchoat/patchoat.h +++ b/patchoat/patchoat.h @@ -177,10 +177,15 @@ class PatchOat { PatchVisitor(PatchOat* patcher, mirror::Object* copy) : patcher_(patcher), copy_(copy) {} ~PatchVisitor() {} void operator() (mirror::Object* obj, MemberOffset off, bool b) const - REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); // For reference classes. void operator() (mirror::Class* cls, mirror::Reference* ref) const - REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); + // TODO: Consider using these for updating native class roots? + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + private: PatchOat* const patcher_; mirror::Object* const copy_; diff --git a/runtime/art_field.h b/runtime/art_field.h index 1a0ee0f63e..fa0694b8ca 100644 --- a/runtime/art_field.h +++ b/runtime/art_field.h @@ -151,8 +151,9 @@ class ArtField FINAL { void SetObj(mirror::Object* object, mirror::Object* new_value) SHARED_REQUIRES(Locks::mutator_lock_); + // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires. template<typename RootVisitorType> - void VisitRoots(RootVisitorType& visitor) SHARED_REQUIRES(Locks::mutator_lock_); + void VisitRoots(RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS; bool IsVolatile() SHARED_REQUIRES(Locks::mutator_lock_) { return (GetAccessFlags() & kAccVolatile) != 0; diff --git a/runtime/art_method.h b/runtime/art_method.h index a5bd2f0309..90352b7c08 100644 --- a/runtime/art_method.h +++ b/runtime/art_method.h @@ -447,8 +447,9 @@ class ArtMethod FINAL { bool* has_no_move_exception) SHARED_REQUIRES(Locks::mutator_lock_); + // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires. template<typename RootVisitorType> - void VisitRoots(RootVisitorType& visitor) SHARED_REQUIRES(Locks::mutator_lock_); + void VisitRoots(RootVisitorType& visitor) NO_THREAD_SAFETY_ANALYSIS; const DexFile* GetDexFile() SHARED_REQUIRES(Locks::mutator_lock_); diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc index fc59d50195..6a76bf7f07 100644 --- a/runtime/class_linker.cc +++ b/runtime/class_linker.cc @@ -1252,8 +1252,7 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { // There is 3 GC cases to handle: // Non moving concurrent: // This case is easy to handle since the reference members of ArtMethod and ArtFields are held - // live by the class and class roots. In this case we probably don't even need to call - // VisitNativeRoots. + // live by the class and class roots. // // Moving non-concurrent: // This case needs to call visit VisitNativeRoots in case the classes or dex cache arrays move. @@ -1266,23 +1265,14 @@ void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) { // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy. for (GcRoot<mirror::Class>& root : class_table_) { buffered_visitor.VisitRoot(root); - if ((flags & kVisitRootFlagNonMoving) == 0) { - // Don't bother visiting ArtField and ArtMethod if kVisitRootFlagNonMoving is set since - // these roots are all reachable from the class or dex cache. - root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_); - } } // PreZygote classes can't move so we won't need to update fields' declaring classes. for (GcRoot<mirror::Class>& root : pre_zygote_class_table_) { buffered_visitor.VisitRoot(root); - if ((flags & kVisitRootFlagNonMoving) == 0) { - root.Read()->VisitNativeRoots(buffered_visitor, image_pointer_size_); - } } } else if ((flags & kVisitRootFlagNewRoots) != 0) { for (auto& root : new_class_roots_) { mirror::Class* old_ref = root.Read<kWithoutReadBarrier>(); - old_ref->VisitNativeRoots(buffered_visitor, image_pointer_size_); root.VisitRoot(visitor, RootInfo(kRootStickyClass)); mirror::Class* new_ref = root.Read<kWithoutReadBarrier>(); if (UNLIKELY(new_ref != old_ref)) { diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc index 68e7fa03e3..157f609312 100644 --- a/runtime/gc/accounting/mod_union_table.cc +++ b/runtime/gc/accounting/mod_union_table.cc @@ -110,6 +110,18 @@ class ModUnionUpdateObjectReferencesVisitor { } } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kIsDebugBuild && !root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + DCHECK(from_space_->HasAddress(root->AsMirrorPtr())); + } + private: MarkObjectVisitor* const visitor_; // Space which we are scanning @@ -163,7 +175,7 @@ class AddToReferenceArrayVisitor { } // Extra parameters are required since we use this same visitor signature for checking objects. - void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const + void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) { mirror::HeapReference<Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset); mirror::Object* ref = ref_ptr->AsMirrorPtr(); @@ -174,6 +186,18 @@ class AddToReferenceArrayVisitor { } } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kIsDebugBuild && !root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + DCHECK(!mod_union_table_->ShouldAddReference(root->AsMirrorPtr())); + } + private: ModUnionTableReferenceCache* const mod_union_table_; std::vector<mirror::HeapReference<Object>*>* const references_; @@ -208,7 +232,7 @@ class CheckReferenceVisitor { } // Extra parameters are required since we use this same visitor signature for checking objects. - void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const + void operator()(Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); if (ref != nullptr && mod_union_table_->ShouldAddReference(ref) && @@ -228,6 +252,18 @@ class CheckReferenceVisitor { } } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kIsDebugBuild && !root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + DCHECK(!mod_union_table_->ShouldAddReference(root->AsMirrorPtr())); + } + private: ModUnionTableReferenceCache* const mod_union_table_; const std::set<const Object*>& references_; diff --git a/runtime/gc/accounting/remembered_set.cc b/runtime/gc/accounting/remembered_set.cc index 994a0ad4ac..251b50d93b 100644 --- a/runtime/gc/accounting/remembered_set.cc +++ b/runtime/gc/accounting/remembered_set.cc @@ -67,7 +67,7 @@ class RememberedSetReferenceVisitor { : collector_(collector), target_space_(target_space), contains_reference_to_target_space_(contains_reference_to_target_space) {} - void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const + void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) { DCHECK(obj != nullptr); mirror::HeapReference<mirror::Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset); @@ -79,14 +79,25 @@ class RememberedSetReferenceVisitor { } void operator()(mirror::Class* klass, mirror::Reference* ref) const - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { if (target_space_->HasAddress(ref->GetReferent())) { *contains_reference_to_target_space_ = true; collector_->DelayReferenceReferent(klass, ref); } } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kIsDebugBuild && !root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + DCHECK(!target_space_->HasAddress(root->AsMirrorPtr())); + } + private: collector::GarbageCollector* const collector_; space::ContinuousSpace* const target_space_; diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index baa33b398e..ec689f8528 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -638,7 +638,7 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor { explicit ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector) : collector_(collector) {} - void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const + void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); @@ -651,6 +651,19 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor { this->operator()(ref, mirror::Reference::ReferentOffset(), false); } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + ConcurrentCopyingVerifyNoFromSpaceRefsVisitor visitor(collector_); + visitor(root->AsMirrorPtr()); + } + private: ConcurrentCopying* const collector_; }; @@ -750,18 +763,31 @@ class ConcurrentCopyingAssertToSpaceInvariantFieldVisitor { explicit ConcurrentCopyingAssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector) : collector_(collector) {} - void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const + void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_); visitor(ref); } - void operator()(mirror::Class* klass, mirror::Reference* /* ref */) const + void operator()(mirror::Class* klass, mirror::Reference* ref ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { CHECK(klass->IsTypeOfReferenceClass()); } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_); + visitor(root->AsMirrorPtr()); + } + private: ConcurrentCopying* const collector_; }; @@ -1500,6 +1526,18 @@ class ConcurrentCopyingRefFieldsVisitor { collector_->DelayReferenceReferent(klass, ref); } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + collector_->MarkRoot(root); + } + private: ConcurrentCopying* const collector_; }; @@ -1513,7 +1551,8 @@ void ConcurrentCopying::Scan(mirror::Object* to_ref) { // Process a field. inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) { - mirror::Object* ref = obj->GetFieldObject<mirror::Object, kVerifyNone, kWithoutReadBarrier, false>(offset); + mirror::Object* ref = obj->GetFieldObject< + mirror::Object, kVerifyNone, kWithoutReadBarrier, false>(offset); if (ref == nullptr || region_space_->IsInToSpace(ref)) { return; } @@ -1530,8 +1569,8 @@ inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) // It was updated by the mutator. break; } - } while (!obj->CasFieldWeakSequentiallyConsistentObjectWithoutWriteBarrier<false, false, kVerifyNone>( - offset, expected_ref, new_ref)); + } while (!obj->CasFieldWeakSequentiallyConsistentObjectWithoutWriteBarrier< + false, false, kVerifyNone>(offset, expected_ref, new_ref)); } // Process some roots. @@ -1559,22 +1598,18 @@ void ConcurrentCopying::VisitRoots( } } -void ConcurrentCopying::VisitRoots( - mirror::CompressedReference<mirror::Object>** roots, size_t count, - const RootInfo& info ATTRIBUTE_UNUSED) { - for (size_t i = 0; i < count; ++i) { - mirror::CompressedReference<mirror::Object>* root = roots[i]; - mirror::Object* ref = root->AsMirrorPtr(); - if (ref == nullptr || region_space_->IsInToSpace(ref)) { - continue; - } - mirror::Object* to_ref = Mark(ref); - if (to_ref == ref) { - continue; - } +void ConcurrentCopying::MarkRoot(mirror::CompressedReference<mirror::Object>* root) { + DCHECK(!root->IsNull()); + mirror::Object* const ref = root->AsMirrorPtr(); + if (region_space_->IsInToSpace(ref)) { + return; + } + mirror::Object* to_ref = Mark(ref); + if (to_ref != ref) { auto* addr = reinterpret_cast<Atomic<mirror::CompressedReference<mirror::Object>>*>(root); auto expected_ref = mirror::CompressedReference<mirror::Object>::FromMirrorPtr(ref); auto new_ref = mirror::CompressedReference<mirror::Object>::FromMirrorPtr(to_ref); + // If the cas fails, then it was updated by the mutator. do { if (ref != addr->LoadRelaxed().AsMirrorPtr()) { // It was updated by the mutator. @@ -1584,6 +1619,17 @@ void ConcurrentCopying::VisitRoots( } } +void ConcurrentCopying::VisitRoots( + mirror::CompressedReference<mirror::Object>** roots, size_t count, + const RootInfo& info ATTRIBUTE_UNUSED) { + for (size_t i = 0; i < count; ++i) { + mirror::CompressedReference<mirror::Object>* const root = roots[i]; + if (!root->IsNull()) { + MarkRoot(root); + } + } +} + // Fill the given memory block with a dummy object. Used to fill in a // copy of objects that was lost in race. void ConcurrentCopying::FillWithDummyObject(mirror::Object* dummy_obj, size_t byte_size) { diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index d324ce1c2a..a4fd71c0a5 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -122,6 +122,8 @@ class ConcurrentCopying : public GarbageCollector { virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); + void MarkRoot(mirror::CompressedReference<mirror::Object>* root) + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, const RootInfo& info) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index c5ad613f24..4b2c588dae 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -438,6 +438,19 @@ class UpdateReferenceVisitor { ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset())); } + // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + root->Assign(collector_->GetMarkedForwardAddress(root->AsMirrorPtr())); + } + private: MarkCompact* const collector_; }; @@ -575,6 +588,19 @@ class MarkCompactMarkObjectVisitor { collector_->DelayReferenceReferent(klass, ref); } + // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + collector_->MarkObject(root->AsMirrorPtr()); + } + private: MarkCompact* const collector_; }; diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 92dde51271..7f2c2048f6 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -365,7 +365,7 @@ class MarkSweepMarkObjectSlowPath { : mark_sweep_(mark_sweep), holder_(holder), offset_(offset) { } - void operator()(const mirror::Object* obj) const ALWAYS_INLINE NO_THREAD_SAFETY_ANALYSIS { + void operator()(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS { if (kProfileLargeObjects) { // TODO: Differentiate between marking and testing somehow. ++mark_sweep_->large_object_test_; @@ -597,8 +597,7 @@ class ScanObjectVisitor { : mark_sweep_(mark_sweep) {} void operator()(mirror::Object* obj) const ALWAYS_INLINE - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { if (kCheckLocks) { Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); @@ -649,13 +648,33 @@ class MarkStackTask : public Task { protected: class MarkObjectParallelVisitor { public: - explicit MarkObjectParallelVisitor(MarkStackTask<kUseFinger>* chunk_task, - MarkSweep* mark_sweep) ALWAYS_INLINE - : chunk_task_(chunk_task), mark_sweep_(mark_sweep) {} + ALWAYS_INLINE explicit MarkObjectParallelVisitor(MarkStackTask<kUseFinger>* chunk_task, + MarkSweep* mark_sweep) + : chunk_task_(chunk_task), mark_sweep_(mark_sweep) {} + + void operator()(mirror::Object* obj, MemberOffset offset, bool /* static */) const + ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { + Mark(obj->GetFieldObject<mirror::Object>(offset)); + } - void operator()(mirror::Object* obj, MemberOffset offset, bool /* static */) const ALWAYS_INLINE + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const SHARED_REQUIRES(Locks::mutator_lock_) { - mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (kCheckLocks) { + Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); + Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); + } + Mark(root->AsMirrorPtr()); + } + + private: + void Mark(mirror::Object* ref) const ALWAYS_INLINE SHARED_REQUIRES(Locks::mutator_lock_) { if (ref != nullptr && mark_sweep_->MarkObjectParallel(ref)) { if (kUseFinger) { std::atomic_thread_fence(std::memory_order_seq_cst); @@ -668,7 +687,6 @@ class MarkStackTask : public Task { } } - private: MarkStackTask<kUseFinger>* const chunk_task_; MarkSweep* const mark_sweep_; }; @@ -1268,6 +1286,22 @@ class MarkVisitor { mark_sweep_->MarkObject(obj->GetFieldObject<mirror::Object>(offset), obj, offset); } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_) { + if (kCheckLocks) { + Locks::mutator_lock_->AssertSharedHeld(Thread::Current()); + Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); + } + mark_sweep_->MarkObject(root->AsMirrorPtr()); + } + private: MarkSweep* const mark_sweep_; }; diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h index 99e00f9fd9..606be63d93 100644 --- a/runtime/gc/collector/mark_sweep.h +++ b/runtime/gc/collector/mark_sweep.h @@ -58,12 +58,12 @@ class MarkSweep : public GarbageCollector { ~MarkSweep() {} - virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS; + virtual void RunPhases() OVERRIDE REQUIRES(!mark_stack_lock_); void InitializePhase(); - void MarkingPhase() SHARED_REQUIRES(Locks::mutator_lock_, !mark_stack_lock_); + void MarkingPhase() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); void PausePhase() REQUIRES(Locks::mutator_lock_, !mark_stack_lock_); void ReclaimPhase() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); - void FinishPhase() SHARED_REQUIRES(Locks::mutator_lock_); + void FinishPhase(); virtual void MarkReachableObjects() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_); diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index e93ff05026..acc1d9bdea 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -295,8 +295,26 @@ class SemiSpaceVerifyNoFromSpaceReferencesVisitor { LOG(FATAL) << ref << " found in from space"; } } + + // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (kIsDebugBuild) { + Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); + Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); + } + CHECK(!from_space_->HasAddress(root->AsMirrorPtr())); + } + private: - space::ContinuousMemMapAllocSpace* from_space_; + space::ContinuousMemMapAllocSpace* const from_space_; }; void SemiSpace::VerifyNoFromSpaceReferences(Object* obj) { @@ -313,6 +331,7 @@ class SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor { DCHECK(obj != nullptr); semi_space_->VerifyNoFromSpaceReferences(obj); } + private: SemiSpace* const semi_space_; }; @@ -670,11 +689,27 @@ class SemiSpaceMarkObjectVisitor { } void operator()(mirror::Class* klass, mirror::Reference* ref) const - SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(Locks::heap_bitmap_lock_) { + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { collector_->DelayReferenceReferent(klass, ref); } + // TODO: Remove NO_THREAD_SAFETY_ANALYSIS when clang better understands visitors. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (!root->IsNull()) { + VisitRoot(root); + } + } + + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + NO_THREAD_SAFETY_ANALYSIS { + if (kIsDebugBuild) { + Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current()); + Locks::heap_bitmap_lock_->AssertExclusiveHeld(Thread::Current()); + } + collector_->MarkObject(root); + } + private: SemiSpace* const collector_; }; diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index 7a9e418ec9..07309d8eb7 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -1780,7 +1780,7 @@ class ReferringObjectsFinder { } // For Object::VisitReferences. - void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) const + void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) { mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset); if (ref == object_ && (max_count_ == 0 || referring_objects_.size() < max_count_)) { @@ -1788,6 +1788,10 @@ class ReferringObjectsFinder { } } + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + private: const mirror::Object* const object_; const uint32_t max_count_; @@ -2612,15 +2616,14 @@ class VerifyReferenceVisitor : public SingleRootVisitor { return fail_count_->LoadSequentiallyConsistent(); } - void operator()(mirror::Class* klass, mirror::Reference* ref) const + void operator()(mirror::Class* klass ATTRIBUTE_UNUSED, mirror::Reference* ref) const SHARED_REQUIRES(Locks::mutator_lock_) { - UNUSED(klass); if (verify_referent_) { VerifyReference(ref, ref->GetReferent(), mirror::Reference::ReferentOffset()); } } - void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const + void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) { VerifyReference(obj, obj->GetFieldObject<mirror::Object>(offset), offset); } @@ -2629,7 +2632,19 @@ class VerifyReferenceVisitor : public SingleRootVisitor { return heap_->IsLiveObjectLocked(obj, true, false, true); } - void VisitRoot(mirror::Object* root, const RootInfo& root_info) OVERRIDE + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + if (!root->IsNull()) { + VisitRoot(root); + } + } + void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const + SHARED_REQUIRES(Locks::mutator_lock_) { + const_cast<VerifyReferenceVisitor*>(this)->VisitRoot( + root->AsMirrorPtr(), RootInfo(kRootVMInternal)); + } + + virtual void VisitRoot(mirror::Object* root, const RootInfo& root_info) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) { if (root == nullptr) { LOG(ERROR) << "Root is null with info " << root_info.GetType(); @@ -2747,7 +2762,7 @@ class VerifyObjectVisitor { : heap_(heap), fail_count_(fail_count), verify_referent_(verify_referent) { } - void operator()(mirror::Object* obj) const + void operator()(mirror::Object* obj) SHARED_REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { // Note: we are verifying the references in obj but not obj itself, this is because obj must // be live or else how did we find it in the live bitmap? @@ -2762,8 +2777,7 @@ class VerifyObjectVisitor { visitor->operator()(obj); } - void VerifyRoots() SHARED_REQUIRES(Locks::mutator_lock_) - REQUIRES(!Locks::heap_bitmap_lock_) { + void VerifyRoots() SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_) { ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_); VerifyReferenceVisitor visitor(heap_, fail_count_, verify_referent_); Runtime::Current()->VisitRoots(&visitor); @@ -2860,6 +2874,11 @@ class VerifyReferenceCardVisitor { : heap_(heap), failed_(failed) { } + // There is no card marks for native roots on a class. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + // TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for // annotalysis on visitors. void operator()(mirror::Object* obj, MemberOffset offset, bool is_static) const diff --git a/runtime/mirror/class-inl.h b/runtime/mirror/class-inl.h index fc273154d4..069e346a8d 100644 --- a/runtime/mirror/class-inl.h +++ b/runtime/mirror/class-inl.h @@ -680,6 +680,8 @@ inline void Class::VisitReferences(mirror::Class* klass, const Visitor& visitor) // linked yet. VisitStaticFieldsReferences<kVisitClass>(this, visitor); } + // Since this class is reachable, we must also visit the associated roots when we scan it. + VisitNativeRoots(visitor, Runtime::Current()->GetClassLinker()->GetImagePointerSize()); } template<ReadBarrierOption kReadBarrierOption> @@ -816,20 +818,22 @@ void mirror::Class::VisitNativeRoots(Visitor& visitor, size_t pointer_size) { if (sfields != nullptr) { for (size_t i = 0, count = NumStaticFields(); i < count; ++i) { auto* f = &sfields[i]; + // Visit roots first in case the declaring class gets moved. + f->VisitRoots(visitor); if (kIsDebugBuild && IsResolved()) { CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus(); } - f->VisitRoots(visitor); } } ArtField* const ifields = GetIFieldsUnchecked(); if (ifields != nullptr) { for (size_t i = 0, count = NumInstanceFields(); i < count; ++i) { auto* f = &ifields[i]; + // Visit roots first in case the declaring class gets moved. + f->VisitRoots(visitor); if (kIsDebugBuild && IsResolved()) { CHECK_EQ(f->GetDeclaringClass(), this) << GetStatus(); } - f->VisitRoots(visitor); } } // We may see GetDirectMethodsPtr() == null with NumDirectMethods() != 0 if the root marking diff --git a/runtime/mirror/object.cc b/runtime/mirror/object.cc index 27dd743f52..80decaa53d 100644 --- a/runtime/mirror/object.cc +++ b/runtime/mirror/object.cc @@ -63,6 +63,11 @@ class CopyReferenceFieldsWithReadBarrierVisitor { this->operator()(ref, mirror::Reference::ReferentOffset(), false); } + // Unused since we don't copy class native roots. + void VisitRootIfNonNull(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) + const {} + void VisitRoot(mirror::CompressedReference<mirror::Object>* root ATTRIBUTE_UNUSED) const {} + private: Object* const dest_obj_; }; |