diff options
| author | 2016-06-21 01:05:30 +0000 | |
|---|---|---|
| committer | 2016-06-21 01:05:30 +0000 | |
| commit | a6eb11d1bc3b174feeb64d06eb92cd10e81808d7 (patch) | |
| tree | a6c09e70c9caedf7616ab4fd33043c066e662bab | |
| parent | 794a76488fc78ce62fa7dfe79e81acd7fa511bc5 (diff) | |
| parent | 8016bdee7ca1a066221a5d2fe5e60890de950a5b (diff) | |
Merge changes from topic 'PHENOMENAL_CHANGE' into nyc-mr1-dev
* changes:
Use collector specific helper classes
Avoid duplicate class for mark compact
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.cc | 81 | ||||
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.h | 25 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_compact.cc | 92 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_compact.h | 11 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_sweep.cc | 34 | ||||
| -rw-r--r-- | runtime/gc/collector/mark_sweep.h | 22 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space-inl.h | 23 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space.cc | 58 | ||||
| -rw-r--r-- | runtime/gc/collector/semi_space.h | 4 |
9 files changed, 149 insertions, 201 deletions
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index d393f0b1d2..36efca1109 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -188,7 +188,7 @@ void ConcurrentCopying::InitializePhase() { } // Used to switch the thread roots of a thread from from-space refs to to-space refs. -class ThreadFlipVisitor : public Closure { +class ConcurrentCopying::ThreadFlipVisitor : public Closure { public: ThreadFlipVisitor(ConcurrentCopying* concurrent_copying, bool use_tlab) : concurrent_copying_(concurrent_copying), use_tlab_(use_tlab) { @@ -225,7 +225,7 @@ class ThreadFlipVisitor : public Closure { }; // Called back from Runtime::FlipThreadRoots() during a pause. -class FlipCallback : public Closure { +class ConcurrentCopying::FlipCallback : public Closure { public: explicit FlipCallback(ConcurrentCopying* concurrent_copying) : concurrent_copying_(concurrent_copying) { @@ -297,10 +297,9 @@ void ConcurrentCopying::RecordLiveStackFreezeSize(Thread* self) { } // Used to visit objects in the immune spaces. -class ConcurrentCopyingImmuneSpaceObjVisitor { +class ConcurrentCopying::ImmuneSpaceObjVisitor { public: - explicit ConcurrentCopyingImmuneSpaceObjVisitor(ConcurrentCopying* cc) - : collector_(cc) {} + explicit ImmuneSpaceObjVisitor(ConcurrentCopying* cc) : collector_(cc) {} void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) SHARED_REQUIRES(Locks::heap_bitmap_lock_) { @@ -391,7 +390,7 @@ void ConcurrentCopying::MarkingPhase() { for (auto& space : immune_spaces_.GetSpaces()) { DCHECK(space->IsImageSpace() || space->IsZygoteSpace()); accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); - ConcurrentCopyingImmuneSpaceObjVisitor visitor(this); + ImmuneSpaceObjVisitor visitor(this); live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), reinterpret_cast<uintptr_t>(space->Limit()), visitor); @@ -487,7 +486,7 @@ void ConcurrentCopying::ReenableWeakRefAccess(Thread* self) { Runtime::Current()->BroadcastForNewSystemWeaks(); } -class DisableMarkingCheckpoint : public Closure { +class ConcurrentCopying::DisableMarkingCheckpoint : public Closure { public: explicit DisableMarkingCheckpoint(ConcurrentCopying* concurrent_copying) : concurrent_copying_(concurrent_copying) { @@ -657,9 +656,9 @@ accounting::ObjectStack* ConcurrentCopying::GetLiveStack() { // The following visitors are that used to verify that there's no // references to the from-space left after marking. -class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor : public SingleRootVisitor { +class ConcurrentCopying::VerifyNoFromSpaceRefsVisitor : public SingleRootVisitor { public: - explicit ConcurrentCopyingVerifyNoFromSpaceRefsVisitor(ConcurrentCopying* collector) + explicit VerifyNoFromSpaceRefsVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* ref) const @@ -697,16 +696,16 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor : public SingleRootVisitor { ConcurrentCopying* const collector_; }; -class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor { +class ConcurrentCopying::VerifyNoFromSpaceRefsFieldVisitor { public: - explicit ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector) + explicit VerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); - ConcurrentCopyingVerifyNoFromSpaceRefsVisitor visitor(collector_); + VerifyNoFromSpaceRefsVisitor visitor(collector_); visitor(ref); } void operator()(mirror::Class* klass, mirror::Reference* ref) const @@ -724,7 +723,7 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor { void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const SHARED_REQUIRES(Locks::mutator_lock_) { - ConcurrentCopyingVerifyNoFromSpaceRefsVisitor visitor(collector_); + VerifyNoFromSpaceRefsVisitor visitor(collector_); visitor(root->AsMirrorPtr()); } @@ -732,9 +731,9 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor { ConcurrentCopying* const collector_; }; -class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor { +class ConcurrentCopying::VerifyNoFromSpaceRefsObjectVisitor { public: - explicit ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor(ConcurrentCopying* collector) + explicit VerifyNoFromSpaceRefsObjectVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) { @@ -746,7 +745,7 @@ class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor { ConcurrentCopying* collector = reinterpret_cast<ConcurrentCopying*>(arg); space::RegionSpace* region_space = collector->RegionSpace(); CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space"; - ConcurrentCopyingVerifyNoFromSpaceRefsFieldVisitor visitor(collector); + VerifyNoFromSpaceRefsFieldVisitor visitor(collector); obj->VisitReferences(visitor, visitor); if (kUseBakerReadBarrier) { if (collector->RegionSpace()->IsInToSpace(obj)) { @@ -780,16 +779,15 @@ void ConcurrentCopying::VerifyNoFromSpaceReferences() { CHECK(!thread->GetIsGcMarking()); } } - ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor visitor(this); + VerifyNoFromSpaceRefsObjectVisitor visitor(this); // Roots. { ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_); - ConcurrentCopyingVerifyNoFromSpaceRefsVisitor ref_visitor(this); + VerifyNoFromSpaceRefsVisitor ref_visitor(this); Runtime::Current()->VisitRoots(&ref_visitor); } // The to-space. - region_space_->WalkToSpace(ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor::ObjectCallback, - this); + region_space_->WalkToSpace(VerifyNoFromSpaceRefsObjectVisitor::ObjectCallback, this); // Non-moving spaces. { WriterMutexLock mu(self, *Locks::heap_bitmap_lock_); @@ -797,7 +795,7 @@ void ConcurrentCopying::VerifyNoFromSpaceReferences() { } // The alloc stack. { - ConcurrentCopyingVerifyNoFromSpaceRefsVisitor ref_visitor(this); + VerifyNoFromSpaceRefsVisitor ref_visitor(this); for (auto* it = heap_->allocation_stack_->Begin(), *end = heap_->allocation_stack_->End(); it < end; ++it) { mirror::Object* const obj = it->AsMirrorPtr(); @@ -812,9 +810,9 @@ void ConcurrentCopying::VerifyNoFromSpaceReferences() { } // The following visitors are used to assert the to-space invariant. -class ConcurrentCopyingAssertToSpaceInvariantRefsVisitor { +class ConcurrentCopying::AssertToSpaceInvariantRefsVisitor { public: - explicit ConcurrentCopyingAssertToSpaceInvariantRefsVisitor(ConcurrentCopying* collector) + explicit AssertToSpaceInvariantRefsVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* ref) const @@ -830,16 +828,16 @@ class ConcurrentCopyingAssertToSpaceInvariantRefsVisitor { ConcurrentCopying* const collector_; }; -class ConcurrentCopyingAssertToSpaceInvariantFieldVisitor { +class ConcurrentCopying::AssertToSpaceInvariantFieldVisitor { public: - explicit ConcurrentCopyingAssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector) + explicit AssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* obj, MemberOffset offset, bool is_static ATTRIBUTE_UNUSED) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { mirror::Object* ref = obj->GetFieldObject<mirror::Object, kDefaultVerifyFlags, kWithoutReadBarrier>(offset); - ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_); + AssertToSpaceInvariantRefsVisitor visitor(collector_); visitor(ref); } void operator()(mirror::Class* klass, mirror::Reference* ref ATTRIBUTE_UNUSED) const @@ -856,7 +854,7 @@ class ConcurrentCopyingAssertToSpaceInvariantFieldVisitor { void VisitRoot(mirror::CompressedReference<mirror::Object>* root) const SHARED_REQUIRES(Locks::mutator_lock_) { - ConcurrentCopyingAssertToSpaceInvariantRefsVisitor visitor(collector_); + AssertToSpaceInvariantRefsVisitor visitor(collector_); visitor(root->AsMirrorPtr()); } @@ -864,9 +862,9 @@ class ConcurrentCopyingAssertToSpaceInvariantFieldVisitor { ConcurrentCopying* const collector_; }; -class ConcurrentCopyingAssertToSpaceInvariantObjectVisitor { +class ConcurrentCopying::AssertToSpaceInvariantObjectVisitor { public: - explicit ConcurrentCopyingAssertToSpaceInvariantObjectVisitor(ConcurrentCopying* collector) + explicit AssertToSpaceInvariantObjectVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) { @@ -879,7 +877,7 @@ class ConcurrentCopyingAssertToSpaceInvariantObjectVisitor { space::RegionSpace* region_space = collector->RegionSpace(); CHECK(!region_space->IsInFromSpace(obj)) << "Scanning object " << obj << " in from space"; collector->AssertToSpaceInvariant(nullptr, MemberOffset(0), obj); - ConcurrentCopyingAssertToSpaceInvariantFieldVisitor visitor(collector); + AssertToSpaceInvariantFieldVisitor visitor(collector); obj->VisitReferences(visitor, visitor); } @@ -887,7 +885,7 @@ class ConcurrentCopyingAssertToSpaceInvariantObjectVisitor { ConcurrentCopying* const collector_; }; -class RevokeThreadLocalMarkStackCheckpoint : public Closure { +class ConcurrentCopying::RevokeThreadLocalMarkStackCheckpoint : public Closure { public: RevokeThreadLocalMarkStackCheckpoint(ConcurrentCopying* concurrent_copying, bool disable_weak_ref_access) @@ -1115,7 +1113,7 @@ inline void ConcurrentCopying::ProcessMarkStackRef(mirror::Object* to_ref) { DCHECK(!kUseBakerReadBarrier); #endif if (ReadBarrier::kEnableToSpaceInvariantChecks || kIsDebugBuild) { - ConcurrentCopyingAssertToSpaceInvariantObjectVisitor visitor(this); + AssertToSpaceInvariantObjectVisitor visitor(this); visitor(to_ref); } } @@ -1226,10 +1224,9 @@ void ConcurrentCopying::SweepLargeObjects(bool swap_bitmaps) { RecordFreeLOS(heap_->GetLargeObjectsSpace()->Sweep(swap_bitmaps)); } -class ConcurrentCopyingClearBlackPtrsVisitor { +class ConcurrentCopying::ClearBlackPtrsVisitor { public: - explicit ConcurrentCopyingClearBlackPtrsVisitor(ConcurrentCopying* cc) - : collector_(cc) {} + explicit ClearBlackPtrsVisitor(ConcurrentCopying* cc) : collector_(cc) {} void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) SHARED_REQUIRES(Locks::heap_bitmap_lock_) { DCHECK(obj != nullptr); @@ -1247,7 +1244,7 @@ class ConcurrentCopyingClearBlackPtrsVisitor { void ConcurrentCopying::ClearBlackPtrs() { CHECK(kUseBakerReadBarrier); TimingLogger::ScopedTiming split("ClearBlackPtrs", GetTimings()); - ConcurrentCopyingClearBlackPtrsVisitor visitor(this); + ClearBlackPtrsVisitor visitor(this); for (auto& space : heap_->GetContinuousSpaces()) { if (space == region_space_) { continue; @@ -1373,9 +1370,9 @@ void ConcurrentCopying::ReclaimPhase() { } } -class ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor { +class ConcurrentCopying::ComputeUnevacFromSpaceLiveRatioVisitor { public: - explicit ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor(ConcurrentCopying* cc) + explicit ComputeUnevacFromSpaceLiveRatioVisitor(ConcurrentCopying* cc) : collector_(cc) {} void operator()(mirror::Object* ref) const SHARED_REQUIRES(Locks::mutator_lock_) SHARED_REQUIRES(Locks::heap_bitmap_lock_) { @@ -1400,7 +1397,7 @@ class ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor { // Compute how much live objects are left in regions. void ConcurrentCopying::ComputeUnevacFromSpaceLiveRatio() { region_space_->AssertAllRegionLiveBytesZeroOrCleared(); - ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor visitor(this); + ComputeUnevacFromSpaceLiveRatioVisitor visitor(this); region_space_bitmap_->VisitMarkedRange(reinterpret_cast<uintptr_t>(region_space_->Begin()), reinterpret_cast<uintptr_t>(region_space_->Limit()), visitor); @@ -1583,9 +1580,9 @@ void ConcurrentCopying::AssertToSpaceInvariantInNonMovingSpace(mirror::Object* o } // Used to scan ref fields of an object. -class ConcurrentCopyingRefFieldsVisitor { +class ConcurrentCopying::RefFieldsVisitor { public: - explicit ConcurrentCopyingRefFieldsVisitor(ConcurrentCopying* collector) + explicit RefFieldsVisitor(ConcurrentCopying* collector) : collector_(collector) {} void operator()(mirror::Object* obj, MemberOffset offset, bool /* is_static */) @@ -1621,7 +1618,7 @@ class ConcurrentCopyingRefFieldsVisitor { // Scan ref fields of an object. inline void ConcurrentCopying::Scan(mirror::Object* to_ref) { DCHECK(!region_space_->IsInFromSpace(to_ref)); - ConcurrentCopyingRefFieldsVisitor visitor(this); + RefFieldsVisitor visitor(this); // Disable the read barrier for a performance reason. to_ref->VisitReferences</*kVisitNativeRoots*/true, kDefaultVerifyFlags, kWithoutReadBarrier>( visitor, visitor); diff --git a/runtime/gc/collector/concurrent_copying.h b/runtime/gc/collector/concurrent_copying.h index 76315fe7cc..5ceab121b2 100644 --- a/runtime/gc/collector/concurrent_copying.h +++ b/runtime/gc/collector/concurrent_copying.h @@ -237,16 +237,21 @@ class ConcurrentCopying : public GarbageCollector { accounting::ReadBarrierTable* rb_table_; bool force_evacuate_all_; // True if all regions are evacuated. - friend class ConcurrentCopyingRefFieldsVisitor; - friend class ConcurrentCopyingImmuneSpaceObjVisitor; - friend class ConcurrentCopyingVerifyNoFromSpaceRefsVisitor; - friend class ConcurrentCopyingVerifyNoFromSpaceRefsObjectVisitor; - friend class ConcurrentCopyingClearBlackPtrsVisitor; - friend class ConcurrentCopyingLostCopyVisitor; - friend class ThreadFlipVisitor; - friend class FlipCallback; - friend class ConcurrentCopyingComputeUnevacFromSpaceLiveRatioVisitor; - friend class RevokeThreadLocalMarkStackCheckpoint; + class AssertToSpaceInvariantFieldVisitor; + class AssertToSpaceInvariantObjectVisitor; + class AssertToSpaceInvariantRefsVisitor; + class ClearBlackPtrsVisitor; + class ComputeUnevacFromSpaceLiveRatioVisitor; + class DisableMarkingCheckpoint; + class FlipCallback; + class ImmuneSpaceObjVisitor; + class LostCopyVisitor; + class RefFieldsVisitor; + class RevokeThreadLocalMarkStackCheckpoint; + class VerifyNoFromSpaceRefsFieldVisitor; + class VerifyNoFromSpaceRefsObjectVisitor; + class VerifyNoFromSpaceRefsVisitor; + class ThreadFlipVisitor; DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying); }; diff --git a/runtime/gc/collector/mark_compact.cc b/runtime/gc/collector/mark_compact.cc index 6beb60608c..43482eb7cc 100644 --- a/runtime/gc/collector/mark_compact.cc +++ b/runtime/gc/collector/mark_compact.cc @@ -52,8 +52,9 @@ void MarkCompact::BindBitmaps() { MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix) : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"), - space_(nullptr), collector_name_(name_), updating_references_(false) { -} + space_(nullptr), + collector_name_(name_), + updating_references_(false) {} void MarkCompact::RunPhases() { Thread* self = Thread::Current(); @@ -85,30 +86,20 @@ void MarkCompact::ForwardObject(mirror::Object* obj) { ++live_objects_in_space_; } -class CalculateObjectForwardingAddressVisitor { - public: - explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector) - : collector_(collector) {} - void operator()(mirror::Object* obj) const REQUIRES(Locks::mutator_lock_, - Locks::heap_bitmap_lock_) { - DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment); - DCHECK(collector_->IsMarked(obj) != nullptr); - collector_->ForwardObject(obj); - } - - private: - MarkCompact* const collector_; -}; void MarkCompact::CalculateObjectForwardingAddresses() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); // The bump pointer in the space where the next forwarding address will be. bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin()); // Visit all the marked objects in the bitmap. - CalculateObjectForwardingAddressVisitor visitor(this); objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()), reinterpret_cast<uintptr_t>(space_->End()), - visitor); + [this](mirror::Object* obj) + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment); + DCHECK(IsMarked(obj) != nullptr); + ForwardObject(obj); + }); } void MarkCompact::InitializePhase() { @@ -129,17 +120,6 @@ void MarkCompact::ProcessReferences(Thread* self) { false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(), this); } -class BitmapSetSlowPathVisitor { - public: - void operator()(const mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) { - // Marking a large object, make sure its aligned as a sanity check. - if (!IsAligned<kPageSize>(obj)) { - Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR)); - LOG(FATAL) << obj; - } - } -}; - inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) { if (obj == nullptr) { return nullptr; @@ -155,8 +135,15 @@ inline mirror::Object* MarkCompact::MarkObject(mirror::Object* obj) { } } else { DCHECK(!space_->HasAddress(obj)); - BitmapSetSlowPathVisitor visitor; - if (!mark_bitmap_->Set(obj, visitor)) { + auto slow_path = [this](const mirror::Object* ref) + SHARED_REQUIRES(Locks::mutator_lock_) { + // Marking a large object, make sure its aligned as a sanity check. + if (!IsAligned<kPageSize>(ref)) { + Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR)); + LOG(FATAL) << ref; + } + }; + if (!mark_bitmap_->Set(obj, slow_path)) { // This object was not previously marked. MarkStackPush(obj); } @@ -296,10 +283,9 @@ void MarkCompact::VisitRoots( } } -class UpdateRootVisitor : public RootVisitor { +class MarkCompact::UpdateRootVisitor : public RootVisitor { public: - explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) { - } + explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {} void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) OVERRIDE REQUIRES(Locks::mutator_lock_) @@ -332,10 +318,10 @@ class UpdateRootVisitor : public RootVisitor { MarkCompact* const collector_; }; -class UpdateObjectReferencesVisitor { +class MarkCompact::UpdateObjectReferencesVisitor { public: - explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) { - } + explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {} + void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_) REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { collector_->UpdateObjectReferences(obj); @@ -423,10 +409,9 @@ inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Objec } } -class UpdateReferenceVisitor { +class MarkCompact::UpdateReferenceVisitor { public: - explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) { - } + explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {} void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { @@ -501,19 +486,6 @@ bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const { return space != space_ && !immune_spaces_.ContainsSpace(space); } -class MoveObjectVisitor { - public: - explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) { - } - void operator()(mirror::Object* obj) const SHARED_REQUIRES(Locks::heap_bitmap_lock_) - REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { - collector_->MoveObject(obj, obj->SizeOf()); - } - - private: - MarkCompact* const collector_; -}; - void MarkCompact::MoveObject(mirror::Object* obj, size_t len) { // Look at the forwarding address stored in the lock word to know where to copy. DCHECK(space_->HasAddress(obj)) << obj; @@ -534,10 +506,13 @@ void MarkCompact::MoveObject(mirror::Object* obj, size_t len) { void MarkCompact::MoveObjects() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); // Move the objects in the before forwarding bitmap. - MoveObjectVisitor visitor(this); objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()), reinterpret_cast<uintptr_t>(space_->End()), - visitor); + [this](mirror::Object* obj) + SHARED_REQUIRES(Locks::heap_bitmap_lock_) + REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { + MoveObject(obj, obj->SizeOf()); + }); CHECK(lock_words_to_restore_.empty()); } @@ -572,10 +547,9 @@ void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this); } -class MarkCompactMarkObjectVisitor { +class MarkCompact::MarkObjectVisitor { public: - explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) { - } + explicit MarkObjectVisitor(MarkCompact* collector) : collector_(collector) {} void operator()(mirror::Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { @@ -608,7 +582,7 @@ class MarkCompactMarkObjectVisitor { // Visit all of the references of an object and update. void MarkCompact::ScanObject(mirror::Object* obj) { - MarkCompactMarkObjectVisitor visitor(this); + MarkObjectVisitor visitor(this); obj->VisitReferences(visitor, visitor); } diff --git a/runtime/gc/collector/mark_compact.h b/runtime/gc/collector/mark_compact.h index 48311570b5..16abfb73b8 100644 --- a/runtime/gc/collector/mark_compact.h +++ b/runtime/gc/collector/mark_compact.h @@ -222,13 +222,10 @@ class MarkCompact : public GarbageCollector { bool updating_references_; private: - friend class BitmapSetSlowPathVisitor; - friend class CalculateObjectForwardingAddressVisitor; - friend class MarkCompactMarkObjectVisitor; - friend class MoveObjectVisitor; - friend class UpdateObjectReferencesVisitor; - friend class UpdateReferenceVisitor; - friend class UpdateRootVisitor; + class MarkObjectVisitor; + class UpdateObjectReferencesVisitor; + class UpdateReferenceVisitor; + class UpdateRootVisitor; DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact); }; diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc index 894ceba216..24cbf10d64 100644 --- a/runtime/gc/collector/mark_sweep.cc +++ b/runtime/gc/collector/mark_sweep.cc @@ -266,7 +266,7 @@ void MarkSweep::MarkingPhase() { PreCleanCards(); } -class ScanObjectVisitor { +class MarkSweep::ScanObjectVisitor { public: explicit ScanObjectVisitor(MarkSweep* const mark_sweep) ALWAYS_INLINE : mark_sweep_(mark_sweep) {} @@ -393,12 +393,14 @@ bool MarkSweep::IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref return IsMarked(ref->AsMirrorPtr()); } -class MarkSweepMarkObjectSlowPath { +class MarkSweep::MarkObjectSlowPath { public: - explicit MarkSweepMarkObjectSlowPath(MarkSweep* mark_sweep, - mirror::Object* holder = nullptr, - MemberOffset offset = MemberOffset(0)) - : mark_sweep_(mark_sweep), holder_(holder), offset_(offset) {} + explicit MarkObjectSlowPath(MarkSweep* mark_sweep, + mirror::Object* holder = nullptr, + MemberOffset offset = MemberOffset(0)) + : mark_sweep_(mark_sweep), + holder_(holder), + offset_(offset) {} void operator()(const mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS { if (kProfileLargeObjects) { @@ -499,7 +501,7 @@ inline void MarkSweep::MarkObjectNonNull(mirror::Object* obj, if (kCountMarkedObjects) { ++mark_slowpath_count_; } - MarkSweepMarkObjectSlowPath visitor(this, holder, offset); + MarkObjectSlowPath visitor(this, holder, offset); // TODO: We already know that the object is not in the current_space_bitmap_ but MarkBitmap::Set // will check again. if (!mark_bitmap_->Set(obj, visitor)) { @@ -534,7 +536,7 @@ inline bool MarkSweep::MarkObjectParallel(mirror::Object* obj) { if (LIKELY(object_bitmap->HasAddress(obj))) { return !object_bitmap->AtomicTestAndSet(obj); } - MarkSweepMarkObjectSlowPath visitor(this); + MarkObjectSlowPath visitor(this); return !mark_bitmap_->AtomicTestAndSet(obj, visitor); } @@ -553,7 +555,7 @@ inline void MarkSweep::MarkObject(mirror::Object* obj, } } -class VerifyRootMarkedVisitor : public SingleRootVisitor { +class MarkSweep::VerifyRootMarkedVisitor : public SingleRootVisitor { public: explicit VerifyRootMarkedVisitor(MarkSweep* collector) : collector_(collector) { } @@ -582,7 +584,7 @@ void MarkSweep::VisitRoots(mirror::CompressedReference<mirror::Object>** roots, } } -class VerifyRootVisitor : public SingleRootVisitor { +class MarkSweep::VerifyRootVisitor : public SingleRootVisitor { public: void VisitRoot(mirror::Object* root, const RootInfo& info) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { @@ -629,7 +631,7 @@ void MarkSweep::MarkConcurrentRoots(VisitRootFlags flags) { this, static_cast<VisitRootFlags>(flags | kVisitRootFlagNonMoving)); } -class DelayReferenceReferentVisitor { +class MarkSweep::DelayReferenceReferentVisitor { public: explicit DelayReferenceReferentVisitor(MarkSweep* collector) : collector_(collector) {} @@ -644,7 +646,7 @@ class DelayReferenceReferentVisitor { }; template <bool kUseFinger = false> -class MarkStackTask : public Task { +class MarkSweep::MarkStackTask : public Task { public: MarkStackTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, @@ -802,7 +804,7 @@ class MarkStackTask : public Task { } }; -class CardScanTask : public MarkStackTask<false> { +class MarkSweep::CardScanTask : public MarkStackTask<false> { public: CardScanTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, @@ -967,7 +969,7 @@ void MarkSweep::ScanGrayObjects(bool paused, uint8_t minimum_age) { } } -class RecursiveMarkTask : public MarkStackTask<false> { +class MarkSweep::RecursiveMarkTask : public MarkStackTask<false> { public: RecursiveMarkTask(ThreadPool* thread_pool, MarkSweep* mark_sweep, @@ -1080,7 +1082,7 @@ void MarkSweep::SweepSystemWeaks(Thread* self) { Runtime::Current()->SweepSystemWeaks(this); } -class VerifySystemWeakVisitor : public IsMarkedVisitor { +class MarkSweep::VerifySystemWeakVisitor : public IsMarkedVisitor { public: explicit VerifySystemWeakVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {} @@ -1109,7 +1111,7 @@ void MarkSweep::VerifySystemWeaks() { Runtime::Current()->SweepSystemWeaks(&visitor); } -class CheckpointMarkThreadRoots : public Closure, public RootVisitor { +class MarkSweep::CheckpointMarkThreadRoots : public Closure, public RootVisitor { public: CheckpointMarkThreadRoots(MarkSweep* mark_sweep, bool revoke_ros_alloc_thread_local_buffers_at_checkpoint) diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h index c19107a626..92d7cf6056 100644 --- a/runtime/gc/collector/mark_sweep.h +++ b/runtime/gc/collector/mark_sweep.h @@ -353,17 +353,17 @@ class MarkSweep : public GarbageCollector { std::unique_ptr<MemMap> sweep_array_free_buffer_mem_map_; private: - friend class CardScanTask; - friend class CheckBitmapVisitor; - friend class CheckReferenceVisitor; - friend class CheckpointMarkThreadRoots; - friend class Heap; - friend class FifoMarkStackChunk; - friend class MarkObjectVisitor; - template<bool kUseFinger> friend class MarkStackTask; - friend class MarkSweepMarkObjectSlowPath; - friend class VerifyRootMarkedVisitor; - friend class VerifyRootVisitor; + class CardScanTask; + class CheckpointMarkThreadRoots; + class DelayReferenceReferentVisitor; + template<bool kUseFinger> class MarkStackTask; + class MarkObjectSlowPath; + class RecursiveMarkTask; + class ScanObjectParallelVisitor; + class ScanObjectVisitor; + class VerifyRootMarkedVisitor; + class VerifyRootVisitor; + class VerifySystemWeakVisitor; DISALLOW_IMPLICIT_CONSTRUCTORS(MarkSweep); }; diff --git a/runtime/gc/collector/semi_space-inl.h b/runtime/gc/collector/semi_space-inl.h index e87b5ff332..78fb2d24ae 100644 --- a/runtime/gc/collector/semi_space-inl.h +++ b/runtime/gc/collector/semi_space-inl.h @@ -26,21 +26,6 @@ namespace art { namespace gc { namespace collector { -class BitmapSetSlowPathVisitor { - public: - explicit BitmapSetSlowPathVisitor(SemiSpace* semi_space) : semi_space_(semi_space) { - } - - void operator()(const mirror::Object* obj) const { - CHECK(!semi_space_->to_space_->HasAddress(obj)) << "Marking " << obj << " in to_space_"; - // Marking a large object, make sure its aligned as a sanity check. - CHECK_ALIGNED(obj, kPageSize); - } - - private: - SemiSpace* const semi_space_; -}; - inline mirror::Object* SemiSpace::GetForwardingAddressInFromSpace(mirror::Object* obj) const { DCHECK(from_space_->HasAddress(obj)); LockWord lock_word = obj->GetLockWord(false); @@ -76,8 +61,12 @@ inline void SemiSpace::MarkObject( obj_ptr->Assign(forward_address); } else if (!collect_from_space_only_ && !immune_spaces_.IsInImmuneRegion(obj)) { DCHECK(!to_space_->HasAddress(obj)) << "Tried to mark " << obj << " in to-space"; - BitmapSetSlowPathVisitor visitor(this); - if (!mark_bitmap_->Set(obj, visitor)) { + auto slow_path = [this](const mirror::Object* ref) { + CHECK(!to_space_->HasAddress(ref)) << "Marking " << ref << " in to_space_"; + // Marking a large object, make sure its aligned as a sanity check. + CHECK_ALIGNED(ref, kPageSize); + }; + if (!mark_bitmap_->Set(obj, slow_path)) { // This object was not previously marked. MarkStackPush(obj); } diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc index f37daa54e9..7a4c025c30 100644 --- a/runtime/gc/collector/semi_space.cc +++ b/runtime/gc/collector/semi_space.cc @@ -282,22 +282,11 @@ void SemiSpace::MarkingPhase() { } } -class SemiSpaceScanObjectVisitor { - public: - explicit SemiSpaceScanObjectVisitor(SemiSpace* ss) : semi_space_(ss) {} - void operator()(Object* obj) const REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { - DCHECK(obj != nullptr); - semi_space_->ScanObject(obj); - } - private: - SemiSpace* const semi_space_; -}; - // Used to verify that there's no references to the from-space. -class SemiSpaceVerifyNoFromSpaceReferencesVisitor { +class SemiSpace::VerifyNoFromSpaceReferencesVisitor { public: - explicit SemiSpaceVerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space) : - from_space_(from_space) {} + explicit VerifyNoFromSpaceReferencesVisitor(space::ContinuousMemMapAllocSpace* from_space) + : from_space_(from_space) {} void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE { @@ -331,23 +320,10 @@ class SemiSpaceVerifyNoFromSpaceReferencesVisitor { void SemiSpace::VerifyNoFromSpaceReferences(Object* obj) { DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space"; - SemiSpaceVerifyNoFromSpaceReferencesVisitor visitor(from_space_); + VerifyNoFromSpaceReferencesVisitor visitor(from_space_); obj->VisitReferences(visitor, VoidFunctor()); } -class SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor { - public: - explicit SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor(SemiSpace* ss) : semi_space_(ss) {} - void operator()(Object* obj) const - SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { - DCHECK(obj != nullptr); - semi_space_->VerifyNoFromSpaceReferences(obj); - } - - private: - SemiSpace* const semi_space_; -}; - void SemiSpace::MarkReachableObjects() { TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings()); { @@ -390,10 +366,12 @@ void SemiSpace::MarkReachableObjects() { } else { TimingLogger::ScopedTiming t2("VisitLiveBits", GetTimings()); accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); - SemiSpaceScanObjectVisitor visitor(this); live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), reinterpret_cast<uintptr_t>(space->End()), - visitor); + [this](mirror::Object* obj) + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + ScanObject(obj); + }); } if (kIsDebugBuild) { // Verify that there are no from-space references that @@ -401,10 +379,13 @@ void SemiSpace::MarkReachableObjects() { // card table) didn't miss any from-space references in the // space. accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap(); - SemiSpaceVerifyNoFromSpaceReferencesObjectVisitor visitor(this); live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()), reinterpret_cast<uintptr_t>(space->End()), - visitor); + [this](Object* obj) + SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { + DCHECK(obj != nullptr); + VerifyNoFromSpaceReferences(obj); + }); } } } @@ -424,10 +405,12 @@ void SemiSpace::MarkReachableObjects() { // classes (primitive array classes) that could move though they // don't contain any other references. accounting::LargeObjectBitmap* large_live_bitmap = los->GetLiveBitmap(); - SemiSpaceScanObjectVisitor visitor(this); large_live_bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(los->Begin()), reinterpret_cast<uintptr_t>(los->End()), - visitor); + [this](mirror::Object* obj) + REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { + ScanObject(obj); + }); } // Recursively process the mark stack. ProcessMarkStack(); @@ -697,10 +680,9 @@ void SemiSpace::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference, this); } -class SemiSpaceMarkObjectVisitor { +class SemiSpace::MarkObjectVisitor { public: - explicit SemiSpaceMarkObjectVisitor(SemiSpace* collector) : collector_(collector) { - } + explicit MarkObjectVisitor(SemiSpace* collector) : collector_(collector) {} void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const ALWAYS_INLINE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_) { @@ -739,7 +721,7 @@ class SemiSpaceMarkObjectVisitor { // Visit all of the references of an object and update. void SemiSpace::ScanObject(Object* obj) { DCHECK(!from_space_->HasAddress(obj)) << "Scanning object " << obj << " in from space"; - SemiSpaceMarkObjectVisitor visitor(this); + MarkObjectVisitor visitor(this); obj->VisitReferences(visitor, visitor); } diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h index 0199e1ae56..694e536b7d 100644 --- a/runtime/gc/collector/semi_space.h +++ b/runtime/gc/collector/semi_space.h @@ -272,7 +272,9 @@ class SemiSpace : public GarbageCollector { bool swap_semi_spaces_; private: - friend class BitmapSetSlowPathVisitor; + class BitmapSetSlowPathVisitor; + class MarkObjectVisitor; + class VerifyNoFromSpaceReferencesVisitor; DISALLOW_IMPLICIT_CONSTRUCTORS(SemiSpace); }; |