diff options
| -rw-r--r-- | runtime/gc/collector/concurrent_copying-inl.h | 56 | ||||
| -rw-r--r-- | runtime/gc/collector/concurrent_copying.cc | 4 | ||||
| -rw-r--r-- | runtime/gc/space/region_space.h | 17 |
3 files changed, 46 insertions, 31 deletions
diff --git a/runtime/gc/collector/concurrent_copying-inl.h b/runtime/gc/collector/concurrent_copying-inl.h index d739ed2867..56983be8fa 100644 --- a/runtime/gc/collector/concurrent_copying-inl.h +++ b/runtime/gc/collector/concurrent_copying-inl.h @@ -23,6 +23,7 @@ #include "gc/accounting/space_bitmap-inl.h" #include "gc/heap.h" #include "gc/space/region_space.h" +#include "gc/verification.h" #include "lock_word.h" #include "mirror/object-readbarrier-inl.h" @@ -123,34 +124,39 @@ inline mirror::Object* ConcurrentCopying::Mark(mirror::Object* from_ref, return from_ref; } DCHECK(region_space_ != nullptr) << "Read barrier slow path taken when CC isn't running?"; - space::RegionSpace::RegionType rtype = region_space_->GetRegionType(from_ref); - switch (rtype) { - case space::RegionSpace::RegionType::kRegionTypeToSpace: - // It's already marked. - return from_ref; - case space::RegionSpace::RegionType::kRegionTypeFromSpace: { - mirror::Object* to_ref = GetFwdPtr(from_ref); - if (to_ref == nullptr) { - // It isn't marked yet. Mark it by copying it to the to-space. - to_ref = Copy(from_ref, holder, offset); + if (region_space_->HasAddress(from_ref)) { + space::RegionSpace::RegionType rtype = region_space_->GetRegionTypeUnsafe(from_ref); + switch (rtype) { + case space::RegionSpace::RegionType::kRegionTypeToSpace: + // It's already marked. + return from_ref; + case space::RegionSpace::RegionType::kRegionTypeFromSpace: { + mirror::Object* to_ref = GetFwdPtr(from_ref); + if (to_ref == nullptr) { + // It isn't marked yet. Mark it by copying it to the to-space. + to_ref = Copy(from_ref, holder, offset); + } + // The copy should either be in a to-space region, or in the + // non-moving space, if it could not fit in a to-space region. + DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref)) + << "from_ref=" << from_ref << " to_ref=" << to_ref; + return to_ref; } - // The copy should either be in a to-space region, or in the - // non-moving space, if it could not fit in a to-space region. - DCHECK(region_space_->IsInToSpace(to_ref) || heap_->non_moving_space_->HasAddress(to_ref)) - << "from_ref=" << from_ref << " to_ref=" << to_ref; - return to_ref; + case space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace: + return MarkUnevacFromSpaceRegion(from_ref, region_space_bitmap_); + default: + // The reference is in an unused region. + region_space_->DumpNonFreeRegions(LOG_STREAM(FATAL_WITHOUT_ABORT)); + LOG(FATAL_WITHOUT_ABORT) << DumpHeapReference(holder, offset, from_ref); + heap_->GetVerification()->LogHeapCorruption(holder, offset, from_ref, /* fatal */ true); + UNREACHABLE(); } - case space::RegionSpace::RegionType::kRegionTypeUnevacFromSpace: { - return MarkUnevacFromSpaceRegion(from_ref, region_space_bitmap_); + } else { + if (immune_spaces_.ContainsObject(from_ref)) { + return MarkImmuneSpace<kGrayImmuneObject>(from_ref); + } else { + return MarkNonMoving(from_ref, holder, offset); } - case space::RegionSpace::RegionType::kRegionTypeNone: - if (immune_spaces_.ContainsObject(from_ref)) { - return MarkImmuneSpace<kGrayImmuneObject>(from_ref); - } else { - return MarkNonMoving(from_ref, holder, offset); - } - default: - UNREACHABLE(); } } diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc index 7304697188..a78813bf7c 100644 --- a/runtime/gc/collector/concurrent_copying.cc +++ b/runtime/gc/collector/concurrent_copying.cc @@ -1857,7 +1857,7 @@ void ConcurrentCopying::AssertToSpaceInvariant(mirror::Object* obj, if (region_space_->HasAddress(ref)) { // Check to-space invariant in region space (moving space). using RegionType = space::RegionSpace::RegionType; - space::RegionSpace::RegionType type = region_space_->GetRegionType(ref); + space::RegionSpace::RegionType type = region_space_->GetRegionTypeUnsafe(ref); if (type == RegionType::kRegionTypeToSpace) { // OK. return; @@ -1935,7 +1935,7 @@ void ConcurrentCopying::AssertToSpaceInvariant(GcRootSource* gc_root_source, if (region_space_->HasAddress(ref)) { // Check to-space invariant in region space (moving space). using RegionType = space::RegionSpace::RegionType; - space::RegionSpace::RegionType type = region_space_->GetRegionType(ref); + space::RegionSpace::RegionType type = region_space_->GetRegionTypeUnsafe(ref); if (type == RegionType::kRegionTypeToSpace) { // OK. return; diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h index c3b7ff72ef..d63257d928 100644 --- a/runtime/gc/space/region_space.h +++ b/runtime/gc/space/region_space.h @@ -231,14 +231,23 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace { return false; } + // If `ref` is in the region space, return the type of its region; + // otherwise, return `RegionType::kRegionTypeNone`. RegionType GetRegionType(mirror::Object* ref) { if (HasAddress(ref)) { - Region* r = RefToRegionUnlocked(ref); - return r->Type(); + return GetRegionTypeUnsafe(ref); } return RegionType::kRegionTypeNone; } + // Unsafe version of RegionSpace::GetRegionType. + // Precondition: `ref` is in the region space. + RegionType GetRegionTypeUnsafe(mirror::Object* ref) { + DCHECK(HasAddress(ref)) << ref; + Region* r = RefToRegionUnlocked(ref); + return r->Type(); + } + // Determine which regions to evacuate and tag them as // from-space. Tag the rest as unevacuated from-space. void SetFromSpace(accounting::ReadBarrierTable* rb_table, bool force_evacuate_all) @@ -530,8 +539,8 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace { // Return the object location following `obj` in the region space // (i.e., the object location at `obj + obj->SizeOf()`). // - // Note that - // - unless the region containing `obj` is fully used; and + // Note that unless + // - the region containing `obj` is fully used; and // - `obj` is not the last object of that region; // the returned location is not guaranteed to be a valid object. mirror::Object* GetNextObject(mirror::Object* obj) |