summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--runtime/gc/space/region_space.cc42
-rw-r--r--runtime/gc/space/region_space.h5
2 files changed, 47 insertions, 0 deletions
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 0701330e81..f97b976e33 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -42,6 +42,9 @@ static constexpr bool kPoisonDeadObjectsInUnevacuatedRegions = true;
// points to a valid, non-protected memory area.
static constexpr uint32_t kPoisonDeadObject = 0xBADDB01D; // "BADDROID"
+// Whether we check a region's live bytes count against the region bitmap.
+static constexpr bool kCheckLiveBytesAgainstRegionBitmap = kIsDebugBuild;
+
MemMap* RegionSpace::CreateMemMap(const std::string& name, size_t capacity,
uint8_t* requested_begin) {
CHECK_ALIGNED(capacity, kRegionSize);
@@ -316,6 +319,9 @@ void RegionSpace::ClearFromSpace(/* out */ uint64_t* cleared_bytes,
};
for (size_t i = 0; i < std::min(num_regions_, non_free_region_index_limit_); ++i) {
Region* r = &regions_[i];
+ if (kCheckLiveBytesAgainstRegionBitmap) {
+ CheckLiveBytesAgainstRegionBitmap(r);
+ }
if (r->IsInFromSpace()) {
*cleared_bytes += r->BytesAllocated();
*cleared_objects += r->ObjectsAllocated();
@@ -404,6 +410,42 @@ void RegionSpace::ClearFromSpace(/* out */ uint64_t* cleared_bytes,
num_evac_regions_ = 0;
}
+void RegionSpace::CheckLiveBytesAgainstRegionBitmap(Region* r) {
+ if (r->LiveBytes() == static_cast<size_t>(-1)) {
+ // Live bytes count is undefined for `r`; nothing to check here.
+ return;
+ }
+
+ // Functor walking the region space bitmap for the range corresponding
+ // to region `r` and calculating the sum of live bytes.
+ size_t live_bytes_recount = 0u;
+ auto recount_live_bytes =
+ [&r, &live_bytes_recount](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK_ALIGNED(obj, kAlignment);
+ if (r->IsLarge()) {
+ // If `r` is a large region, then it contains at most one
+ // object, which must start at the beginning of the
+ // region. The live byte count in that case is equal to the
+ // allocated regions (large region + large tails regions).
+ DCHECK_EQ(reinterpret_cast<uint8_t*>(obj), r->Begin());
+ DCHECK_EQ(live_bytes_recount, 0u);
+ live_bytes_recount = r->Top() - r->Begin();
+ } else {
+ DCHECK(r->IsAllocated())
+ << "r->State()=" << r->State() << " r->LiveBytes()=" << r->LiveBytes();
+ size_t obj_size = obj->SizeOf<kDefaultVerifyFlags>();
+ size_t alloc_size = RoundUp(obj_size, space::RegionSpace::kAlignment);
+ live_bytes_recount += alloc_size;
+ }
+ };
+ // Visit live objects in `r` and recount the live bytes.
+ GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(r->Begin()),
+ reinterpret_cast<uintptr_t>(r->Top()),
+ recount_live_bytes);
+ // Check that this recount matches the region's current live bytes count.
+ DCHECK_EQ(live_bytes_recount, r->LiveBytes());
+}
+
// Poison the memory area in range [`begin`, `end`) with value `kPoisonDeadObject`.
static void PoisonUnevacuatedRange(uint8_t* begin, uint8_t* end) {
static constexpr size_t kPoisonDeadObjectSize = sizeof(kPoisonDeadObject);
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index fa33a8a21b..90f1f1dd2a 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -602,6 +602,11 @@ class RegionSpace FINAL : public ContinuousMemMapAllocSpace {
/* out */ size_t* bytes_tl_bulk_allocated,
/* out */ size_t* next_region = nullptr) REQUIRES(region_lock_);
+ // Check that the value of `r->LiveBytes()` matches the number of
+ // (allocated) bytes used by live objects according to the live bits
+ // in the region space bitmap range corresponding to region `r`.
+ void CheckLiveBytesAgainstRegionBitmap(Region* r);
+
// Poison memory areas used by dead objects within unevacuated
// region `r`. This is meant to detect dangling references to dead
// objects earlier in debug mode.