Implement dead object poisoning in unevacuated regions.
When an unevacuated region is cleared from the from-space,
replace memory areas used by dead objects with a poison
value, so as to catch dangling references to such objects
earlier.
This poisoning mechanism is only turned on in debug mode.
Test: art/test.py
Test: Device boot test with libartd
Bug: 74064045
Change-Id: I2f89a31648d292baae09859494410f88eca21759
diff --git a/runtime/gc/space/region_space.cc b/runtime/gc/space/region_space.cc
index 6a01c88..74abe1c 100644
--- a/runtime/gc/space/region_space.cc
+++ b/runtime/gc/space/region_space.cc
@@ -29,10 +29,19 @@
// value of the region size, evaculate the region.
static constexpr uint kEvacuateLivePercentThreshold = 75U;
-// If we protect the cleared regions.
+// Whether we protect the cleared regions.
// Only protect for target builds to prevent flaky test failures (b/63131961).
static constexpr bool kProtectClearedRegions = kIsTargetBuild;
+// Wether we poison memory areas occupied by dead objects in unevacuated regions.
+static constexpr bool kPoisonDeadObjectsInUnevacuatedRegions = kIsDebugBuild;
+
+// Special 32-bit value used to poison memory areas occupied by dead
+// objects in unevacuated regions. Dereferencing this value is expected
+// to trigger a memory protection fault, as it is unlikely that it
+// points to a valid, non-protected memory area.
+static constexpr uint32_t kPoisonDeadObject = 0xBADDB01D; // "BADDROID"
+
MemMap* RegionSpace::CreateMemMap(const std::string& name, size_t capacity,
uint8_t* requested_begin) {
CHECK_ALIGNED(capacity, kRegionSize);
@@ -370,6 +379,13 @@
// as they are unevac regions that are live.
// Subtract one for the for-loop.
i += regions_to_clear_bitmap - 1;
+ } else {
+ // Only some allocated bytes are live in this unevac region.
+ // This should only happen for an allocated non-large region.
+ DCHECK(r->IsAllocated()) << r->State();
+ if (kPoisonDeadObjectsInUnevacuatedRegions) {
+ PoisonDeadObjectsInUnevacuatedRegion(r);
+ }
}
}
// Note r != last_checked_region if r->IsInUnevacFromSpace() was true above.
@@ -388,6 +404,55 @@
num_evac_regions_ = 0;
}
+// Poison the memory area in range [`begin`, `end`) with value `kPoisonDeadObject`.
+static void PoisonUnevacuatedRange(uint8_t* begin, uint8_t* end) {
+ static constexpr size_t kPoisonDeadObjectSize = sizeof(kPoisonDeadObject);
+ static_assert(IsPowerOfTwo(kPoisonDeadObjectSize) &&
+ IsPowerOfTwo(RegionSpace::kAlignment) &&
+ (kPoisonDeadObjectSize < RegionSpace::kAlignment),
+ "RegionSpace::kAlignment should be a multiple of kPoisonDeadObjectSize"
+ " and both should be powers of 2");
+ DCHECK_ALIGNED(begin, kPoisonDeadObjectSize);
+ DCHECK_ALIGNED(end, kPoisonDeadObjectSize);
+ uint32_t* begin_addr = reinterpret_cast<uint32_t*>(begin);
+ uint32_t* end_addr = reinterpret_cast<uint32_t*>(end);
+ std::fill(begin_addr, end_addr, kPoisonDeadObject);
+}
+
+void RegionSpace::PoisonDeadObjectsInUnevacuatedRegion(Region* r) {
+ // The live byte count of `r` should be different from -1, as this
+ // region should neither be a newly allocated region nor an
+ // evacuated region.
+ DCHECK_NE(r->LiveBytes(), static_cast<size_t>(-1));
+
+ // Past-the-end address of the previously visited (live) object (or
+ // the beginning of the region, if `maybe_poison` has not run yet).
+ uint8_t* prev_obj_end = reinterpret_cast<uint8_t*>(r->Begin());
+
+ // Functor poisoning the space between `obj` and the previously
+ // visited (live) object (or the beginng of the region), if any.
+ auto maybe_poison = [this, &prev_obj_end](mirror::Object* obj) REQUIRES(Locks::mutator_lock_) {
+ DCHECK_ALIGNED(obj, kAlignment);
+ uint8_t* cur_obj_begin = reinterpret_cast<uint8_t*>(obj);
+ if (cur_obj_begin != prev_obj_end) {
+ // There is a gap (dead object(s)) between the previously
+ // visited (live) object (or the beginning of the region) and
+ // `obj`; poison that space.
+ PoisonUnevacuatedRange(prev_obj_end, cur_obj_begin);
+ }
+ prev_obj_end = reinterpret_cast<uint8_t*>(GetNextObject(obj));
+ };
+
+ // Visit live objects in `r` and poison gaps (dead objects) between them.
+ GetLiveBitmap()->VisitMarkedRange(reinterpret_cast<uintptr_t>(r->Begin()),
+ reinterpret_cast<uintptr_t>(r->Top()),
+ maybe_poison);
+ // Poison memory between the last live object and the end of the region, if any.
+ if (prev_obj_end < r->Top()) {
+ PoisonUnevacuatedRange(prev_obj_end, r->Top());
+ }
+}
+
void RegionSpace::LogFragmentationAllocFailure(std::ostream& os,
size_t /* failed_alloc_bytes */) {
size_t max_contiguous_allocation = 0;
diff --git a/runtime/gc/space/region_space.h b/runtime/gc/space/region_space.h
index c7e1888..ab18b1b 100644
--- a/runtime/gc/space/region_space.h
+++ b/runtime/gc/space/region_space.h
@@ -595,6 +595,11 @@
/* out */ size_t* bytes_tl_bulk_allocated,
/* out */ size_t* next_region = nullptr) REQUIRES(region_lock_);
+ // Poison memory areas used by dead objects within unevacuated
+ // region `r`. This is meant to detect dangling references to dead
+ // objects earlier in debug mode.
+ void PoisonDeadObjectsInUnevacuatedRegion(Region* r);
+
Mutex region_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
uint32_t time_; // The time as the number of collections since the startup.