summaryrefslogtreecommitdiff
path: root/runtime/gc/heap.cc
diff options
context:
space:
mode:
author Hans Boehm <hboehm@google.com> 2024-08-19 21:32:19 +0000
committer Hans Boehm <hboehm@google.com> 2024-08-19 23:48:03 +0000
commit7c89f49c2c542df1a5780fb851e2ef0e0909f48f (patch)
tree02bc90403e3f56bbb59c7fb1d89b785cd8257b28 /runtime/gc/heap.cc
parent806ace6dfa77cd65b05b20affb6a7e7072dcc845 (diff)
Revert "Object.clone() allocates more movable objects"
This reverts commit a5001fed23788c966fd87048d7f17ba8c0b51914. Reason for revert: b/360363656 Change-Id: Ibfea46976bb6434d728c69160edb5904ab7708aa
Diffstat (limited to 'runtime/gc/heap.cc')
-rw-r--r--runtime/gc/heap.cc64
1 files changed, 8 insertions, 56 deletions
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 0f5dd99dc0..ad3e690938 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -377,10 +377,11 @@ Heap::Heap(size_t initial_size,
* verification is enabled, we limit the size of allocation stacks to speed up their
* searching.
*/
- max_allocation_stack_size_(kGCALotMode ? kGcAlotAllocationStackSize :
- (kVerifyObjectSupport > kVerifyObjectModeFast) ?
- kVerifyObjectAllocationStackSize :
- kDefaultAllocationStackSize),
+ max_allocation_stack_size_(kGCALotMode
+ ? kGcAlotAllocationStackSize
+ : (kVerifyObjectSupport > kVerifyObjectModeFast)
+ ? kVerifyObjectAllocationStackSize
+ : kDefaultAllocationStackSize),
current_allocator_(kAllocatorTypeDlMalloc),
current_non_moving_allocator_(kAllocatorTypeNonMoving),
bump_pointer_space_(nullptr),
@@ -431,8 +432,7 @@ Heap::Heap(size_t initial_size,
boot_image_spaces_(),
boot_images_start_address_(0u),
boot_images_size_(0u),
- pre_oome_gc_count_(0u),
- non_movable_zygote_objects_() {
+ pre_oome_gc_count_(0u) {
if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
LOG(INFO) << "Heap() entering";
}
@@ -2378,7 +2378,7 @@ class ZygoteCompactingCollector final : public collector::SemiSpace {
bin_live_bitmap_ = space->GetLiveBitmap();
bin_mark_bitmap_ = space->GetMarkBitmap();
uintptr_t prev = reinterpret_cast<uintptr_t>(space->Begin());
- Heap* heap = Runtime::Current()->GetHeap();
+ WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
// Note: This requires traversing the space in increasing order of object addresses.
auto visitor = [&](mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) {
uintptr_t object_addr = reinterpret_cast<uintptr_t>(obj);
@@ -2386,11 +2386,7 @@ class ZygoteCompactingCollector final : public collector::SemiSpace {
// Add the bin consisting of the end of the previous object to the start of the current object.
AddBin(bin_size, prev);
prev = object_addr + RoundUp(obj->SizeOf<kDefaultVerifyFlags>(), kObjectAlignment);
- if (!obj->IsClass()) {
- heap->AddNonMovableZygoteObject(obj);
- }
};
- WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
bin_live_bitmap_->Walk(visitor);
// Add the last bin which spans after the last object to the end of the space.
AddBin(reinterpret_cast<uintptr_t>(space->End()) - prev, prev);
@@ -2496,10 +2492,6 @@ void Heap::IncrementFreedEver() {
// FIXME: BUT it did exceed... http://b/197647048
# pragma clang diagnostic ignored "-Wframe-larger-than="
void Heap::PreZygoteFork() {
- // Opportunistically log here; empirically logs from the initial PreZygoteFork() are lost.
- // But for the main zygote, this is typically entered at least twice.
- LOG(INFO) << "PreZygoteFork(): non_movable_zygote_objects_.size() = "
- << non_movable_zygote_objects_.size();
if (!HasZygoteSpace()) {
// We still want to GC in case there is some unreachable non moving objects that could cause a
// suboptimal bin packing when we compact the zygote space.
@@ -2527,18 +2519,6 @@ void Heap::PreZygoteFork() {
// there.
non_moving_space_->GetMemMap()->Protect(PROT_READ | PROT_WRITE);
const bool same_space = non_moving_space_ == main_space_;
- // We create the ZygoteSpace by performing a semi-space collection to copy the main allocation
- // space into what was the non-moving space. We do so by ignoring and overwriting the meta-
- // information from the non-moving (dlmalloc) space. An initial pass identifies unused sections
- // of the heap that we usually try to copy into first. We copy any remaining objects past the
- // previous end of the old non-moving space. Eeverything up to the last allocated object in the
- // old non-moving space then becomes ZygoteSpace. Everything after that becomes the new
- // non-moving space.
- // There is a subtlety here in that Object.clone() treats objects allocated as non-movable
- // differently from other objects, and this ZygoteSpace creation process doesn't automatically
- // preserve that distinction. Thus we must explicitly track this in non_movable_zygote_objects_.
- // Otherwise we have to treat the entire ZygoteSpace as non-movable, which could cause some
- // weird programming styles to eventually render most of the heap non-movable.
if (kCompactZygote) {
// Temporarily disable rosalloc verification because the zygote
// compaction will mess up the rosalloc internal metadata.
@@ -2567,9 +2547,6 @@ void Heap::PreZygoteFork() {
zygote_collector.SetToSpace(&target_space);
zygote_collector.SetSwapSemiSpaces(false);
zygote_collector.Run(kGcCauseCollectorTransition, false);
- uint32_t num_nonmovable = non_movable_zygote_objects_.size();
- // For an AOSP boot, we saw num_nonmovable around a dozen.
- DCHECK_LT(num_nonmovable, 1000u) << " Too many nonmovable zygote objects?";
if (reset_main_space) {
main_space_->GetMemMap()->Protect(PROT_READ | PROT_WRITE);
madvise(main_space_->Begin(), main_space_->Capacity(), MADV_DONTNEED);
@@ -3759,32 +3736,7 @@ void Heap::SetIdealFootprint(size_t target_footprint) {
target_footprint_.store(target_footprint, std::memory_order_relaxed);
}
-bool Heap::IsNonMovable(ObjPtr<mirror::Object> obj) const {
- DCHECK(!obj.Ptr()->IsClass()); // We do not correctly track classes in zygote space.
- if (GetNonMovingSpace()->Contains(obj.Ptr())) {
- return true;
- }
- if (zygote_space_ != nullptr && zygote_space_->Contains(obj.Ptr())) {
- return non_movable_zygote_objects_.contains(
- mirror::CompressedReference<mirror::Object>::FromMirrorPtr(obj.Ptr()));
- }
- return false; // E.g. in LargeObjectsSpace.
-}
-
-bool Heap::PossiblyAllocatedMovable(ObjPtr<mirror::Object> obj) const {
- // The CC collector may copy movable objects into NonMovingSpace. It does that only when it
- // runs out of space, so we assume this does not affect ZygoteSpace.
- if (!gUseReadBarrier && GetNonMovingSpace()->Contains(obj.Ptr())) {
- return false;
- }
- if (zygote_space_ != nullptr && zygote_space_->Contains(obj.Ptr())) {
- return !non_movable_zygote_objects_.contains(
- mirror::CompressedReference<mirror::Object>::FromMirrorPtr(obj.Ptr()));
- }
- return true;
-}
-
-bool Heap::ObjectMayMove(ObjPtr<mirror::Object> obj) const {
+bool Heap::IsMovableObject(ObjPtr<mirror::Object> obj) const {
if (kMovingCollector) {
space::Space* space = FindContinuousSpaceFromObject(obj.Ptr(), true);
if (space != nullptr) {