Move SwapBitmaps to ContinuousMemMapAllocSpace.
Moved the SwapBitmaps function to ContinuousMemMapAllocSpace since
the zygote space uses this function during full GC.
Fixed a place where we were casting a ZygoteSpace to a MallocSpace,
somehow this didn't cause any issues in non-debug builds.
Moved the CollectGarbage in PreZygoteFork before the lock to prevent
an occasional lock level violation caused by attempting to enqueue
java lang references with the a lock.
Bug: 12876255
Change-Id: I77439e46d5b26b37724bdcee3a0948410f1b0eb4
diff --git a/runtime/gc/collector/garbage_collector.cc b/runtime/gc/collector/garbage_collector.cc
index 25e8966..ae04074 100644
--- a/runtime/gc/collector/garbage_collector.cc
+++ b/runtime/gc/collector/garbage_collector.cc
@@ -151,10 +151,11 @@
space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect)) {
accounting::SpaceBitmap* live_bitmap = space->GetLiveBitmap();
accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
- if (live_bitmap != mark_bitmap) {
+ if (live_bitmap != nullptr && live_bitmap != mark_bitmap) {
heap_->GetLiveBitmap()->ReplaceBitmap(live_bitmap, mark_bitmap);
heap_->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
- space->AsMallocSpace()->SwapBitmaps();
+ CHECK(space->IsContinuousMemMapAllocSpace());
+ space->AsContinuousMemMapAllocSpace()->SwapBitmaps();
}
}
}
diff --git a/runtime/gc/collector/sticky_mark_sweep.cc b/runtime/gc/collector/sticky_mark_sweep.cc
index 30f3753..9e3adb4 100644
--- a/runtime/gc/collector/sticky_mark_sweep.cc
+++ b/runtime/gc/collector/sticky_mark_sweep.cc
@@ -38,7 +38,7 @@
// know what was allocated since the last GC. A side-effect of binding the allocation space mark
// and live bitmap is that marking the objects will place them in the live bitmap.
for (const auto& space : GetHeap()->GetContinuousSpaces()) {
- if (space->IsMallocSpace() &&
+ if (space->IsContinuousMemMapAllocSpace() &&
space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect) {
DCHECK(space->IsContinuousMemMapAllocSpace());
space->AsContinuousMemMapAllocSpace()->BindLiveToMarkBitmap();
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index eb38310..309adb7 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1431,6 +1431,7 @@
}
void Heap::PreZygoteFork() {
+ CollectGarbageInternal(collector::kGcTypeFull, kGcCauseBackground, false);
static Mutex zygote_creation_lock_("zygote creation lock", kZygoteCreationLock);
Thread* self = Thread::Current();
MutexLock mu(self, zygote_creation_lock_);
@@ -1439,7 +1440,6 @@
return;
}
VLOG(heap) << "Starting PreZygoteFork";
- CollectGarbageInternal(collector::kGcTypeFull, kGcCauseBackground, false);
// Trim the pages at the end of the non moving space.
non_moving_space_->Trim();
non_moving_space_->GetMemMap()->Protect(PROT_READ | PROT_WRITE);
diff --git a/runtime/gc/space/malloc_space.cc b/runtime/gc/space/malloc_space.cc
index 9ca4eac..6c6cb97 100644
--- a/runtime/gc/space/malloc_space.cc
+++ b/runtime/gc/space/malloc_space.cc
@@ -95,14 +95,6 @@
return mem_map;
}
-void MallocSpace::SwapBitmaps() {
- live_bitmap_.swap(mark_bitmap_);
- // Swap names to get more descriptive diagnostics.
- std::string temp_name(live_bitmap_->GetName());
- live_bitmap_->SetName(mark_bitmap_->GetName());
- mark_bitmap_->SetName(temp_name);
-}
-
mirror::Class* MallocSpace::FindRecentFreedObject(const mirror::Object* obj) {
size_t pos = recent_free_pos_;
// Start at the most recently freed object and work our way back since there may be duplicates
diff --git a/runtime/gc/space/malloc_space.h b/runtime/gc/space/malloc_space.h
index 58cfe8b..9a42e2c 100644
--- a/runtime/gc/space/malloc_space.h
+++ b/runtime/gc/space/malloc_space.h
@@ -109,9 +109,6 @@
void SetGrowthLimit(size_t growth_limit);
- // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping.
- void SwapBitmaps();
-
virtual MallocSpace* CreateInstance(const std::string& name, MemMap* mem_map, void* allocator,
byte* begin, byte* end, byte* limit, size_t growth_limit) = 0;
diff --git a/runtime/gc/space/space-inl.h b/runtime/gc/space/space-inl.h
index e94c44e..02a63f6 100644
--- a/runtime/gc/space/space-inl.h
+++ b/runtime/gc/space/space-inl.h
@@ -32,7 +32,7 @@
}
inline MallocSpace* Space::AsMallocSpace() {
- DCHECK(GetType() == kSpaceTypeMallocSpace);
+ DCHECK(IsMallocSpace());
DCHECK(IsDlMallocSpace() || IsRosAllocSpace());
return down_cast<MallocSpace*>(down_cast<MemMapSpace*>(this));
}
diff --git a/runtime/gc/space/space.cc b/runtime/gc/space/space.cc
index 5478d5b..32a00bc 100644
--- a/runtime/gc/space/space.cc
+++ b/runtime/gc/space/space.cc
@@ -77,10 +77,12 @@
void ContinuousMemMapAllocSpace::BindLiveToMarkBitmap() {
CHECK(!HasBoundBitmaps());
accounting::SpaceBitmap* live_bitmap = GetLiveBitmap();
- accounting::SpaceBitmap* mark_bitmap = mark_bitmap_.release();
- Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
- temp_bitmap_.reset(mark_bitmap);
- mark_bitmap_.reset(live_bitmap);
+ if (live_bitmap != mark_bitmap_.get()) {
+ accounting::SpaceBitmap* mark_bitmap = mark_bitmap_.release();
+ Runtime::Current()->GetHeap()->GetMarkBitmap()->ReplaceBitmap(mark_bitmap, live_bitmap);
+ temp_bitmap_.reset(mark_bitmap);
+ mark_bitmap_.reset(live_bitmap);
+ }
}
bool ContinuousMemMapAllocSpace::HasBoundBitmaps() const {
@@ -97,6 +99,14 @@
DCHECK(temp_bitmap_.get() == nullptr);
}
+void ContinuousMemMapAllocSpace::SwapBitmaps() {
+ live_bitmap_.swap(mark_bitmap_);
+ // Swap names to get more descriptive diagnostics.
+ std::string temp_name(live_bitmap_->GetName());
+ live_bitmap_->SetName(mark_bitmap_->GetName());
+ mark_bitmap_->SetName(temp_name);
+}
+
} // namespace space
} // namespace gc
} // namespace art
diff --git a/runtime/gc/space/space.h b/runtime/gc/space/space.h
index 32230b3..95a79ec 100644
--- a/runtime/gc/space/space.h
+++ b/runtime/gc/space/space.h
@@ -411,6 +411,8 @@
void BindLiveToMarkBitmap()
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
void UnBindBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+ // Swap the live and mark bitmaps of this space. This is used by the GC for concurrent sweeping.
+ void SwapBitmaps();
virtual void Clear() {
LOG(FATAL) << "Unimplemented";
diff --git a/runtime/native/dalvik_system_VMDebug.cc b/runtime/native/dalvik_system_VMDebug.cc
index 4a84cfe..dceea5c 100644
--- a/runtime/native/dalvik_system_VMDebug.cc
+++ b/runtime/native/dalvik_system_VMDebug.cc
@@ -24,6 +24,7 @@
#include "gc/space/dlmalloc_space.h"
#include "gc/space/large_object_space.h"
#include "gc/space/space-inl.h"
+#include "gc/space/zygote_space.h"
#include "hprof/hprof.h"
#include "jni_internal.h"
#include "mirror/class.h"
@@ -265,9 +266,9 @@
if (space->IsImageSpace()) {
// Currently don't include the image space.
} else if (space->IsZygoteSpace()) {
- gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
- zygoteSize += malloc_space->GetFootprint();
- zygoteUsed += malloc_space->GetBytesAllocated();
+ gc::space::ZygoteSpace* zygote_space = space->AsZygoteSpace();
+ zygoteSize += zygote_space->Size();
+ zygoteUsed += zygote_space->GetBytesAllocated();
} else if (space->IsMallocSpace()) {
// This is a malloc space.
gc::space::MallocSpace* malloc_space = space->AsMallocSpace();