Improve IsMovableObject logic.
The new logic says the main space is non movable iff neither the
background collector or foreground collector are compacting.
Temporarily fixes performance regressions and stability issues
caused by buggy JNI code.
Also addressed a few review comments from previous CLs.
Change-Id: Id5f91765b5fbdae387b566c0bda3392557218ba1
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 3b129fc..14edc2f 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -223,16 +223,18 @@
// Compute heap capacity. Continuous spaces are sorted in order of Begin().
CHECK(!continuous_spaces_.empty());
- std::string error_str;
- post_zygote_non_moving_space_mem_map_.reset(
- MemMap::MapAnonymous("post zygote non-moving space", nullptr, 64 * MB,
- PROT_READ | PROT_WRITE, &error_str));
- CHECK(post_zygote_non_moving_space_mem_map_.get() != nullptr) << error_str;
// Relies on the spaces being sorted.
- byte* heap_begin = std::min(post_zygote_non_moving_space_mem_map_->Begin(),
- continuous_spaces_.front()->Begin());
- byte* heap_end = std::max(post_zygote_non_moving_space_mem_map_->End(),
- continuous_spaces_.back()->Limit());
+ byte* heap_begin = continuous_spaces_.front()->Begin();
+ byte* heap_end = continuous_spaces_.back()->Limit();
+ if (Runtime::Current()->IsZygote()) {
+ std::string error_str;
+ post_zygote_non_moving_space_mem_map_.reset(
+ MemMap::MapAnonymous("post zygote non-moving space", nullptr, 64 * MB,
+ PROT_READ | PROT_WRITE, &error_str));
+ CHECK(post_zygote_non_moving_space_mem_map_.get() != nullptr) << error_str;
+ heap_begin = std::min(post_zygote_non_moving_space_mem_map_->Begin(), heap_begin);
+ heap_end = std::max(post_zygote_non_moving_space_mem_map_->End(), heap_end);
+ }
size_t heap_capacity = heap_end - heap_begin;
// Allocate the card table.
@@ -795,6 +797,7 @@
if (bump_pointer_space_ != nullptr && bump_pointer_space_->HasAddress(obj)) {
mirror::Class* klass = obj->GetClass();
if (obj == klass) {
+ // This case happens for java.lang.Class.
return true;
}
return VerifyClassClass(klass) && IsLiveObjectLocked(klass);
@@ -2118,7 +2121,11 @@
if (bump_pointer_space_->HasAddress(obj)) {
return true;
}
- if (main_space_ != nullptr && main_space_->HasAddress(obj)) {
+ // TODO: Refactor this logic into the space itself?
+ // Objects in the main space are only copied during background -> foreground transitions or
+ // visa versa.
+ if (main_space_ != nullptr && main_space_->HasAddress(obj) &&
+ (IsCompactingGC(background_collector_type_) || IsCompactingGC(collector_type_))) {
return true;
}
}
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 26bbacd..0232b4d 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -545,6 +545,9 @@
static ALWAYS_INLINE bool AllocatorMayHaveConcurrentGC(AllocatorType allocator_type) {
return AllocatorHasAllocationStack(allocator_type);
}
+ static bool IsCompactingGC(CollectorType collector_type) {
+ return collector_type == kCollectorTypeSS || collector_type == kCollectorTypeGSS;
+ }
bool ShouldAllocLargeObject(mirror::Class* c, size_t byte_count) const;
ALWAYS_INLINE void CheckConcurrentGC(Thread* self, size_t new_num_bytes_allocated,
mirror::Object* obj);