Use mutator lock to guard adding and removing heap spaces
Too hard to add a new lock since dlmalloc ArtMoreCore requires
looping through the spaces while holding the allocator lock.
Bug: 22858531
Change-Id: Ieac2136da02c766b6795cd604a58798bee37ef2a
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 8cd8d73..137540a 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -801,6 +801,7 @@
if (!Runtime::Current()->IsAotCompiler()) {
return false;
}
+ ScopedObjectAccess soa(Thread::Current());
for (const auto& space : continuous_spaces_) {
if (space->IsImageSpace() || space->IsZygoteSpace()) {
return false;
@@ -1381,15 +1382,18 @@
uint64_t total_alloc_space_allocated = 0;
uint64_t total_alloc_space_size = 0;
uint64_t managed_reclaimed = 0;
- for (const auto& space : continuous_spaces_) {
- if (space->IsMallocSpace()) {
- gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
- if (malloc_space->IsRosAllocSpace() || !CareAboutPauseTimes()) {
- // Don't trim dlmalloc spaces if we care about pauses since this can hold the space lock
- // for a long period of time.
- managed_reclaimed += malloc_space->Trim();
+ {
+ ScopedObjectAccess soa(self);
+ for (const auto& space : continuous_spaces_) {
+ if (space->IsMallocSpace()) {
+ gc::space::MallocSpace* malloc_space = space->AsMallocSpace();
+ if (malloc_space->IsRosAllocSpace() || !CareAboutPauseTimes()) {
+ // Don't trim dlmalloc spaces if we care about pauses since this can hold the space lock
+ // for a long period of time.
+ managed_reclaimed += malloc_space->Trim();
+ }
+ total_alloc_space_size += malloc_space->Size();
}
- total_alloc_space_size += malloc_space->Size();
}
}
total_alloc_space_allocated = GetBytesAllocated();
@@ -1520,6 +1524,7 @@
}
void Heap::DumpSpaces(std::ostream& stream) const {
+ ScopedObjectAccess soa(Thread::Current());
for (const auto& space : continuous_spaces_) {
accounting::ContinuousSpaceBitmap* live_bitmap = space->GetLiveBitmap();
accounting::ContinuousSpaceBitmap* mark_bitmap = space->GetMarkBitmap();
@@ -1598,6 +1603,9 @@
}
space::RosAllocSpace* Heap::GetRosAllocSpace(gc::allocator::RosAlloc* rosalloc) const {
+ if (rosalloc_space_ != nullptr && rosalloc_space_->GetRosAlloc() == rosalloc) {
+ return rosalloc_space_;
+ }
for (const auto& space : continuous_spaces_) {
if (space->AsContinuousSpace()->IsRosAllocSpace()) {
if (space->AsContinuousSpace()->AsRosAllocSpace()->GetRosAlloc() == rosalloc) {
@@ -3530,7 +3538,8 @@
void Heap::ClampGrowthLimit() {
// Use heap bitmap lock to guard against races with BindLiveToMarkBitmap.
- WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+ ScopedObjectAccess soa(Thread::Current());
+ WriterMutexLock mu(soa.Self(), *Locks::heap_bitmap_lock_);
capacity_ = growth_limit_;
for (const auto& space : continuous_spaces_) {
if (space->IsMallocSpace()) {
@@ -3546,6 +3555,7 @@
void Heap::ClearGrowthLimit() {
growth_limit_ = capacity_;
+ ScopedObjectAccess soa(Thread::Current());
for (const auto& space : continuous_spaces_) {
if (space->IsMallocSpace()) {
gc::space::MallocSpace* malloc_space = space->AsMallocSpace();