summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Roland Levillain <rpl@google.com> 2018-02-02 11:55:00 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2018-02-02 11:55:00 +0000
commite01ec238cccd6a936d80d6fd09d924d566f3ad76 (patch)
tree848f66deae13043353e13eaba8464e233217b6b8
parente5bf41acab7cc5a798a4d19425493a4af8f20436 (diff)
parent9b869ea9a0c65a3c4860768fae00f937ad969153 (diff)
Merge "Fix heap size tracing during bulk thread-local allocations."
-rw-r--r--runtime/gc/heap-inl.h11
1 files changed, 6 insertions, 5 deletions
diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h
index 52dd104ac8..6735961591 100644
--- a/runtime/gc/heap-inl.h
+++ b/runtime/gc/heap-inl.h
@@ -106,8 +106,8 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self,
pre_fence_visitor(obj, usable_size);
QuasiAtomic::ThreadFenceForConstructor();
} else {
- // bytes allocated that takes bulk thread-local buffer allocations into account.
- size_t bytes_tl_bulk_allocated = 0;
+ // Bytes allocated that takes bulk thread-local buffer allocations into account.
+ size_t bytes_tl_bulk_allocated = 0u;
obj = TryToAllocate<kInstrumented, false>(self, allocator, byte_count, &bytes_allocated,
&usable_size, &bytes_tl_bulk_allocated);
if (UNLIKELY(obj == nullptr)) {
@@ -154,12 +154,13 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self,
}
pre_fence_visitor(obj, usable_size);
QuasiAtomic::ThreadFenceForConstructor();
- new_num_bytes_allocated = num_bytes_allocated_.FetchAndAddRelaxed(bytes_tl_bulk_allocated) +
- bytes_tl_bulk_allocated;
+ size_t num_bytes_allocated_before =
+ num_bytes_allocated_.FetchAndAddRelaxed(bytes_tl_bulk_allocated);
+ new_num_bytes_allocated = num_bytes_allocated_before + bytes_tl_bulk_allocated;
if (bytes_tl_bulk_allocated > 0) {
// Only trace when we get an increase in the number of bytes allocated. This happens when
// obtaining a new TLAB and isn't often enough to hurt performance according to golem.
- TraceHeapSize(new_num_bytes_allocated + bytes_tl_bulk_allocated);
+ TraceHeapSize(new_num_bytes_allocated);
}
}
if (kIsDebugBuild && Runtime::Current()->IsStarted()) {