diff options
| -rw-r--r-- | runtime/atomic.h | 4 | ||||
| -rw-r--r-- | runtime/gc/heap-inl.h | 4 |
2 files changed, 6 insertions, 2 deletions
diff --git a/runtime/atomic.h b/runtime/atomic.h index 0faa3c69c6..d4a7f37bc6 100644 --- a/runtime/atomic.h +++ b/runtime/atomic.h @@ -278,6 +278,10 @@ class PACKED(sizeof(T)) Atomic : public std::atomic<T> { return this->fetch_add(value, std::memory_order_seq_cst); // Return old_value. } + T FetchAndAddRelaxed(const T value) { + return this->fetch_add(value, std::memory_order_relaxed); // Return old_value. + } + T FetchAndSubSequentiallyConsistent(const T value) { return this->fetch_sub(value, std::memory_order_seq_cst); // Return old value. } diff --git a/runtime/gc/heap-inl.h b/runtime/gc/heap-inl.h index d1ab587aea..f437830e11 100644 --- a/runtime/gc/heap-inl.h +++ b/runtime/gc/heap-inl.h @@ -145,9 +145,9 @@ inline mirror::Object* Heap::AllocObjectWithAllocator(Thread* self, WriteBarrierField(obj, mirror::Object::ClassOffset(), klass); } pre_fence_visitor(obj, usable_size); + QuasiAtomic::ThreadFenceForConstructor(); new_num_bytes_allocated = static_cast<size_t>( - num_bytes_allocated_.FetchAndAddSequentiallyConsistent(bytes_tl_bulk_allocated)) - + bytes_tl_bulk_allocated; + num_bytes_allocated_.FetchAndAddRelaxed(bytes_tl_bulk_allocated)) + bytes_tl_bulk_allocated; } if (kIsDebugBuild && Runtime::Current()->IsStarted()) { CHECK_LE(obj->SizeOf(), usable_size); |