summaryrefslogtreecommitdiff
path: root/runtime/gc/heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/gc/heap.cc')
-rw-r--r--runtime/gc/heap.cc51
1 files changed, 43 insertions, 8 deletions
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 85b79da329..b462e294ba 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -17,6 +17,7 @@
#include "heap.h"
#include <limits>
+#include "android-base/thread_annotations.h"
#if defined(__BIONIC__) || defined(__GLIBC__)
#include <malloc.h> // For mallinfo()
#endif
@@ -1723,18 +1724,46 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
size_t* bytes_allocated,
size_t* usable_size,
size_t* bytes_tl_bulk_allocated,
- ObjPtr<mirror::Class>* klass) {
+ ObjPtr<mirror::Class>* klass,
+ /*out*/const char** old_no_thread_suspend_cause) {
bool was_default_allocator = allocator == GetCurrentAllocator();
// Make sure there is no pending exception since we may need to throw an OOME.
self->AssertNoPendingException();
DCHECK(klass != nullptr);
+
StackHandleScope<1> hs(self);
- HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(klass));
+ HandleWrapperObjPtr<mirror::Class> h_klass(hs.NewHandleWrapper(klass));
+
+ auto release_no_suspend = [&]() RELEASE(Roles::uninterruptible_) {
+ self->EndAssertNoThreadSuspension(*old_no_thread_suspend_cause);
+ };
+ auto send_object_pre_alloc = [&]() ACQUIRE(Roles::uninterruptible_)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ if (UNLIKELY(instrumented)) {
+ AllocationListener* l = nullptr;
+ l = alloc_listener_.load(std::memory_order_seq_cst);
+ if (UNLIKELY(l != nullptr) && UNLIKELY(l->HasPreAlloc())) {
+ l->PreObjectAllocated(self, h_klass, &alloc_size);
+ }
+ }
+ *old_no_thread_suspend_cause =
+ self->StartAssertNoThreadSuspension("Called PreObjectAllocated, no suspend until alloc");
+};
+#define PERFORM_SUSPENDING_OPERATION(op) \
+ [&]() REQUIRES(Roles::uninterruptible_) REQUIRES_SHARED(Locks::mutator_lock_) { \
+ release_no_suspend(); \
+ auto res = (op); \
+ send_object_pre_alloc(); \
+ return res; \
+ }()
+
// The allocation failed. If the GC is running, block until it completes, and then retry the
// allocation.
collector::GcType last_gc = WaitForGcToComplete(kGcCauseForAlloc, self);
// If we were the default allocator but the allocator changed while we were suspended,
// abort the allocation.
+ // We just waited, call the pre-alloc again.
+ send_object_pre_alloc();
if ((was_default_allocator && allocator != GetCurrentAllocator()) ||
(!instrumented && EntrypointsInstrumented())) {
return nullptr;
@@ -1749,8 +1778,9 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
}
collector::GcType tried_type = next_gc_type_;
- const bool gc_ran =
- CollectGarbageInternal(tried_type, kGcCauseForAlloc, false) != collector::kGcTypeNone;
+ const bool gc_ran = PERFORM_SUSPENDING_OPERATION(
+ CollectGarbageInternal(tried_type, kGcCauseForAlloc, false) != collector::kGcTypeNone);
+
if ((was_default_allocator && allocator != GetCurrentAllocator()) ||
(!instrumented && EntrypointsInstrumented())) {
return nullptr;
@@ -1769,8 +1799,8 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
continue;
}
// Attempt to run the collector, if we succeed, re-try the allocation.
- const bool plan_gc_ran =
- CollectGarbageInternal(gc_type, kGcCauseForAlloc, false) != collector::kGcTypeNone;
+ const bool plan_gc_ran = PERFORM_SUSPENDING_OPERATION(
+ CollectGarbageInternal(gc_type, kGcCauseForAlloc, false) != collector::kGcTypeNone);
if ((was_default_allocator && allocator != GetCurrentAllocator()) ||
(!instrumented && EntrypointsInstrumented())) {
return nullptr;
@@ -1800,7 +1830,7 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
// TODO: Run finalization, but this may cause more allocations to occur.
// We don't need a WaitForGcToComplete here either.
DCHECK(!gc_plan_.empty());
- CollectGarbageInternal(gc_plan_.back(), kGcCauseForAlloc, true);
+ PERFORM_SUSPENDING_OPERATION(CollectGarbageInternal(gc_plan_.back(), kGcCauseForAlloc, true));
if ((was_default_allocator && allocator != GetCurrentAllocator()) ||
(!instrumented && EntrypointsInstrumented())) {
return nullptr;
@@ -1817,7 +1847,8 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
current_time - last_time_homogeneous_space_compaction_by_oom_ >
min_interval_homogeneous_space_compaction_by_oom_) {
last_time_homogeneous_space_compaction_by_oom_ = current_time;
- HomogeneousSpaceCompactResult result = PerformHomogeneousSpaceCompact();
+ HomogeneousSpaceCompactResult result =
+ PERFORM_SUSPENDING_OPERATION(PerformHomogeneousSpaceCompact());
// Thread suspension could have occurred.
if ((was_default_allocator && allocator != GetCurrentAllocator()) ||
(!instrumented && EntrypointsInstrumented())) {
@@ -1862,9 +1893,13 @@ mirror::Object* Heap::AllocateInternalWithGc(Thread* self,
}
}
}
+#undef PERFORM_SUSPENDING_OPERATION
// If the allocation hasn't succeeded by this point, throw an OOM error.
if (ptr == nullptr) {
+ release_no_suspend();
ThrowOutOfMemoryError(self, alloc_size, allocator);
+ *old_no_thread_suspend_cause =
+ self->StartAssertNoThreadSuspension("Failed allocation fallback");
}
return ptr;
}