summaryrefslogtreecommitdiff
path: root/runtime/gc/heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/gc/heap.cc')
-rw-r--r--runtime/gc/heap.cc78
1 files changed, 59 insertions, 19 deletions
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 20e791d9f2..22207ee21c 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -110,6 +110,9 @@ static constexpr size_t kVerifyObjectAllocationStackSize = 16 * KB /
sizeof(mirror::HeapReference<mirror::Object>);
static constexpr size_t kDefaultAllocationStackSize = 8 * MB /
sizeof(mirror::HeapReference<mirror::Object>);
+// System.runFinalization can deadlock with native allocations, to deal with this, we have a
+// timeout on how long we wait for finalizers to run. b/21544853
+static constexpr uint64_t kNativeAllocationFinalizeTimeout = MsToNs(250u);
Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max_free,
double target_utilization, double foreground_heap_growth_multiplier,
@@ -206,7 +209,8 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
blocking_gc_count_last_window_(0U),
gc_count_rate_histogram_("gc count rate histogram", 1U, kGcCountRateMaxBucketCount),
blocking_gc_count_rate_histogram_("blocking gc count rate histogram", 1U,
- kGcCountRateMaxBucketCount) {
+ kGcCountRateMaxBucketCount),
+ alloc_tracking_enabled_(false) {
if (VLOG_IS_ON(heap) || VLOG_IS_ON(startup)) {
LOG(INFO) << "Heap() entering";
}
@@ -232,10 +236,11 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
requested_alloc_space_begin = reinterpret_cast<uint8_t*>(300 * MB) - non_moving_space_capacity;
}
if (!image_file_name.empty()) {
+ ATRACE_BEGIN("ImageSpace::Create");
std::string error_msg;
- space::ImageSpace* image_space = space::ImageSpace::Create(image_file_name.c_str(),
- image_instruction_set,
- &error_msg);
+ auto* image_space = space::ImageSpace::Create(image_file_name.c_str(), image_instruction_set,
+ &error_msg);
+ ATRACE_END();
if (image_space != nullptr) {
AddSpace(image_space);
// Oat files referenced by image files immediately follow them in memory, ensure alloc space
@@ -287,6 +292,7 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
}
std::string error_str;
std::unique_ptr<MemMap> non_moving_space_mem_map;
+ ATRACE_BEGIN("Create heap maps");
if (separate_non_moving_space) {
// If we are the zygote, the non moving space becomes the zygote space when we run
// PreZygoteFork the first time. In this case, call the map "zygote space" since we can't
@@ -323,6 +329,8 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
capacity_, &error_str));
CHECK(main_mem_map_2.get() != nullptr) << error_str;
}
+ ATRACE_END();
+ ATRACE_BEGIN("Create spaces");
// Create the non moving space first so that bitmaps don't take up the address range.
if (separate_non_moving_space) {
// Non moving space is always dlmalloc since we currently don't have support for multiple
@@ -340,7 +348,8 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
if (foreground_collector_type_ == kCollectorTypeCC) {
region_space_ = space::RegionSpace::Create("Region space", capacity_ * 2, request_begin);
AddSpace(region_space_);
- } else if (IsMovingGc(foreground_collector_type_) && foreground_collector_type_ != kCollectorTypeGSS) {
+ } else if (IsMovingGc(foreground_collector_type_) &&
+ foreground_collector_type_ != kCollectorTypeGSS) {
// Create bump pointer spaces.
// We only to create the bump pointer if the foreground collector is a compacting GC.
// TODO: Place bump-pointer spaces somewhere to minimize size of card table.
@@ -411,10 +420,12 @@ Heap::Heap(size_t initial_size, size_t growth_limit, size_t min_free, size_t max
if (main_space_backup_.get() != nullptr) {
RemoveSpace(main_space_backup_.get());
}
+ ATRACE_END();
// Allocate the card table.
+ ATRACE_BEGIN("Create card table");
card_table_.reset(accounting::CardTable::Create(heap_begin, heap_capacity));
CHECK(card_table_.get() != nullptr) << "Failed to create card table";
-
+ ATRACE_END();
if (foreground_collector_type_ == kCollectorTypeCC && kUseTableLookupReadBarrier) {
rb_table_.reset(new accounting::ReadBarrierTable());
DCHECK(rb_table_->IsAllCleared());
@@ -990,6 +1001,27 @@ void Heap::DumpGcPerformanceInfo(std::ostream& os) {
BaseMutex::DumpAll(os);
}
+void Heap::ResetGcPerformanceInfo() {
+ for (auto& collector : garbage_collectors_) {
+ collector->ResetMeasurements();
+ }
+ total_allocation_time_.StoreRelaxed(0);
+ total_bytes_freed_ever_ = 0;
+ total_objects_freed_ever_ = 0;
+ total_wait_time_ = 0;
+ blocking_gc_count_ = 0;
+ blocking_gc_time_ = 0;
+ gc_count_last_window_ = 0;
+ blocking_gc_count_last_window_ = 0;
+ last_update_time_gc_count_rate_histograms_ = // Round down by the window duration.
+ (NanoTime() / kGcCountRateHistogramWindowDuration) * kGcCountRateHistogramWindowDuration;
+ {
+ MutexLock mu(Thread::Current(), *gc_complete_lock_);
+ gc_count_rate_histogram_.Reset();
+ blocking_gc_count_rate_histogram_.Reset();
+ }
+}
+
uint64_t Heap::GetGcCount() const {
uint64_t gc_count = 0U;
for (auto& collector : garbage_collectors_) {
@@ -1033,6 +1065,7 @@ Heap::~Heap() {
STLDeleteElements(&garbage_collectors_);
// If we don't reset then the mark stack complains in its destructor.
allocation_stack_->Reset();
+ allocation_records_.reset();
live_stack_->Reset();
STLDeleteValues(&mod_union_tables_);
STLDeleteValues(&remembered_sets_);
@@ -3531,22 +3564,16 @@ bool Heap::IsGCRequestPending() const {
return concurrent_gc_pending_.LoadRelaxed();
}
-void Heap::RunFinalization(JNIEnv* env) {
- // Can't do this in WellKnownClasses::Init since System is not properly set up at that point.
- if (WellKnownClasses::java_lang_System_runFinalization == nullptr) {
- CHECK(WellKnownClasses::java_lang_System != nullptr);
- WellKnownClasses::java_lang_System_runFinalization =
- CacheMethod(env, WellKnownClasses::java_lang_System, true, "runFinalization", "()V");
- CHECK(WellKnownClasses::java_lang_System_runFinalization != nullptr);
- }
- env->CallStaticVoidMethod(WellKnownClasses::java_lang_System,
- WellKnownClasses::java_lang_System_runFinalization);
+void Heap::RunFinalization(JNIEnv* env, uint64_t timeout) {
+ env->CallStaticVoidMethod(WellKnownClasses::dalvik_system_VMRuntime,
+ WellKnownClasses::dalvik_system_VMRuntime_runFinalization,
+ static_cast<jlong>(timeout));
}
void Heap::RegisterNativeAllocation(JNIEnv* env, size_t bytes) {
Thread* self = ThreadForEnv(env);
if (native_need_to_run_finalization_) {
- RunFinalization(env);
+ RunFinalization(env, kNativeAllocationFinalizeTimeout);
UpdateMaxNativeFootprint();
native_need_to_run_finalization_ = false;
}
@@ -3562,7 +3589,7 @@ void Heap::RegisterNativeAllocation(JNIEnv* env, size_t bytes) {
if (new_native_bytes_allocated > growth_limit_) {
if (WaitForGcToComplete(kGcCauseForNativeAlloc, self) != collector::kGcTypeNone) {
// Just finished a GC, attempt to run finalizers.
- RunFinalization(env);
+ RunFinalization(env, kNativeAllocationFinalizeTimeout);
CHECK(!env->ExceptionCheck());
// Native bytes allocated may be updated by finalization, refresh it.
new_native_bytes_allocated = native_bytes_allocated_.LoadRelaxed();
@@ -3570,7 +3597,7 @@ void Heap::RegisterNativeAllocation(JNIEnv* env, size_t bytes) {
// If we still are over the watermark, attempt a GC for alloc and run finalizers.
if (new_native_bytes_allocated > growth_limit_) {
CollectGarbageInternal(gc_type, kGcCauseForNativeAlloc, false);
- RunFinalization(env);
+ RunFinalization(env, kNativeAllocationFinalizeTimeout);
native_need_to_run_finalization_ = false;
CHECK(!env->ExceptionCheck());
}
@@ -3649,5 +3676,18 @@ void Heap::ClearMarkedObjects() {
}
}
+void Heap::SetAllocationRecords(AllocRecordObjectMap* records) {
+ allocation_records_.reset(records);
+}
+
+void Heap::SweepAllocationRecords(IsMarkedCallback* visitor, void* arg) const {
+ if (IsAllocTrackingEnabled()) {
+ MutexLock mu(Thread::Current(), *Locks::alloc_tracker_lock_);
+ if (IsAllocTrackingEnabled()) {
+ GetAllocationRecords()->SweepAllocationRecords(visitor, arg);
+ }
+ }
+}
+
} // namespace gc
} // namespace art