diff options
| -rw-r--r-- | runtime/gc/collector_type.h | 2 | ||||
| -rw-r--r-- | runtime/gc/heap.cc | 12 | ||||
| -rw-r--r-- | runtime/gc/heap.h | 7 | ||||
| -rw-r--r-- | runtime/metrics/statsd.cc | 3 | ||||
| -rw-r--r-- | runtime/runtime.cc | 6 |
5 files changed, 22 insertions, 8 deletions
diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h index c20e3a7347..290860136b 100644 --- a/runtime/gc/collector_type.h +++ b/runtime/gc/collector_type.h @@ -32,6 +32,8 @@ enum CollectorType { kCollectorTypeCMS, // Concurrent mark-compact. kCollectorTypeCMC, + // The background compaction of the Concurrent mark-compact GC. + kCollectorTypeCMCBackground, // Semi-space / mark-sweep hybrid, enables compaction. kCollectorTypeSS, // Heap trimming collector, doesn't do any actual collecting. diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc index eabe692d7e..796f8fbf09 100644 --- a/runtime/gc/heap.cc +++ b/runtime/gc/heap.cc @@ -428,7 +428,10 @@ Heap::Heap(size_t initial_size, } LOG(INFO) << "Using " << foreground_collector_type_ << " GC."; - if (!gUseUserfaultfd) { + if (gUseUserfaultfd) { + CHECK_EQ(foreground_collector_type_, kCollectorTypeCMC); + CHECK_EQ(background_collector_type_, kCollectorTypeCMCBackground); + } else { // This ensures that userfaultfd syscall is done before any seccomp filter is installed. // TODO(b/266731037): Remove this when we no longer need to collect metric on userfaultfd // support. @@ -1566,7 +1569,7 @@ void Heap::DoPendingCollectorTransition() { VLOG(gc) << "Homogeneous compaction ignored due to jank perceptible process state"; } } else if (desired_collector_type == kCollectorTypeCCBackground || - desired_collector_type == kCollectorTypeCMC) { + desired_collector_type == kCollectorTypeCMCBackground) { if (!CareAboutPauseTimes()) { // Invoke full compaction. CollectGarbageInternal(collector::kGcTypeFull, @@ -3989,7 +3992,12 @@ void Heap::RequestCollectorTransition(CollectorType desired_collector_type, uint // doesn't change. DCHECK_EQ(desired_collector_type_, kCollectorTypeCCBackground); } + if (collector_type_ == kCollectorTypeCMC) { + // For CMC collector type doesn't change. + DCHECK_EQ(desired_collector_type_, kCollectorTypeCMCBackground); + } DCHECK_NE(collector_type_, kCollectorTypeCCBackground); + DCHECK_NE(collector_type_, kCollectorTypeCMCBackground); CollectorTransitionTask* added_task = nullptr; const uint64_t target_time = NanoTime() + delta_time; { diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h index 9f0b8613d1..6c2ac01332 100644 --- a/runtime/gc/heap.h +++ b/runtime/gc/heap.h @@ -1052,6 +1052,7 @@ class Heap { collector_type == kCollectorTypeSS || collector_type == kCollectorTypeCMC || collector_type == kCollectorTypeCCBackground || + collector_type == kCollectorTypeCMCBackground || collector_type == kCollectorTypeHomogeneousSpaceCompact; } bool ShouldAllocLargeObject(ObjPtr<mirror::Class> c, size_t byte_count) const @@ -1235,13 +1236,13 @@ class Heap { void ClearPendingTrim(Thread* self) REQUIRES(!*pending_task_lock_); void ClearPendingCollectorTransition(Thread* self) REQUIRES(!*pending_task_lock_); - // What kind of concurrency behavior is the runtime after? Currently true for concurrent mark - // sweep GC, false for other GC types. + // What kind of concurrency behavior is the runtime after? bool IsGcConcurrent() const ALWAYS_INLINE { return collector_type_ == kCollectorTypeCC || collector_type_ == kCollectorTypeCMC || collector_type_ == kCollectorTypeCMS || - collector_type_ == kCollectorTypeCCBackground; + collector_type_ == kCollectorTypeCCBackground || + collector_type_ == kCollectorTypeCMCBackground; } // Trim the managed and native spaces by releasing unused memory back to the OS. diff --git a/runtime/metrics/statsd.cc b/runtime/metrics/statsd.cc index 2fd261a548..716989131e 100644 --- a/runtime/metrics/statsd.cc +++ b/runtime/metrics/statsd.cc @@ -307,6 +307,9 @@ constexpr int32_t EncodeGcCollectorType(gc::CollectorType collector_type) { return statsd::ART_DATUM_REPORTED__GC__ART_GC_COLLECTOR_TYPE_CONCURRENT_MARK_SWEEP; case gc::CollectorType::kCollectorTypeCMC: return statsd::ART_DATUM_REPORTED__GC__ART_GC_COLLECTOR_TYPE_CONCURRENT_MARK_COMPACT; + case gc::CollectorType::kCollectorTypeCMCBackground: + return statsd:: + ART_DATUM_REPORTED__GC__ART_GC_COLLECTOR_TYPE_CONCURRENT_MARK_COMPACT_BACKGROUND; case gc::CollectorType::kCollectorTypeSS: return statsd::ART_DATUM_REPORTED__GC__ART_GC_COLLECTOR_TYPE_SEMI_SPACE; case gc::kCollectorTypeCC: diff --git a/runtime/runtime.cc b/runtime/runtime.cc index 8e70de5445..2b3c148dfd 100644 --- a/runtime/runtime.cc +++ b/runtime/runtime.cc @@ -1642,9 +1642,9 @@ bool Runtime::Init(RuntimeArgumentMap&& runtime_options_in) { InitializeApexVersions(); BackgroundGcOption background_gc = - gUseReadBarrier ? BackgroundGcOption(gc::kCollectorTypeCCBackground) - : (gUseUserfaultfd ? BackgroundGcOption(xgc_option.collector_type_) - : runtime_options.GetOrDefault(Opt::BackgroundGc)); + gUseReadBarrier ? BackgroundGcOption(gc::kCollectorTypeCCBackground) : + (gUseUserfaultfd ? BackgroundGcOption(gc::kCollectorTypeCMCBackground) : + runtime_options.GetOrDefault(Opt::BackgroundGc)); heap_ = new gc::Heap(runtime_options.GetOrDefault(Opt::MemoryInitialSize), runtime_options.GetOrDefault(Opt::HeapGrowthLimit), |