Small documentation and stylistic changes.
Test: art/test.py
Change-Id: Ibc74ee4783314266bf1d027811715058626c57c8
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index 602e094..5066385 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -949,7 +949,7 @@
JDWP::JdwpError Dbg::GetInstanceCounts(const std::vector<JDWP::RefTypeId>& class_ids,
std::vector<uint64_t>* counts) {
gc::Heap* heap = Runtime::Current()->GetHeap();
- heap->CollectGarbage(false, gc::GcCause::kGcCauseDebugger);
+ heap->CollectGarbage(/* clear_soft_references */ false, gc::GcCause::kGcCauseDebugger);
VariableSizedHandleScope hs(Thread::Current());
std::vector<Handle<mirror::Class>> classes;
counts->clear();
@@ -970,7 +970,7 @@
std::vector<JDWP::ObjectId>* instances) {
gc::Heap* heap = Runtime::Current()->GetHeap();
// We only want reachable instances, so do a GC.
- heap->CollectGarbage(false, gc::GcCause::kGcCauseDebugger);
+ heap->CollectGarbage(/* clear_soft_references */ false, gc::GcCause::kGcCauseDebugger);
JDWP::JdwpError error;
ObjPtr<mirror::Class> c = DecodeClass(class_id, &error);
if (c == nullptr) {
@@ -992,7 +992,7 @@
JDWP::JdwpError Dbg::GetReferringObjects(JDWP::ObjectId object_id, int32_t max_count,
std::vector<JDWP::ObjectId>* referring_objects) {
gc::Heap* heap = Runtime::Current()->GetHeap();
- heap->CollectGarbage(false, gc::GcCause::kGcCauseDebugger);
+ heap->CollectGarbage(/* clear_soft_references */ false, gc::GcCause::kGcCauseDebugger);
JDWP::JdwpError error;
ObjPtr<mirror::Object> o = gRegistry->Get<mirror::Object*>(object_id, &error);
if (o == nullptr) {
diff --git a/runtime/gc/allocation_record.cc b/runtime/gc/allocation_record.cc
index 2ee4239..a1d1986 100644
--- a/runtime/gc/allocation_record.cc
+++ b/runtime/gc/allocation_record.cc
@@ -289,7 +289,7 @@
return;
}
- // Wait for GC's sweeping to complete and allow new records
+ // Wait for GC's sweeping to complete and allow new records.
while (UNLIKELY((!kUseReadBarrier && !allow_new_record_) ||
(kUseReadBarrier && !self->GetWeakRefAccessEnabled()))) {
// Check and run the empty checkpoint before blocking so the empty checkpoint will work in the
diff --git a/runtime/gc/collector/concurrent_copying.cc b/runtime/gc/collector/concurrent_copying.cc
index 3770085..7304697 100644
--- a/runtime/gc/collector/concurrent_copying.cc
+++ b/runtime/gc/collector/concurrent_copying.cc
@@ -1414,7 +1414,8 @@
MarkStackMode mark_stack_mode = mark_stack_mode_.LoadRelaxed();
if (mark_stack_mode == kMarkStackModeThreadLocal) {
// Process the thread-local mark stacks and the GC mark stack.
- count += ProcessThreadLocalMarkStacks(false, nullptr);
+ count += ProcessThreadLocalMarkStacks(/* disable_weak_ref_access */ false,
+ /* checkpoint_callback */ nullptr);
while (!gc_mark_stack_->IsEmpty()) {
mirror::Object* to_ref = gc_mark_stack_->PopBack();
ProcessMarkStackRef(to_ref);
@@ -1602,7 +1603,7 @@
DisableWeakRefAccessCallback dwrac(this);
// Process the thread local mark stacks one last time after switching to the shared mark stack
// mode and disable weak ref accesses.
- ProcessThreadLocalMarkStacks(true, &dwrac);
+ ProcessThreadLocalMarkStacks(/* disable_weak_ref_access */ true, &dwrac);
if (kVerboseMode) {
LOG(INFO) << "Switched to shared mark stack mode and disabled weak ref access";
}
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index cb43601..19b4acd 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -3931,7 +3931,7 @@
if (new_backtrace) {
StackHandleScope<1> hs(self);
auto h = hs.NewHandleWrapper(obj);
- CollectGarbage(false);
+ CollectGarbage(/* clear_soft_references */ false);
unique_backtrace_count_.FetchAndAddSequentiallyConsistent(1);
} else {
seen_backtrace_count_.FetchAndAddSequentiallyConsistent(1);
diff --git a/runtime/gc/heap_test.cc b/runtime/gc/heap_test.cc
index 6d426c2..2def524 100644
--- a/runtime/gc/heap_test.cc
+++ b/runtime/gc/heap_test.cc
@@ -59,7 +59,7 @@
}
}
}
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
}
TEST_F(HeapTest, HeapBitmapCapacityTest) {
diff --git a/runtime/gc/system_weak_test.cc b/runtime/gc/system_weak_test.cc
index dfbbd2a..21f5117 100644
--- a/runtime/gc/system_weak_test.cc
+++ b/runtime/gc/system_weak_test.cc
@@ -142,7 +142,7 @@
cswh.Set(GcRoot<mirror::Object>(s.Get()));
// Trigger a GC.
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
// Expect the holder to have been called.
EXPECT_EQ(CollectorDoesAllowOrBroadcast() ? 1U : 0U, cswh.allow_count_);
@@ -163,7 +163,7 @@
cswh.Set(GcRoot<mirror::Object>(mirror::String::AllocFromModifiedUtf8(soa.Self(), "ABC")));
// Trigger a GC.
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
// Expect the holder to have been called.
EXPECT_EQ(CollectorDoesAllowOrBroadcast() ? 1U : 0U, cswh.allow_count_);
@@ -187,7 +187,7 @@
cswh.Set(GcRoot<mirror::Object>(s.Get()));
// Trigger a GC.
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
// Expect the holder to have been called.
ASSERT_EQ(CollectorDoesAllowOrBroadcast() ? 1U : 0U, cswh.allow_count_);
@@ -202,7 +202,7 @@
Runtime::Current()->RemoveSystemWeakHolder(&cswh);
// Trigger another GC.
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
// Expectation: no change in the numbers.
EXPECT_EQ(CollectorDoesAllowOrBroadcast() ? 1U : 0U, cswh.allow_count_);
diff --git a/runtime/jni_internal_test.cc b/runtime/jni_internal_test.cc
index 6300038..293e18a 100644
--- a/runtime/jni_internal_test.cc
+++ b/runtime/jni_internal_test.cc
@@ -1484,7 +1484,8 @@
jweak weak_global = env_->NewWeakGlobalRef(local_ref);
ASSERT_NE(weak_global, nullptr);
env_->DeleteLocalRef(local_ref);
- Runtime::Current()->GetHeap()->CollectGarbage(false); // GC should clear the weak global.
+ // GC should clear the weak global.
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
jobject new_global_ref = env_->NewGlobalRef(weak_global);
EXPECT_EQ(new_global_ref, nullptr);
jobject new_local_ref = env_->NewLocalRef(weak_global);
diff --git a/runtime/signal_catcher.cc b/runtime/signal_catcher.cc
index d9c4da9..9c3afbb 100644
--- a/runtime/signal_catcher.cc
+++ b/runtime/signal_catcher.cc
@@ -207,7 +207,7 @@
void SignalCatcher::HandleSigUsr1() {
LOG(INFO) << "SIGUSR1 forcing GC (no HPROF) and profile save";
- Runtime::Current()->GetHeap()->CollectGarbage(false);
+ Runtime::Current()->GetHeap()->CollectGarbage(/* clear_soft_references */ false);
ProfileSaver::ForceProcessProfiles();
}
diff --git a/runtime/thread_list.h b/runtime/thread_list.h
index 7657fa8..895c1a4 100644
--- a/runtime/thread_list.h
+++ b/runtime/thread_list.h
@@ -68,7 +68,7 @@
bool Resume(Thread* thread, SuspendReason reason = SuspendReason::kInternal)
REQUIRES(!Locks::thread_suspend_count_lock_) WARN_UNUSED;
- // Suspends all threads and gets exclusive access to the mutator_lock_.
+ // Suspends all threads and gets exclusive access to the mutator lock.
// If long_suspend is true, then other threads who try to suspend will never timeout.
// long_suspend is currenly used for hprof since large heaps take a long time.
void SuspendAll(const char* cause, bool long_suspend = false)
@@ -240,7 +240,7 @@
DISALLOW_COPY_AND_ASSIGN(ThreadList);
};
-// Helper for suspending all threads and
+// Helper for suspending all threads and getting exclusive access to the mutator lock.
class ScopedSuspendAll : public ValueObject {
public:
explicit ScopedSuspendAll(const char* cause, bool long_suspend = false)