summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
author Mathieu Chartier <mathieuc@google.com> 2015-10-05 19:48:24 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2015-10-05 19:48:24 +0000
commit3a9122a5f9f057e9acb20083cd7e076f3bbf8577 (patch)
treee2933664bf311e02d4a26ec6b26c38f9bdd96bfa
parent002117f95896ffa5db74bee808ae61e876b6e8b0 (diff)
parentaa5168291c46f9b418d989bccf2d8e09338a83e6 (diff)
Merge "Add exclusion between instrumentation and GC"
-rw-r--r--runtime/Android.mk1
-rw-r--r--runtime/debugger.cc14
-rw-r--r--runtime/debugger.h2
-rw-r--r--runtime/gc/collector_type.h2
-rw-r--r--runtime/gc/gc_cause.cc1
-rw-r--r--runtime/gc/gc_cause.h2
-rw-r--r--runtime/gc/heap.cc12
-rw-r--r--runtime/gc/heap.h3
-rw-r--r--runtime/gc/scoped_gc_critical_section.cc41
-rw-r--r--runtime/gc/scoped_gc_critical_section.h47
-rw-r--r--runtime/instrumentation.h31
-rw-r--r--runtime/instrumentation_test.cc22
-rw-r--r--runtime/trace.cc20
13 files changed, 179 insertions, 19 deletions
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 059c4cdd57..8d81f2a7f6 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -67,6 +67,7 @@ LIBART_COMMON_SRC_FILES := \
gc/heap.cc \
gc/reference_processor.cc \
gc/reference_queue.cc \
+ gc/scoped_gc_critical_section.cc \
gc/space/bump_pointer_space.cc \
gc/space/dlmalloc_space.cc \
gc/space/image_space.cc \
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index d24b4fb542..b19381d879 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -30,6 +30,7 @@
#include "dex_instruction.h"
#include "gc/accounting/card_table-inl.h"
#include "gc/allocation_record.h"
+#include "gc/scoped_gc_critical_section.h"
#include "gc/space/large_object_space.h"
#include "gc/space/space-inl.h"
#include "handle_scope.h"
@@ -559,14 +560,15 @@ void Dbg::GoActive() {
return;
}
+ Thread* const self = Thread::Current();
{
// TODO: dalvik only warned if there were breakpoints left over. clear in Dbg::Disconnected?
- ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
+ ReaderMutexLock mu(self, *Locks::breakpoint_lock_);
CHECK_EQ(gBreakpoints.size(), 0U);
}
{
- MutexLock mu(Thread::Current(), *Locks::deoptimization_lock_);
+ MutexLock mu(self, *Locks::deoptimization_lock_);
CHECK_EQ(deoptimization_requests_.size(), 0U);
CHECK_EQ(full_deoptimization_event_count_, 0U);
CHECK_EQ(dex_pc_change_event_ref_count_, 0U);
@@ -598,6 +600,10 @@ void Dbg::Disconnected() {
Runtime* runtime = Runtime::Current();
Thread* self = Thread::Current();
{
+ // Required for DisableDeoptimization.
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa(__FUNCTION__);
ThreadState old_state = self->SetStateUnsafe(kRunnable);
// Debugger may not be active at this point.
@@ -3162,6 +3168,10 @@ void Dbg::ManageDeoptimization() {
}
CHECK_EQ(self->GetState(), kRunnable);
ScopedThreadSuspension sts(self, kWaitingForDeoptimization);
+ // Required for ProcessDeoptimizationRequest.
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
// We need to suspend mutator threads first.
ScopedSuspendAll ssa(__FUNCTION__);
const ThreadState old_state = self->SetStateUnsafe(kRunnable);
diff --git a/runtime/debugger.h b/runtime/debugger.h
index b4d42de2bd..b3617e4bbb 100644
--- a/runtime/debugger.h
+++ b/runtime/debugger.h
@@ -731,7 +731,7 @@ class Dbg {
SHARED_REQUIRES(Locks::mutator_lock_);
static void ProcessDeoptimizationRequest(const DeoptimizationRequest& request)
- REQUIRES(Locks::mutator_lock_);
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_);
static void RequestDeoptimizationLocked(const DeoptimizationRequest& req)
REQUIRES(Locks::deoptimization_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h
index 95ba380a01..416510d73b 100644
--- a/runtime/gc/collector_type.h
+++ b/runtime/gc/collector_type.h
@@ -40,6 +40,8 @@ enum CollectorType {
kCollectorTypeHeapTrim,
// A (mostly) concurrent copying collector.
kCollectorTypeCC,
+ // Instrumentation critical section fake collector.
+ kCollectorTypeInstrumentation,
// A homogeneous space compaction collector used in background transition
// when both foreground and background collector are CMS.
kCollectorTypeHomogeneousSpaceCompact,
diff --git a/runtime/gc/gc_cause.cc b/runtime/gc/gc_cause.cc
index 6be683df48..84243dfe1e 100644
--- a/runtime/gc/gc_cause.cc
+++ b/runtime/gc/gc_cause.cc
@@ -33,6 +33,7 @@ const char* PrettyCause(GcCause cause) {
case kGcCauseDisableMovingGc: return "DisableMovingGc";
case kGcCauseHomogeneousSpaceCompact: return "HomogeneousSpaceCompact";
case kGcCauseTrim: return "HeapTrim";
+ case kGcCauseInstrumentation: return "Instrumentation";
default:
LOG(FATAL) << "Unreachable";
UNREACHABLE();
diff --git a/runtime/gc/gc_cause.h b/runtime/gc/gc_cause.h
index 0536f32df9..34c776622a 100644
--- a/runtime/gc/gc_cause.h
+++ b/runtime/gc/gc_cause.h
@@ -39,6 +39,8 @@ enum GcCause {
kGcCauseDisableMovingGc,
// Not a real GC cause, used when we trim the heap.
kGcCauseTrim,
+ // Not a real GC cause, used to implement exclusion between GC and instrumentation.
+ kGcCauseInstrumentation,
// GC triggered for background transition when both foreground and background collector are CMS.
kGcCauseHomogeneousSpaceCompact,
};
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 7d664faa40..657fcb5f08 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1312,6 +1312,13 @@ void Heap::TrimIndirectReferenceTables(Thread* self) {
ATRACE_END();
}
+void Heap::StartGC(Thread* self, GcCause cause, CollectorType collector_type) {
+ MutexLock mu(self, *gc_complete_lock_);
+ // Ensure there is only one GC at a time.
+ WaitForGcToCompleteLocked(cause, self);
+ collector_type_running_ = collector_type;
+}
+
void Heap::TrimSpaces(Thread* self) {
{
// Need to do this before acquiring the locks since we don't want to get suspended while
@@ -1319,10 +1326,7 @@ void Heap::TrimSpaces(Thread* self) {
ScopedThreadStateChange tsc(self, kWaitingForGcToComplete);
// Pretend we are doing a GC to prevent background compaction from deleting the space we are
// trimming.
- MutexLock mu(self, *gc_complete_lock_);
- // Ensure there is only one GC at a time.
- WaitForGcToCompleteLocked(kGcCauseTrim, self);
- collector_type_running_ = kCollectorTypeHeapTrim;
+ StartGC(self, kGcCauseTrim, kCollectorTypeHeapTrim);
}
ATRACE_BEGIN(__FUNCTION__);
const uint64_t start_ns = NanoTime();
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index d0d0be3826..cc48172f71 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -775,6 +775,8 @@ class Heap {
REQUIRES(Locks::mutator_lock_);
void LogGC(GcCause gc_cause, collector::GarbageCollector* collector);
+ void StartGC(Thread* self, GcCause cause, CollectorType collector_type)
+ REQUIRES(!*gc_complete_lock_);
void FinishGC(Thread* self, collector::GcType gc_type) REQUIRES(!*gc_complete_lock_);
// Create a mem map with a preferred base address.
@@ -1325,6 +1327,7 @@ class Heap {
friend class collector::MarkSweep;
friend class collector::SemiSpace;
friend class ReferenceQueue;
+ friend class ScopedGCCriticalSection;
friend class VerifyReferenceCardVisitor;
friend class VerifyReferenceVisitor;
friend class VerifyObjectVisitor;
diff --git a/runtime/gc/scoped_gc_critical_section.cc b/runtime/gc/scoped_gc_critical_section.cc
new file mode 100644
index 0000000000..e7786a1546
--- /dev/null
+++ b/runtime/gc/scoped_gc_critical_section.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "scoped_gc_critical_section.h"
+
+#include "gc/collector_type.h"
+#include "gc/heap.h"
+#include "runtime.h"
+#include "thread-inl.h"
+
+namespace art {
+namespace gc {
+
+ScopedGCCriticalSection::ScopedGCCriticalSection(Thread* self,
+ GcCause cause,
+ CollectorType collector_type)
+ : self_(self) {
+ Runtime::Current()->GetHeap()->StartGC(self, cause, collector_type);
+ old_cause_ = self->StartAssertNoThreadSuspension("ScopedGCCriticalSection");
+}
+ScopedGCCriticalSection::~ScopedGCCriticalSection() {
+ self_->EndAssertNoThreadSuspension(old_cause_);
+ Runtime::Current()->GetHeap()->FinishGC(self_, collector::kGcTypeNone);
+}
+
+} // namespace gc
+} // namespace art
+
diff --git a/runtime/gc/scoped_gc_critical_section.h b/runtime/gc/scoped_gc_critical_section.h
new file mode 100644
index 0000000000..ec93bca802
--- /dev/null
+++ b/runtime/gc/scoped_gc_critical_section.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
+#define ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
+
+#include "base/mutex.h"
+#include "collector_type.h"
+#include "gc_cause.h"
+
+namespace art {
+
+class Thread;
+
+namespace gc {
+
+// Wait until the GC is finished and then prevent GC from starting until the destructor. Used
+// to prevent deadlocks in places where we call ClassLinker::VisitClass with all th threads
+// suspended.
+class ScopedGCCriticalSection {
+ public:
+ ScopedGCCriticalSection(Thread* self, GcCause cause, CollectorType collector_type)
+ ACQUIRE(Roles::uninterruptible_);
+ ~ScopedGCCriticalSection() RELEASE(Roles::uninterruptible_);
+
+ private:
+ Thread* const self_;
+ const char* old_cause_;
+};
+
+} // namespace gc
+} // namespace art
+
+#endif // ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 80460562eb..612ca14cf5 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -146,9 +146,13 @@ class Instrumentation {
// Deoptimization.
void EnableDeoptimization()
- REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_);
+ REQUIRES(Locks::mutator_lock_)
+ REQUIRES(!deoptimized_methods_lock_);
+ // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
void DisableDeoptimization(const char* key)
- REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_);
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!deoptimized_methods_lock_);
+
bool AreAllMethodsDeoptimized() const {
return interpreter_stubs_installed_;
}
@@ -156,12 +160,17 @@ class Instrumentation {
// Executes everything with interpreter.
void DeoptimizeEverything(const char* key)
- REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!Locks::thread_list_lock_,
+ !Locks::classlinker_classes_lock_,
!deoptimized_methods_lock_);
- // Executes everything with compiled code (or interpreter if there is no code).
+ // Executes everything with compiled code (or interpreter if there is no code). May visit class
+ // linker classes through ConfigureStubs.
void UndeoptimizeEverything(const char* key)
- REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!Locks::thread_list_lock_,
+ !Locks::classlinker_classes_lock_,
!deoptimized_methods_lock_);
// Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
@@ -183,12 +192,16 @@ class Instrumentation {
// Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
void EnableMethodTracing(const char* key,
bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
- REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!Locks::thread_list_lock_,
+ !Locks::classlinker_classes_lock_,
!deoptimized_methods_lock_);
// Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
void DisableMethodTracing(const char* key)
- REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!Locks::thread_list_lock_,
+ !Locks::classlinker_classes_lock_,
!deoptimized_methods_lock_);
InterpreterHandlerTable GetInterpreterHandlerTable() const
@@ -393,7 +406,9 @@ class Instrumentation {
// instrumentation level it needs. Therefore the current instrumentation level
// becomes the highest instrumentation level required by a client.
void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
- REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_, !Locks::thread_list_lock_,
+ REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+ REQUIRES(!deoptimized_methods_lock_,
+ !Locks::thread_list_lock_,
!Locks::classlinker_classes_lock_);
void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc
index d98d246914..e4688a21dd 100644
--- a/runtime/instrumentation_test.cc
+++ b/runtime/instrumentation_test.cc
@@ -20,6 +20,7 @@
#include "common_throws.h"
#include "class_linker-inl.h"
#include "dex_file.h"
+#include "gc/scoped_gc_critical_section.h"
#include "handle_scope-inl.h"
#include "jvalue.h"
#include "runtime.h"
@@ -151,6 +152,9 @@ class InstrumentationTest : public CommonRuntimeTest {
ScopedObjectAccess soa(Thread::Current());
instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
ScopedThreadSuspension sts(soa.Self(), kSuspended);
+ gc::ScopedGCCriticalSection gcs(soa.Self(),
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("Instrumentation::ConfigureStubs");
instr->ConfigureStubs(key, level);
}
@@ -203,6 +207,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("Single method deoptimization");
if (enable_deoptimization) {
instrumentation->EnableDeoptimization();
@@ -216,6 +223,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("Single method undeoptimization");
instrumentation->Undeoptimize(method);
if (disable_deoptimization) {
@@ -228,6 +238,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("Full deoptimization");
if (enable_deoptimization) {
instrumentation->EnableDeoptimization();
@@ -240,6 +253,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("Full undeoptimization");
instrumentation->UndeoptimizeEverything(key);
if (disable_deoptimization) {
@@ -252,6 +268,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("EnableMethodTracing");
instrumentation->EnableMethodTracing(key, needs_interpreter);
}
@@ -261,6 +280,9 @@ class InstrumentationTest : public CommonRuntimeTest {
Runtime* runtime = Runtime::Current();
instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
ScopedThreadSuspension sts(self, kSuspended);
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa("EnableMethodTracing");
instrumentation->DisableMethodTracing(key);
}
diff --git a/runtime/trace.cc b/runtime/trace.cc
index e2743ceb13..eb0ea6843e 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -31,6 +31,7 @@
#include "common_throws.h"
#include "debugger.h"
#include "dex_file-inl.h"
+#include "gc/scoped_gc_critical_section.h"
#include "instrumentation.h"
#include "mirror/class-inl.h"
#include "mirror/dex_cache-inl.h"
@@ -350,6 +351,10 @@ void Trace::Start(const char* trace_filename, int trace_fd, size_t buffer_size,
// Create Trace object.
{
+ // Required since EnableMethodTracing calls ConfigureStubs which visits class linker classes.
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa(__FUNCTION__);
MutexLock mu(self, *Locks::trace_lock_);
if (the_trace_ != nullptr) {
@@ -464,9 +469,10 @@ void Trace::Pause() {
Runtime* runtime = Runtime::Current();
Trace* the_trace = nullptr;
+ Thread* const self = Thread::Current();
pthread_t sampling_pthread = 0U;
{
- MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+ MutexLock mu(self, *Locks::trace_lock_);
if (the_trace_ == nullptr) {
LOG(ERROR) << "Trace pause requested, but no trace currently running";
return;
@@ -478,23 +484,26 @@ void Trace::Pause() {
if (sampling_pthread != 0U) {
{
- MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+ MutexLock mu(self, *Locks::trace_lock_);
the_trace_ = nullptr;
}
CHECK_PTHREAD_CALL(pthread_join, (sampling_pthread, nullptr), "sampling thread shutdown");
sampling_pthread_ = 0U;
{
- MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+ MutexLock mu(self, *Locks::trace_lock_);
the_trace_ = the_trace;
}
}
if (the_trace != nullptr) {
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa(__FUNCTION__);
stop_alloc_counting = (the_trace->flags_ & Trace::kTraceCountAllocs) != 0;
if (the_trace->trace_mode_ == TraceMode::kSampling) {
- MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
+ MutexLock mu(self, *Locks::thread_list_lock_);
runtime->GetThreadList()->ForEach(ClearThreadStackTraceAndClockBase, nullptr);
} else {
runtime->GetInstrumentation()->DisableMethodTracing(kTracerInstrumentationKey);
@@ -530,6 +539,9 @@ void Trace::Resume() {
bool enable_stats = (the_trace->flags_ && kTraceCountAllocs) != 0;
{
+ gc::ScopedGCCriticalSection gcs(self,
+ gc::kGcCauseInstrumentation,
+ gc::kCollectorTypeInstrumentation);
ScopedSuspendAll ssa(__FUNCTION__);
// Reenable.