Merge "MIPS64r6 Assembler Tests"
diff --git a/runtime/Android.mk b/runtime/Android.mk
index 059c4cd..8d81f2a 100644
--- a/runtime/Android.mk
+++ b/runtime/Android.mk
@@ -67,6 +67,7 @@
   gc/heap.cc \
   gc/reference_processor.cc \
   gc/reference_queue.cc \
+  gc/scoped_gc_critical_section.cc \
   gc/space/bump_pointer_space.cc \
   gc/space/dlmalloc_space.cc \
   gc/space/image_space.cc \
diff --git a/runtime/class_linker.cc b/runtime/class_linker.cc
index b0590e2..acb39c5 100644
--- a/runtime/class_linker.cc
+++ b/runtime/class_linker.cc
@@ -79,6 +79,7 @@
 #include "scoped_thread_state_change.h"
 #include "handle_scope-inl.h"
 #include "thread-inl.h"
+#include "trace.h"
 #include "utils.h"
 #include "utils/dex_cache_arrays_layout-inl.h"
 #include "verifier/method_verifier.h"
@@ -1299,6 +1300,9 @@
 }
 
 void ClassLinker::VisitClassRoots(RootVisitor* visitor, VisitRootFlags flags) {
+  // Acquire tracing_enabled before locking class linker lock to prevent lock order violation. Since
+  // enabling tracing requires the mutator lock, there are no race conditions here.
+  const bool tracing_enabled = Trace::IsTracingEnabled();
   Thread* const self = Thread::Current();
   WriterMutexLock mu(self, *Locks::classlinker_classes_lock_);
   BufferedRootVisitor<kDefaultBufferedRootCount> buffered_visitor(
@@ -1320,6 +1324,14 @@
     // Need to make sure to not copy ArtMethods without doing read barriers since the roots are
     // marked concurrently and we don't hold the classlinker_classes_lock_ when we do the copy.
     boot_class_table_.VisitRoots(buffered_visitor);
+
+    // If tracing is enabled, then mark all the class loaders to prevent unloading.
+    if (tracing_enabled) {
+      for (const ClassLoaderData& data : class_loaders_) {
+        GcRoot<mirror::Object> root(GcRoot<mirror::Object>(self->DecodeJObject(data.weak_root)));
+        root.VisitRoot(visitor, RootInfo(kRootVMInternal));
+      }
+    }
   } else if ((flags & kVisitRootFlagNewRoots) != 0) {
     for (auto& root : new_class_roots_) {
       mirror::Class* old_ref = root.Read<kWithoutReadBarrier>();
diff --git a/runtime/debugger.cc b/runtime/debugger.cc
index d24b4fb..b19381d 100644
--- a/runtime/debugger.cc
+++ b/runtime/debugger.cc
@@ -30,6 +30,7 @@
 #include "dex_instruction.h"
 #include "gc/accounting/card_table-inl.h"
 #include "gc/allocation_record.h"
+#include "gc/scoped_gc_critical_section.h"
 #include "gc/space/large_object_space.h"
 #include "gc/space/space-inl.h"
 #include "handle_scope.h"
@@ -559,14 +560,15 @@
     return;
   }
 
+  Thread* const self = Thread::Current();
   {
     // TODO: dalvik only warned if there were breakpoints left over. clear in Dbg::Disconnected?
-    ReaderMutexLock mu(Thread::Current(), *Locks::breakpoint_lock_);
+    ReaderMutexLock mu(self, *Locks::breakpoint_lock_);
     CHECK_EQ(gBreakpoints.size(), 0U);
   }
 
   {
-    MutexLock mu(Thread::Current(), *Locks::deoptimization_lock_);
+    MutexLock mu(self, *Locks::deoptimization_lock_);
     CHECK_EQ(deoptimization_requests_.size(), 0U);
     CHECK_EQ(full_deoptimization_event_count_, 0U);
     CHECK_EQ(dex_pc_change_event_ref_count_, 0U);
@@ -598,6 +600,10 @@
   Runtime* runtime = Runtime::Current();
   Thread* self = Thread::Current();
   {
+    // Required for DisableDeoptimization.
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa(__FUNCTION__);
     ThreadState old_state = self->SetStateUnsafe(kRunnable);
     // Debugger may not be active at this point.
@@ -3162,6 +3168,10 @@
   }
   CHECK_EQ(self->GetState(), kRunnable);
   ScopedThreadSuspension sts(self, kWaitingForDeoptimization);
+  // Required for ProcessDeoptimizationRequest.
+  gc::ScopedGCCriticalSection gcs(self,
+                                  gc::kGcCauseInstrumentation,
+                                  gc::kCollectorTypeInstrumentation);
   // We need to suspend mutator threads first.
   ScopedSuspendAll ssa(__FUNCTION__);
   const ThreadState old_state = self->SetStateUnsafe(kRunnable);
diff --git a/runtime/debugger.h b/runtime/debugger.h
index b4d42de..b3617e4 100644
--- a/runtime/debugger.h
+++ b/runtime/debugger.h
@@ -731,7 +731,7 @@
       SHARED_REQUIRES(Locks::mutator_lock_);
 
   static void ProcessDeoptimizationRequest(const DeoptimizationRequest& request)
-      REQUIRES(Locks::mutator_lock_);
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_);
 
   static void RequestDeoptimizationLocked(const DeoptimizationRequest& req)
       REQUIRES(Locks::deoptimization_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
diff --git a/runtime/gc/collector_type.h b/runtime/gc/collector_type.h
index 95ba380..416510d 100644
--- a/runtime/gc/collector_type.h
+++ b/runtime/gc/collector_type.h
@@ -40,6 +40,8 @@
   kCollectorTypeHeapTrim,
   // A (mostly) concurrent copying collector.
   kCollectorTypeCC,
+  // Instrumentation critical section fake collector.
+  kCollectorTypeInstrumentation,
   // A homogeneous space compaction collector used in background transition
   // when both foreground and background collector are CMS.
   kCollectorTypeHomogeneousSpaceCompact,
diff --git a/runtime/gc/gc_cause.cc b/runtime/gc/gc_cause.cc
index 6be683d..84243df 100644
--- a/runtime/gc/gc_cause.cc
+++ b/runtime/gc/gc_cause.cc
@@ -33,6 +33,7 @@
     case kGcCauseDisableMovingGc: return "DisableMovingGc";
     case kGcCauseHomogeneousSpaceCompact: return "HomogeneousSpaceCompact";
     case kGcCauseTrim: return "HeapTrim";
+    case kGcCauseInstrumentation: return "Instrumentation";
     default:
       LOG(FATAL) << "Unreachable";
       UNREACHABLE();
diff --git a/runtime/gc/gc_cause.h b/runtime/gc/gc_cause.h
index 0536f32d..34c7766 100644
--- a/runtime/gc/gc_cause.h
+++ b/runtime/gc/gc_cause.h
@@ -39,6 +39,8 @@
   kGcCauseDisableMovingGc,
   // Not a real GC cause, used when we trim the heap.
   kGcCauseTrim,
+  // Not a real GC cause, used to implement exclusion between GC and instrumentation.
+  kGcCauseInstrumentation,
   // GC triggered for background transition when both foreground and background collector are CMS.
   kGcCauseHomogeneousSpaceCompact,
 };
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 7d664fa..657fcb5 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -1312,6 +1312,13 @@
   ATRACE_END();
 }
 
+void Heap::StartGC(Thread* self, GcCause cause, CollectorType collector_type) {
+  MutexLock mu(self, *gc_complete_lock_);
+  // Ensure there is only one GC at a time.
+  WaitForGcToCompleteLocked(cause, self);
+  collector_type_running_ = collector_type;
+}
+
 void Heap::TrimSpaces(Thread* self) {
   {
     // Need to do this before acquiring the locks since we don't want to get suspended while
@@ -1319,10 +1326,7 @@
     ScopedThreadStateChange tsc(self, kWaitingForGcToComplete);
     // Pretend we are doing a GC to prevent background compaction from deleting the space we are
     // trimming.
-    MutexLock mu(self, *gc_complete_lock_);
-    // Ensure there is only one GC at a time.
-    WaitForGcToCompleteLocked(kGcCauseTrim, self);
-    collector_type_running_ = kCollectorTypeHeapTrim;
+    StartGC(self, kGcCauseTrim, kCollectorTypeHeapTrim);
   }
   ATRACE_BEGIN(__FUNCTION__);
   const uint64_t start_ns = NanoTime();
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index d0d0be3..cc48172 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -775,6 +775,8 @@
       REQUIRES(Locks::mutator_lock_);
 
   void LogGC(GcCause gc_cause, collector::GarbageCollector* collector);
+  void StartGC(Thread* self, GcCause cause, CollectorType collector_type)
+      REQUIRES(!*gc_complete_lock_);
   void FinishGC(Thread* self, collector::GcType gc_type) REQUIRES(!*gc_complete_lock_);
 
   // Create a mem map with a preferred base address.
@@ -1325,6 +1327,7 @@
   friend class collector::MarkSweep;
   friend class collector::SemiSpace;
   friend class ReferenceQueue;
+  friend class ScopedGCCriticalSection;
   friend class VerifyReferenceCardVisitor;
   friend class VerifyReferenceVisitor;
   friend class VerifyObjectVisitor;
diff --git a/runtime/gc/scoped_gc_critical_section.cc b/runtime/gc/scoped_gc_critical_section.cc
new file mode 100644
index 0000000..e7786a1
--- /dev/null
+++ b/runtime/gc/scoped_gc_critical_section.cc
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "scoped_gc_critical_section.h"
+
+#include "gc/collector_type.h"
+#include "gc/heap.h"
+#include "runtime.h"
+#include "thread-inl.h"
+
+namespace art {
+namespace gc {
+
+ScopedGCCriticalSection::ScopedGCCriticalSection(Thread* self,
+                                                 GcCause cause,
+                                                 CollectorType collector_type)
+    : self_(self) {
+  Runtime::Current()->GetHeap()->StartGC(self, cause, collector_type);
+  old_cause_ = self->StartAssertNoThreadSuspension("ScopedGCCriticalSection");
+}
+ScopedGCCriticalSection::~ScopedGCCriticalSection() {
+  self_->EndAssertNoThreadSuspension(old_cause_);
+  Runtime::Current()->GetHeap()->FinishGC(self_, collector::kGcTypeNone);
+}
+
+}  // namespace gc
+}  // namespace art
+
diff --git a/runtime/gc/scoped_gc_critical_section.h b/runtime/gc/scoped_gc_critical_section.h
new file mode 100644
index 0000000..ec93bca
--- /dev/null
+++ b/runtime/gc/scoped_gc_critical_section.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
+#define ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
+
+#include "base/mutex.h"
+#include "collector_type.h"
+#include "gc_cause.h"
+
+namespace art {
+
+class Thread;
+
+namespace gc {
+
+// Wait until the GC is finished and then prevent GC from starting until the destructor. Used
+// to prevent deadlocks in places where we call ClassLinker::VisitClass with all th threads
+// suspended.
+class ScopedGCCriticalSection {
+ public:
+  ScopedGCCriticalSection(Thread* self, GcCause cause, CollectorType collector_type)
+      ACQUIRE(Roles::uninterruptible_);
+  ~ScopedGCCriticalSection() RELEASE(Roles::uninterruptible_);
+
+ private:
+  Thread* const self_;
+  const char* old_cause_;
+};
+
+}  // namespace gc
+}  // namespace art
+
+#endif  // ART_RUNTIME_GC_SCOPED_GC_CRITICAL_SECTION_H_
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index 8046056..612ca14 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -146,9 +146,13 @@
 
   // Deoptimization.
   void EnableDeoptimization()
-      REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_);
+      REQUIRES(Locks::mutator_lock_)
+      REQUIRES(!deoptimized_methods_lock_);
+  // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
   void DisableDeoptimization(const char* key)
-      REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_);
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!deoptimized_methods_lock_);
+
   bool AreAllMethodsDeoptimized() const {
     return interpreter_stubs_installed_;
   }
@@ -156,12 +160,17 @@
 
   // Executes everything with interpreter.
   void DeoptimizeEverything(const char* key)
-      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!Locks::thread_list_lock_,
+               !Locks::classlinker_classes_lock_,
                !deoptimized_methods_lock_);
 
-  // Executes everything with compiled code (or interpreter if there is no code).
+  // Executes everything with compiled code (or interpreter if there is no code). May visit class
+  // linker classes through ConfigureStubs.
   void UndeoptimizeEverything(const char* key)
-      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!Locks::thread_list_lock_,
+               !Locks::classlinker_classes_lock_,
                !deoptimized_methods_lock_);
 
   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
@@ -183,12 +192,16 @@
   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
   void EnableMethodTracing(const char* key,
                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
-      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!Locks::thread_list_lock_,
+               !Locks::classlinker_classes_lock_,
                !deoptimized_methods_lock_);
 
   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
   void DisableMethodTracing(const char* key)
-      REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_,
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!Locks::thread_list_lock_,
+               !Locks::classlinker_classes_lock_,
                !deoptimized_methods_lock_);
 
   InterpreterHandlerTable GetInterpreterHandlerTable() const
@@ -393,7 +406,9 @@
   // instrumentation level it needs. Therefore the current instrumentation level
   // becomes the highest instrumentation level required by a client.
   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
-      REQUIRES(Locks::mutator_lock_, !deoptimized_methods_lock_, !Locks::thread_list_lock_,
+      REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
+      REQUIRES(!deoptimized_methods_lock_,
+               !Locks::thread_list_lock_,
                !Locks::classlinker_classes_lock_);
 
   void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc
index d98d246..e4688a2 100644
--- a/runtime/instrumentation_test.cc
+++ b/runtime/instrumentation_test.cc
@@ -20,6 +20,7 @@
 #include "common_throws.h"
 #include "class_linker-inl.h"
 #include "dex_file.h"
+#include "gc/scoped_gc_critical_section.h"
 #include "handle_scope-inl.h"
 #include "jvalue.h"
 #include "runtime.h"
@@ -151,6 +152,9 @@
     ScopedObjectAccess soa(Thread::Current());
     instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
     ScopedThreadSuspension sts(soa.Self(), kSuspended);
+    gc::ScopedGCCriticalSection gcs(soa.Self(),
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("Instrumentation::ConfigureStubs");
     instr->ConfigureStubs(key, level);
   }
@@ -203,6 +207,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("Single method deoptimization");
     if (enable_deoptimization) {
       instrumentation->EnableDeoptimization();
@@ -216,6 +223,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("Single method undeoptimization");
     instrumentation->Undeoptimize(method);
     if (disable_deoptimization) {
@@ -228,6 +238,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("Full deoptimization");
     if (enable_deoptimization) {
       instrumentation->EnableDeoptimization();
@@ -240,6 +253,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("Full undeoptimization");
     instrumentation->UndeoptimizeEverything(key);
     if (disable_deoptimization) {
@@ -252,6 +268,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("EnableMethodTracing");
     instrumentation->EnableMethodTracing(key, needs_interpreter);
   }
@@ -261,6 +280,9 @@
     Runtime* runtime = Runtime::Current();
     instrumentation::Instrumentation* instrumentation = runtime->GetInstrumentation();
     ScopedThreadSuspension sts(self, kSuspended);
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa("EnableMethodTracing");
     instrumentation->DisableMethodTracing(key);
   }
diff --git a/runtime/native/dalvik_system_DexFile.cc b/runtime/native/dalvik_system_DexFile.cc
index 3b84bfa..4aebc2c 100644
--- a/runtime/native/dalvik_system_DexFile.cc
+++ b/runtime/native/dalvik_system_DexFile.cc
@@ -192,28 +192,38 @@
   }
 }
 
-static void DexFile_closeDexFile(JNIEnv* env, jclass, jobject cookie) {
-  std::unique_ptr<std::vector<const DexFile*>> dex_files = ConvertJavaArrayToNative(env, cookie);
-  if (dex_files.get() == nullptr) {
-    DCHECK(env->ExceptionCheck());
-    return;
-  }
-
+static jboolean DexFile_closeDexFile(JNIEnv* env, jclass, jobject cookie) {
   ScopedObjectAccess soa(env);
+  mirror::Object* dex_files_object = soa.Decode<mirror::Object*>(cookie);
+  if (dex_files_object == nullptr) {
+    ThrowNullPointerException("cookie == null");
+    return JNI_FALSE;
+  }
+  mirror::LongArray* dex_files = dex_files_object->AsLongArray();
 
-  // The Runtime currently never unloads classes, which means any registered
-  // dex files must be kept around forever in case they are used. We
-  // accomplish this here by explicitly leaking those dex files that are
-  // registered.
-  //
-  // TODO: The Runtime should support unloading of classes and freeing of the
-  // dex files for those unloaded classes rather than leaking dex files here.
+  // Delete dex files associated with this dalvik.system.DexFile since there should not be running
+  // code using it. dex_files is a vector due to multidex.
   ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
-  for (const DexFile* dex_file : *dex_files) {
+  bool all_deleted = true;
+  for (int32_t i = 0, count = dex_files->GetLength(); i < count; ++i) {
+    auto* dex_file = reinterpret_cast<DexFile*>(dex_files->Get(i));
+    if (dex_file == nullptr) {
+      continue;
+    }
+    // Only delete the dex file if the dex cache is not found to prevent runtime crashes if there
+    // are calls to DexFile.close while the ART DexFile is still in use.
     if (class_linker->FindDexCache(soa.Self(), *dex_file, true) == nullptr) {
+      // Clear the element in the array so that we can call close again.
+      dex_files->Set(i, 0);
       delete dex_file;
+    } else {
+      all_deleted = false;
     }
   }
+
+  // TODO: Also unmap the OatFile for this dalvik.system.DexFile.
+
+  return all_deleted ? JNI_TRUE : JNI_FALSE;
 }
 
 static jclass DexFile_defineClassNative(JNIEnv* env, jclass, jstring javaName, jobject javaLoader,
@@ -379,7 +389,7 @@
 }
 
 static JNINativeMethod gMethods[] = {
-  NATIVE_METHOD(DexFile, closeDexFile, "(Ljava/lang/Object;)V"),
+  NATIVE_METHOD(DexFile, closeDexFile, "(Ljava/lang/Object;)Z"),
   NATIVE_METHOD(DexFile, defineClassNative,
                 "(Ljava/lang/String;Ljava/lang/ClassLoader;Ljava/lang/Object;)Ljava/lang/Class;"),
   NATIVE_METHOD(DexFile, getClassNameList, "(Ljava/lang/Object;)[Ljava/lang/String;"),
diff --git a/runtime/trace.cc b/runtime/trace.cc
index e2743ce..745aa63 100644
--- a/runtime/trace.cc
+++ b/runtime/trace.cc
@@ -31,6 +31,7 @@
 #include "common_throws.h"
 #include "debugger.h"
 #include "dex_file-inl.h"
+#include "gc/scoped_gc_critical_section.h"
 #include "instrumentation.h"
 #include "mirror/class-inl.h"
 #include "mirror/dex_cache-inl.h"
@@ -350,6 +351,10 @@
 
   // Create Trace object.
   {
+    // Required since EnableMethodTracing calls ConfigureStubs which visits class linker classes.
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa(__FUNCTION__);
     MutexLock mu(self, *Locks::trace_lock_);
     if (the_trace_ != nullptr) {
@@ -464,9 +469,10 @@
   Runtime* runtime = Runtime::Current();
   Trace* the_trace = nullptr;
 
+  Thread* const self = Thread::Current();
   pthread_t sampling_pthread = 0U;
   {
-    MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+    MutexLock mu(self, *Locks::trace_lock_);
     if (the_trace_ == nullptr) {
       LOG(ERROR) << "Trace pause requested, but no trace currently running";
       return;
@@ -478,23 +484,26 @@
 
   if (sampling_pthread != 0U) {
     {
-      MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+      MutexLock mu(self, *Locks::trace_lock_);
       the_trace_ = nullptr;
     }
     CHECK_PTHREAD_CALL(pthread_join, (sampling_pthread, nullptr), "sampling thread shutdown");
     sampling_pthread_ = 0U;
     {
-      MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+      MutexLock mu(self, *Locks::trace_lock_);
       the_trace_ = the_trace;
     }
   }
 
   if (the_trace != nullptr) {
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa(__FUNCTION__);
     stop_alloc_counting = (the_trace->flags_ & Trace::kTraceCountAllocs) != 0;
 
     if (the_trace->trace_mode_ == TraceMode::kSampling) {
-      MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
+      MutexLock mu(self, *Locks::thread_list_lock_);
       runtime->GetThreadList()->ForEach(ClearThreadStackTraceAndClockBase, nullptr);
     } else {
       runtime->GetInstrumentation()->DisableMethodTracing(kTracerInstrumentationKey);
@@ -530,6 +539,9 @@
   bool enable_stats = (the_trace->flags_ && kTraceCountAllocs) != 0;
 
   {
+    gc::ScopedGCCriticalSection gcs(self,
+                                    gc::kGcCauseInstrumentation,
+                                    gc::kCollectorTypeInstrumentation);
     ScopedSuspendAll ssa(__FUNCTION__);
 
     // Reenable.
@@ -1046,4 +1058,9 @@
   return the_trace_->buffer_size_;
 }
 
+bool Trace::IsTracingEnabled() {
+  MutexLock mu(Thread::Current(), *Locks::trace_lock_);
+  return the_trace_ != nullptr;
+}
+
 }  // namespace art
diff --git a/runtime/trace.h b/runtime/trace.h
index 87a691d..356a81f 100644
--- a/runtime/trace.h
+++ b/runtime/trace.h
@@ -183,6 +183,9 @@
   static TraceMode GetMode() REQUIRES(!Locks::trace_lock_);
   static size_t GetBufferSize() REQUIRES(!Locks::trace_lock_);
 
+  // Used by class linker to prevent class unloading.
+  static bool IsTracingEnabled() REQUIRES(!Locks::trace_lock_);
+
  private:
   Trace(File* trace_file, const char* trace_name, size_t buffer_size, int flags,
         TraceOutputMode output_mode, TraceMode trace_mode);
diff --git a/test/087-gc-after-link/src/Main.java b/test/087-gc-after-link/src/Main.java
index e0a187a..2f6d496 100644
--- a/test/087-gc-after-link/src/Main.java
+++ b/test/087-gc-after-link/src/Main.java
@@ -155,6 +155,12 @@
      * See if we can GC after a failed load.
      */
     static void testFailLoadAndGc() throws TestFailed {
+        processFailLoadAndGc();
+        Runtime.getRuntime().gc();
+        System.out.println("GC complete.");
+    }
+
+    private static void processFailLoadAndGc() throws TestFailed {
         try {
             BrokenDexLoader loader;
 
@@ -170,7 +176,5 @@
                 ite.printStackTrace();
             }
         }
-        Runtime.getRuntime().gc();
-        System.out.println("GC complete.");
     }
 }
diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk
index a103eac..db16b97 100644
--- a/test/Android.run-test.mk
+++ b/test/Android.run-test.mk
@@ -369,6 +369,7 @@
 # This test dynamically enables tracing to force a deoptimization. This makes the test meaningless
 # when already tracing, and writes an error message that we do not want to check for.
 TEST_ART_BROKEN_TRACING_RUN_TESTS := \
+  087-gc-after-link \
   137-cfi \
   141-class-unload \
   802-deoptimization