Cleanup Instrumentation::IsActive

Instrumentation::IsActive is used to check if nterp should be disabled
because we have certain listeners that are processed only when running
in interpreter. Earlier this check included all listeners. However
- method entry / exit / unwind are handled by installing instrumentation
  stubs, so nterp need not be disabled.
- exception events are handled in nterp too (see
  art_quick_deliver_exception)
- dex_pc_listeners these are handled only in interpreter but the methods
  that need these events are deoptimized.

This CL removes these listeners from IsActive and renames the IsActive.

Change-Id: Ie2e936207405d5c9057129b64ac382fcf9f0cbfb
diff --git a/runtime/debugger.h b/runtime/debugger.h
index 65dc13d..a79add9 100644
--- a/runtime/debugger.h
+++ b/runtime/debugger.h
@@ -28,6 +28,7 @@
 #include "base/locks.h"
 #include "base/logging.h"
 #include "jni.h"
+#include "runtime.h"
 #include "runtime_callbacks.h"
 #include "thread.h"
 #include "thread_state.h"
@@ -64,7 +65,10 @@
   // the deoptimized frames.
   static bool IsForcedInterpreterNeededForException(Thread* thread)
       REQUIRES_SHARED(Locks::mutator_lock_) {
-    if (LIKELY(!thread->HasDebuggerShadowFrames())) {
+    // A quick check to avoid walking the stack. If there are no shadow frames or no method
+    // that needs to be deoptimized we can safely continue with optimized code.
+    if (LIKELY(!thread->HasDebuggerShadowFrames() &&
+               Runtime::Current()->GetInstrumentation()->IsDeoptimizedMethodsEmpty())) {
       return false;
     }
     return IsForcedInterpreterNeededForExceptionImpl(thread);
diff --git a/runtime/instrumentation.cc b/runtime/instrumentation.cc
index b4abf00..a9a8810 100644
--- a/runtime/instrumentation.cc
+++ b/runtime/instrumentation.cc
@@ -943,11 +943,7 @@
     InstallStubsClassVisitor visitor(this);
     runtime->GetClassLinker()->VisitClasses(&visitor);
     // Restore stack only if there is no method currently deoptimized.
-    bool empty;
-    {
-      ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
-      empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
-    }
+    bool empty = IsDeoptimizedMethodsEmpty();
     if (empty) {
       MutexLock mu(self, *Locks::thread_list_lock_);
       bool no_remaining_deopts = true;
@@ -1146,7 +1142,7 @@
   return true;
 }
 
-bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
+bool Instrumentation::IsDeoptimizedMethodsEmptyLocked() const {
   return deoptimized_methods_.empty();
 }
 
@@ -1190,7 +1186,7 @@
     bool found_and_erased = RemoveDeoptimizedMethod(method);
     CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
         << " is not deoptimized";
-    empty = IsDeoptimizedMethodsEmpty();
+    empty = IsDeoptimizedMethodsEmptyLocked();
   }
 
   // Restore code and possibly stack only if we did not deoptimize everything.
@@ -1214,6 +1210,11 @@
   }
 }
 
+bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
+  ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
+  return deoptimized_methods_.empty();
+}
+
 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
   DCHECK(method != nullptr);
   ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
@@ -1229,7 +1230,7 @@
     ArtMethod* method;
     {
       ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
-      if (IsDeoptimizedMethodsEmpty()) {
+      if (IsDeoptimizedMethodsEmptyLocked()) {
         break;
       }
       method = BeginDeoptimizedMethod();
diff --git a/runtime/instrumentation.h b/runtime/instrumentation.h
index e53ee82..572586e 100644
--- a/runtime/instrumentation.h
+++ b/runtime/instrumentation.h
@@ -260,6 +260,11 @@
   bool IsDeoptimized(ArtMethod* method)
       REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
 
+  // Indicates if any method needs to be deoptimized. This is used to avoid walking the stack to
+  // determine if a deoptimization is required.
+  bool IsDeoptimizedMethodsEmpty() const
+      REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
+
   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
   void EnableMethodTracing(const char* key,
                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
@@ -371,12 +376,11 @@
     return have_exception_handled_listeners_;
   }
 
-  bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
-    return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
-        have_field_read_listeners_ || have_field_write_listeners_ ||
-        have_exception_thrown_listeners_ || have_method_unwind_listeners_ ||
-        have_branch_listeners_ || have_watched_frame_pop_listeners_ ||
-        have_exception_handled_listeners_;
+  bool NeedsSlowInterpreterForListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
+    return have_field_read_listeners_ ||
+           have_field_write_listeners_ ||
+           have_watched_frame_pop_listeners_ ||
+           have_exception_handled_listeners_;
   }
 
   // Inform listeners that a method has been entered. A dex PC is provided as we may install
@@ -619,7 +623,7 @@
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
   ArtMethod* BeginDeoptimizedMethod()
       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
-  bool IsDeoptimizedMethodsEmpty() const
+  bool IsDeoptimizedMethodsEmptyLocked() const
       REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
   void UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code)
       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
diff --git a/runtime/instrumentation_test.cc b/runtime/instrumentation_test.cc
index 9373751..b0a81b6 100644
--- a/runtime/instrumentation_test.cc
+++ b/runtime/instrumentation_test.cc
@@ -466,7 +466,7 @@
 
   EXPECT_FALSE(instr->AreExitStubsInstalled());
   EXPECT_FALSE(instr->AreAllMethodsDeoptimized());
-  EXPECT_FALSE(instr->IsActive());
+  EXPECT_FALSE(instr->NeedsSlowInterpreterForListeners());
   EXPECT_FALSE(instr->ShouldNotifyMethodEnterExitEvents());
 
 
@@ -478,7 +478,6 @@
   EXPECT_FALSE(instr->HasFieldWriteListeners());
   EXPECT_FALSE(instr->HasMethodEntryListeners());
   EXPECT_FALSE(instr->HasMethodExitListeners());
-  EXPECT_FALSE(instr->IsActive());
 }
 
 // Test instrumentation listeners for each event.
diff --git a/runtime/interpreter/interpreter.cc b/runtime/interpreter/interpreter.cc
index 4339120..dc7e23e 100644
--- a/runtime/interpreter/interpreter.cc
+++ b/runtime/interpreter/interpreter.cc
@@ -476,13 +476,16 @@
     const uint32_t dex_pc = shadow_frame->GetDexPC();
     uint32_t new_dex_pc = dex_pc;
     if (UNLIKELY(self->IsExceptionPending())) {
-      // If we deoptimize from the QuickExceptionHandler, we already reported the exception to
-      // the instrumentation. To prevent from reporting it a second time, we simply pass a
-      // null Instrumentation*.
-      const instrumentation::Instrumentation* const instrumentation =
-          frame_cnt == 0 ? nullptr : Runtime::Current()->GetInstrumentation();
-      new_dex_pc = MoveToExceptionHandler(
-          self, *shadow_frame, instrumentation) ? shadow_frame->GetDexPC() : dex::kDexNoIndex;
+      // If we deoptimize from the QuickExceptionHandler, we already reported the exception throw
+      // event to the instrumentation. Skip throw listeners for the first frame. The deopt check
+      // should happen after the throw listener is called as throw listener can trigger a
+      // deoptimization.
+      new_dex_pc = MoveToExceptionHandler(self,
+                                          *shadow_frame,
+                                          /* skip_listeners= */ false,
+                                          /* skip_throw_listener= */ frame_cnt == 0) ?
+                       shadow_frame->GetDexPC() :
+                       dex::kDexNoIndex;
     } else if (!from_code) {
       // Deoptimization is not called from code directly.
       const Instruction* instr = &accessor.InstructionAt(dex_pc);
diff --git a/runtime/interpreter/interpreter_common.cc b/runtime/interpreter/interpreter_common.cc
index cafe747..c8a87c1 100644
--- a/runtime/interpreter/interpreter_common.cc
+++ b/runtime/interpreter/interpreter_common.cc
@@ -150,11 +150,14 @@
 // behavior.
 bool MoveToExceptionHandler(Thread* self,
                             ShadowFrame& shadow_frame,
-                            const instrumentation::Instrumentation* instrumentation) {
+                            bool skip_listeners,
+                            bool skip_throw_listener) {
   self->VerifyStack();
   StackHandleScope<2> hs(self);
   Handle<mirror::Throwable> exception(hs.NewHandle(self->GetException()));
-  if (instrumentation != nullptr &&
+  const instrumentation::Instrumentation* instrumentation =
+      Runtime::Current()->GetInstrumentation();
+  if (!skip_throw_listener &&
       instrumentation->HasExceptionThrownListeners() &&
       self->IsExceptionThrownByCurrentMethod(exception.Get())) {
     // See b/65049545 for why we don't need to check to see if the exception has changed.
@@ -169,7 +172,7 @@
   uint32_t found_dex_pc = shadow_frame.GetMethod()->FindCatchBlock(
       hs.NewHandle(exception->GetClass()), shadow_frame.GetDexPC(), &clear_exception);
   if (found_dex_pc == dex::kDexNoIndex) {
-    if (instrumentation != nullptr) {
+    if (!skip_listeners) {
       if (shadow_frame.NeedsNotifyPop()) {
         instrumentation->WatchedFramePopped(self, shadow_frame);
         if (shadow_frame.GetForcePopFrame()) {
@@ -189,12 +192,12 @@
     return shadow_frame.GetForcePopFrame();
   } else {
     shadow_frame.SetDexPC(found_dex_pc);
-    if (instrumentation != nullptr && instrumentation->HasExceptionHandledListeners()) {
+    if (!skip_listeners && instrumentation->HasExceptionHandledListeners()) {
       self->ClearException();
       instrumentation->ExceptionHandledEvent(self, exception.Get());
       if (UNLIKELY(self->IsExceptionPending())) {
         // Exception handled event threw an exception. Try to find the handler for this one.
-        return MoveToExceptionHandler(self, shadow_frame, instrumentation);
+        return MoveToExceptionHandler(self, shadow_frame, skip_listeners, skip_throw_listener);
       } else if (!clear_exception) {
         self->SetException(exception.Get());
       }
diff --git a/runtime/interpreter/interpreter_common.h b/runtime/interpreter/interpreter_common.h
index e9b78f3..95e3bd5 100644
--- a/runtime/interpreter/interpreter_common.h
+++ b/runtime/interpreter/interpreter_common.h
@@ -706,8 +706,8 @@
 // TODO We might wish to reconsider how we cause some events to be ignored.
 bool MoveToExceptionHandler(Thread* self,
                             ShadowFrame& shadow_frame,
-                            const instrumentation::Instrumentation* instrumentation)
-    REQUIRES_SHARED(Locks::mutator_lock_);
+                            bool skip_listeners,
+                            bool skip_throw_listener) REQUIRES_SHARED(Locks::mutator_lock_);
 
 NO_RETURN void UnexpectedOpcode(const Instruction* inst, const ShadowFrame& shadow_frame)
   __attribute__((cold))
diff --git a/runtime/interpreter/interpreter_switch_impl-inl.h b/runtime/interpreter/interpreter_switch_impl-inl.h
index f58fb95..d95c507 100644
--- a/runtime/interpreter/interpreter_switch_impl-inl.h
+++ b/runtime/interpreter/interpreter_switch_impl-inl.h
@@ -95,8 +95,11 @@
     }
     bool skip_event = shadow_frame_.GetSkipNextExceptionEvent();
     shadow_frame_.SetSkipNextExceptionEvent(false);
-    if (!MoveToExceptionHandler(Self(), shadow_frame_, skip_event ? nullptr : Instrumentation())) {
-      /* Structured locking is to be enforced for abnormal termination, too. */
+    if (!MoveToExceptionHandler(Self(),
+                                shadow_frame_,
+                                /* skip_listeners= */ skip_event,
+                                /* skip_throw_listener= */ skip_event)) {
+      // Structured locking is to be enforced for abnormal termination, too.
       DoMonitorCheckOnExit<do_assignability_check>(Self(), &shadow_frame_);
       ctx_->result = JValue(); /* Handled in caller. */
       ExitInterpreterLoop();
diff --git a/runtime/interpreter/mterp/nterp.cc b/runtime/interpreter/mterp/nterp.cc
index 34c971c..89e6a13 100644
--- a/runtime/interpreter/mterp/nterp.cc
+++ b/runtime/interpreter/mterp/nterp.cc
@@ -46,7 +46,7 @@
   return IsNterpSupported() &&
       !instr->InterpretOnly() &&
       !runtime->IsAotCompiler() &&
-      !runtime->GetInstrumentation()->IsActive() &&
+      !runtime->GetInstrumentation()->NeedsSlowInterpreterForListeners() &&
       // An async exception has been thrown. We need to go to the switch interpreter. nterp doesn't
       // know how to deal with these so we could end up never dealing with it if we are in an
       // infinite loop.
diff --git a/runtime/thread.cc b/runtime/thread.cc
index 4b5ec18..8b2a411 100644
--- a/runtime/thread.cc
+++ b/runtime/thread.cc
@@ -3672,7 +3672,9 @@
 
   ReadBarrier::MaybeAssertToSpaceInvariant(exception.Ptr());
 
-  // This is a real exception: let the instrumentation know about it.
+  // This is a real exception: let the instrumentation know about it. Exception throw listener
+  // could set a breakpoint or install listeners that might require a deoptimization. Hence the
+  // deoptimization check needs to happen after calling the listener.
   instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
   if (instrumentation->HasExceptionThrownListeners() &&
       IsExceptionThrownByCurrentMethod(exception)) {