Remove need for intrinsic recognizer to be a pass.

Instead just recognize the intrinsic when creating an invoke
instruction.

Also remove some old code related to compiler driver sharpening.

Test: test.py
Change-Id: Iecb668f30e95034970fcf57160ca12092c9c610d
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 68f1a24..4ae7915 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -29,6 +29,7 @@
 #include "base/quasi_atomic.h"
 #include "base/stl_util.h"
 #include "base/transform_array_ref.h"
+#include "art_method.h"
 #include "data_type.h"
 #include "deoptimization_kind.h"
 #include "dex/dex_file.h"
@@ -4322,7 +4323,7 @@
   bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
 
   ArtMethod* GetResolvedMethod() const { return resolved_method_; }
-  void SetResolvedMethod(ArtMethod* method) { resolved_method_ = method; }
+  void SetResolvedMethod(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
 
   DECLARE_ABSTRACT_INSTRUCTION(Invoke);
 
@@ -4354,12 +4355,14 @@
           number_of_arguments + number_of_other_inputs,
           kArenaAllocInvokeInputs),
       number_of_arguments_(number_of_arguments),
-      resolved_method_(resolved_method),
       dex_method_index_(dex_method_index),
       intrinsic_(Intrinsics::kNone),
       intrinsic_optimizations_(0) {
     SetPackedField<InvokeTypeField>(invoke_type);
     SetPackedFlag<kFlagCanThrow>(true);
+    // Check mutator lock, constructors lack annotalysis support.
+    Locks::mutator_lock_->AssertNotExclusiveHeld(Thread::Current());
+    SetResolvedMethod(resolved_method);
   }
 
   DEFAULT_COPY_CONSTRUCTOR(Invoke);