Fix stats reporting over 100% methods compiled.

Add statistics for intrinsic and native stub compilation
and JIT failing to allocate memory for committing the
code. Clean up recording of compilation statistics.

New statistics when building aosp_taimen-userdebug boot
image with --dump-stats:
  Attempted compilation of 94304 methods: 99.99% (94295) compiled.
  OptStat#AttemptBytecodeCompilation: 89487
  OptStat#AttemptIntrinsicCompilation: 160
  OptStat#CompiledNativeStub: 4733
  OptStat#CompiledIntrinsic: 84
  OptStat#CompiledBytecode: 89478
  ...
where 94304=89487+4733+84 and 94295=89478+4733+84.

Test: testrunner.py -b --host --optimizing
Test: Manually inspect output of building boot image
      with --dump-stats.
Bug: 69627511
Change-Id: I15eb2b062a96f09a7721948bcc77b83ee4f18efd
diff --git a/compiler/Android.bp b/compiler/Android.bp
index 3699d66..37a18cb 100644
--- a/compiler/Android.bp
+++ b/compiler/Android.bp
@@ -211,6 +211,7 @@
         "driver/compiler_options.h",
         "linker/linker_patch.h",
         "optimizing/locations.h",
+        "optimizing/optimizing_compiler_stats.h",
 
         "utils/arm/constants_arm.h",
         "utils/mips/assembler_mips.h",
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 0bd3ce9..aff6f9f 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1411,10 +1411,10 @@
 
 void CodeGenerator::GenerateNullCheck(HNullCheck* instruction) {
   if (compiler_options_.GetImplicitNullChecks()) {
-    MaybeRecordStat(stats_, kImplicitNullCheckGenerated);
+    MaybeRecordStat(stats_, MethodCompilationStat::kImplicitNullCheckGenerated);
     GenerateImplicitNullCheck(instruction);
   } else {
-    MaybeRecordStat(stats_, kExplicitNullCheckGenerated);
+    MaybeRecordStat(stats_, MethodCompilationStat::kExplicitNullCheckGenerated);
     GenerateExplicitNullCheck(instruction);
   }
 }
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 1eb1f2e..2444e43 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -441,9 +441,9 @@
         // Add dependency due to devirtulization. We've assumed resolved_method
         // has single implementation.
         outermost_graph_->AddCHASingleImplementationDependency(resolved_method);
-        MaybeRecordStat(stats_, kCHAInline);
+        MaybeRecordStat(stats_, MethodCompilationStat::kCHAInline);
       } else {
-        MaybeRecordStat(stats_, kInlinedInvokeVirtualOrInterface);
+        MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvokeVirtualOrInterface);
       }
     }
     return result;
@@ -533,7 +533,7 @@
     }
 
     case kInlineCacheMonomorphic: {
-      MaybeRecordStat(stats_, kMonomorphicCall);
+      MaybeRecordStat(stats_, MethodCompilationStat::kMonomorphicCall);
       if (UseOnlyPolymorphicInliningWithNoDeopt()) {
         return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
       } else {
@@ -542,7 +542,7 @@
     }
 
     case kInlineCachePolymorphic: {
-      MaybeRecordStat(stats_, kPolymorphicCall);
+      MaybeRecordStat(stats_, MethodCompilationStat::kPolymorphicCall);
       return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
     }
 
@@ -551,7 +551,7 @@
           << "Interface or virtual call to "
           << caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex())
           << " is megamorphic and not inlined";
-      MaybeRecordStat(stats_, kMegamorphicCall);
+      MaybeRecordStat(stats_, MethodCompilationStat::kMegamorphicCall);
       return false;
     }
 
@@ -755,7 +755,7 @@
   dex::TypeIndex class_index = FindClassIndexIn(
       GetMonomorphicType(classes), caller_compilation_unit_);
   if (!class_index.IsValid()) {
-    LOG_FAIL(stats_, kNotInlinedDexCache)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
         << "Call to " << ArtMethod::PrettyMethod(resolved_method)
         << " from inline cache is not inlined because its class is not"
         << " accessible to the caller";
@@ -804,7 +804,7 @@
                                      /* is_first_run */ false);
   rtp_fixup.Run();
 
-  MaybeRecordStat(stats_, kInlinedMonomorphicCall);
+  MaybeRecordStat(stats_, MethodCompilationStat::kInlinedMonomorphicCall);
   return true;
 }
 
@@ -994,7 +994,7 @@
     return false;
   }
 
-  MaybeRecordStat(stats_, kInlinedPolymorphicCall);
+  MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall);
 
   // Run type propagation to get the guards typed.
   ReferenceTypePropagation rtp_fixup(graph_,
@@ -1200,7 +1200,7 @@
                                      /* is_first_run */ false);
   rtp_fixup.Run();
 
-  MaybeRecordStat(stats_, kInlinedPolymorphicCall);
+  MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall);
 
   LOG_SUCCESS() << "Inlined same polymorphic target " << actual_method->PrettyMethod();
   return true;
@@ -1263,7 +1263,7 @@
       // these relative rare cases.
       bool wrong_invoke_type = false;
       if (IntrinsicsRecognizer::Recognize(new_invoke, &wrong_invoke_type)) {
-        MaybeRecordStat(stats_, kIntrinsicRecognized);
+        MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
       }
     } else {
       // TODO: Consider sharpening an invoke virtual once it is not dependent on the
@@ -1308,14 +1308,14 @@
                                  ReferenceTypeInfo receiver_type,
                                  HInstruction** return_replacement) {
   if (method->IsProxyMethod()) {
-    LOG_FAIL(stats_, kNotInlinedProxy)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedProxy)
         << "Method " << method->PrettyMethod()
         << " is not inlined because of unimplemented inline support for proxy methods.";
     return false;
   }
 
   if (CountRecursiveCallsOf(method) > kMaximumNumberOfRecursiveCalls) {
-    LOG_FAIL(stats_, kNotInlinedRecursiveBudget)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRecursiveBudget)
         << "Method "
         << method->PrettyMethod()
         << " is not inlined because it has reached its recursive call budget.";
@@ -1329,10 +1329,10 @@
     if (TryPatternSubstitution(invoke_instruction, method, return_replacement)) {
       LOG_SUCCESS() << "Successfully replaced pattern of invoke "
                     << method->PrettyMethod();
-      MaybeRecordStat(stats_, kReplacedInvokeWithSimplePattern);
+      MaybeRecordStat(stats_, MethodCompilationStat::kReplacedInvokeWithSimplePattern);
       return true;
     }
-    LOG_FAIL(stats_, kNotInlinedWont)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedWont)
         << "Won't inline " << method->PrettyMethod() << " in "
         << outer_compilation_unit_.GetDexFile()->GetLocation() << " ("
         << caller_compilation_unit_.GetDexFile()->GetLocation() << ") from "
@@ -1352,7 +1352,7 @@
 
   size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits();
   if (code_item->insns_size_in_code_units_ > inline_max_code_units) {
-    LOG_FAIL(stats_, kNotInlinedCodeItem)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCodeItem)
         << "Method " << method->PrettyMethod()
         << " is not inlined because its code item is too big: "
         << code_item->insns_size_in_code_units_
@@ -1362,13 +1362,13 @@
   }
 
   if (code_item->tries_size_ != 0) {
-    LOG_FAIL(stats_, kNotInlinedTryCatch)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch)
         << "Method " << method->PrettyMethod() << " is not inlined because of try block";
     return false;
   }
 
   if (!method->IsCompilable()) {
-    LOG_FAIL(stats_, kNotInlinedNotVerified)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified)
         << "Method " << method->PrettyMethod()
         << " has soft failures un-handled by the compiler, so it cannot be inlined";
   }
@@ -1378,7 +1378,7 @@
     if (Runtime::Current()->UseJitCompilation() ||
         !compiler_driver_->IsMethodVerifiedWithoutFailures(
             method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) {
-      LOG_FAIL(stats_, kNotInlinedNotVerified)
+      LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified)
           << "Method " << method->PrettyMethod()
           << " couldn't be verified, so it cannot be inlined";
       return false;
@@ -1389,9 +1389,10 @@
       invoke_instruction->AsInvokeStaticOrDirect()->IsStaticWithImplicitClinitCheck()) {
     // Case of a static method that cannot be inlined because it implicitly
     // requires an initialization check of its declaring class.
-    LOG_FAIL(stats_, kNotInlinedDexCache) << "Method " << method->PrettyMethod()
-             << " is not inlined because it is static and requires a clinit"
-             << " check that cannot be emitted due to Dex cache limitations";
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
+        << "Method " << method->PrettyMethod()
+        << " is not inlined because it is static and requires a clinit"
+        << " check that cannot be emitted due to Dex cache limitations";
     return false;
   }
 
@@ -1401,7 +1402,7 @@
   }
 
   LOG_SUCCESS() << method->PrettyMethod();
-  MaybeRecordStat(stats_, kInlinedInvoke);
+  MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvoke);
   return true;
 }
 
@@ -1684,7 +1685,7 @@
                         handles_);
 
   if (builder.BuildGraph() != kAnalysisSuccess) {
-    LOG_FAIL(stats_, kNotInlinedCannotBuild)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCannotBuild)
         << "Method " << callee_dex_file.PrettyMethod(method_index)
         << " could not be built, so cannot be inlined";
     return false;
@@ -1692,7 +1693,7 @@
 
   if (!RegisterAllocator::CanAllocateRegistersFor(*callee_graph,
                                                   compiler_driver_->GetInstructionSet())) {
-    LOG_FAIL(stats_, kNotInlinedRegisterAllocator)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRegisterAllocator)
         << "Method " << callee_dex_file.PrettyMethod(method_index)
         << " cannot be inlined because of the register allocator";
     return false;
@@ -1745,7 +1746,7 @@
 
   HBasicBlock* exit_block = callee_graph->GetExitBlock();
   if (exit_block == nullptr) {
-    LOG_FAIL(stats_, kNotInlinedInfiniteLoop)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop)
         << "Method " << callee_dex_file.PrettyMethod(method_index)
         << " could not be inlined because it has an infinite loop";
     return false;
@@ -1756,14 +1757,14 @@
     if (predecessor->GetLastInstruction()->IsThrow()) {
       if (invoke_instruction->GetBlock()->IsTryBlock()) {
         // TODO(ngeoffray): Support adding HTryBoundary in Hgraph::InlineInto.
-        LOG_FAIL(stats_, kNotInlinedTryCatch)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because one branch always throws and"
             << " caller is in a try/catch block";
         return false;
       } else if (graph_->GetExitBlock() == nullptr) {
         // TODO(ngeoffray): Support adding HExit in the caller graph.
-        LOG_FAIL(stats_, kNotInlinedInfiniteLoop)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because one branch always throws and"
             << " caller does not have an exit block";
@@ -1782,7 +1783,7 @@
   }
 
   if (!has_one_return) {
-    LOG_FAIL(stats_, kNotInlinedAlwaysThrows)
+    LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedAlwaysThrows)
         << "Method " << callee_dex_file.PrettyMethod(method_index)
         << " could not be inlined because it always throws";
     return false;
@@ -1795,7 +1796,7 @@
       if (block->GetLoopInformation()->IsIrreducible()) {
         // Don't inline methods with irreducible loops, they could prevent some
         // optimizations to run.
-        LOG_FAIL(stats_, kNotInlinedIrreducibleLoop)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedIrreducibleLoop)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because it contains an irreducible loop";
         return false;
@@ -1804,7 +1805,7 @@
         // Don't inline methods with loops without exit, since they cause the
         // loop information to be computed incorrectly when updating after
         // inlining.
-        LOG_FAIL(stats_, kNotInlinedLoopWithoutExit)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedLoopWithoutExit)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because it contains a loop with no exit";
         return false;
@@ -1815,7 +1816,7 @@
          !instr_it.Done();
          instr_it.Advance()) {
       if (++number_of_instructions >= inlining_budget_) {
-        LOG_FAIL(stats_, kNotInlinedInstructionBudget)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInstructionBudget)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " is not inlined because the outer method has reached"
             << " its instruction budget limit.";
@@ -1824,7 +1825,7 @@
       HInstruction* current = instr_it.Current();
       if (current->NeedsEnvironment() &&
           (total_number_of_dex_registers_ >= kMaximumNumberOfCumulatedDexRegisters)) {
-        LOG_FAIL(stats_, kNotInlinedEnvironmentBudget)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedEnvironmentBudget)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " is not inlined because its caller has reached"
             << " its environment budget limit.";
@@ -1834,7 +1835,7 @@
       if (current->NeedsEnvironment() &&
           !CanEncodeInlinedMethodInStackMap(*caller_compilation_unit_.GetDexFile(),
                                             resolved_method)) {
-        LOG_FAIL(stats_, kNotInlinedStackMaps)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedStackMaps)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because " << current->DebugName()
             << " needs an environment, is in a different dex file"
@@ -1843,7 +1844,7 @@
       }
 
       if (!same_dex_file && current->NeedsDexCacheOfDeclaringClass()) {
-        LOG_FAIL(stats_, kNotInlinedDexCache)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because " << current->DebugName()
             << " it is in a different dex file and requires access to the dex cache";
@@ -1855,7 +1856,7 @@
           current->IsUnresolvedStaticFieldSet() ||
           current->IsUnresolvedInstanceFieldSet()) {
         // Entrypoint for unresolved fields does not handle inlined frames.
-        LOG_FAIL(stats_, kNotInlinedUnresolvedEntrypoint)
+        LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedUnresolvedEntrypoint)
             << "Method " << callee_dex_file.PrettyMethod(method_index)
             << " could not be inlined because it is using an unresolved"
             << " entrypoint";
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 4c18e16..7fa0c2b 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -48,7 +48,7 @@
   void RecordSimplification() {
     simplification_occurred_ = true;
     simplifications_at_current_position_++;
-    MaybeRecordStat(stats_, kInstructionSimplifications);
+    MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplifications);
   }
 
   bool ReplaceRotateWithRor(HBinaryOperation* op, HUShr* ushr, HShl* shl);
@@ -663,7 +663,7 @@
 
   HGraph* graph = GetGraph();
   if (object->IsNullConstant()) {
-    MaybeRecordStat(stats_, kRemovedInstanceOf);
+    MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf);
     instruction->ReplaceWith(graph->GetIntConstant(0));
     instruction->GetBlock()->RemoveInstruction(instruction);
     RecordSimplification();
@@ -674,7 +674,7 @@
   // the return value check with the `outcome` check, b/27651442 .
   bool outcome = false;
   if (TypeCheckHasKnownOutcome(load_class, object, &outcome)) {
-    MaybeRecordStat(stats_, kRemovedInstanceOf);
+    MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf);
     if (outcome && can_be_null) {
       // Type test will succeed, we just need a null test.
       HNotEqual* test = new (graph->GetAllocator()) HNotEqual(graph->GetNullConstant(), object);
diff --git a/compiler/optimizing/instruction_simplifier_arm.cc b/compiler/optimizing/instruction_simplifier_arm.cc
index d41e49a..92081e3 100644
--- a/compiler/optimizing/instruction_simplifier_arm.cc
+++ b/compiler/optimizing/instruction_simplifier_arm.cc
@@ -37,9 +37,7 @@
 
  private:
   void RecordSimplification() {
-    if (stats_ != nullptr) {
-      stats_->RecordStat(kInstructionSimplificationsArch);
-    }
+    MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
   }
 
   bool TryMergeIntoUsersShifterOperand(HInstruction* instruction);
diff --git a/compiler/optimizing/instruction_simplifier_arm64.cc b/compiler/optimizing/instruction_simplifier_arm64.cc
index 69e1463..1c44e5a 100644
--- a/compiler/optimizing/instruction_simplifier_arm64.cc
+++ b/compiler/optimizing/instruction_simplifier_arm64.cc
@@ -37,9 +37,7 @@
 
  private:
   void RecordSimplification() {
-    if (stats_ != nullptr) {
-      stats_->RecordStat(kInstructionSimplificationsArch);
-    }
+    MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
   }
 
   bool TryMergeIntoUsersShifterOperand(HInstruction* instruction);
diff --git a/compiler/optimizing/instruction_simplifier_mips.cc b/compiler/optimizing/instruction_simplifier_mips.cc
index 6a0d8a6..fa97401 100644
--- a/compiler/optimizing/instruction_simplifier_mips.cc
+++ b/compiler/optimizing/instruction_simplifier_mips.cc
@@ -33,9 +33,7 @@
 
  private:
   void RecordSimplification() {
-    if (stats_ != nullptr) {
-      stats_->RecordStat(kInstructionSimplificationsArch);
-    }
+    MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
   }
 
   bool TryExtractArrayAccessIndex(HInstruction* access,
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 53f9ec4..095ca63 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -738,7 +738,7 @@
                                               ArtMethod* method,
                                               bool osr,
                                               VariableSizedHandleScope* handles) const {
-  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptCompilation);
+  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptBytecodeCompilation);
   CompilerDriver* compiler_driver = GetCompilerDriver();
   InstructionSet instruction_set = compiler_driver->GetInstructionSet();
   const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -757,8 +757,7 @@
   }
 
   if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
-    MaybeRecordStat(compilation_stats_.get(),
-                    MethodCompilationStat::kNotCompiledPathological);
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledPathological);
     return nullptr;
   }
 
@@ -768,8 +767,7 @@
   const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions();
   if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace)
       && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) {
-    MaybeRecordStat(compilation_stats_.get(),
-                    MethodCompilationStat::kNotCompiledSpaceFilter);
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledSpaceFilter);
     return nullptr;
   }
 
@@ -800,8 +798,7 @@
                             compiler_driver->GetCompilerOptions(),
                             compilation_stats_.get()));
   if (codegen.get() == nullptr) {
-    MaybeRecordStat(compilation_stats_.get(),
-                    MethodCompilationStat::kNotCompiledNoCodegen);
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledNoCodegen);
     return nullptr;
   }
   codegen->GetAssembler()->cfi().SetEnabled(
@@ -873,6 +870,7 @@
   codegen->Compile(code_allocator);
   pass_observer.DumpDisassembly();
 
+  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledBytecode);
   return codegen.release();
 }
 
@@ -883,6 +881,7 @@
     const DexCompilationUnit& dex_compilation_unit,
     ArtMethod* method,
     VariableSizedHandleScope* handles) const {
+  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptIntrinsicCompilation);
   CompilerDriver* compiler_driver = GetCompilerDriver();
   InstructionSet instruction_set = compiler_driver->GetInstructionSet();
   const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -894,8 +893,6 @@
 
   // Do not attempt to compile on architectures we do not support.
   if (!IsInstructionSetSupported(instruction_set)) {
-    MaybeRecordStat(compilation_stats_.get(),
-                    MethodCompilationStat::kNotCompiledUnsupportedIsa);
     return nullptr;
   }
 
@@ -920,8 +917,6 @@
                             compiler_driver->GetCompilerOptions(),
                             compilation_stats_.get()));
   if (codegen.get() == nullptr) {
-    MaybeRecordStat(compilation_stats_.get(),
-                    MethodCompilationStat::kNotCompiledNoCodegen);
     return nullptr;
   }
   codegen->GetAssembler()->cfi().SetEnabled(
@@ -979,6 +974,7 @@
 
   VLOG(compiler) << "Compiled intrinsic: " << method->GetIntrinsic()
       << " " << graph->PrettyMethod();
+  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledIntrinsic);
   return codegen.release();
 }
 
@@ -1046,8 +1042,6 @@
       }
     }
     if (codegen.get() != nullptr) {
-      MaybeRecordStat(compilation_stats_.get(),
-                      MethodCompilationStat::kCompiled);
       compiled_method = Emit(&allocator,
                              &code_allocator,
                              codegen.get(),
@@ -1139,10 +1133,12 @@
     }
   }
 
-  return ArtQuickJniCompileMethod(GetCompilerDriver(),
-                                  access_flags,
-                                  method_idx,
-                                  dex_file);
+  CompiledMethod* compiled_method = ArtQuickJniCompileMethod(GetCompilerDriver(),
+                                                             access_flags,
+                                                             method_idx,
+                                                             dex_file);
+  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
+  return compiled_method;
 }
 
 Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
@@ -1237,6 +1233,7 @@
           self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots)));
   if (roots == nullptr) {
     // Out of memory, just clear the exception to avoid any Java exception uncaught problems.
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
     DCHECK(self->IsExceptionPending());
     self->ClearException();
     return false;
@@ -1253,9 +1250,9 @@
                                                &method_info_data,
                                                &roots_data);
   if (stack_map_data == nullptr || roots_data == nullptr) {
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
     return false;
   }
-  MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiled);
   codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size),
                           MemoryRegion(method_info_data, method_info_size),
                           code_item);
@@ -1279,6 +1276,7 @@
       codegen->GetGraph()->GetCHASingleImplementationList());
 
   if (code == nullptr) {
+    MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
     code_cache->ClearData(self, stack_map_data, roots_data);
     return false;
   }
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
index 07f9635..a2e92d2 100644
--- a/compiler/optimizing/optimizing_compiler_stats.h
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -27,10 +27,13 @@
 
 namespace art {
 
-enum MethodCompilationStat {
-  kAttemptCompilation = 0,
+enum class MethodCompilationStat {
+  kAttemptBytecodeCompilation = 0,
+  kAttemptIntrinsicCompilation,
+  kCompiledNativeStub,
+  kCompiledIntrinsic,
+  kCompiledBytecode,
   kCHAInline,
-  kCompiled,
   kInlinedInvoke,
   kReplacedInvokeWithSimplePattern,
   kInstructionSimplifications,
@@ -94,8 +97,10 @@
   kConstructorFenceRemovedLSE,
   kConstructorFenceRemovedPFRA,
   kConstructorFenceRemovedCFRE,
+  kJitOutOfMemoryForCommit,
   kLastStat
 };
+std::ostream& operator<<(std::ostream& os, const MethodCompilationStat& rhs);
 
 class OptimizingCompilerStats {
  public:
@@ -105,7 +110,15 @@
   }
 
   void RecordStat(MethodCompilationStat stat, uint32_t count = 1) {
-    compile_stats_[stat] += count;
+    size_t stat_index = static_cast<size_t>(stat);
+    DCHECK_LT(stat_index, arraysize(compile_stats_));
+    compile_stats_[stat_index] += count;
+  }
+
+  uint32_t GetStat(MethodCompilationStat stat) const {
+    size_t stat_index = static_cast<size_t>(stat);
+    DCHECK_LT(stat_index, arraysize(compile_stats_));
+    return compile_stats_[stat_index];
   }
 
   void Log() const {
@@ -114,18 +127,29 @@
       return;
     }
 
-    if (compile_stats_[kAttemptCompilation] == 0) {
+    uint32_t compiled_intrinsics = GetStat(MethodCompilationStat::kCompiledIntrinsic);
+    uint32_t compiled_native_stubs = GetStat(MethodCompilationStat::kCompiledNativeStub);
+    uint32_t bytecode_attempts =
+        GetStat(MethodCompilationStat::kAttemptBytecodeCompilation);
+    if (compiled_intrinsics == 0u && compiled_native_stubs == 0u && bytecode_attempts == 0u) {
       LOG(INFO) << "Did not compile any method.";
     } else {
-      float compiled_percent =
-          compile_stats_[kCompiled] * 100.0f / compile_stats_[kAttemptCompilation];
-      LOG(INFO) << "Attempted compilation of " << compile_stats_[kAttemptCompilation]
-          << " methods: " << std::fixed << std::setprecision(2)
-          << compiled_percent << "% (" << compile_stats_[kCompiled] << ") compiled.";
+      uint32_t compiled_bytecode_methods =
+          GetStat(MethodCompilationStat::kCompiledBytecode);
+      // Successful intrinsic compilation preempts other compilation attempts but failed intrinsic
+      // compilation shall still count towards bytecode or native stub compilation attempts.
+      uint32_t num_compilation_attempts =
+          compiled_intrinsics + compiled_native_stubs + bytecode_attempts;
+      uint32_t num_successful_compilations =
+          compiled_intrinsics + compiled_native_stubs + compiled_bytecode_methods;
+      float compiled_percent = num_successful_compilations * 100.0f / num_compilation_attempts;
+      LOG(INFO) << "Attempted compilation of "
+          << num_compilation_attempts << " methods: " << std::fixed << std::setprecision(2)
+          << compiled_percent << "% (" << num_successful_compilations << ") compiled.";
 
-      for (size_t i = 0; i < kLastStat; i++) {
+      for (size_t i = 0; i < arraysize(compile_stats_); ++i) {
         if (compile_stats_[i] != 0) {
-          LOG(INFO) << PrintMethodCompilationStat(static_cast<MethodCompilationStat>(i)) << ": "
+          LOG(INFO) << "OptStat#" << static_cast<MethodCompilationStat>(i) << ": "
               << compile_stats_[i];
         }
       }
@@ -133,7 +157,7 @@
   }
 
   void AddTo(OptimizingCompilerStats* other_stats) {
-    for (size_t i = 0; i != kLastStat; ++i) {
+    for (size_t i = 0; i != arraysize(compile_stats_); ++i) {
       uint32_t count = compile_stats_[i];
       if (count != 0) {
         other_stats->RecordStat(static_cast<MethodCompilationStat>(i), count);
@@ -142,91 +166,13 @@
   }
 
   void Reset() {
-    for (size_t i = 0; i != kLastStat; ++i) {
-      compile_stats_[i] = 0u;
+    for (std::atomic<uint32_t>& stat : compile_stats_) {
+      stat = 0u;
     }
   }
 
  private:
-  std::string PrintMethodCompilationStat(MethodCompilationStat stat) const {
-    std::string name;
-    switch (stat) {
-      case kAttemptCompilation : name = "AttemptCompilation"; break;
-      case kCHAInline : name = "CHAInline"; break;
-      case kCompiled : name = "Compiled"; break;
-      case kInlinedInvoke : name = "InlinedInvoke"; break;
-      case kReplacedInvokeWithSimplePattern: name = "ReplacedInvokeWithSimplePattern"; break;
-      case kInstructionSimplifications: name = "InstructionSimplifications"; break;
-      case kInstructionSimplificationsArch: name = "InstructionSimplificationsArch"; break;
-      case kUnresolvedMethod : name = "UnresolvedMethod"; break;
-      case kUnresolvedField : name = "UnresolvedField"; break;
-      case kUnresolvedFieldNotAFastAccess : name = "UnresolvedFieldNotAFastAccess"; break;
-      case kRemovedCheckedCast: name = "RemovedCheckedCast"; break;
-      case kRemovedDeadInstruction: name = "RemovedDeadInstruction"; break;
-      case kRemovedNullCheck: name = "RemovedNullCheck"; break;
-      case kNotCompiledSkipped: name = "NotCompiledSkipped"; break;
-      case kNotCompiledInvalidBytecode: name = "NotCompiledInvalidBytecode"; break;
-      case kNotCompiledThrowCatchLoop : name = "NotCompiledThrowCatchLoop"; break;
-      case kNotCompiledAmbiguousArrayOp : name = "NotCompiledAmbiguousArrayOp"; break;
-      case kNotCompiledHugeMethod : name = "NotCompiledHugeMethod"; break;
-      case kNotCompiledLargeMethodNoBranches : name = "NotCompiledLargeMethodNoBranches"; break;
-      case kNotCompiledMalformedOpcode : name = "NotCompiledMalformedOpcode"; break;
-      case kNotCompiledNoCodegen : name = "NotCompiledNoCodegen"; break;
-      case kNotCompiledPathological : name = "NotCompiledPathological"; break;
-      case kNotCompiledSpaceFilter : name = "NotCompiledSpaceFilter"; break;
-      case kNotCompiledUnhandledInstruction : name = "NotCompiledUnhandledInstruction"; break;
-      case kNotCompiledUnsupportedIsa : name = "NotCompiledUnsupportedIsa"; break;
-      case kNotCompiledVerificationError : name = "NotCompiledVerificationError"; break;
-      case kNotCompiledVerifyAtRuntime : name = "NotCompiledVerifyAtRuntime"; break;
-      case kInlinedMonomorphicCall: name = "InlinedMonomorphicCall"; break;
-      case kInlinedPolymorphicCall: name = "InlinedPolymorphicCall"; break;
-      case kMonomorphicCall: name = "MonomorphicCall"; break;
-      case kPolymorphicCall: name = "PolymorphicCall"; break;
-      case kMegamorphicCall: name = "MegamorphicCall"; break;
-      case kBooleanSimplified : name = "BooleanSimplified"; break;
-      case kIntrinsicRecognized : name = "IntrinsicRecognized"; break;
-      case kLoopInvariantMoved : name = "LoopInvariantMoved"; break;
-      case kLoopVectorized : name = "LoopVectorized"; break;
-      case kLoopVectorizedIdiom : name = "LoopVectorizedIdiom"; break;
-      case kSelectGenerated : name = "SelectGenerated"; break;
-      case kRemovedInstanceOf: name = "RemovedInstanceOf"; break;
-      case kInlinedInvokeVirtualOrInterface: name = "InlinedInvokeVirtualOrInterface"; break;
-      case kImplicitNullCheckGenerated: name = "ImplicitNullCheckGenerated"; break;
-      case kExplicitNullCheckGenerated: name = "ExplicitNullCheckGenerated"; break;
-      case kSimplifyIf: name = "SimplifyIf"; break;
-      case kInstructionSunk: name = "InstructionSunk"; break;
-      case kNotInlinedUnresolvedEntrypoint: name = "NotInlinedUnresolvedEntrypoint"; break;
-      case kNotInlinedDexCache: name = "NotInlinedDexCache"; break;
-      case kNotInlinedStackMaps: name = "NotInlinedStackMaps"; break;
-      case kNotInlinedEnvironmentBudget: name = "NotInlinedEnvironmentBudget"; break;
-      case kNotInlinedInstructionBudget: name = "NotInlinedInstructionBudget"; break;
-      case kNotInlinedLoopWithoutExit: name = "NotInlinedLoopWithoutExit"; break;
-      case kNotInlinedIrreducibleLoop: name = "NotInlinedIrreducibleLoop"; break;
-      case kNotInlinedAlwaysThrows: name = "NotInlinedAlwaysThrows"; break;
-      case kNotInlinedInfiniteLoop: name = "NotInlinedInfiniteLoop"; break;
-      case kNotInlinedTryCatch: name = "NotInlinedTryCatch"; break;
-      case kNotInlinedRegisterAllocator: name = "NotInlinedRegisterAllocator"; break;
-      case kNotInlinedCannotBuild: name = "NotInlinedCannotBuild"; break;
-      case kNotInlinedNotVerified: name = "NotInlinedNotVerified"; break;
-      case kNotInlinedCodeItem: name = "NotInlinedCodeItem"; break;
-      case kNotInlinedWont: name = "NotInlinedWont"; break;
-      case kNotInlinedRecursiveBudget: name = "NotInlinedRecursiveBudget"; break;
-      case kNotInlinedProxy: name = "NotInlinedProxy"; break;
-      case kConstructorFenceGeneratedNew: name = "ConstructorFenceGeneratedNew"; break;
-      case kConstructorFenceGeneratedFinal: name = "ConstructorFenceGeneratedFinal"; break;
-      case kConstructorFenceRemovedLSE: name = "ConstructorFenceRemovedLSE"; break;
-      case kConstructorFenceRemovedPFRA: name = "ConstructorFenceRemovedPFRA"; break;
-      case kConstructorFenceRemovedCFRE: name = "ConstructorFenceRemovedCFRE"; break;
-
-      case kLastStat:
-        LOG(FATAL) << "invalid stat "
-            << static_cast<std::underlying_type<MethodCompilationStat>::type>(stat);
-        UNREACHABLE();
-    }
-    return "OptStat#" + name;
-  }
-
-  std::atomic<uint32_t> compile_stats_[kLastStat];
+  std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)];
 
   DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats);
 };