summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc4
-rw-r--r--compiler/optimizing/inliner.cc71
-rw-r--r--compiler/optimizing/instruction_simplifier.cc6
-rw-r--r--compiler/optimizing/instruction_simplifier_arm.cc4
-rw-r--r--compiler/optimizing/instruction_simplifier_arm64.cc4
-rw-r--r--compiler/optimizing/instruction_simplifier_mips.cc4
-rw-r--r--compiler/optimizing/optimizing_compiler.cc34
-rw-r--r--compiler/optimizing/optimizing_compiler_stats.h136
8 files changed, 101 insertions, 162 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 0bd3ce937a..aff6f9f64f 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1411,10 +1411,10 @@ LocationSummary* CodeGenerator::CreateThrowingSlowPathLocations(HInstruction* in
void CodeGenerator::GenerateNullCheck(HNullCheck* instruction) {
if (compiler_options_.GetImplicitNullChecks()) {
- MaybeRecordStat(stats_, kImplicitNullCheckGenerated);
+ MaybeRecordStat(stats_, MethodCompilationStat::kImplicitNullCheckGenerated);
GenerateImplicitNullCheck(instruction);
} else {
- MaybeRecordStat(stats_, kExplicitNullCheckGenerated);
+ MaybeRecordStat(stats_, MethodCompilationStat::kExplicitNullCheckGenerated);
GenerateExplicitNullCheck(instruction);
}
}
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 1eb1f2e46b..2444e43d64 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -441,9 +441,9 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) {
// Add dependency due to devirtulization. We've assumed resolved_method
// has single implementation.
outermost_graph_->AddCHASingleImplementationDependency(resolved_method);
- MaybeRecordStat(stats_, kCHAInline);
+ MaybeRecordStat(stats_, MethodCompilationStat::kCHAInline);
} else {
- MaybeRecordStat(stats_, kInlinedInvokeVirtualOrInterface);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvokeVirtualOrInterface);
}
}
return result;
@@ -533,7 +533,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file,
}
case kInlineCacheMonomorphic: {
- MaybeRecordStat(stats_, kMonomorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kMonomorphicCall);
if (UseOnlyPolymorphicInliningWithNoDeopt()) {
return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
} else {
@@ -542,7 +542,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file,
}
case kInlineCachePolymorphic: {
- MaybeRecordStat(stats_, kPolymorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kPolymorphicCall);
return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
}
@@ -551,7 +551,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file,
<< "Interface or virtual call to "
<< caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex())
<< " is megamorphic and not inlined";
- MaybeRecordStat(stats_, kMegamorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kMegamorphicCall);
return false;
}
@@ -755,7 +755,7 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction,
dex::TypeIndex class_index = FindClassIndexIn(
GetMonomorphicType(classes), caller_compilation_unit_);
if (!class_index.IsValid()) {
- LOG_FAIL(stats_, kNotInlinedDexCache)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
<< "Call to " << ArtMethod::PrettyMethod(resolved_method)
<< " from inline cache is not inlined because its class is not"
<< " accessible to the caller";
@@ -804,7 +804,7 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction,
/* is_first_run */ false);
rtp_fixup.Run();
- MaybeRecordStat(stats_, kInlinedMonomorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInlinedMonomorphicCall);
return true;
}
@@ -994,7 +994,7 @@ bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction,
return false;
}
- MaybeRecordStat(stats_, kInlinedPolymorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall);
// Run type propagation to get the guards typed.
ReferenceTypePropagation rtp_fixup(graph_,
@@ -1200,7 +1200,7 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget(
/* is_first_run */ false);
rtp_fixup.Run();
- MaybeRecordStat(stats_, kInlinedPolymorphicCall);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall);
LOG_SUCCESS() << "Inlined same polymorphic target " << actual_method->PrettyMethod();
return true;
@@ -1263,7 +1263,7 @@ bool HInliner::TryInlineAndReplace(HInvoke* invoke_instruction,
// these relative rare cases.
bool wrong_invoke_type = false;
if (IntrinsicsRecognizer::Recognize(new_invoke, &wrong_invoke_type)) {
- MaybeRecordStat(stats_, kIntrinsicRecognized);
+ MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
}
} else {
// TODO: Consider sharpening an invoke virtual once it is not dependent on the
@@ -1308,14 +1308,14 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
ReferenceTypeInfo receiver_type,
HInstruction** return_replacement) {
if (method->IsProxyMethod()) {
- LOG_FAIL(stats_, kNotInlinedProxy)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedProxy)
<< "Method " << method->PrettyMethod()
<< " is not inlined because of unimplemented inline support for proxy methods.";
return false;
}
if (CountRecursiveCallsOf(method) > kMaximumNumberOfRecursiveCalls) {
- LOG_FAIL(stats_, kNotInlinedRecursiveBudget)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRecursiveBudget)
<< "Method "
<< method->PrettyMethod()
<< " is not inlined because it has reached its recursive call budget.";
@@ -1329,10 +1329,10 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
if (TryPatternSubstitution(invoke_instruction, method, return_replacement)) {
LOG_SUCCESS() << "Successfully replaced pattern of invoke "
<< method->PrettyMethod();
- MaybeRecordStat(stats_, kReplacedInvokeWithSimplePattern);
+ MaybeRecordStat(stats_, MethodCompilationStat::kReplacedInvokeWithSimplePattern);
return true;
}
- LOG_FAIL(stats_, kNotInlinedWont)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedWont)
<< "Won't inline " << method->PrettyMethod() << " in "
<< outer_compilation_unit_.GetDexFile()->GetLocation() << " ("
<< caller_compilation_unit_.GetDexFile()->GetLocation() << ") from "
@@ -1352,7 +1352,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits();
if (code_item->insns_size_in_code_units_ > inline_max_code_units) {
- LOG_FAIL(stats_, kNotInlinedCodeItem)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCodeItem)
<< "Method " << method->PrettyMethod()
<< " is not inlined because its code item is too big: "
<< code_item->insns_size_in_code_units_
@@ -1362,13 +1362,13 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
}
if (code_item->tries_size_ != 0) {
- LOG_FAIL(stats_, kNotInlinedTryCatch)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch)
<< "Method " << method->PrettyMethod() << " is not inlined because of try block";
return false;
}
if (!method->IsCompilable()) {
- LOG_FAIL(stats_, kNotInlinedNotVerified)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified)
<< "Method " << method->PrettyMethod()
<< " has soft failures un-handled by the compiler, so it cannot be inlined";
}
@@ -1378,7 +1378,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
if (Runtime::Current()->UseJitCompilation() ||
!compiler_driver_->IsMethodVerifiedWithoutFailures(
method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) {
- LOG_FAIL(stats_, kNotInlinedNotVerified)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified)
<< "Method " << method->PrettyMethod()
<< " couldn't be verified, so it cannot be inlined";
return false;
@@ -1389,9 +1389,10 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
invoke_instruction->AsInvokeStaticOrDirect()->IsStaticWithImplicitClinitCheck()) {
// Case of a static method that cannot be inlined because it implicitly
// requires an initialization check of its declaring class.
- LOG_FAIL(stats_, kNotInlinedDexCache) << "Method " << method->PrettyMethod()
- << " is not inlined because it is static and requires a clinit"
- << " check that cannot be emitted due to Dex cache limitations";
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
+ << "Method " << method->PrettyMethod()
+ << " is not inlined because it is static and requires a clinit"
+ << " check that cannot be emitted due to Dex cache limitations";
return false;
}
@@ -1401,7 +1402,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction,
}
LOG_SUCCESS() << method->PrettyMethod();
- MaybeRecordStat(stats_, kInlinedInvoke);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvoke);
return true;
}
@@ -1684,7 +1685,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
handles_);
if (builder.BuildGraph() != kAnalysisSuccess) {
- LOG_FAIL(stats_, kNotInlinedCannotBuild)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCannotBuild)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be built, so cannot be inlined";
return false;
@@ -1692,7 +1693,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
if (!RegisterAllocator::CanAllocateRegistersFor(*callee_graph,
compiler_driver_->GetInstructionSet())) {
- LOG_FAIL(stats_, kNotInlinedRegisterAllocator)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRegisterAllocator)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " cannot be inlined because of the register allocator";
return false;
@@ -1745,7 +1746,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
HBasicBlock* exit_block = callee_graph->GetExitBlock();
if (exit_block == nullptr) {
- LOG_FAIL(stats_, kNotInlinedInfiniteLoop)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because it has an infinite loop";
return false;
@@ -1756,14 +1757,14 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
if (predecessor->GetLastInstruction()->IsThrow()) {
if (invoke_instruction->GetBlock()->IsTryBlock()) {
// TODO(ngeoffray): Support adding HTryBoundary in Hgraph::InlineInto.
- LOG_FAIL(stats_, kNotInlinedTryCatch)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because one branch always throws and"
<< " caller is in a try/catch block";
return false;
} else if (graph_->GetExitBlock() == nullptr) {
// TODO(ngeoffray): Support adding HExit in the caller graph.
- LOG_FAIL(stats_, kNotInlinedInfiniteLoop)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because one branch always throws and"
<< " caller does not have an exit block";
@@ -1782,7 +1783,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
}
if (!has_one_return) {
- LOG_FAIL(stats_, kNotInlinedAlwaysThrows)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedAlwaysThrows)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because it always throws";
return false;
@@ -1795,7 +1796,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
if (block->GetLoopInformation()->IsIrreducible()) {
// Don't inline methods with irreducible loops, they could prevent some
// optimizations to run.
- LOG_FAIL(stats_, kNotInlinedIrreducibleLoop)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedIrreducibleLoop)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because it contains an irreducible loop";
return false;
@@ -1804,7 +1805,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
// Don't inline methods with loops without exit, since they cause the
// loop information to be computed incorrectly when updating after
// inlining.
- LOG_FAIL(stats_, kNotInlinedLoopWithoutExit)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedLoopWithoutExit)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because it contains a loop with no exit";
return false;
@@ -1815,7 +1816,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
!instr_it.Done();
instr_it.Advance()) {
if (++number_of_instructions >= inlining_budget_) {
- LOG_FAIL(stats_, kNotInlinedInstructionBudget)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInstructionBudget)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " is not inlined because the outer method has reached"
<< " its instruction budget limit.";
@@ -1824,7 +1825,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
HInstruction* current = instr_it.Current();
if (current->NeedsEnvironment() &&
(total_number_of_dex_registers_ >= kMaximumNumberOfCumulatedDexRegisters)) {
- LOG_FAIL(stats_, kNotInlinedEnvironmentBudget)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedEnvironmentBudget)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " is not inlined because its caller has reached"
<< " its environment budget limit.";
@@ -1834,7 +1835,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
if (current->NeedsEnvironment() &&
!CanEncodeInlinedMethodInStackMap(*caller_compilation_unit_.GetDexFile(),
resolved_method)) {
- LOG_FAIL(stats_, kNotInlinedStackMaps)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedStackMaps)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because " << current->DebugName()
<< " needs an environment, is in a different dex file"
@@ -1843,7 +1844,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
}
if (!same_dex_file && current->NeedsDexCacheOfDeclaringClass()) {
- LOG_FAIL(stats_, kNotInlinedDexCache)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because " << current->DebugName()
<< " it is in a different dex file and requires access to the dex cache";
@@ -1855,7 +1856,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
current->IsUnresolvedStaticFieldSet() ||
current->IsUnresolvedInstanceFieldSet()) {
// Entrypoint for unresolved fields does not handle inlined frames.
- LOG_FAIL(stats_, kNotInlinedUnresolvedEntrypoint)
+ LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedUnresolvedEntrypoint)
<< "Method " << callee_dex_file.PrettyMethod(method_index)
<< " could not be inlined because it is using an unresolved"
<< " entrypoint";
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index 4c18e16c48..7fa0c2be3d 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -48,7 +48,7 @@ class InstructionSimplifierVisitor : public HGraphDelegateVisitor {
void RecordSimplification() {
simplification_occurred_ = true;
simplifications_at_current_position_++;
- MaybeRecordStat(stats_, kInstructionSimplifications);
+ MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplifications);
}
bool ReplaceRotateWithRor(HBinaryOperation* op, HUShr* ushr, HShl* shl);
@@ -663,7 +663,7 @@ void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) {
HGraph* graph = GetGraph();
if (object->IsNullConstant()) {
- MaybeRecordStat(stats_, kRemovedInstanceOf);
+ MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf);
instruction->ReplaceWith(graph->GetIntConstant(0));
instruction->GetBlock()->RemoveInstruction(instruction);
RecordSimplification();
@@ -674,7 +674,7 @@ void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) {
// the return value check with the `outcome` check, b/27651442 .
bool outcome = false;
if (TypeCheckHasKnownOutcome(load_class, object, &outcome)) {
- MaybeRecordStat(stats_, kRemovedInstanceOf);
+ MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf);
if (outcome && can_be_null) {
// Type test will succeed, we just need a null test.
HNotEqual* test = new (graph->GetAllocator()) HNotEqual(graph->GetNullConstant(), object);
diff --git a/compiler/optimizing/instruction_simplifier_arm.cc b/compiler/optimizing/instruction_simplifier_arm.cc
index d41e49a0f3..92081e30b1 100644
--- a/compiler/optimizing/instruction_simplifier_arm.cc
+++ b/compiler/optimizing/instruction_simplifier_arm.cc
@@ -37,9 +37,7 @@ class InstructionSimplifierArmVisitor : public HGraphVisitor {
private:
void RecordSimplification() {
- if (stats_ != nullptr) {
- stats_->RecordStat(kInstructionSimplificationsArch);
- }
+ MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
}
bool TryMergeIntoUsersShifterOperand(HInstruction* instruction);
diff --git a/compiler/optimizing/instruction_simplifier_arm64.cc b/compiler/optimizing/instruction_simplifier_arm64.cc
index 69e1463ac4..1c44e5ac49 100644
--- a/compiler/optimizing/instruction_simplifier_arm64.cc
+++ b/compiler/optimizing/instruction_simplifier_arm64.cc
@@ -37,9 +37,7 @@ class InstructionSimplifierArm64Visitor : public HGraphVisitor {
private:
void RecordSimplification() {
- if (stats_ != nullptr) {
- stats_->RecordStat(kInstructionSimplificationsArch);
- }
+ MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
}
bool TryMergeIntoUsersShifterOperand(HInstruction* instruction);
diff --git a/compiler/optimizing/instruction_simplifier_mips.cc b/compiler/optimizing/instruction_simplifier_mips.cc
index 6a0d8a60c4..fa97401a0c 100644
--- a/compiler/optimizing/instruction_simplifier_mips.cc
+++ b/compiler/optimizing/instruction_simplifier_mips.cc
@@ -33,9 +33,7 @@ class InstructionSimplifierMipsVisitor : public HGraphVisitor {
private:
void RecordSimplification() {
- if (stats_ != nullptr) {
- stats_->RecordStat(kInstructionSimplificationsArch);
- }
+ MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch);
}
bool TryExtractArrayAccessIndex(HInstruction* access,
diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc
index 53f9ec413b..095ca6372e 100644
--- a/compiler/optimizing/optimizing_compiler.cc
+++ b/compiler/optimizing/optimizing_compiler.cc
@@ -738,7 +738,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
ArtMethod* method,
bool osr,
VariableSizedHandleScope* handles) const {
- MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptCompilation);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptBytecodeCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
InstructionSet instruction_set = compiler_driver->GetInstructionSet();
const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -757,8 +757,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
}
if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledPathological);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledPathological);
return nullptr;
}
@@ -768,8 +767,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions();
if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace)
&& (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledSpaceFilter);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledSpaceFilter);
return nullptr;
}
@@ -800,8 +798,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
compiler_driver->GetCompilerOptions(),
compilation_stats_.get()));
if (codegen.get() == nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledNoCodegen);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledNoCodegen);
return nullptr;
}
codegen->GetAssembler()->cfi().SetEnabled(
@@ -873,6 +870,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator,
codegen->Compile(code_allocator);
pass_observer.DumpDisassembly();
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledBytecode);
return codegen.release();
}
@@ -883,6 +881,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
const DexCompilationUnit& dex_compilation_unit,
ArtMethod* method,
VariableSizedHandleScope* handles) const {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptIntrinsicCompilation);
CompilerDriver* compiler_driver = GetCompilerDriver();
InstructionSet instruction_set = compiler_driver->GetInstructionSet();
const DexFile& dex_file = *dex_compilation_unit.GetDexFile();
@@ -894,8 +893,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
// Do not attempt to compile on architectures we do not support.
if (!IsInstructionSetSupported(instruction_set)) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledUnsupportedIsa);
return nullptr;
}
@@ -920,8 +917,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
compiler_driver->GetCompilerOptions(),
compilation_stats_.get()));
if (codegen.get() == nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kNotCompiledNoCodegen);
return nullptr;
}
codegen->GetAssembler()->cfi().SetEnabled(
@@ -979,6 +974,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic(
VLOG(compiler) << "Compiled intrinsic: " << method->GetIntrinsic()
<< " " << graph->PrettyMethod();
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledIntrinsic);
return codegen.release();
}
@@ -1046,8 +1042,6 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item,
}
}
if (codegen.get() != nullptr) {
- MaybeRecordStat(compilation_stats_.get(),
- MethodCompilationStat::kCompiled);
compiled_method = Emit(&allocator,
&code_allocator,
codegen.get(),
@@ -1139,10 +1133,12 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags,
}
}
- return ArtQuickJniCompileMethod(GetCompilerDriver(),
- access_flags,
- method_idx,
- dex_file);
+ CompiledMethod* compiled_method = ArtQuickJniCompileMethod(GetCompilerDriver(),
+ access_flags,
+ method_idx,
+ dex_file);
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub);
+ return compiled_method;
}
Compiler* CreateOptimizingCompiler(CompilerDriver* driver) {
@@ -1237,6 +1233,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots)));
if (roots == nullptr) {
// Out of memory, just clear the exception to avoid any Java exception uncaught problems.
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
DCHECK(self->IsExceptionPending());
self->ClearException();
return false;
@@ -1253,9 +1250,9 @@ bool OptimizingCompiler::JitCompile(Thread* self,
&method_info_data,
&roots_data);
if (stack_map_data == nullptr || roots_data == nullptr) {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
return false;
}
- MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiled);
codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size),
MemoryRegion(method_info_data, method_info_size),
code_item);
@@ -1279,6 +1276,7 @@ bool OptimizingCompiler::JitCompile(Thread* self,
codegen->GetGraph()->GetCHASingleImplementationList());
if (code == nullptr) {
+ MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit);
code_cache->ClearData(self, stack_map_data, roots_data);
return false;
}
diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h
index 07f9635aba..a2e92d2931 100644
--- a/compiler/optimizing/optimizing_compiler_stats.h
+++ b/compiler/optimizing/optimizing_compiler_stats.h
@@ -27,10 +27,13 @@
namespace art {
-enum MethodCompilationStat {
- kAttemptCompilation = 0,
+enum class MethodCompilationStat {
+ kAttemptBytecodeCompilation = 0,
+ kAttemptIntrinsicCompilation,
+ kCompiledNativeStub,
+ kCompiledIntrinsic,
+ kCompiledBytecode,
kCHAInline,
- kCompiled,
kInlinedInvoke,
kReplacedInvokeWithSimplePattern,
kInstructionSimplifications,
@@ -94,8 +97,10 @@ enum MethodCompilationStat {
kConstructorFenceRemovedLSE,
kConstructorFenceRemovedPFRA,
kConstructorFenceRemovedCFRE,
+ kJitOutOfMemoryForCommit,
kLastStat
};
+std::ostream& operator<<(std::ostream& os, const MethodCompilationStat& rhs);
class OptimizingCompilerStats {
public:
@@ -105,7 +110,15 @@ class OptimizingCompilerStats {
}
void RecordStat(MethodCompilationStat stat, uint32_t count = 1) {
- compile_stats_[stat] += count;
+ size_t stat_index = static_cast<size_t>(stat);
+ DCHECK_LT(stat_index, arraysize(compile_stats_));
+ compile_stats_[stat_index] += count;
+ }
+
+ uint32_t GetStat(MethodCompilationStat stat) const {
+ size_t stat_index = static_cast<size_t>(stat);
+ DCHECK_LT(stat_index, arraysize(compile_stats_));
+ return compile_stats_[stat_index];
}
void Log() const {
@@ -114,18 +127,29 @@ class OptimizingCompilerStats {
return;
}
- if (compile_stats_[kAttemptCompilation] == 0) {
+ uint32_t compiled_intrinsics = GetStat(MethodCompilationStat::kCompiledIntrinsic);
+ uint32_t compiled_native_stubs = GetStat(MethodCompilationStat::kCompiledNativeStub);
+ uint32_t bytecode_attempts =
+ GetStat(MethodCompilationStat::kAttemptBytecodeCompilation);
+ if (compiled_intrinsics == 0u && compiled_native_stubs == 0u && bytecode_attempts == 0u) {
LOG(INFO) << "Did not compile any method.";
} else {
- float compiled_percent =
- compile_stats_[kCompiled] * 100.0f / compile_stats_[kAttemptCompilation];
- LOG(INFO) << "Attempted compilation of " << compile_stats_[kAttemptCompilation]
- << " methods: " << std::fixed << std::setprecision(2)
- << compiled_percent << "% (" << compile_stats_[kCompiled] << ") compiled.";
-
- for (size_t i = 0; i < kLastStat; i++) {
+ uint32_t compiled_bytecode_methods =
+ GetStat(MethodCompilationStat::kCompiledBytecode);
+ // Successful intrinsic compilation preempts other compilation attempts but failed intrinsic
+ // compilation shall still count towards bytecode or native stub compilation attempts.
+ uint32_t num_compilation_attempts =
+ compiled_intrinsics + compiled_native_stubs + bytecode_attempts;
+ uint32_t num_successful_compilations =
+ compiled_intrinsics + compiled_native_stubs + compiled_bytecode_methods;
+ float compiled_percent = num_successful_compilations * 100.0f / num_compilation_attempts;
+ LOG(INFO) << "Attempted compilation of "
+ << num_compilation_attempts << " methods: " << std::fixed << std::setprecision(2)
+ << compiled_percent << "% (" << num_successful_compilations << ") compiled.";
+
+ for (size_t i = 0; i < arraysize(compile_stats_); ++i) {
if (compile_stats_[i] != 0) {
- LOG(INFO) << PrintMethodCompilationStat(static_cast<MethodCompilationStat>(i)) << ": "
+ LOG(INFO) << "OptStat#" << static_cast<MethodCompilationStat>(i) << ": "
<< compile_stats_[i];
}
}
@@ -133,7 +157,7 @@ class OptimizingCompilerStats {
}
void AddTo(OptimizingCompilerStats* other_stats) {
- for (size_t i = 0; i != kLastStat; ++i) {
+ for (size_t i = 0; i != arraysize(compile_stats_); ++i) {
uint32_t count = compile_stats_[i];
if (count != 0) {
other_stats->RecordStat(static_cast<MethodCompilationStat>(i), count);
@@ -142,91 +166,13 @@ class OptimizingCompilerStats {
}
void Reset() {
- for (size_t i = 0; i != kLastStat; ++i) {
- compile_stats_[i] = 0u;
+ for (std::atomic<uint32_t>& stat : compile_stats_) {
+ stat = 0u;
}
}
private:
- std::string PrintMethodCompilationStat(MethodCompilationStat stat) const {
- std::string name;
- switch (stat) {
- case kAttemptCompilation : name = "AttemptCompilation"; break;
- case kCHAInline : name = "CHAInline"; break;
- case kCompiled : name = "Compiled"; break;
- case kInlinedInvoke : name = "InlinedInvoke"; break;
- case kReplacedInvokeWithSimplePattern: name = "ReplacedInvokeWithSimplePattern"; break;
- case kInstructionSimplifications: name = "InstructionSimplifications"; break;
- case kInstructionSimplificationsArch: name = "InstructionSimplificationsArch"; break;
- case kUnresolvedMethod : name = "UnresolvedMethod"; break;
- case kUnresolvedField : name = "UnresolvedField"; break;
- case kUnresolvedFieldNotAFastAccess : name = "UnresolvedFieldNotAFastAccess"; break;
- case kRemovedCheckedCast: name = "RemovedCheckedCast"; break;
- case kRemovedDeadInstruction: name = "RemovedDeadInstruction"; break;
- case kRemovedNullCheck: name = "RemovedNullCheck"; break;
- case kNotCompiledSkipped: name = "NotCompiledSkipped"; break;
- case kNotCompiledInvalidBytecode: name = "NotCompiledInvalidBytecode"; break;
- case kNotCompiledThrowCatchLoop : name = "NotCompiledThrowCatchLoop"; break;
- case kNotCompiledAmbiguousArrayOp : name = "NotCompiledAmbiguousArrayOp"; break;
- case kNotCompiledHugeMethod : name = "NotCompiledHugeMethod"; break;
- case kNotCompiledLargeMethodNoBranches : name = "NotCompiledLargeMethodNoBranches"; break;
- case kNotCompiledMalformedOpcode : name = "NotCompiledMalformedOpcode"; break;
- case kNotCompiledNoCodegen : name = "NotCompiledNoCodegen"; break;
- case kNotCompiledPathological : name = "NotCompiledPathological"; break;
- case kNotCompiledSpaceFilter : name = "NotCompiledSpaceFilter"; break;
- case kNotCompiledUnhandledInstruction : name = "NotCompiledUnhandledInstruction"; break;
- case kNotCompiledUnsupportedIsa : name = "NotCompiledUnsupportedIsa"; break;
- case kNotCompiledVerificationError : name = "NotCompiledVerificationError"; break;
- case kNotCompiledVerifyAtRuntime : name = "NotCompiledVerifyAtRuntime"; break;
- case kInlinedMonomorphicCall: name = "InlinedMonomorphicCall"; break;
- case kInlinedPolymorphicCall: name = "InlinedPolymorphicCall"; break;
- case kMonomorphicCall: name = "MonomorphicCall"; break;
- case kPolymorphicCall: name = "PolymorphicCall"; break;
- case kMegamorphicCall: name = "MegamorphicCall"; break;
- case kBooleanSimplified : name = "BooleanSimplified"; break;
- case kIntrinsicRecognized : name = "IntrinsicRecognized"; break;
- case kLoopInvariantMoved : name = "LoopInvariantMoved"; break;
- case kLoopVectorized : name = "LoopVectorized"; break;
- case kLoopVectorizedIdiom : name = "LoopVectorizedIdiom"; break;
- case kSelectGenerated : name = "SelectGenerated"; break;
- case kRemovedInstanceOf: name = "RemovedInstanceOf"; break;
- case kInlinedInvokeVirtualOrInterface: name = "InlinedInvokeVirtualOrInterface"; break;
- case kImplicitNullCheckGenerated: name = "ImplicitNullCheckGenerated"; break;
- case kExplicitNullCheckGenerated: name = "ExplicitNullCheckGenerated"; break;
- case kSimplifyIf: name = "SimplifyIf"; break;
- case kInstructionSunk: name = "InstructionSunk"; break;
- case kNotInlinedUnresolvedEntrypoint: name = "NotInlinedUnresolvedEntrypoint"; break;
- case kNotInlinedDexCache: name = "NotInlinedDexCache"; break;
- case kNotInlinedStackMaps: name = "NotInlinedStackMaps"; break;
- case kNotInlinedEnvironmentBudget: name = "NotInlinedEnvironmentBudget"; break;
- case kNotInlinedInstructionBudget: name = "NotInlinedInstructionBudget"; break;
- case kNotInlinedLoopWithoutExit: name = "NotInlinedLoopWithoutExit"; break;
- case kNotInlinedIrreducibleLoop: name = "NotInlinedIrreducibleLoop"; break;
- case kNotInlinedAlwaysThrows: name = "NotInlinedAlwaysThrows"; break;
- case kNotInlinedInfiniteLoop: name = "NotInlinedInfiniteLoop"; break;
- case kNotInlinedTryCatch: name = "NotInlinedTryCatch"; break;
- case kNotInlinedRegisterAllocator: name = "NotInlinedRegisterAllocator"; break;
- case kNotInlinedCannotBuild: name = "NotInlinedCannotBuild"; break;
- case kNotInlinedNotVerified: name = "NotInlinedNotVerified"; break;
- case kNotInlinedCodeItem: name = "NotInlinedCodeItem"; break;
- case kNotInlinedWont: name = "NotInlinedWont"; break;
- case kNotInlinedRecursiveBudget: name = "NotInlinedRecursiveBudget"; break;
- case kNotInlinedProxy: name = "NotInlinedProxy"; break;
- case kConstructorFenceGeneratedNew: name = "ConstructorFenceGeneratedNew"; break;
- case kConstructorFenceGeneratedFinal: name = "ConstructorFenceGeneratedFinal"; break;
- case kConstructorFenceRemovedLSE: name = "ConstructorFenceRemovedLSE"; break;
- case kConstructorFenceRemovedPFRA: name = "ConstructorFenceRemovedPFRA"; break;
- case kConstructorFenceRemovedCFRE: name = "ConstructorFenceRemovedCFRE"; break;
-
- case kLastStat:
- LOG(FATAL) << "invalid stat "
- << static_cast<std::underlying_type<MethodCompilationStat>::type>(stat);
- UNREACHABLE();
- }
- return "OptStat#" + name;
- }
-
- std::atomic<uint32_t> compile_stats_[kLastStat];
+ std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)];
DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats);
};