diff options
Diffstat (limited to 'compiler/optimizing')
-rw-r--r-- | compiler/optimizing/code_generator.cc | 4 | ||||
-rw-r--r-- | compiler/optimizing/data_type.h | 13 | ||||
-rw-r--r-- | compiler/optimizing/inliner.cc | 76 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier.cc | 16 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier_arm.cc | 4 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier_arm64.cc | 4 | ||||
-rw-r--r-- | compiler/optimizing/instruction_simplifier_mips.cc | 4 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.cc | 59 | ||||
-rw-r--r-- | compiler/optimizing/intrinsics.h | 5 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler.cc | 42 | ||||
-rw-r--r-- | compiler/optimizing/optimizing_compiler_stats.h | 136 |
11 files changed, 184 insertions, 179 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc index 0bd3ce937a..aff6f9f64f 100644 --- a/compiler/optimizing/code_generator.cc +++ b/compiler/optimizing/code_generator.cc @@ -1411,10 +1411,10 @@ LocationSummary* CodeGenerator::CreateThrowingSlowPathLocations(HInstruction* in void CodeGenerator::GenerateNullCheck(HNullCheck* instruction) { if (compiler_options_.GetImplicitNullChecks()) { - MaybeRecordStat(stats_, kImplicitNullCheckGenerated); + MaybeRecordStat(stats_, MethodCompilationStat::kImplicitNullCheckGenerated); GenerateImplicitNullCheck(instruction); } else { - MaybeRecordStat(stats_, kExplicitNullCheckGenerated); + MaybeRecordStat(stats_, MethodCompilationStat::kExplicitNullCheckGenerated); GenerateExplicitNullCheck(instruction); } } diff --git a/compiler/optimizing/data_type.h b/compiler/optimizing/data_type.h index 75a7fbe6ca..d253036479 100644 --- a/compiler/optimizing/data_type.h +++ b/compiler/optimizing/data_type.h @@ -186,6 +186,7 @@ class DataType { } static bool IsTypeConversionImplicit(Type input_type, Type result_type); + static bool IsTypeConversionImplicit(int64_t value, Type result_type); static const char* PrettyDescriptor(Type type); @@ -213,6 +214,18 @@ inline bool DataType::IsTypeConversionImplicit(Type input_type, Type result_type MaxValueOfIntegralType(input_type) <= MaxValueOfIntegralType(result_type)); } +inline bool DataType::IsTypeConversionImplicit(int64_t value, Type result_type) { + if (IsIntegralType(result_type) && result_type != Type::kInt64) { + // If the constant value falls in the range of the result_type, type + // conversion isn't needed. + return value >= MinValueOfIntegralType(result_type) && + value <= MaxValueOfIntegralType(result_type); + } + // Conversion isn't implicit if it's into non-integer types, or 64-bit int + // which may have different number of registers. + return false; +} + } // namespace art #endif // ART_COMPILER_OPTIMIZING_DATA_TYPE_H_ diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 3f4a3d8b8e..2444e43d64 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -441,9 +441,9 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { // Add dependency due to devirtulization. We've assumed resolved_method // has single implementation. outermost_graph_->AddCHASingleImplementationDependency(resolved_method); - MaybeRecordStat(stats_, kCHAInline); + MaybeRecordStat(stats_, MethodCompilationStat::kCHAInline); } else { - MaybeRecordStat(stats_, kInlinedInvokeVirtualOrInterface); + MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvokeVirtualOrInterface); } } return result; @@ -533,7 +533,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file, } case kInlineCacheMonomorphic: { - MaybeRecordStat(stats_, kMonomorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kMonomorphicCall); if (UseOnlyPolymorphicInliningWithNoDeopt()) { return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache); } else { @@ -542,7 +542,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file, } case kInlineCachePolymorphic: { - MaybeRecordStat(stats_, kPolymorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kPolymorphicCall); return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache); } @@ -551,7 +551,7 @@ bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file, << "Interface or virtual call to " << caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex()) << " is megamorphic and not inlined"; - MaybeRecordStat(stats_, kMegamorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kMegamorphicCall); return false; } @@ -755,7 +755,7 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction, dex::TypeIndex class_index = FindClassIndexIn( GetMonomorphicType(classes), caller_compilation_unit_); if (!class_index.IsValid()) { - LOG_FAIL(stats_, kNotInlinedDexCache) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache) << "Call to " << ArtMethod::PrettyMethod(resolved_method) << " from inline cache is not inlined because its class is not" << " accessible to the caller"; @@ -804,7 +804,7 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction, /* is_first_run */ false); rtp_fixup.Run(); - MaybeRecordStat(stats_, kInlinedMonomorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kInlinedMonomorphicCall); return true; } @@ -994,7 +994,7 @@ bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction, return false; } - MaybeRecordStat(stats_, kInlinedPolymorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall); // Run type propagation to get the guards typed. ReferenceTypePropagation rtp_fixup(graph_, @@ -1200,7 +1200,7 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget( /* is_first_run */ false); rtp_fixup.Run(); - MaybeRecordStat(stats_, kInlinedPolymorphicCall); + MaybeRecordStat(stats_, MethodCompilationStat::kInlinedPolymorphicCall); LOG_SUCCESS() << "Inlined same polymorphic target " << actual_method->PrettyMethod(); return true; @@ -1258,6 +1258,13 @@ bool HInliner::TryInlineAndReplace(HInvoke* invoke_instruction, new_invoke->SetReferenceTypeInfo(invoke_instruction->GetReferenceTypeInfo()); } return_replacement = new_invoke; + // Directly check if the new virtual can be recognized as an intrinsic. + // This way, we avoid running a full recognition pass just to detect + // these relative rare cases. + bool wrong_invoke_type = false; + if (IntrinsicsRecognizer::Recognize(new_invoke, &wrong_invoke_type)) { + MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized); + } } else { // TODO: Consider sharpening an invoke virtual once it is not dependent on the // compiler driver. @@ -1301,14 +1308,14 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, ReferenceTypeInfo receiver_type, HInstruction** return_replacement) { if (method->IsProxyMethod()) { - LOG_FAIL(stats_, kNotInlinedProxy) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedProxy) << "Method " << method->PrettyMethod() << " is not inlined because of unimplemented inline support for proxy methods."; return false; } if (CountRecursiveCallsOf(method) > kMaximumNumberOfRecursiveCalls) { - LOG_FAIL(stats_, kNotInlinedRecursiveBudget) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRecursiveBudget) << "Method " << method->PrettyMethod() << " is not inlined because it has reached its recursive call budget."; @@ -1322,10 +1329,10 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, if (TryPatternSubstitution(invoke_instruction, method, return_replacement)) { LOG_SUCCESS() << "Successfully replaced pattern of invoke " << method->PrettyMethod(); - MaybeRecordStat(stats_, kReplacedInvokeWithSimplePattern); + MaybeRecordStat(stats_, MethodCompilationStat::kReplacedInvokeWithSimplePattern); return true; } - LOG_FAIL(stats_, kNotInlinedWont) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedWont) << "Won't inline " << method->PrettyMethod() << " in " << outer_compilation_unit_.GetDexFile()->GetLocation() << " (" << caller_compilation_unit_.GetDexFile()->GetLocation() << ") from " @@ -1345,7 +1352,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, size_t inline_max_code_units = compiler_driver_->GetCompilerOptions().GetInlineMaxCodeUnits(); if (code_item->insns_size_in_code_units_ > inline_max_code_units) { - LOG_FAIL(stats_, kNotInlinedCodeItem) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCodeItem) << "Method " << method->PrettyMethod() << " is not inlined because its code item is too big: " << code_item->insns_size_in_code_units_ @@ -1355,13 +1362,13 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, } if (code_item->tries_size_ != 0) { - LOG_FAIL(stats_, kNotInlinedTryCatch) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch) << "Method " << method->PrettyMethod() << " is not inlined because of try block"; return false; } if (!method->IsCompilable()) { - LOG_FAIL(stats_, kNotInlinedNotVerified) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified) << "Method " << method->PrettyMethod() << " has soft failures un-handled by the compiler, so it cannot be inlined"; } @@ -1371,7 +1378,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, if (Runtime::Current()->UseJitCompilation() || !compiler_driver_->IsMethodVerifiedWithoutFailures( method->GetDexMethodIndex(), class_def_idx, *method->GetDexFile())) { - LOG_FAIL(stats_, kNotInlinedNotVerified) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedNotVerified) << "Method " << method->PrettyMethod() << " couldn't be verified, so it cannot be inlined"; return false; @@ -1382,9 +1389,10 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, invoke_instruction->AsInvokeStaticOrDirect()->IsStaticWithImplicitClinitCheck()) { // Case of a static method that cannot be inlined because it implicitly // requires an initialization check of its declaring class. - LOG_FAIL(stats_, kNotInlinedDexCache) << "Method " << method->PrettyMethod() - << " is not inlined because it is static and requires a clinit" - << " check that cannot be emitted due to Dex cache limitations"; + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache) + << "Method " << method->PrettyMethod() + << " is not inlined because it is static and requires a clinit" + << " check that cannot be emitted due to Dex cache limitations"; return false; } @@ -1394,7 +1402,7 @@ bool HInliner::TryBuildAndInline(HInvoke* invoke_instruction, } LOG_SUCCESS() << method->PrettyMethod(); - MaybeRecordStat(stats_, kInlinedInvoke); + MaybeRecordStat(stats_, MethodCompilationStat::kInlinedInvoke); return true; } @@ -1677,7 +1685,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, handles_); if (builder.BuildGraph() != kAnalysisSuccess) { - LOG_FAIL(stats_, kNotInlinedCannotBuild) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedCannotBuild) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be built, so cannot be inlined"; return false; @@ -1685,7 +1693,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, if (!RegisterAllocator::CanAllocateRegistersFor(*callee_graph, compiler_driver_->GetInstructionSet())) { - LOG_FAIL(stats_, kNotInlinedRegisterAllocator) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedRegisterAllocator) << "Method " << callee_dex_file.PrettyMethod(method_index) << " cannot be inlined because of the register allocator"; return false; @@ -1738,7 +1746,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, HBasicBlock* exit_block = callee_graph->GetExitBlock(); if (exit_block == nullptr) { - LOG_FAIL(stats_, kNotInlinedInfiniteLoop) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because it has an infinite loop"; return false; @@ -1749,14 +1757,14 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, if (predecessor->GetLastInstruction()->IsThrow()) { if (invoke_instruction->GetBlock()->IsTryBlock()) { // TODO(ngeoffray): Support adding HTryBoundary in Hgraph::InlineInto. - LOG_FAIL(stats_, kNotInlinedTryCatch) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedTryCatch) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because one branch always throws and" << " caller is in a try/catch block"; return false; } else if (graph_->GetExitBlock() == nullptr) { // TODO(ngeoffray): Support adding HExit in the caller graph. - LOG_FAIL(stats_, kNotInlinedInfiniteLoop) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInfiniteLoop) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because one branch always throws and" << " caller does not have an exit block"; @@ -1775,7 +1783,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, } if (!has_one_return) { - LOG_FAIL(stats_, kNotInlinedAlwaysThrows) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedAlwaysThrows) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because it always throws"; return false; @@ -1788,7 +1796,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, if (block->GetLoopInformation()->IsIrreducible()) { // Don't inline methods with irreducible loops, they could prevent some // optimizations to run. - LOG_FAIL(stats_, kNotInlinedIrreducibleLoop) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedIrreducibleLoop) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because it contains an irreducible loop"; return false; @@ -1797,7 +1805,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, // Don't inline methods with loops without exit, since they cause the // loop information to be computed incorrectly when updating after // inlining. - LOG_FAIL(stats_, kNotInlinedLoopWithoutExit) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedLoopWithoutExit) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because it contains a loop with no exit"; return false; @@ -1808,7 +1816,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, !instr_it.Done(); instr_it.Advance()) { if (++number_of_instructions >= inlining_budget_) { - LOG_FAIL(stats_, kNotInlinedInstructionBudget) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedInstructionBudget) << "Method " << callee_dex_file.PrettyMethod(method_index) << " is not inlined because the outer method has reached" << " its instruction budget limit."; @@ -1817,7 +1825,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, HInstruction* current = instr_it.Current(); if (current->NeedsEnvironment() && (total_number_of_dex_registers_ >= kMaximumNumberOfCumulatedDexRegisters)) { - LOG_FAIL(stats_, kNotInlinedEnvironmentBudget) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedEnvironmentBudget) << "Method " << callee_dex_file.PrettyMethod(method_index) << " is not inlined because its caller has reached" << " its environment budget limit."; @@ -1827,7 +1835,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, if (current->NeedsEnvironment() && !CanEncodeInlinedMethodInStackMap(*caller_compilation_unit_.GetDexFile(), resolved_method)) { - LOG_FAIL(stats_, kNotInlinedStackMaps) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedStackMaps) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because " << current->DebugName() << " needs an environment, is in a different dex file" @@ -1836,7 +1844,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, } if (!same_dex_file && current->NeedsDexCacheOfDeclaringClass()) { - LOG_FAIL(stats_, kNotInlinedDexCache) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedDexCache) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because " << current->DebugName() << " it is in a different dex file and requires access to the dex cache"; @@ -1848,7 +1856,7 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction, current->IsUnresolvedStaticFieldSet() || current->IsUnresolvedInstanceFieldSet()) { // Entrypoint for unresolved fields does not handle inlined frames. - LOG_FAIL(stats_, kNotInlinedUnresolvedEntrypoint) + LOG_FAIL(stats_, MethodCompilationStat::kNotInlinedUnresolvedEntrypoint) << "Method " << callee_dex_file.PrettyMethod(method_index) << " could not be inlined because it is using an unresolved" << " entrypoint"; diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc index 4c18e16c48..bd20d28992 100644 --- a/compiler/optimizing/instruction_simplifier.cc +++ b/compiler/optimizing/instruction_simplifier.cc @@ -48,7 +48,7 @@ class InstructionSimplifierVisitor : public HGraphDelegateVisitor { void RecordSimplification() { simplification_occurred_ = true; simplifications_at_current_position_++; - MaybeRecordStat(stats_, kInstructionSimplifications); + MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplifications); } bool ReplaceRotateWithRor(HBinaryOperation* op, HUShr* ushr, HShl* shl); @@ -663,7 +663,7 @@ void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) { HGraph* graph = GetGraph(); if (object->IsNullConstant()) { - MaybeRecordStat(stats_, kRemovedInstanceOf); + MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf); instruction->ReplaceWith(graph->GetIntConstant(0)); instruction->GetBlock()->RemoveInstruction(instruction); RecordSimplification(); @@ -674,7 +674,7 @@ void InstructionSimplifierVisitor::VisitInstanceOf(HInstanceOf* instruction) { // the return value check with the `outcome` check, b/27651442 . bool outcome = false; if (TypeCheckHasKnownOutcome(load_class, object, &outcome)) { - MaybeRecordStat(stats_, kRemovedInstanceOf); + MaybeRecordStat(stats_, MethodCompilationStat::kRemovedInstanceOf); if (outcome && can_be_null) { // Type test will succeed, we just need a null test. HNotEqual* test = new (graph->GetAllocator()) HNotEqual(graph->GetNullConstant(), object); @@ -1168,6 +1168,16 @@ void InstructionSimplifierVisitor::VisitTypeConversion(HTypeConversion* instruct RecordSimplification(); return; } + } else if (input->IsIntConstant()) { + // Try to eliminate type conversion on int constant whose value falls into + // the range of the result type. + int32_t value = input->AsIntConstant()->GetValue(); + if (DataType::IsTypeConversionImplicit(value, result_type)) { + instruction->ReplaceWith(input); + instruction->GetBlock()->RemoveInstruction(instruction); + RecordSimplification(); + return; + } } } diff --git a/compiler/optimizing/instruction_simplifier_arm.cc b/compiler/optimizing/instruction_simplifier_arm.cc index d41e49a0f3..92081e30b1 100644 --- a/compiler/optimizing/instruction_simplifier_arm.cc +++ b/compiler/optimizing/instruction_simplifier_arm.cc @@ -37,9 +37,7 @@ class InstructionSimplifierArmVisitor : public HGraphVisitor { private: void RecordSimplification() { - if (stats_ != nullptr) { - stats_->RecordStat(kInstructionSimplificationsArch); - } + MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch); } bool TryMergeIntoUsersShifterOperand(HInstruction* instruction); diff --git a/compiler/optimizing/instruction_simplifier_arm64.cc b/compiler/optimizing/instruction_simplifier_arm64.cc index 69e1463ac4..1c44e5ac49 100644 --- a/compiler/optimizing/instruction_simplifier_arm64.cc +++ b/compiler/optimizing/instruction_simplifier_arm64.cc @@ -37,9 +37,7 @@ class InstructionSimplifierArm64Visitor : public HGraphVisitor { private: void RecordSimplification() { - if (stats_ != nullptr) { - stats_->RecordStat(kInstructionSimplificationsArch); - } + MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch); } bool TryMergeIntoUsersShifterOperand(HInstruction* instruction); diff --git a/compiler/optimizing/instruction_simplifier_mips.cc b/compiler/optimizing/instruction_simplifier_mips.cc index 6a0d8a60c4..fa97401a0c 100644 --- a/compiler/optimizing/instruction_simplifier_mips.cc +++ b/compiler/optimizing/instruction_simplifier_mips.cc @@ -33,9 +33,7 @@ class InstructionSimplifierMipsVisitor : public HGraphVisitor { private: void RecordSimplification() { - if (stats_ != nullptr) { - stats_->RecordStat(kInstructionSimplificationsArch); - } + MaybeRecordStat(stats_, MethodCompilationStat::kInstructionSimplificationsArch); } bool TryExtractArrayAccessIndex(HInstruction* access, diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index dfae534555..9bf10f58fd 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -139,9 +139,41 @@ static bool CheckInvokeType(Intrinsics intrinsic, HInvoke* invoke) { // Call might be devirtualized. return (invoke_type == kVirtual || invoke_type == kDirect); - default: + case kSuper: + case kInterface: + case kPolymorphic: return false; } + LOG(FATAL) << "Unknown intrinsic invoke type: " << intrinsic_type; + UNREACHABLE(); +} + +bool IntrinsicsRecognizer::Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type) { + ArtMethod* art_method = invoke->GetResolvedMethod(); + *wrong_invoke_type = false; + if (art_method == nullptr || !art_method->IsIntrinsic()) { + return false; + } + + { + // TODO: b/65872996 Polymorphic signature methods should be compiler intrinsics. + ScopedObjectAccess soa(Thread::Current()); + if (art_method->IsPolymorphicSignature()) { + return false; + } + } + + Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic()); + if (CheckInvokeType(intrinsic, invoke) == false) { + *wrong_invoke_type = true; + return false; + } + + invoke->SetIntrinsic(intrinsic, + NeedsEnvironmentOrCache(intrinsic), + GetSideEffects(intrinsic), + GetExceptions(intrinsic)); + return true; } void IntrinsicsRecognizer::Run() { @@ -151,23 +183,14 @@ void IntrinsicsRecognizer::Run() { inst_it.Advance()) { HInstruction* inst = inst_it.Current(); if (inst->IsInvoke()) { - HInvoke* invoke = inst->AsInvoke(); - ArtMethod* art_method = invoke->GetResolvedMethod(); - if (art_method != nullptr && art_method->IsIntrinsic()) { - Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic()); - if (!CheckInvokeType(intrinsic, invoke)) { - LOG(WARNING) << "Found an intrinsic with unexpected invoke type: " - << static_cast<uint32_t>(intrinsic) << " for " - << art_method->PrettyMethod() - << invoke->DebugName(); - } else { - invoke->SetIntrinsic(intrinsic, - NeedsEnvironmentOrCache(intrinsic), - GetSideEffects(intrinsic), - GetExceptions(intrinsic)); - MaybeRecordStat(stats_, - MethodCompilationStat::kIntrinsicRecognized); - } + bool wrong_invoke_type = false; + if (Recognize(inst->AsInvoke(), &wrong_invoke_type)) { + MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized); + } else if (wrong_invoke_type) { + LOG(WARNING) + << "Found an intrinsic with unexpected invoke type: " + << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " " + << inst->DebugName(); } } } diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index 818d7f63a3..8088ab25a7 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -44,6 +44,11 @@ class IntrinsicsRecognizer : public HOptimization { void Run() OVERRIDE; + // Static helper that recognizes intrinsic call. Returns true on success. + // If it fails due to invoke type mismatch, wrong_invoke_type is set. + // Useful to recognize intrinsics on invidual calls outside this full pass. + static bool Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type); + static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition"; private: diff --git a/compiler/optimizing/optimizing_compiler.cc b/compiler/optimizing/optimizing_compiler.cc index 47f7b125b7..a281c4a310 100644 --- a/compiler/optimizing/optimizing_compiler.cc +++ b/compiler/optimizing/optimizing_compiler.cc @@ -738,7 +738,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, ArtMethod* method, bool osr, VariableSizedHandleScope* handles) const { - MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptCompilation); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptBytecodeCompilation); CompilerDriver* compiler_driver = GetCompilerDriver(); InstructionSet instruction_set = compiler_driver->GetInstructionSet(); const DexFile& dex_file = *dex_compilation_unit.GetDexFile(); @@ -757,8 +757,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, } if (Compiler::IsPathologicalCase(*code_item, method_idx, dex_file)) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kNotCompiledPathological); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledPathological); return nullptr; } @@ -768,8 +767,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, const CompilerOptions& compiler_options = compiler_driver->GetCompilerOptions(); if ((compiler_options.GetCompilerFilter() == CompilerFilter::kSpace) && (code_item->insns_size_in_code_units_ > kSpaceFilterOptimizingThreshold)) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kNotCompiledSpaceFilter); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledSpaceFilter); return nullptr; } @@ -800,8 +798,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, compiler_driver->GetCompilerOptions(), compilation_stats_.get())); if (codegen.get() == nullptr) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kNotCompiledNoCodegen); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kNotCompiledNoCodegen); return nullptr; } codegen->GetAssembler()->cfi().SetEnabled( @@ -873,6 +870,7 @@ CodeGenerator* OptimizingCompiler::TryCompile(ArenaAllocator* allocator, codegen->Compile(code_allocator); pass_observer.DumpDisassembly(); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledBytecode); return codegen.release(); } @@ -883,6 +881,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( const DexCompilationUnit& dex_compilation_unit, ArtMethod* method, VariableSizedHandleScope* handles) const { + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kAttemptIntrinsicCompilation); CompilerDriver* compiler_driver = GetCompilerDriver(); InstructionSet instruction_set = compiler_driver->GetInstructionSet(); const DexFile& dex_file = *dex_compilation_unit.GetDexFile(); @@ -894,8 +893,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( // Do not attempt to compile on architectures we do not support. if (!IsInstructionSetSupported(instruction_set)) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kNotCompiledUnsupportedIsa); return nullptr; } @@ -920,8 +917,6 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( compiler_driver->GetCompilerOptions(), compilation_stats_.get())); if (codegen.get() == nullptr) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kNotCompiledNoCodegen); return nullptr; } codegen->GetAssembler()->cfi().SetEnabled( @@ -979,6 +974,7 @@ CodeGenerator* OptimizingCompiler::TryCompileIntrinsic( VLOG(compiler) << "Compiled intrinsic: " << method->GetIntrinsic() << " " << graph->PrettyMethod(); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledIntrinsic); return codegen.release(); } @@ -1046,8 +1042,6 @@ CompiledMethod* OptimizingCompiler::Compile(const DexFile::CodeItem* code_item, } } if (codegen.get() != nullptr) { - MaybeRecordStat(compilation_stats_.get(), - MethodCompilationStat::kCompiled); compiled_method = Emit(&allocator, &code_allocator, codegen.get(), @@ -1139,10 +1133,20 @@ CompiledMethod* OptimizingCompiler::JniCompile(uint32_t access_flags, } } - return ArtQuickJniCompileMethod(GetCompilerDriver(), - access_flags, - method_idx, - dex_file); + JniCompiledMethod jni_compiled_method = ArtQuickJniCompileMethod( + GetCompilerDriver(), access_flags, method_idx, dex_file); + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiledNativeStub); + return CompiledMethod::SwapAllocCompiledMethod( + GetCompilerDriver(), + jni_compiled_method.GetInstructionSet(), + jni_compiled_method.GetCode(), + jni_compiled_method.GetFrameSize(), + jni_compiled_method.GetCoreSpillMask(), + jni_compiled_method.GetFpSpillMask(), + /* method_info */ ArrayRef<const uint8_t>(), + /* vmap_table */ ArrayRef<const uint8_t>(), + jni_compiled_method.GetCfi(), + /* patches */ ArrayRef<const linker::LinkerPatch>()); } Compiler* CreateOptimizingCompiler(CompilerDriver* driver) { @@ -1237,6 +1241,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, self, class_linker->GetClassRoot(ClassLinker::kObjectArrayClass), number_of_roots))); if (roots == nullptr) { // Out of memory, just clear the exception to avoid any Java exception uncaught problems. + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit); DCHECK(self->IsExceptionPending()); self->ClearException(); return false; @@ -1253,9 +1258,9 @@ bool OptimizingCompiler::JitCompile(Thread* self, &method_info_data, &roots_data); if (stack_map_data == nullptr || roots_data == nullptr) { + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit); return false; } - MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kCompiled); codegen->BuildStackMaps(MemoryRegion(stack_map_data, stack_map_size), MemoryRegion(method_info_data, method_info_size), code_item); @@ -1279,6 +1284,7 @@ bool OptimizingCompiler::JitCompile(Thread* self, codegen->GetGraph()->GetCHASingleImplementationList()); if (code == nullptr) { + MaybeRecordStat(compilation_stats_.get(), MethodCompilationStat::kJitOutOfMemoryForCommit); code_cache->ClearData(self, stack_map_data, roots_data); return false; } diff --git a/compiler/optimizing/optimizing_compiler_stats.h b/compiler/optimizing/optimizing_compiler_stats.h index 07f9635aba..a2e92d2931 100644 --- a/compiler/optimizing/optimizing_compiler_stats.h +++ b/compiler/optimizing/optimizing_compiler_stats.h @@ -27,10 +27,13 @@ namespace art { -enum MethodCompilationStat { - kAttemptCompilation = 0, +enum class MethodCompilationStat { + kAttemptBytecodeCompilation = 0, + kAttemptIntrinsicCompilation, + kCompiledNativeStub, + kCompiledIntrinsic, + kCompiledBytecode, kCHAInline, - kCompiled, kInlinedInvoke, kReplacedInvokeWithSimplePattern, kInstructionSimplifications, @@ -94,8 +97,10 @@ enum MethodCompilationStat { kConstructorFenceRemovedLSE, kConstructorFenceRemovedPFRA, kConstructorFenceRemovedCFRE, + kJitOutOfMemoryForCommit, kLastStat }; +std::ostream& operator<<(std::ostream& os, const MethodCompilationStat& rhs); class OptimizingCompilerStats { public: @@ -105,7 +110,15 @@ class OptimizingCompilerStats { } void RecordStat(MethodCompilationStat stat, uint32_t count = 1) { - compile_stats_[stat] += count; + size_t stat_index = static_cast<size_t>(stat); + DCHECK_LT(stat_index, arraysize(compile_stats_)); + compile_stats_[stat_index] += count; + } + + uint32_t GetStat(MethodCompilationStat stat) const { + size_t stat_index = static_cast<size_t>(stat); + DCHECK_LT(stat_index, arraysize(compile_stats_)); + return compile_stats_[stat_index]; } void Log() const { @@ -114,18 +127,29 @@ class OptimizingCompilerStats { return; } - if (compile_stats_[kAttemptCompilation] == 0) { + uint32_t compiled_intrinsics = GetStat(MethodCompilationStat::kCompiledIntrinsic); + uint32_t compiled_native_stubs = GetStat(MethodCompilationStat::kCompiledNativeStub); + uint32_t bytecode_attempts = + GetStat(MethodCompilationStat::kAttemptBytecodeCompilation); + if (compiled_intrinsics == 0u && compiled_native_stubs == 0u && bytecode_attempts == 0u) { LOG(INFO) << "Did not compile any method."; } else { - float compiled_percent = - compile_stats_[kCompiled] * 100.0f / compile_stats_[kAttemptCompilation]; - LOG(INFO) << "Attempted compilation of " << compile_stats_[kAttemptCompilation] - << " methods: " << std::fixed << std::setprecision(2) - << compiled_percent << "% (" << compile_stats_[kCompiled] << ") compiled."; - - for (size_t i = 0; i < kLastStat; i++) { + uint32_t compiled_bytecode_methods = + GetStat(MethodCompilationStat::kCompiledBytecode); + // Successful intrinsic compilation preempts other compilation attempts but failed intrinsic + // compilation shall still count towards bytecode or native stub compilation attempts. + uint32_t num_compilation_attempts = + compiled_intrinsics + compiled_native_stubs + bytecode_attempts; + uint32_t num_successful_compilations = + compiled_intrinsics + compiled_native_stubs + compiled_bytecode_methods; + float compiled_percent = num_successful_compilations * 100.0f / num_compilation_attempts; + LOG(INFO) << "Attempted compilation of " + << num_compilation_attempts << " methods: " << std::fixed << std::setprecision(2) + << compiled_percent << "% (" << num_successful_compilations << ") compiled."; + + for (size_t i = 0; i < arraysize(compile_stats_); ++i) { if (compile_stats_[i] != 0) { - LOG(INFO) << PrintMethodCompilationStat(static_cast<MethodCompilationStat>(i)) << ": " + LOG(INFO) << "OptStat#" << static_cast<MethodCompilationStat>(i) << ": " << compile_stats_[i]; } } @@ -133,7 +157,7 @@ class OptimizingCompilerStats { } void AddTo(OptimizingCompilerStats* other_stats) { - for (size_t i = 0; i != kLastStat; ++i) { + for (size_t i = 0; i != arraysize(compile_stats_); ++i) { uint32_t count = compile_stats_[i]; if (count != 0) { other_stats->RecordStat(static_cast<MethodCompilationStat>(i), count); @@ -142,91 +166,13 @@ class OptimizingCompilerStats { } void Reset() { - for (size_t i = 0; i != kLastStat; ++i) { - compile_stats_[i] = 0u; + for (std::atomic<uint32_t>& stat : compile_stats_) { + stat = 0u; } } private: - std::string PrintMethodCompilationStat(MethodCompilationStat stat) const { - std::string name; - switch (stat) { - case kAttemptCompilation : name = "AttemptCompilation"; break; - case kCHAInline : name = "CHAInline"; break; - case kCompiled : name = "Compiled"; break; - case kInlinedInvoke : name = "InlinedInvoke"; break; - case kReplacedInvokeWithSimplePattern: name = "ReplacedInvokeWithSimplePattern"; break; - case kInstructionSimplifications: name = "InstructionSimplifications"; break; - case kInstructionSimplificationsArch: name = "InstructionSimplificationsArch"; break; - case kUnresolvedMethod : name = "UnresolvedMethod"; break; - case kUnresolvedField : name = "UnresolvedField"; break; - case kUnresolvedFieldNotAFastAccess : name = "UnresolvedFieldNotAFastAccess"; break; - case kRemovedCheckedCast: name = "RemovedCheckedCast"; break; - case kRemovedDeadInstruction: name = "RemovedDeadInstruction"; break; - case kRemovedNullCheck: name = "RemovedNullCheck"; break; - case kNotCompiledSkipped: name = "NotCompiledSkipped"; break; - case kNotCompiledInvalidBytecode: name = "NotCompiledInvalidBytecode"; break; - case kNotCompiledThrowCatchLoop : name = "NotCompiledThrowCatchLoop"; break; - case kNotCompiledAmbiguousArrayOp : name = "NotCompiledAmbiguousArrayOp"; break; - case kNotCompiledHugeMethod : name = "NotCompiledHugeMethod"; break; - case kNotCompiledLargeMethodNoBranches : name = "NotCompiledLargeMethodNoBranches"; break; - case kNotCompiledMalformedOpcode : name = "NotCompiledMalformedOpcode"; break; - case kNotCompiledNoCodegen : name = "NotCompiledNoCodegen"; break; - case kNotCompiledPathological : name = "NotCompiledPathological"; break; - case kNotCompiledSpaceFilter : name = "NotCompiledSpaceFilter"; break; - case kNotCompiledUnhandledInstruction : name = "NotCompiledUnhandledInstruction"; break; - case kNotCompiledUnsupportedIsa : name = "NotCompiledUnsupportedIsa"; break; - case kNotCompiledVerificationError : name = "NotCompiledVerificationError"; break; - case kNotCompiledVerifyAtRuntime : name = "NotCompiledVerifyAtRuntime"; break; - case kInlinedMonomorphicCall: name = "InlinedMonomorphicCall"; break; - case kInlinedPolymorphicCall: name = "InlinedPolymorphicCall"; break; - case kMonomorphicCall: name = "MonomorphicCall"; break; - case kPolymorphicCall: name = "PolymorphicCall"; break; - case kMegamorphicCall: name = "MegamorphicCall"; break; - case kBooleanSimplified : name = "BooleanSimplified"; break; - case kIntrinsicRecognized : name = "IntrinsicRecognized"; break; - case kLoopInvariantMoved : name = "LoopInvariantMoved"; break; - case kLoopVectorized : name = "LoopVectorized"; break; - case kLoopVectorizedIdiom : name = "LoopVectorizedIdiom"; break; - case kSelectGenerated : name = "SelectGenerated"; break; - case kRemovedInstanceOf: name = "RemovedInstanceOf"; break; - case kInlinedInvokeVirtualOrInterface: name = "InlinedInvokeVirtualOrInterface"; break; - case kImplicitNullCheckGenerated: name = "ImplicitNullCheckGenerated"; break; - case kExplicitNullCheckGenerated: name = "ExplicitNullCheckGenerated"; break; - case kSimplifyIf: name = "SimplifyIf"; break; - case kInstructionSunk: name = "InstructionSunk"; break; - case kNotInlinedUnresolvedEntrypoint: name = "NotInlinedUnresolvedEntrypoint"; break; - case kNotInlinedDexCache: name = "NotInlinedDexCache"; break; - case kNotInlinedStackMaps: name = "NotInlinedStackMaps"; break; - case kNotInlinedEnvironmentBudget: name = "NotInlinedEnvironmentBudget"; break; - case kNotInlinedInstructionBudget: name = "NotInlinedInstructionBudget"; break; - case kNotInlinedLoopWithoutExit: name = "NotInlinedLoopWithoutExit"; break; - case kNotInlinedIrreducibleLoop: name = "NotInlinedIrreducibleLoop"; break; - case kNotInlinedAlwaysThrows: name = "NotInlinedAlwaysThrows"; break; - case kNotInlinedInfiniteLoop: name = "NotInlinedInfiniteLoop"; break; - case kNotInlinedTryCatch: name = "NotInlinedTryCatch"; break; - case kNotInlinedRegisterAllocator: name = "NotInlinedRegisterAllocator"; break; - case kNotInlinedCannotBuild: name = "NotInlinedCannotBuild"; break; - case kNotInlinedNotVerified: name = "NotInlinedNotVerified"; break; - case kNotInlinedCodeItem: name = "NotInlinedCodeItem"; break; - case kNotInlinedWont: name = "NotInlinedWont"; break; - case kNotInlinedRecursiveBudget: name = "NotInlinedRecursiveBudget"; break; - case kNotInlinedProxy: name = "NotInlinedProxy"; break; - case kConstructorFenceGeneratedNew: name = "ConstructorFenceGeneratedNew"; break; - case kConstructorFenceGeneratedFinal: name = "ConstructorFenceGeneratedFinal"; break; - case kConstructorFenceRemovedLSE: name = "ConstructorFenceRemovedLSE"; break; - case kConstructorFenceRemovedPFRA: name = "ConstructorFenceRemovedPFRA"; break; - case kConstructorFenceRemovedCFRE: name = "ConstructorFenceRemovedCFRE"; break; - - case kLastStat: - LOG(FATAL) << "invalid stat " - << static_cast<std::underlying_type<MethodCompilationStat>::type>(stat); - UNREACHABLE(); - } - return "OptStat#" + name; - } - - std::atomic<uint32_t> compile_stats_[kLastStat]; + std::atomic<uint32_t> compile_stats_[static_cast<size_t>(MethodCompilationStat::kLastStat)]; DISALLOW_COPY_AND_ASSIGN(OptimizingCompilerStats); }; |