diff options
Diffstat (limited to 'compiler/optimizing/inliner.cc')
-rw-r--r-- | compiler/optimizing/inliner.cc | 119 |
1 files changed, 74 insertions, 45 deletions
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index a4dcb3aeba..48d32999b7 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -33,6 +33,7 @@ #include "reference_type_propagation.h" #include "register_allocator.h" #include "sharpening.h" +#include "ssa_builder.h" #include "ssa_phi_elimination.h" #include "scoped_thread_state_change.h" #include "thread.h" @@ -41,7 +42,14 @@ namespace art { -static constexpr size_t kMaximumNumberOfHInstructions = 12; +static constexpr size_t kMaximumNumberOfHInstructions = 32; + +// Limit the number of dex registers that we accumulate while inlining +// to avoid creating large amount of nested environments. +static constexpr size_t kMaximumNumberOfCumulatedDexRegisters = 64; + +// Avoid inlining within a huge method due to memory pressure. +static constexpr size_t kMaximumCodeUnitSize = 4096; void HInliner::Run() { const CompilerOptions& compiler_options = compiler_driver_->GetCompilerOptions(); @@ -49,6 +57,9 @@ void HInliner::Run() { || (compiler_options.GetInlineMaxCodeUnits() == 0)) { return; } + if (caller_compilation_unit_.GetCodeItem()->insns_size_in_code_units_ > kMaximumCodeUnitSize) { + return; + } if (graph_->IsDebuggable()) { // For simplicity, we currently never inline when the graph is debuggable. This avoids // doing some logic in the runtime to discover if a method could have been inlined. @@ -215,6 +226,7 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker(); // We can query the dex cache directly. The verifier has populated it already. ArtMethod* resolved_method; + ArtMethod* actual_method = nullptr; if (invoke_instruction->IsInvokeStaticOrDirect()) { if (invoke_instruction->AsInvokeStaticOrDirect()->IsStringInit()) { VLOG(compiler) << "Not inlining a String.<init> method"; @@ -226,9 +238,15 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { : class_linker->FindDexCache(soa.Self(), *ref.dex_file); resolved_method = dex_cache->GetResolvedMethod( ref.dex_method_index, class_linker->GetImagePointerSize()); + // actual_method == resolved_method for direct or static calls. + actual_method = resolved_method; } else { resolved_method = caller_compilation_unit_.GetDexCache().Get()->GetResolvedMethod( method_index, class_linker->GetImagePointerSize()); + if (resolved_method != nullptr) { + // Check if we can statically find the method. + actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); + } } if (resolved_method == nullptr) { @@ -238,15 +256,10 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) { return false; } - if (invoke_instruction->IsInvokeStaticOrDirect()) { - return TryInline(invoke_instruction, resolved_method); - } - - // Check if we can statically find the method. - ArtMethod* actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method); if (actual_method != nullptr) { return TryInline(invoke_instruction, actual_method); } + DCHECK(!invoke_instruction->IsInvokeStaticOrDirect()); // Check if we can use an inline cache. ArtMethod* caller = graph_->GetArtMethod(); @@ -372,6 +385,18 @@ bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction ATTRIBUTE_UN bool HInliner::TryInline(HInvoke* invoke_instruction, ArtMethod* method, bool do_rtp) { const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile(); + + // Check whether we're allowed to inline. The outermost compilation unit is the relevant + // dex file here (though the transitivity of an inline chain would allow checking the calller). + if (!compiler_driver_->MayInline(method->GetDexFile(), + outer_compilation_unit_.GetDexFile())) { + VLOG(compiler) << "Won't inline " << PrettyMethod(method) << " in " + << outer_compilation_unit_.GetDexFile()->GetLocation() << " (" + << caller_compilation_unit_.GetDexFile()->GetLocation() << ") from " + << method->GetDexFile()->GetLocation(); + return false; + } + uint32_t method_index = FindMethodIndexIn( method, caller_dex_file, invoke_instruction->GetDexMethodIndex()); if (method_index == DexFile::kDexNoIndex) { @@ -514,7 +539,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, return false; } - if (!callee_graph->TryBuildingSsa()) { + if (callee_graph->TryBuildingSsa(handles_) != kBuildSsaSuccess) { VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file) << " could not be transformed to SSA"; return false; @@ -549,14 +574,12 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, // Run simple optimizations on the graph. HDeadCodeElimination dce(callee_graph, stats_); HConstantFolding fold(callee_graph); - ReferenceTypePropagation type_propagation(callee_graph, handles_); HSharpening sharpening(callee_graph, codegen_, dex_compilation_unit, compiler_driver_); InstructionSimplifier simplify(callee_graph, stats_); IntrinsicsRecognizer intrinsics(callee_graph, compiler_driver_); HOptimization* optimizations[] = { &intrinsics, - &type_propagation, &sharpening, &simplify, &fold, @@ -578,6 +601,7 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, compiler_driver_, handles_, stats_, + total_number_of_dex_registers_ + code_item->registers_size_, depth_ + 1); inliner.Run(); number_of_instructions_budget += inliner.number_of_inlined_instructions_; @@ -609,6 +633,10 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, HReversePostOrderIterator it(*callee_graph); it.Advance(); // Past the entry block, it does not contain instructions that prevent inlining. size_t number_of_instructions = 0; + + bool can_inline_environment = + total_number_of_dex_registers_ < kMaximumNumberOfCumulatedDexRegisters; + for (; !it.Done(); it.Advance()) { HBasicBlock* block = it.Current(); if (block->IsLoopHeader()) { @@ -622,10 +650,17 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, instr_it.Advance()) { if (number_of_instructions++ == number_of_instructions_budget) { VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file) - << " could not be inlined because it is too big."; + << " is not inlined because its caller has reached" + << " its instruction budget limit."; return false; } HInstruction* current = instr_it.Current(); + if (!can_inline_environment && current->NeedsEnvironment()) { + VLOG(compiler) << "Method " << PrettyMethod(method_index, callee_dex_file) + << " is not inlined because its caller has reached" + << " its environment budget limit."; + return false; + } if (current->IsInvokeInterface()) { // Disable inlining of interface calls. The cost in case of entering the @@ -677,42 +712,36 @@ bool HInliner::TryBuildAndInline(ArtMethod* resolved_method, DCHECK_EQ(graph_, return_replacement->GetBlock()->GetGraph()); } - // When merging the graph we might create a new NullConstant in the caller graph which does - // not have the chance to be typed. We assign the correct type here so that we can keep the - // assertion that every reference has a valid type. This also simplifies checks along the way. - HNullConstant* null_constant = graph_->GetNullConstant(); - if (!null_constant->GetReferenceTypeInfo().IsValid()) { - ReferenceTypeInfo::TypeHandle obj_handle = - handles_->NewHandle(class_linker->GetClassRoot(ClassLinker::kJavaLangObject)); - null_constant->SetReferenceTypeInfo( - ReferenceTypeInfo::Create(obj_handle, false /* is_exact */)); - } - // Check the integrity of reference types and run another type propagation if needed. - if ((return_replacement != nullptr) - && (return_replacement->GetType() == Primitive::kPrimNot)) { - if (!return_replacement->GetReferenceTypeInfo().IsValid()) { - // Make sure that we have a valid type for the return. We may get an invalid one when - // we inline invokes with multiple branches and create a Phi for the result. - // TODO: we could be more precise by merging the phi inputs but that requires - // some functionality from the reference type propagation. - DCHECK(return_replacement->IsPhi()); - size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); - ReferenceTypeInfo::TypeHandle return_handle = - handles_->NewHandle(resolved_method->GetReturnType(true /* resolve */, pointer_size)); - return_replacement->SetReferenceTypeInfo(ReferenceTypeInfo::Create( - return_handle, return_handle->CannotBeAssignedFromOtherTypes() /* is_exact */)); - } + if (return_replacement != nullptr) { + if (return_replacement->GetType() == Primitive::kPrimNot) { + if (!return_replacement->GetReferenceTypeInfo().IsValid()) { + // Make sure that we have a valid type for the return. We may get an invalid one when + // we inline invokes with multiple branches and create a Phi for the result. + // TODO: we could be more precise by merging the phi inputs but that requires + // some functionality from the reference type propagation. + DCHECK(return_replacement->IsPhi()); + size_t pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize(); + ReferenceTypeInfo::TypeHandle return_handle = + handles_->NewHandle(resolved_method->GetReturnType(true /* resolve */, pointer_size)); + return_replacement->SetReferenceTypeInfo(ReferenceTypeInfo::Create( + return_handle, return_handle->CannotBeAssignedFromOtherTypes() /* is_exact */)); + } - if (do_rtp) { - // If the return type is a refinement of the declared type run the type propagation again. - ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); - ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); - if (invoke_rti.IsStrictSupertypeOf(return_rti) - || (return_rti.IsExact() && !invoke_rti.IsExact()) - || !return_replacement->CanBeNull()) { - ReferenceTypePropagation rtp_fixup(graph_, handles_); - rtp_fixup.Run(); + if (do_rtp) { + // If the return type is a refinement of the declared type run the type propagation again. + ReferenceTypeInfo return_rti = return_replacement->GetReferenceTypeInfo(); + ReferenceTypeInfo invoke_rti = invoke_instruction->GetReferenceTypeInfo(); + if (invoke_rti.IsStrictSupertypeOf(return_rti) + || (return_rti.IsExact() && !invoke_rti.IsExact()) + || !return_replacement->CanBeNull()) { + ReferenceTypePropagation(graph_, handles_).Run(); + } + } + } else if (return_replacement->IsInstanceOf()) { + if (do_rtp) { + // Inlining InstanceOf into an If may put a tighter bound on reference types. + ReferenceTypePropagation(graph_, handles_).Run(); } } } |