summaryrefslogtreecommitdiff
path: root/compiler/optimizing/inliner.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/inliner.cc')
-rw-r--r--compiler/optimizing/inliner.cc104
1 files changed, 62 insertions, 42 deletions
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 5a4478dc14..35582297f3 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -144,11 +144,6 @@ bool HInliner::Run() {
}
bool did_inline = false;
- // The inliner is the only phase that sets invokes as `always throwing`, and since we only run the
- // inliner once per graph this value should always be false at the beginning of the inlining
- // phase. This is important since we use `HasAlwaysThrowingInvokes` to know whether the inliner
- // phase performed a relevant change in the graph.
- DCHECK(!graph_->HasAlwaysThrowingInvokes());
// Initialize the number of instructions for the method being compiled. Recursive calls
// to HInliner::Run have already updated the instruction count.
@@ -180,7 +175,7 @@ bool HInliner::Run() {
for (HBasicBlock* block : blocks) {
for (HInstruction* instruction = block->GetFirstInstruction(); instruction != nullptr;) {
HInstruction* next = instruction->GetNext();
- HInvoke* call = instruction->AsInvoke();
+ HInvoke* call = instruction->AsInvokeOrNull();
// As long as the call is not intrinsified, it is worth trying to inline.
if (call != nullptr && !codegen_->IsImplementedIntrinsic(call)) {
if (honor_noinline_directives) {
@@ -210,6 +205,16 @@ bool HInliner::Run() {
// We return true if we either inlined at least one method, or we marked one of our methods as
// always throwing.
+ // To check if we added an always throwing method we can either:
+ // 1) Pass a boolean throughout the pipeline and get an accurate result, or
+ // 2) Just check that the `HasAlwaysThrowingInvokes()` flag is true now. This is not 100%
+ // accurate but the only other part where we set `HasAlwaysThrowingInvokes` is constant
+ // folding the DivideUnsigned intrinsics for when the divisor is known to be 0. This case is
+ // rare enough that changing the pipeline for this is not worth it. In the case of the false
+ // positive (i.e. A) we didn't inline at all, B) the graph already had an always throwing
+ // invoke, and C) we didn't set any new always throwing invokes), we will be running constant
+ // folding, instruction simplifier, and dead code elimination one more time even though it
+ // shouldn't change things. There's no false negative case.
return did_inline || graph_->HasAlwaysThrowingInvokes();
}
@@ -223,7 +228,7 @@ static bool IsMethodOrDeclaringClassFinal(ArtMethod* method)
* the actual runtime target of an interface or virtual call.
* Return nullptr if the runtime target cannot be proven.
*/
-static ArtMethod* FindVirtualOrInterfaceTarget(HInvoke* invoke)
+static ArtMethod* FindVirtualOrInterfaceTarget(HInvoke* invoke, ReferenceTypeInfo info)
REQUIRES_SHARED(Locks::mutator_lock_) {
ArtMethod* resolved_method = invoke->GetResolvedMethod();
if (IsMethodOrDeclaringClassFinal(resolved_method)) {
@@ -231,20 +236,7 @@ static ArtMethod* FindVirtualOrInterfaceTarget(HInvoke* invoke)
return resolved_method;
}
- HInstruction* receiver = invoke->InputAt(0);
- if (receiver->IsNullCheck()) {
- // Due to multiple levels of inlining within the same pass, it might be that
- // null check does not have the reference type of the actual receiver.
- receiver = receiver->InputAt(0);
- }
- ReferenceTypeInfo info = receiver->GetReferenceTypeInfo();
- DCHECK(info.IsValid()) << "Invalid RTI for " << receiver->DebugName();
- if (!info.IsExact()) {
- // We currently only support inlining with known receivers.
- // TODO: Remove this check, we should be able to inline final methods
- // on unknown receivers.
- return nullptr;
- } else if (info.GetTypeHandle()->IsInterface()) {
+ if (info.GetTypeHandle()->IsInterface()) {
// Statically knowing that the receiver has an interface type cannot
// help us find what is the target method.
return nullptr;
@@ -336,8 +328,8 @@ static dex::TypeIndex FindClassIndexIn(ObjPtr<mirror::Class> cls,
HInliner::InlineCacheType HInliner::GetInlineCacheType(
const StackHandleScope<InlineCache::kIndividualCacheSize>& classes) {
- DCHECK_EQ(classes.NumberOfReferences(), InlineCache::kIndividualCacheSize);
- uint8_t number_of_types = InlineCache::kIndividualCacheSize - classes.RemainingSlots();
+ DCHECK_EQ(classes.Capacity(), InlineCache::kIndividualCacheSize);
+ uint8_t number_of_types = classes.Size();
if (number_of_types == 0) {
return kInlineCacheUninitialized;
} else if (number_of_types == 1) {
@@ -472,15 +464,31 @@ bool HInliner::TryInline(HInvoke* invoke_instruction) {
return false;
}
- ArtMethod* actual_method = invoke_instruction->IsInvokeStaticOrDirect()
- ? invoke_instruction->GetResolvedMethod()
- : FindVirtualOrInterfaceTarget(invoke_instruction);
+ ArtMethod* actual_method = nullptr;
+ ReferenceTypeInfo receiver_info = ReferenceTypeInfo::CreateInvalid();
+ if (invoke_instruction->GetInvokeType() == kStatic) {
+ actual_method = invoke_instruction->GetResolvedMethod();
+ } else {
+ HInstruction* receiver = invoke_instruction->InputAt(0);
+ while (receiver->IsNullCheck()) {
+ // Due to multiple levels of inlining within the same pass, it might be that
+ // null check does not have the reference type of the actual receiver.
+ receiver = receiver->InputAt(0);
+ }
+ receiver_info = receiver->GetReferenceTypeInfo();
+ DCHECK(receiver_info.IsValid()) << "Invalid RTI for " << receiver->DebugName();
+ if (invoke_instruction->IsInvokeStaticOrDirect()) {
+ actual_method = invoke_instruction->GetResolvedMethod();
+ } else {
+ actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, receiver_info);
+ }
+ }
if (actual_method != nullptr) {
// Single target.
bool result = TryInlineAndReplace(invoke_instruction,
actual_method,
- ReferenceTypeInfo::CreateInvalid(),
+ receiver_info,
/* do_rtp= */ true,
/* is_speculative= */ false);
if (result) {
@@ -541,9 +549,10 @@ bool HInliner::TryInlineFromCHA(HInvoke* invoke_instruction) {
uint32_t dex_pc = invoke_instruction->GetDexPc();
HInstruction* cursor = invoke_instruction->GetPrevious();
HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
+ Handle<mirror::Class> cls = graph_->GetHandleCache()->NewHandle(method->GetDeclaringClass());
if (!TryInlineAndReplace(invoke_instruction,
method,
- ReferenceTypeInfo::CreateInvalid(),
+ ReferenceTypeInfo::Create(cls),
/* do_rtp= */ true,
/* is_speculative= */ true)) {
return false;
@@ -660,17 +669,23 @@ HInliner::InlineCacheType HInliner::GetInlineCacheJIT(
return kInlineCacheNoData;
}
- Runtime::Current()->GetJit()->GetCodeCache()->CopyInlineCacheInto(
- *profiling_info->GetInlineCache(invoke_instruction->GetDexPc()),
- classes);
+ InlineCache* cache = profiling_info->GetInlineCache(invoke_instruction->GetDexPc());
+ if (cache == nullptr) {
+ // This shouldn't happen, but we don't guarantee that method resolution
+ // between baseline compilation and optimizing compilation is identical. Be robust,
+ // warn about it, and return that we don't have any inline cache data.
+ LOG(WARNING) << "No inline cache found for " << caller->PrettyMethod();
+ return kInlineCacheNoData;
+ }
+ Runtime::Current()->GetJit()->GetCodeCache()->CopyInlineCacheInto(*cache, classes);
return GetInlineCacheType(*classes);
}
HInliner::InlineCacheType HInliner::GetInlineCacheAOT(
HInvoke* invoke_instruction,
/*out*/StackHandleScope<InlineCache::kIndividualCacheSize>* classes) {
- DCHECK_EQ(classes->NumberOfReferences(), InlineCache::kIndividualCacheSize);
- DCHECK_EQ(classes->RemainingSlots(), InlineCache::kIndividualCacheSize);
+ DCHECK_EQ(classes->Capacity(), InlineCache::kIndividualCacheSize);
+ DCHECK_EQ(classes->Size(), 0u);
const ProfileCompilationInfo* pci = codegen_->GetCompilerOptions().GetProfileCompilationInfo();
if (pci == nullptr) {
@@ -702,19 +717,21 @@ HInliner::InlineCacheType HInliner::GetInlineCacheAOT(
// Walk over the class descriptors and look up the actual classes.
// If we cannot find a type we return kInlineCacheMissingTypes.
ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
+ Thread* self = Thread::Current();
for (const dex::TypeIndex& type_index : dex_pc_data.classes) {
const DexFile* dex_file = caller_compilation_unit_.GetDexFile();
const char* descriptor = pci->GetTypeDescriptor(dex_file, type_index);
- ObjPtr<mirror::ClassLoader> class_loader = caller_compilation_unit_.GetClassLoader().Get();
- ObjPtr<mirror::Class> clazz = class_linker->LookupResolvedType(descriptor, class_loader);
+ ObjPtr<mirror::Class> clazz =
+ class_linker->FindClass(self, descriptor, caller_compilation_unit_.GetClassLoader());
if (clazz == nullptr) {
+ self->ClearException(); // Clean up the exception left by type resolution.
VLOG(compiler) << "Could not find class from inline cache in AOT mode "
<< invoke_instruction->GetMethodReference().PrettyMethod()
<< " : "
<< descriptor;
return kInlineCacheMissingTypes;
}
- DCHECK_NE(classes->RemainingSlots(), 0u);
+ DCHECK_LT(classes->Size(), classes->Capacity());
classes->NewHandle(clazz);
}
@@ -965,8 +982,8 @@ bool HInliner::TryInlinePolymorphicCall(
bool all_targets_inlined = true;
bool one_target_inlined = false;
- DCHECK_EQ(classes.NumberOfReferences(), InlineCache::kIndividualCacheSize);
- uint8_t number_of_types = InlineCache::kIndividualCacheSize - classes.RemainingSlots();
+ DCHECK_EQ(classes.Capacity(), InlineCache::kIndividualCacheSize);
+ uint8_t number_of_types = classes.Size();
for (size_t i = 0; i != number_of_types; ++i) {
DCHECK(classes.GetReference(i) != nullptr);
Handle<mirror::Class> handle =
@@ -1152,8 +1169,8 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget(
// Check whether we are actually calling the same method among
// the different types seen.
- DCHECK_EQ(classes.NumberOfReferences(), InlineCache::kIndividualCacheSize);
- uint8_t number_of_types = InlineCache::kIndividualCacheSize - classes.RemainingSlots();
+ DCHECK_EQ(classes.Capacity(), InlineCache::kIndividualCacheSize);
+ uint8_t number_of_types = classes.Size();
for (size_t i = 0; i != number_of_types; ++i) {
DCHECK(classes.GetReference(i) != nullptr);
ArtMethod* new_method = nullptr;
@@ -1184,9 +1201,11 @@ bool HInliner::TryInlinePolymorphicCallToSameTarget(
HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
HInstruction* return_replacement = nullptr;
+ Handle<mirror::Class> cls =
+ graph_->GetHandleCache()->NewHandle(actual_method->GetDeclaringClass());
if (!TryBuildAndInline(invoke_instruction,
actual_method,
- ReferenceTypeInfo::CreateInvalid(),
+ ReferenceTypeInfo::Create(cls),
&return_replacement,
/* is_speculative= */ true)) {
return false;
@@ -2062,7 +2081,8 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
ReferenceTypeInfo receiver_type,
HInstruction** return_replacement,
bool is_speculative) {
- DCHECK(!(resolved_method->IsStatic() && receiver_type.IsValid()));
+ DCHECK_IMPLIES(resolved_method->IsStatic(), !receiver_type.IsValid());
+ DCHECK_IMPLIES(!resolved_method->IsStatic(), receiver_type.IsValid());
const dex::CodeItem* code_item = resolved_method->GetCodeItem();
const DexFile& callee_dex_file = *resolved_method->GetDexFile();
uint32_t method_index = resolved_method->GetDexMethodIndex();