Apply individual intrinsic recognition during inliner.
Rationale:
Inliner could introduce new method calls, in particular
it could change invoke-interface to invoke-virtual,
which could expose new intrinsics. This situation
happens, for example, in Kotlin generated code where
String operations first go through the CharSequence
interface. Rather than running a full new phase,
we just recognize intrinsics when interface calls
are replaced by virtual calls.
This optimization boosts KotlinMicroItems by 100%
Test: test-art-host test-art-target
Change-Id: Ibd0519283d67ed6997b056e34b4eafdd49fcbc2d
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 3f4a3d8..1eb1f2e 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -1258,6 +1258,13 @@
new_invoke->SetReferenceTypeInfo(invoke_instruction->GetReferenceTypeInfo());
}
return_replacement = new_invoke;
+ // Directly check if the new virtual can be recognized as an intrinsic.
+ // This way, we avoid running a full recognition pass just to detect
+ // these relative rare cases.
+ bool wrong_invoke_type = false;
+ if (IntrinsicsRecognizer::Recognize(new_invoke, &wrong_invoke_type)) {
+ MaybeRecordStat(stats_, kIntrinsicRecognized);
+ }
} else {
// TODO: Consider sharpening an invoke virtual once it is not dependent on the
// compiler driver.
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index dfae534..210607f 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -144,6 +144,23 @@
}
}
+bool IntrinsicsRecognizer::Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type) {
+ ArtMethod* art_method = invoke->GetResolvedMethod();
+ if (art_method != nullptr && art_method->IsIntrinsic()) {
+ Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic());
+ if (CheckInvokeType(intrinsic, invoke)) {
+ invoke->SetIntrinsic(intrinsic,
+ NeedsEnvironmentOrCache(intrinsic),
+ GetSideEffects(intrinsic),
+ GetExceptions(intrinsic));
+ return true;
+ } else {
+ *wrong_invoke_type = true;
+ }
+ }
+ return false;
+}
+
void IntrinsicsRecognizer::Run() {
ScopedObjectAccess soa(Thread::Current());
for (HBasicBlock* block : graph_->GetReversePostOrder()) {
@@ -151,23 +168,14 @@
inst_it.Advance()) {
HInstruction* inst = inst_it.Current();
if (inst->IsInvoke()) {
- HInvoke* invoke = inst->AsInvoke();
- ArtMethod* art_method = invoke->GetResolvedMethod();
- if (art_method != nullptr && art_method->IsIntrinsic()) {
- Intrinsics intrinsic = static_cast<Intrinsics>(art_method->GetIntrinsic());
- if (!CheckInvokeType(intrinsic, invoke)) {
- LOG(WARNING) << "Found an intrinsic with unexpected invoke type: "
- << static_cast<uint32_t>(intrinsic) << " for "
- << art_method->PrettyMethod()
- << invoke->DebugName();
- } else {
- invoke->SetIntrinsic(intrinsic,
- NeedsEnvironmentOrCache(intrinsic),
- GetSideEffects(intrinsic),
- GetExceptions(intrinsic));
- MaybeRecordStat(stats_,
- MethodCompilationStat::kIntrinsicRecognized);
- }
+ bool wrong_invoke_type = false;
+ if (Recognize(inst->AsInvoke(), &wrong_invoke_type)) {
+ MaybeRecordStat(stats_, MethodCompilationStat::kIntrinsicRecognized);
+ } else if (wrong_invoke_type) {
+ LOG(WARNING)
+ << "Found an intrinsic with unexpected invoke type: "
+ << inst->AsInvoke()->GetResolvedMethod()->PrettyMethod() << " "
+ << inst->DebugName();
}
}
}
diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h
index 818d7f6..8088ab2 100644
--- a/compiler/optimizing/intrinsics.h
+++ b/compiler/optimizing/intrinsics.h
@@ -44,6 +44,11 @@
void Run() OVERRIDE;
+ // Static helper that recognizes intrinsic call. Returns true on success.
+ // If it fails due to invoke type mismatch, wrong_invoke_type is set.
+ // Useful to recognize intrinsics on invidual calls outside this full pass.
+ static bool Recognize(HInvoke* invoke, /*out*/ bool* wrong_invoke_type);
+
static constexpr const char* kIntrinsicsRecognizerPassName = "intrinsics_recognition";
private: