Use offline inline caches during AOT compilation
Also:
- extend the testing script to understand profile when --profile is
passed
- filter inline cache types which are not loaded by the caller class
loader
Test: m test-art-host-run-test-638-checker-inline-caches
Bug: 32434870
Change-Id: Ifcc27b3cebc79b84617412aaae64a73324151b55
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index 0b96005..664b95a 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -249,20 +249,25 @@
ProfilingInfo* const profiling_info_;
};
-static bool IsMonomorphic(Handle<mirror::ObjectArray<mirror::Class>> classes)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- DCHECK_GE(InlineCache::kIndividualCacheSize, 2);
- return classes->Get(0) != nullptr && classes->Get(1) == nullptr;
-}
-
-static bool IsMegamorphic(Handle<mirror::ObjectArray<mirror::Class>> classes)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- for (size_t i = 0; i < InlineCache::kIndividualCacheSize; ++i) {
- if (classes->Get(i) == nullptr) {
- return false;
+HInliner::InlineCacheType HInliner::GetInlineCacheType(
+ const Handle<mirror::ObjectArray<mirror::Class>>& classes)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ uint8_t number_of_types = 0;
+ for (; number_of_types < InlineCache::kIndividualCacheSize; ++number_of_types) {
+ if (classes->Get(number_of_types) == nullptr) {
+ break;
}
}
- return true;
+
+ if (number_of_types == 0) {
+ return kInlineCacheUninitialized;
+ } else if (number_of_types == 1) {
+ return kInlineCacheMonomorphic;
+ } else if (number_of_types == InlineCache::kIndividualCacheSize) {
+ return kInlineCacheMegamorphic;
+ } else {
+ return kInlineCachePolymorphic;
+ }
}
static mirror::Class* GetMonomorphicType(Handle<mirror::ObjectArray<mirror::Class>> classes)
@@ -271,18 +276,6 @@
return classes->Get(0);
}
-static bool IsUninitialized(Handle<mirror::ObjectArray<mirror::Class>> classes)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- return classes->Get(0) == nullptr;
-}
-
-static bool IsPolymorphic(Handle<mirror::ObjectArray<mirror::Class>> classes)
- REQUIRES_SHARED(Locks::mutator_lock_) {
- DCHECK_GE(InlineCache::kIndividualCacheSize, 3);
- return classes->Get(1) != nullptr &&
- classes->Get(InlineCache::kIndividualCacheSize - 1) == nullptr;
-}
-
ArtMethod* HInliner::TryCHADevirtualization(ArtMethod* resolved_method) {
if (!resolved_method->HasSingleImplementation()) {
return nullptr;
@@ -353,67 +346,209 @@
}
return result;
}
-
DCHECK(!invoke_instruction->IsInvokeStaticOrDirect());
- // Check if we can use an inline cache.
- ArtMethod* caller = graph_->GetArtMethod();
- if (Runtime::Current()->UseJitCompilation()) {
- // Under JIT, we should always know the caller.
- DCHECK(caller != nullptr);
- ScopedProfilingInfoInlineUse spiis(caller, soa.Self());
- ProfilingInfo* profiling_info = spiis.GetProfilingInfo();
- if (profiling_info != nullptr) {
- StackHandleScope<1> hs(soa.Self());
- ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
- Handle<mirror::ObjectArray<mirror::Class>> inline_cache = hs.NewHandle(
- mirror::ObjectArray<mirror::Class>::Alloc(
- soa.Self(),
- class_linker->GetClassRoot(ClassLinker::kClassArrayClass),
- InlineCache::kIndividualCacheSize));
- if (inline_cache == nullptr) {
- // We got an OOME. Just clear the exception, and don't inline.
- DCHECK(soa.Self()->IsExceptionPending());
- soa.Self()->ClearException();
- VLOG(compiler) << "Out of memory in the compiler when trying to inline";
- return false;
+ // Try using inline caches.
+ return TryInlineFromInlineCache(caller_dex_file, invoke_instruction, resolved_method);
+}
+
+static Handle<mirror::ObjectArray<mirror::Class>> AllocateInlineCacheHolder(
+ const DexCompilationUnit& compilation_unit,
+ StackHandleScope<1>* hs)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ Thread* self = Thread::Current();
+ ClassLinker* class_linker = compilation_unit.GetClassLinker();
+ Handle<mirror::ObjectArray<mirror::Class>> inline_cache = hs->NewHandle(
+ mirror::ObjectArray<mirror::Class>::Alloc(
+ self,
+ class_linker->GetClassRoot(ClassLinker::kClassArrayClass),
+ InlineCache::kIndividualCacheSize));
+ if (inline_cache == nullptr) {
+ // We got an OOME. Just clear the exception, and don't inline.
+ DCHECK(self->IsExceptionPending());
+ self->ClearException();
+ VLOG(compiler) << "Out of memory in the compiler when trying to inline";
+ }
+ return inline_cache;
+}
+
+bool HInliner::TryInlineFromInlineCache(const DexFile& caller_dex_file,
+ HInvoke* invoke_instruction,
+ ArtMethod* resolved_method)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ StackHandleScope<1> hs(Thread::Current());
+ Handle<mirror::ObjectArray<mirror::Class>> inline_cache;
+ InlineCacheType inline_cache_type = Runtime::Current()->IsAotCompiler()
+ ? GetInlineCacheAOT(caller_dex_file, invoke_instruction, &hs, &inline_cache)
+ : GetInlineCacheJIT(invoke_instruction, &hs, &inline_cache);
+
+ switch (inline_cache_type) {
+ case kInlineCacheNoData:
+ break;
+
+ case kInlineCacheUninitialized:
+ VLOG(compiler) << "Interface or virtual call to "
+ << caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex())
+ << " is not hit and not inlined";
+ return false;
+
+ case kInlineCacheMonomorphic:
+ MaybeRecordStat(kMonomorphicCall);
+ if (outermost_graph_->IsCompilingOsr()) {
+ // If we are compiling OSR, we pretend this call is polymorphic, as we may come from the
+ // interpreter and it may have seen different receiver types.
+ return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
} else {
- Runtime::Current()->GetJit()->GetCodeCache()->CopyInlineCacheInto(
- *profiling_info->GetInlineCache(invoke_instruction->GetDexPc()),
- inline_cache);
- if (IsUninitialized(inline_cache)) {
- VLOG(compiler) << "Interface or virtual call to "
- << caller_dex_file.PrettyMethod(method_index)
- << " is not hit and not inlined";
- return false;
- } else if (IsMonomorphic(inline_cache)) {
- MaybeRecordStat(kMonomorphicCall);
- if (outermost_graph_->IsCompilingOsr()) {
- // If we are compiling OSR, we pretend this call is polymorphic, as we may come from the
- // interpreter and it may have seen different receiver types.
- return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
- } else {
- return TryInlineMonomorphicCall(invoke_instruction, resolved_method, inline_cache);
- }
- } else if (IsPolymorphic(inline_cache)) {
- MaybeRecordStat(kPolymorphicCall);
- return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
- } else {
- DCHECK(IsMegamorphic(inline_cache));
- VLOG(compiler) << "Interface or virtual call to "
- << caller_dex_file.PrettyMethod(method_index)
- << " is megamorphic and not inlined";
- MaybeRecordStat(kMegamorphicCall);
- return false;
- }
+ return TryInlineMonomorphicCall(invoke_instruction, resolved_method, inline_cache);
}
+
+ case kInlineCachePolymorphic:
+ MaybeRecordStat(kPolymorphicCall);
+ return TryInlinePolymorphicCall(invoke_instruction, resolved_method, inline_cache);
+
+ case kInlineCacheMegamorphic:
+ VLOG(compiler) << "Interface or virtual call to "
+ << caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex())
+ << " is megamorphic and not inlined";
+ MaybeRecordStat(kMegamorphicCall);
+ return false;
+
+ case kInlineCacheMissingTypes:
+ VLOG(compiler) << "Interface or virtual call to "
+ << caller_dex_file.PrettyMethod(invoke_instruction->GetDexMethodIndex())
+ << " is missing types and not inlined";
+ return false;
+ }
+ UNREACHABLE();
+}
+
+HInliner::InlineCacheType HInliner::GetInlineCacheJIT(
+ HInvoke* invoke_instruction,
+ StackHandleScope<1>* hs,
+ /*out*/Handle<mirror::ObjectArray<mirror::Class>>* inline_cache)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(Runtime::Current()->UseJitCompilation());
+
+ ArtMethod* caller = graph_->GetArtMethod();
+ // Under JIT, we should always know the caller.
+ DCHECK(caller != nullptr);
+ ScopedProfilingInfoInlineUse spiis(caller, Thread::Current());
+ ProfilingInfo* profiling_info = spiis.GetProfilingInfo();
+
+ if (profiling_info == nullptr) {
+ return kInlineCacheNoData;
+ }
+
+ *inline_cache = AllocateInlineCacheHolder(caller_compilation_unit_, hs);
+ if (inline_cache->Get() == nullptr) {
+ // We can't extract any data if we failed to allocate;
+ return kInlineCacheNoData;
+ } else {
+ Runtime::Current()->GetJit()->GetCodeCache()->CopyInlineCacheInto(
+ *profiling_info->GetInlineCache(invoke_instruction->GetDexPc()),
+ *inline_cache);
+ return GetInlineCacheType(*inline_cache);
+ }
+}
+
+HInliner::InlineCacheType HInliner::GetInlineCacheAOT(
+ const DexFile& caller_dex_file,
+ HInvoke* invoke_instruction,
+ StackHandleScope<1>* hs,
+ /*out*/Handle<mirror::ObjectArray<mirror::Class>>* inline_cache)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ DCHECK(Runtime::Current()->IsAotCompiler());
+ const ProfileCompilationInfo* pci = compiler_driver_->GetProfileCompilationInfo();
+ if (pci == nullptr) {
+ return kInlineCacheNoData;
+ }
+
+ ProfileCompilationInfo::OfflineProfileMethodInfo offline_profile;
+ bool found = pci->GetMethod(caller_dex_file.GetLocation(),
+ caller_dex_file.GetLocationChecksum(),
+ caller_compilation_unit_.GetDexMethodIndex(),
+ &offline_profile);
+ if (!found) {
+ return kInlineCacheNoData; // no profile information for this invocation.
+ }
+
+ *inline_cache = AllocateInlineCacheHolder(caller_compilation_unit_, hs);
+ if (inline_cache == nullptr) {
+ // We can't extract any data if we failed to allocate;
+ return kInlineCacheNoData;
+ } else {
+ return ExtractClassesFromOfflineProfile(invoke_instruction,
+ offline_profile,
+ *inline_cache);
+ }
+}
+
+HInliner::InlineCacheType HInliner::ExtractClassesFromOfflineProfile(
+ const HInvoke* invoke_instruction,
+ const ProfileCompilationInfo::OfflineProfileMethodInfo& offline_profile,
+ /*out*/Handle<mirror::ObjectArray<mirror::Class>> inline_cache)
+ REQUIRES_SHARED(Locks::mutator_lock_) {
+ const auto it = offline_profile.inline_caches.find(invoke_instruction->GetDexPc());
+ if (it == offline_profile.inline_caches.end()) {
+ return kInlineCacheUninitialized;
+ }
+
+ const ProfileCompilationInfo::DexPcData& dex_pc_data = it->second;
+
+ if (dex_pc_data.is_missing_types) {
+ return kInlineCacheMissingTypes;
+ }
+ if (dex_pc_data.is_megamorphic) {
+ return kInlineCacheMegamorphic;
+ }
+
+ DCHECK_LE(dex_pc_data.classes.size(), InlineCache::kIndividualCacheSize);
+ Thread* self = Thread::Current();
+ // We need to resolve the class relative to the containing dex file.
+ // So first, build a mapping from the index of dex file in the profile to
+ // its dex cache. This will avoid repeating the lookup when walking over
+ // the inline cache types.
+ std::vector<ObjPtr<mirror::DexCache>> dex_profile_index_to_dex_cache(
+ offline_profile.dex_references.size());
+ for (size_t i = 0; i < offline_profile.dex_references.size(); i++) {
+ bool found = false;
+ for (const DexFile* dex_file : compiler_driver_->GetDexFilesForOatFile()) {
+ if (offline_profile.dex_references[i].MatchesDex(dex_file)) {
+ dex_profile_index_to_dex_cache[i] =
+ caller_compilation_unit_.GetClassLinker()->FindDexCache(self, *dex_file);
+ found = true;
+ }
+ }
+ if (!found) {
+ VLOG(compiler) << "Could not find profiled dex file: "
+ << offline_profile.dex_references[i].dex_location;
+ return kInlineCacheMissingTypes;
}
}
- VLOG(compiler) << "Interface or virtual call to "
- << caller_dex_file.PrettyMethod(method_index)
- << " could not be statically determined";
- return false;
+ // Walk over the classes and resolve them. If we cannot find a type we return
+ // kInlineCacheMissingTypes.
+ int ic_index = 0;
+ for (const ProfileCompilationInfo::ClassReference& class_ref : dex_pc_data.classes) {
+ ObjPtr<mirror::DexCache> dex_cache =
+ dex_profile_index_to_dex_cache[class_ref.dex_profile_index];
+ DCHECK(dex_cache != nullptr);
+ ObjPtr<mirror::Class> clazz = ClassLinker::LookupResolvedType(
+ class_ref.type_index,
+ dex_cache,
+ caller_compilation_unit_.GetClassLoader().Get());
+ if (clazz != nullptr) {
+ inline_cache->Set(ic_index++, clazz);
+ } else {
+ VLOG(compiler) << "Could not resolve class from inline cache in AOT mode "
+ << caller_compilation_unit_.GetDexFile()->PrettyMethod(
+ invoke_instruction->GetDexMethodIndex()) << " : "
+ << caller_compilation_unit_
+ .GetDexFile()->StringByTypeIdx(class_ref.type_index);
+ return kInlineCacheMissingTypes;
+ }
+ }
+ return GetInlineCacheType(inline_cache);
}
HInstanceFieldGet* HInliner::BuildGetReceiverClass(ClassLinker* class_linker,
@@ -556,6 +691,13 @@
// Insert before setting the kind, as setting the kind affects the inputs.
bb_cursor->InsertInstructionAfter(load_class, receiver_class);
load_class->SetLoadKind(kind);
+ // In AOT mode, we will most likely load the class from BSS, which will involve a call
+ // to the runtime. In this case, the load instruction will need an environment so copy
+ // it from the invoke instruction.
+ if (load_class->NeedsEnvironment()) {
+ DCHECK(Runtime::Current()->IsAotCompiler());
+ load_class->CopyEnvironmentFrom(invoke_instruction->GetEnvironment());
+ }
HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, receiver_class);
bb_cursor->InsertInstructionAfter(compare, load_class);
@@ -746,7 +888,10 @@
ArtMethod* resolved_method,
Handle<mirror::ObjectArray<mirror::Class>> classes) {
// This optimization only works under JIT for now.
- DCHECK(Runtime::Current()->UseJitCompilation());
+ if (!Runtime::Current()->UseJitCompilation()) {
+ return false;
+ }
+
if (graph_->GetInstructionSet() == kMips64) {
// TODO: Support HClassTableGet for mips64.
return false;