Ensure inlined static calls perform clinit checks in Optimizing.
Calls to static methods have implicit class initialization
(clinit) checks of the method's declaring class in
Optimizing. However, when such a static call is inlined,
the implicit clinit check vanishes, possibly leading to an
incorrect behavior.
To ensure that inlining static methods does not change the
behavior of a program, add explicit class initialization
checks (art::HClinitCheck) as well as load class
instructions (art::HLoadClass) as last input of static
calls (art::HInvokeStaticOrDirect) in Optimizing' control
flow graphs, when the declaring class is reachable and not
known to be already initialized. Then when considering the
inlining of a static method call, proceed only if the method
has no implicit clinit check requirement.
The added explicit clinit checks are already removed by the
art::PrepareForRegisterAllocation visitor. This CL also
extends this visitor to turn explicit clinit checks from
static invokes into implicit ones after the inlining step,
by removing the added art::HLoadClass nodes mentioned
hereinbefore.
Change-Id: I9ba452b8bd09ae1fdd9a3797ef556e3e7e19c651
diff --git a/compiler/driver/compiler_driver-inl.h b/compiler/driver/compiler_driver-inl.h
index b4d4695..3056051 100644
--- a/compiler/driver/compiler_driver-inl.h
+++ b/compiler/driver/compiler_driver-inl.h
@@ -127,34 +127,67 @@
return std::make_pair(fast_get, fast_put);
}
-inline std::pair<bool, bool> CompilerDriver::IsFastStaticField(
- mirror::DexCache* dex_cache, mirror::Class* referrer_class,
- ArtField* resolved_field, uint16_t field_idx, uint32_t* storage_index) {
- DCHECK(resolved_field->IsStatic());
+template <typename ArtMember>
+inline bool CompilerDriver::CanAccessResolvedMember(mirror::Class* referrer_class ATTRIBUTE_UNUSED,
+ mirror::Class* access_to ATTRIBUTE_UNUSED,
+ ArtMember* member ATTRIBUTE_UNUSED,
+ mirror::DexCache* dex_cache ATTRIBUTE_UNUSED,
+ uint32_t field_idx ATTRIBUTE_UNUSED) {
+ // Not defined for ArtMember values other than ArtField or mirror::ArtMethod.
+ UNREACHABLE();
+}
+
+template <>
+inline bool CompilerDriver::CanAccessResolvedMember<ArtField>(mirror::Class* referrer_class,
+ mirror::Class* access_to,
+ ArtField* field,
+ mirror::DexCache* dex_cache,
+ uint32_t field_idx) {
+ return referrer_class->CanAccessResolvedField(access_to, field, dex_cache, field_idx);
+}
+
+template <>
+inline bool CompilerDriver::CanAccessResolvedMember<mirror::ArtMethod>(
+ mirror::Class* referrer_class,
+ mirror::Class* access_to,
+ mirror::ArtMethod* method,
+ mirror::DexCache* dex_cache,
+ uint32_t field_idx) {
+ return referrer_class->CanAccessResolvedMethod(access_to, method, dex_cache, field_idx);
+}
+
+template <typename ArtMember>
+inline std::pair<bool, bool> CompilerDriver::IsClassOfStaticMemberAvailableToReferrer(
+ mirror::DexCache* dex_cache,
+ mirror::Class* referrer_class,
+ ArtMember* resolved_member,
+ uint16_t member_idx,
+ uint32_t* storage_index) {
+ DCHECK(resolved_member->IsStatic());
if (LIKELY(referrer_class != nullptr)) {
- mirror::Class* fields_class = resolved_field->GetDeclaringClass();
- if (fields_class == referrer_class) {
- *storage_index = fields_class->GetDexTypeIndex();
+ mirror::Class* members_class = resolved_member->GetDeclaringClass();
+ if (members_class == referrer_class) {
+ *storage_index = members_class->GetDexTypeIndex();
return std::make_pair(true, true);
}
- if (referrer_class->CanAccessResolvedField(fields_class, resolved_field,
- dex_cache, field_idx)) {
- // We have the resolved field, we must make it into a index for the referrer
+ if (CanAccessResolvedMember<ArtMember>(
+ referrer_class, members_class, resolved_member, dex_cache, member_idx)) {
+ // We have the resolved member, we must make it into a index for the referrer
// in its static storage (which may fail if it doesn't have a slot for it)
// TODO: for images we can elide the static storage base null check
// if we know there's a non-null entry in the image
const DexFile* dex_file = dex_cache->GetDexFile();
uint32_t storage_idx = DexFile::kDexNoIndex;
- if (LIKELY(fields_class->GetDexCache() == dex_cache)) {
- // common case where the dex cache of both the referrer and the field are the same,
+ if (LIKELY(members_class->GetDexCache() == dex_cache)) {
+ // common case where the dex cache of both the referrer and the member are the same,
// no need to search the dex file
- storage_idx = fields_class->GetDexTypeIndex();
+ storage_idx = members_class->GetDexTypeIndex();
} else {
- // Search dex file for localized ssb index, may fail if field's class is a parent
+ // Search dex file for localized ssb index, may fail if member's class is a parent
// of the class mentioned in the dex file and there is no dex cache entry.
std::string temp;
const DexFile::StringId* string_id =
- dex_file->FindStringId(resolved_field->GetDeclaringClass()->GetDescriptor(&temp));
+ dex_file->FindStringId(resolved_member->GetDeclaringClass()->GetDescriptor(&temp));
if (string_id != nullptr) {
const DexFile::TypeId* type_id =
dex_file->FindTypeId(dex_file->GetIndexForStringId(*string_id));
@@ -166,7 +199,7 @@
}
if (storage_idx != DexFile::kDexNoIndex) {
*storage_index = storage_idx;
- return std::make_pair(true, !resolved_field->IsFinal());
+ return std::make_pair(true, !resolved_member->IsFinal());
}
}
}
@@ -175,6 +208,23 @@
return std::make_pair(false, false);
}
+inline std::pair<bool, bool> CompilerDriver::IsFastStaticField(
+ mirror::DexCache* dex_cache, mirror::Class* referrer_class,
+ ArtField* resolved_field, uint16_t field_idx, uint32_t* storage_index) {
+ return IsClassOfStaticMemberAvailableToReferrer(
+ dex_cache, referrer_class, resolved_field, field_idx, storage_index);
+}
+
+inline bool CompilerDriver::IsClassOfStaticMethodAvailableToReferrer(
+ mirror::DexCache* dex_cache, mirror::Class* referrer_class,
+ mirror::ArtMethod* resolved_method, uint16_t method_idx, uint32_t* storage_index) {
+ std::pair<bool, bool> result = IsClassOfStaticMemberAvailableToReferrer(
+ dex_cache, referrer_class, resolved_method, method_idx, storage_index);
+ // Only the first member of `result` is meaningful, as there is no
+ // "write access" to a method.
+ return result.first;
+}
+
inline bool CompilerDriver::IsStaticFieldInReferrerClass(mirror::Class* referrer_class,
ArtField* resolved_field) {
DCHECK(resolved_field->IsStatic());
diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h
index ce13a17..742e2e5 100644
--- a/compiler/driver/compiler_driver.h
+++ b/compiler/driver/compiler_driver.h
@@ -280,6 +280,18 @@
ArtField* resolved_field, uint16_t field_idx, uint32_t* storage_index)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ // Return whether the declaring class of `resolved_method` is
+ // available to `referrer_class`. If this is true, compute the type
+ // index of the declaring class in the referrer's dex file and
+ // return it through the out argument `storage_index`; otherwise
+ // return DexFile::kDexNoIndex through `storage_index`.
+ bool IsClassOfStaticMethodAvailableToReferrer(mirror::DexCache* dex_cache,
+ mirror::Class* referrer_class,
+ mirror::ArtMethod* resolved_method,
+ uint16_t method_idx,
+ uint32_t* storage_index)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Is static field's in referrer's class?
bool IsStaticFieldInReferrerClass(mirror::Class* referrer_class, ArtField* resolved_field)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -455,6 +467,33 @@
}
private:
+ // Return whether the declaring class of `resolved_member` is
+ // available to `referrer_class` for read or write access using two
+ // Boolean values returned as a pair. If is true at least for read
+ // access, compute the type index of the declaring class in the
+ // referrer's dex file and return it through the out argument
+ // `storage_index`; otherwise return DexFile::kDexNoIndex through
+ // `storage_index`.
+ template <typename ArtMember>
+ std::pair<bool, bool> IsClassOfStaticMemberAvailableToReferrer(mirror::DexCache* dex_cache,
+ mirror::Class* referrer_class,
+ ArtMember* resolved_member,
+ uint16_t member_idx,
+ uint32_t* storage_index)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
+ // Can `referrer_class` access the resolved `member`?
+ // Dispatch call to mirror::Class::CanAccessResolvedField or
+ // mirror::Class::CanAccessResolvedMember depending on the value of
+ // ArtMember.
+ template <typename ArtMember>
+ static bool CanAccessResolvedMember(mirror::Class* referrer_class,
+ mirror::Class* access_to,
+ ArtMember* member,
+ mirror::DexCache* dex_cache,
+ uint32_t field_idx)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// These flags are internal to CompilerDriver for collecting INVOKE resolution statistics.
// The only external contract is that unresolved method has flags 0 and resolved non-0.
enum {
diff --git a/compiler/optimizing/builder.cc b/compiler/optimizing/builder.cc
index 818d671..bb20807 100644
--- a/compiler/optimizing/builder.cc
+++ b/compiler/optimizing/builder.cc
@@ -21,6 +21,7 @@
#include "class_linker.h"
#include "dex_file-inl.h"
#include "dex_instruction-inl.h"
+#include "dex/verified_method.h"
#include "driver/compiler_driver-inl.h"
#include "driver/compiler_options.h"
#include "mirror/class_loader.h"
@@ -587,7 +588,7 @@
const char* descriptor = dex_file_->StringDataByIdx(proto_id.shorty_idx_);
Primitive::Type return_type = Primitive::GetType(descriptor[0]);
bool is_instance_call = invoke_type != kStatic;
- const size_t number_of_arguments = strlen(descriptor) - (is_instance_call ? 0 : 1);
+ size_t number_of_arguments = strlen(descriptor) - (is_instance_call ? 0 : 1);
MethodReference target_method(dex_file_, method_idx);
uintptr_t direct_code;
@@ -605,7 +606,15 @@
}
DCHECK(optimized_invoke_type != kSuper);
+ // By default, consider that the called method implicitly requires
+ // an initialization check of its declaring method.
+ HInvokeStaticOrDirect::ClinitCheckRequirement clinit_check_requirement =
+ HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit;
+ // Potential class initialization check, in the case of a static method call.
+ HClinitCheck* clinit_check = nullptr;
+
HInvoke* invoke = nullptr;
+
if (optimized_invoke_type == kVirtual) {
invoke = new (arena_) HInvokeVirtual(
arena_, number_of_arguments, return_type, dex_pc, method_idx, table_index);
@@ -620,9 +629,75 @@
bool is_recursive =
(target_method.dex_method_index == dex_compilation_unit_->GetDexMethodIndex());
DCHECK(!is_recursive || (target_method.dex_file == dex_compilation_unit_->GetDexFile()));
+
+ if (optimized_invoke_type == kStatic) {
+ ScopedObjectAccess soa(Thread::Current());
+ StackHandleScope<4> hs(soa.Self());
+ Handle<mirror::DexCache> dex_cache(hs.NewHandle(
+ dex_compilation_unit_->GetClassLinker()->FindDexCache(
+ *dex_compilation_unit_->GetDexFile())));
+ Handle<mirror::ClassLoader> class_loader(hs.NewHandle(
+ soa.Decode<mirror::ClassLoader*>(dex_compilation_unit_->GetClassLoader())));
+ mirror::ArtMethod* resolved_method = compiler_driver_->ResolveMethod(
+ soa, dex_cache, class_loader, dex_compilation_unit_, method_idx,
+ optimized_invoke_type);
+
+ if (resolved_method == nullptr) {
+ MaybeRecordStat(MethodCompilationStat::kNotCompiledUnresolvedMethod);
+ return false;
+ }
+
+ const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile();
+ Handle<mirror::DexCache> outer_dex_cache(hs.NewHandle(
+ outer_compilation_unit_->GetClassLinker()->FindDexCache(outer_dex_file)));
+ Handle<mirror::Class> referrer_class(hs.NewHandle(GetOutermostCompilingClass()));
+
+ // The index at which the method's class is stored in the DexCache's type array.
+ uint32_t storage_index = DexFile::kDexNoIndex;
+ bool is_referrer_class = (resolved_method->GetDeclaringClass() == referrer_class.Get());
+ if (is_referrer_class) {
+ storage_index = referrer_class->GetDexTypeIndex();
+ } else if (outer_dex_cache.Get() == dex_cache.Get()) {
+ // Get `storage_index` from IsClassOfStaticMethodAvailableToReferrer.
+ compiler_driver_->IsClassOfStaticMethodAvailableToReferrer(outer_dex_cache.Get(),
+ referrer_class.Get(),
+ resolved_method,
+ method_idx,
+ &storage_index);
+ }
+
+ if (is_referrer_class) {
+ // If the declaring class is the referrer class, no class
+ // initialization is needed before the static method call.
+ clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
+ } else if (storage_index != DexFile::kDexNoIndex) {
+ // If the method's class type index is available, check
+ // whether we should add an explicit class initialization
+ // check for its declaring class before the static method call.
+
+ // TODO: find out why this check is needed.
+ bool is_in_dex_cache = compiler_driver_->CanAssumeTypeIsPresentInDexCache(
+ *outer_compilation_unit_->GetDexFile(), storage_index);
+ bool is_initialized =
+ resolved_method->GetDeclaringClass()->IsInitialized() && is_in_dex_cache;
+
+ if (is_initialized) {
+ clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
+ } else {
+ clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
+ HLoadClass* load_class =
+ new (arena_) HLoadClass(storage_index, is_referrer_class, dex_pc);
+ current_block_->AddInstruction(load_class);
+ clinit_check = new (arena_) HClinitCheck(load_class, dex_pc);
+ current_block_->AddInstruction(clinit_check);
+ ++number_of_arguments;
+ }
+ }
+ }
+
invoke = new (arena_) HInvokeStaticOrDirect(
arena_, number_of_arguments, return_type, dex_pc, target_method.dex_method_index,
- is_recursive, invoke_type, optimized_invoke_type);
+ is_recursive, invoke_type, optimized_invoke_type, clinit_check_requirement);
}
size_t start_index = 0;
@@ -655,6 +730,12 @@
}
}
+ if (clinit_check_requirement == HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit) {
+ // Add the class initialization check as last input of `invoke`.
+ DCHECK(clinit_check != nullptr);
+ invoke->SetArgumentAt(argument_index++, clinit_check);
+ }
+
DCHECK_EQ(argument_index, number_of_arguments);
current_block_->AddInstruction(invoke);
latest_result_ = invoke;
@@ -732,7 +813,6 @@
return compiling_class.Get() == cls.Get();
}
-
bool HGraphBuilder::BuildStaticFieldAccess(const Instruction& instruction,
uint32_t dex_pc,
bool is_put) {
@@ -764,7 +844,7 @@
if (is_referrer_class) {
storage_index = referrer_class->GetDexTypeIndex();
} else if (outer_dex_cache.Get() != dex_cache.Get()) {
- // The compiler driver cannot currently understand multple dex caches involved. Just bailout.
+ // The compiler driver cannot currently understand multiple dex caches involved. Just bailout.
return false;
} else {
std::pair<bool, bool> pair = compiler_driver_->IsFastStaticField(
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 38fa043..41d8017 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -1243,6 +1243,10 @@
}
void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
IntrinsicLocationsBuilderARM intrinsic(GetGraph()->GetArena(),
codegen_->GetInstructionSetFeatures());
if (intrinsic.TryDispatch(invoke)) {
@@ -1267,6 +1271,10 @@
}
void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
if (TryGenerateIntrinsicCode(invoke, codegen_)) {
return;
}
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 23ba339..59f4cc2 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1966,6 +1966,10 @@
}
void LocationsBuilderARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
IntrinsicLocationsBuilderARM64 intrinsic(GetGraph()->GetArena());
if (intrinsic.TryDispatch(invoke)) {
return;
@@ -2016,6 +2020,10 @@
}
void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
if (TryGenerateIntrinsicCode(invoke, codegen_)) {
return;
}
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 86e84ac..ad71d3a 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1196,6 +1196,10 @@
}
void LocationsBuilderX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
IntrinsicLocationsBuilderX86 intrinsic(codegen_);
if (intrinsic.TryDispatch(invoke)) {
return;
@@ -1214,6 +1218,10 @@
}
void InstructionCodeGeneratorX86::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
if (TryGenerateIntrinsicCode(invoke, codegen_)) {
return;
}
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index d8d2ae3..e996e46 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1270,6 +1270,10 @@
}
void LocationsBuilderX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
IntrinsicLocationsBuilderX86_64 intrinsic(codegen_);
if (intrinsic.TryDispatch(invoke)) {
return;
@@ -1288,6 +1292,10 @@
}
void InstructionCodeGeneratorX86_64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ // Explicit clinit checks triggered by static invokes must have been
+ // pruned by art::PrepareForRegisterAllocation.
+ DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
+
if (TryGenerateIntrinsicCode(invoke, codegen_)) {
return;
}
diff --git a/compiler/optimizing/graph_checker.cc b/compiler/optimizing/graph_checker.cc
index 2216cec..7c3c379 100644
--- a/compiler/optimizing/graph_checker.cc
+++ b/compiler/optimizing/graph_checker.cc
@@ -178,6 +178,30 @@
}
}
+void GraphChecker::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ VisitInstruction(invoke);
+
+ if (invoke->IsStaticWithExplicitClinitCheck()) {
+ size_t last_input_index = invoke->InputCount() - 1;
+ HInstruction* last_input = invoke->InputAt(last_input_index);
+ if (last_input == nullptr) {
+ AddError(StringPrintf("Static invoke %s:%d marked as having an explicit clinit check "
+ "has a null pointer as last input.",
+ invoke->DebugName(),
+ invoke->GetId()));
+ }
+ if (!last_input->IsClinitCheck() && !last_input->IsLoadClass()) {
+ AddError(StringPrintf("Static invoke %s:%d marked as having an explicit clinit check "
+ "has a last instruction (%s:%d) which is neither a clinit check "
+ "nor a load class instruction.",
+ invoke->DebugName(),
+ invoke->GetId(),
+ last_input->DebugName(),
+ last_input->GetId()));
+ }
+ }
+}
+
void SSAChecker::VisitBasicBlock(HBasicBlock* block) {
super_type::VisitBasicBlock(block);
@@ -459,7 +483,7 @@
Primitive::PrettyDescriptor(op->InputAt(1)->GetType())));
}
} else {
- if (PrimitiveKind(op->InputAt(1)->GetType()) != PrimitiveKind(op->InputAt(0)->GetType())) {
+ if (PrimitiveKind(op->InputAt(0)->GetType()) != PrimitiveKind(op->InputAt(1)->GetType())) {
AddError(StringPrintf(
"Binary operation %s %d has inputs of different types: "
"%s, and %s.",
@@ -484,7 +508,7 @@
"from its input type: %s vs %s.",
op->DebugName(), op->GetId(),
Primitive::PrettyDescriptor(op->GetType()),
- Primitive::PrettyDescriptor(op->InputAt(1)->GetType())));
+ Primitive::PrettyDescriptor(op->InputAt(0)->GetType())));
}
}
}
diff --git a/compiler/optimizing/graph_checker.h b/compiler/optimizing/graph_checker.h
index 24fee37..45e8804 100644
--- a/compiler/optimizing/graph_checker.h
+++ b/compiler/optimizing/graph_checker.h
@@ -42,6 +42,9 @@
// Check `instruction`.
void VisitInstruction(HInstruction* instruction) OVERRIDE;
+ // Perform control-flow graph checks on instruction.
+ void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE;
+
// Was the last visit of the graph valid?
bool IsValid() const {
return errors_.empty();
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index bffd639..37b5753 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -130,6 +130,16 @@
return false;
}
+ if (invoke_instruction->IsInvokeStaticOrDirect() &&
+ invoke_instruction->AsInvokeStaticOrDirect()->IsStaticWithImplicitClinitCheck()) {
+ // Case of a static method that cannot be inlined because it implicitly
+ // requires an initialization check of its declaring class.
+ VLOG(compiler) << "Method " << PrettyMethod(method_index, caller_dex_file)
+ << " is not inlined because it is static and requires a clinit"
+ << " check that cannot be emitted due to Dex cache limitations";
+ return false;
+ }
+
if (!TryBuildAndInline(resolved_method, invoke_instruction, method_index, can_use_dex_cache)) {
resolved_method->SetShouldNotInline();
return false;
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index 9a6062f..f67f24a 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -79,6 +79,7 @@
static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM* codegen) {
if (invoke->InputCount() == 0) {
+ // No argument to move.
return;
}
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index d3a4e6c..a3cb9df 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -88,6 +88,7 @@
static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorARM64* codegen) {
if (invoke->InputCount() == 0) {
+ // No argument to move.
return;
}
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 95ab90d..89e931d 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -113,6 +113,7 @@
static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorX86* codegen) {
if (invoke->InputCount() == 0) {
+ // No argument to move.
return;
}
@@ -1038,7 +1039,7 @@
LocationSummary::kNoCall,
kIntrinsified);
locations->SetInAt(0, Location::RequiresRegister());
- HInstruction *value = invoke->InputAt(1);
+ HInstruction* value = invoke->InputAt(1);
if (size == Primitive::kPrimByte) {
locations->SetInAt(1, Location::ByteRegisterOrConstant(EDX, value));
} else {
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index d9a1c31..2a9b507 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -105,6 +105,7 @@
static void MoveArguments(HInvoke* invoke, ArenaAllocator* arena, CodeGeneratorX86_64* codegen) {
if (invoke->InputCount() == 0) {
+ // No argument to move.
return;
}
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 5fca4fa..360c04e 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -1097,7 +1097,7 @@
// - Remove suspend checks, that hold an environment.
// We must do this after the other blocks have been inlined, otherwise ids of
// constants could overlap with the inner graph.
- int parameter_index = 0;
+ size_t parameter_index = 0;
for (HInstructionIterator it(entry_block_->GetInstructions()); !it.Done(); it.Advance()) {
HInstruction* current = it.Current();
if (current->IsNullConstant()) {
@@ -1110,6 +1110,14 @@
// TODO: Don't duplicate floating-point constants.
current->MoveBefore(outer_graph->GetEntryBlock()->GetLastInstruction());
} else if (current->IsParameterValue()) {
+ if (kIsDebugBuild
+ && invoke->IsInvokeStaticOrDirect()
+ && invoke->AsInvokeStaticOrDirect()->IsStaticWithExplicitClinitCheck()) {
+ // Ensure we do not use the last input of `invoke`, as it
+ // contains a clinit check which is not an actual argument.
+ size_t last_input_index = invoke->InputCount() - 1;
+ DCHECK(parameter_index != last_input_index);
+ }
current->ReplaceWith(invoke->InputAt(parameter_index++));
} else {
DCHECK(current->IsGoto() || current->IsSuspendCheck());
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1565f58..9e496df 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -2212,6 +2212,14 @@
class HInvokeStaticOrDirect : public HInvoke {
public:
+ // Requirements of this method call regarding the class
+ // initialization (clinit) check of its declaring class.
+ enum class ClinitCheckRequirement {
+ kNone, // Class already initialized.
+ kExplicit, // Static call having explicit clinit check as last input.
+ kImplicit, // Static call implicitly requiring a clinit check.
+ };
+
HInvokeStaticOrDirect(ArenaAllocator* arena,
uint32_t number_of_arguments,
Primitive::Type return_type,
@@ -2219,11 +2227,13 @@
uint32_t dex_method_index,
bool is_recursive,
InvokeType original_invoke_type,
- InvokeType invoke_type)
+ InvokeType invoke_type,
+ ClinitCheckRequirement clinit_check_requirement)
: HInvoke(arena, number_of_arguments, return_type, dex_pc, dex_method_index),
original_invoke_type_(original_invoke_type),
invoke_type_(invoke_type),
- is_recursive_(is_recursive) {}
+ is_recursive_(is_recursive),
+ clinit_check_requirement_(clinit_check_requirement) {}
bool CanDoImplicitNullCheck() const OVERRIDE {
// We access the method via the dex cache so we can't do an implicit null check.
@@ -2236,12 +2246,61 @@
bool IsRecursive() const { return is_recursive_; }
bool NeedsDexCache() const OVERRIDE { return !IsRecursive(); }
+ // Is this instruction a call to a static method?
+ bool IsStatic() const {
+ return GetInvokeType() == kStatic;
+ }
+
+ // Remove the art::HLoadClass instruction set as last input by
+ // art::PrepareForRegisterAllocation::VisitClinitCheck in lieu of
+ // the initial art::HClinitCheck instruction (only relevant for
+ // static calls with explicit clinit check).
+ void RemoveLoadClassAsLastInput() {
+ DCHECK(IsStaticWithExplicitClinitCheck());
+ size_t last_input_index = InputCount() - 1;
+ HInstruction* last_input = InputAt(last_input_index);
+ DCHECK(last_input != nullptr);
+ DCHECK(last_input->IsLoadClass()) << last_input->DebugName();
+ RemoveAsUserOfInput(last_input_index);
+ inputs_.DeleteAt(last_input_index);
+ clinit_check_requirement_ = ClinitCheckRequirement::kImplicit;
+ DCHECK(IsStaticWithImplicitClinitCheck());
+ }
+
+ // Is this a call to a static method whose declaring class has an
+ // explicit intialization check in the graph?
+ bool IsStaticWithExplicitClinitCheck() const {
+ return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kExplicit);
+ }
+
+ // Is this a call to a static method whose declaring class has an
+ // implicit intialization check requirement?
+ bool IsStaticWithImplicitClinitCheck() const {
+ return IsStatic() && (clinit_check_requirement_ == ClinitCheckRequirement::kImplicit);
+ }
+
DECLARE_INSTRUCTION(InvokeStaticOrDirect);
+ protected:
+ const HUserRecord<HInstruction*> InputRecordAt(size_t i) const OVERRIDE {
+ const HUserRecord<HInstruction*> input_record = HInvoke::InputRecordAt(i);
+ if (kIsDebugBuild && IsStaticWithExplicitClinitCheck() && (i == InputCount() - 1)) {
+ HInstruction* input = input_record.GetInstruction();
+ // `input` is the last input of a static invoke marked as having
+ // an explicit clinit check. It must either be:
+ // - an art::HClinitCheck instruction, set by art::HGraphBuilder; or
+ // - an art::HLoadClass instruction, set by art::PrepareForRegisterAllocation.
+ DCHECK(input != nullptr);
+ DCHECK(input->IsClinitCheck() || input->IsLoadClass()) << input->DebugName();
+ }
+ return input_record;
+ }
+
private:
const InvokeType original_invoke_type_;
const InvokeType invoke_type_;
const bool is_recursive_;
+ ClinitCheckRequirement clinit_check_requirement_;
DISALLOW_COPY_AND_ASSIGN(HInvokeStaticOrDirect);
};
@@ -3210,7 +3269,6 @@
DISALLOW_COPY_AND_ASSIGN(HLoadString);
};
-// TODO: Pass this check to HInvokeStaticOrDirect nodes.
/**
* Performs an initialization check on its Class object input.
*/
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index f5d8d82..78d1185 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -79,4 +79,26 @@
}
}
+void PrepareForRegisterAllocation::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
+ if (invoke->IsStaticWithExplicitClinitCheck()) {
+ size_t last_input_index = invoke->InputCount() - 1;
+ HInstruction* last_input = invoke->InputAt(last_input_index);
+ DCHECK(last_input->IsLoadClass()) << last_input->DebugName();
+
+ // Remove a load class instruction as last input of a static
+ // invoke, which has been added (along with a clinit check,
+ // removed by PrepareForRegisterAllocation::VisitClinitCheck
+ // previously) by the graph builder during the creation of the
+ // static invoke instruction, but is no longer required at this
+ // stage (i.e., after inlining has been performed).
+ invoke->RemoveLoadClassAsLastInput();
+
+ // If the load class instruction is no longer used, remove it from
+ // the graph.
+ if (!last_input->HasUses()) {
+ last_input->GetBlock()->RemoveInstruction(last_input);
+ }
+ }
+}
+
} // namespace art
diff --git a/compiler/optimizing/prepare_for_register_allocation.h b/compiler/optimizing/prepare_for_register_allocation.h
index c28507c..d7f277f 100644
--- a/compiler/optimizing/prepare_for_register_allocation.h
+++ b/compiler/optimizing/prepare_for_register_allocation.h
@@ -39,6 +39,7 @@
void VisitBoundType(HBoundType* bound_type) OVERRIDE;
void VisitClinitCheck(HClinitCheck* check) OVERRIDE;
void VisitCondition(HCondition* condition) OVERRIDE;
+ void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE;
DISALLOW_COPY_AND_ASSIGN(PrepareForRegisterAllocation);
};