Clean-up sharpening and compiler driver.
Remove dependency on compiler driver for sharpening
and dex2dex (the methods called on the compiler driver were
doing unnecessary work), and remove the now unused methods
in compiler driver.
Also remove test that is now invalid, as sharpening always
succeeds.
test: m test-art-host m test-art-target
Change-Id: I54e91c6839bd5b0b86182f2f43ba5d2c112ef908
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index cf633df..0f8cdbb 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -346,7 +346,7 @@
// Initialize to anything to silent compiler warnings.
QuickEntrypointEnum entrypoint = kQuickInvokeStaticTrampolineWithAccessCheck;
- switch (invoke->GetOriginalInvokeType()) {
+ switch (invoke->GetInvokeType()) {
case kStatic:
entrypoint = kQuickInvokeStaticTrampolineWithAccessCheck;
break;
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index c0c798d..8500204 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -515,7 +515,7 @@
// otherwise return a fall-back info that should be used instead.
virtual HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) = 0;
+ HInvokeStaticOrDirect* invoke) = 0;
// Generate a call to a static or direct method.
virtual void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) = 0;
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index a052873..6be458c 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -6672,7 +6672,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) {
+ HInvokeStaticOrDirect* invoke) {
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable pc-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
@@ -6686,7 +6686,7 @@
if (dispatch_info.code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative) {
const DexFile& outer_dex_file = GetGraph()->GetDexFile();
- if (&outer_dex_file != target_method.dex_file) {
+ if (&outer_dex_file != invoke->GetTargetMethod().dex_file) {
// Calls across dex files are more likely to exceed the available BL range,
// so use absolute patch with fixup if available and kCallArtMethod otherwise.
HInvokeStaticOrDirect::CodePtrLocation code_ptr_location =
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index 424a1a1..6416d40 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -454,7 +454,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index a29e9f3..7160607 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -3544,7 +3544,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM64::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method ATTRIBUTE_UNUSED) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
// On ARM64 we support all dispatch types.
return desired_dispatch_info;
}
@@ -3588,7 +3588,7 @@
break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
// Add ADRP with its PC-relative DexCache access patch.
- const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
+ const DexFile& dex_file = invoke->GetDexFile();
uint32_t element_offset = invoke->GetDexCacheArrayOffset();
vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
{
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index f1dc7ee..a152245 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -522,7 +522,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index b06c84d..226f109 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -432,7 +432,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) {
+ HInvokeStaticOrDirect* invoke) {
TODO_VIXL32(FATAL);
return desired_dispatch_info;
}
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index d0c2c85..7b7118c 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -363,7 +363,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 2211ea3..f560207 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -4327,7 +4327,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method ATTRIBUTE_UNUSED) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable PC-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index 553a7e6..f943978 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -395,7 +395,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp);
void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 5039fad..a5e2351 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -2972,7 +2972,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorMIPS64::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method ATTRIBUTE_UNUSED) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
switch (desired_dispatch_info.method_load_kind) {
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 2dd409a..690eccb 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -343,7 +343,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index cc9fe83..47dfb2e 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -4217,7 +4217,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method ATTRIBUTE_UNUSED) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable pc-relative load when there is an irreducible loop, as the optimization
@@ -4297,7 +4297,7 @@
__ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
// Bind a new fixup label at the end of the "movl" insn.
uint32_t offset = invoke->GetDexCacheArrayOffset();
- __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
+ __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
break;
}
case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 5866e65..1ae9af3 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -402,7 +402,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
// Generate a call to a static or direct method.
Location GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp);
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 1d87bf6..59c0ca4 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -733,7 +733,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method ATTRIBUTE_UNUSED) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
switch (desired_dispatch_info.code_ptr_location) {
case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
@@ -775,7 +775,7 @@
Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
// Bind a new fixup label at the end of the "movl" insn.
uint32_t offset = invoke->GetDexCacheArrayOffset();
- __ Bind(NewPcRelativeDexCacheArrayPatch(*invoke->GetTargetMethod().dex_file, offset));
+ __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
break;
}
case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 7108676..594f051 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -399,7 +399,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- MethodReference target_method) OVERRIDE;
+ HInvokeStaticOrDirect* invoke) OVERRIDE;
Location GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp);
void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
diff --git a/compiler/optimizing/dex_cache_array_fixups_arm.cc b/compiler/optimizing/dex_cache_array_fixups_arm.cc
index 6ad9b07..7010171 100644
--- a/compiler/optimizing/dex_cache_array_fixups_arm.cc
+++ b/compiler/optimizing/dex_cache_array_fixups_arm.cc
@@ -82,12 +82,10 @@
// we need to add the dex cache arrays base as the special input.
if (invoke->HasPcRelativeDexCache() &&
!IsCallFreeIntrinsic<IntrinsicLocationsBuilderARM>(invoke, codegen_)) {
- // Initialize base for target method dex file if needed.
- MethodReference target_method = invoke->GetTargetMethod();
- HArmDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(*target_method.dex_file);
+ HArmDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(invoke->GetDexFile());
// Update the element offset in base.
- DexCacheArraysLayout layout(kArmPointerSize, target_method.dex_file);
- base->UpdateElementOffset(layout.MethodOffset(target_method.dex_method_index));
+ DexCacheArraysLayout layout(kArmPointerSize, &invoke->GetDexFile());
+ base->UpdateElementOffset(layout.MethodOffset(invoke->GetDexMethodIndex()));
// Add the special argument base to the method.
DCHECK(!invoke->HasCurrentMethodInput());
invoke->AddSpecialInput(base);
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index b3d5341..912ee29 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -447,7 +447,7 @@
void VisitInvokeUnresolved(HInvokeUnresolved* invoke) OVERRIDE {
VisitInvoke(invoke);
- StartAttributeStream("invoke_type") << invoke->GetOriginalInvokeType();
+ StartAttributeStream("invoke_type") << invoke->GetInvokeType();
}
void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index ce53134..f21dc0e 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -263,42 +263,24 @@
return false; // Don't bother to move further if we know the method is unresolved.
}
- uint32_t method_index = invoke_instruction->GetDexMethodIndex();
ScopedObjectAccess soa(Thread::Current());
+ uint32_t method_index = invoke_instruction->GetDexMethodIndex();
const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
VLOG(compiler) << "Try inlining " << PrettyMethod(method_index, caller_dex_file);
- ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
// We can query the dex cache directly. The verifier has populated it already.
- ArtMethod* resolved_method;
+ ArtMethod* resolved_method = invoke_instruction->GetResolvedMethod();
ArtMethod* actual_method = nullptr;
- if (invoke_instruction->IsInvokeStaticOrDirect()) {
- if (invoke_instruction->AsInvokeStaticOrDirect()->IsStringInit()) {
- VLOG(compiler) << "Not inlining a String.<init> method";
- return false;
- }
- MethodReference ref = invoke_instruction->AsInvokeStaticOrDirect()->GetTargetMethod();
- mirror::DexCache* const dex_cache = IsSameDexFile(caller_dex_file, *ref.dex_file)
- ? caller_compilation_unit_.GetDexCache().Get()
- : class_linker->FindDexCache(soa.Self(), *ref.dex_file);
- resolved_method = dex_cache->GetResolvedMethod(
- ref.dex_method_index, class_linker->GetImagePointerSize());
- // actual_method == resolved_method for direct or static calls.
+ if (resolved_method == nullptr) {
+ DCHECK(invoke_instruction->IsInvokeStaticOrDirect());
+ DCHECK(invoke_instruction->AsInvokeStaticOrDirect()->IsStringInit());
+ VLOG(compiler) << "Not inlining a String.<init> method";
+ return false;
+ } else if (invoke_instruction->IsInvokeStaticOrDirect()) {
actual_method = resolved_method;
} else {
- resolved_method = caller_compilation_unit_.GetDexCache().Get()->GetResolvedMethod(
- method_index, class_linker->GetImagePointerSize());
- if (resolved_method != nullptr) {
- // Check if we can statically find the method.
- actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method);
- }
- }
-
- if (resolved_method == nullptr) {
- // TODO: Can this still happen?
- // Method cannot be resolved if it is in another dex file we do not have access to.
- VLOG(compiler) << "Method cannot be resolved " << PrettyMethod(method_index, caller_dex_file);
- return false;
+ // Check if we can statically find the method.
+ actual_method = FindVirtualOrInterfaceTarget(invoke_instruction, resolved_method);
}
if (actual_method != nullptr) {
@@ -763,9 +745,9 @@
// 2) We will not go to the conflict trampoline with an invoke-virtual.
// TODO: Consider sharpening once it is not dependent on the compiler driver.
const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
- uint32_t method_index = FindMethodIndexIn(
+ uint32_t dex_method_index = FindMethodIndexIn(
method, caller_dex_file, invoke_instruction->GetDexMethodIndex());
- if (method_index == DexFile::kDexNoIndex) {
+ if (dex_method_index == DexFile::kDexNoIndex) {
return false;
}
HInvokeVirtual* new_invoke = new (graph_->GetArena()) HInvokeVirtual(
@@ -773,7 +755,8 @@
invoke_instruction->GetNumberOfArguments(),
invoke_instruction->GetType(),
invoke_instruction->GetDexPc(),
- method_index,
+ dex_method_index,
+ method,
method->GetMethodIndex());
HInputsRef inputs = invoke_instruction->GetInputs();
for (size_t index = 0; index != inputs.size(); ++index) {
@@ -1122,7 +1105,7 @@
}
}
- InvokeType invoke_type = invoke_instruction->GetOriginalInvokeType();
+ InvokeType invoke_type = invoke_instruction->GetInvokeType();
if (invoke_type == kInterface) {
// We have statically resolved the dispatch. To please the class linker
// at runtime, we change this call as if it was a virtual call.
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index d7e4c53..5a6a212 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -785,8 +785,6 @@
number_of_arguments++;
}
- MethodReference target_method(dex_file_, method_idx);
-
// Special handling for string init.
int32_t string_init_offset = 0;
bool is_string_init = compiler_driver_->IsStringInit(method_idx,
@@ -800,16 +798,17 @@
dchecked_integral_cast<uint64_t>(string_init_offset),
0U
};
+ MethodReference target_method(dex_file_, method_idx);
HInvoke* invoke = new (arena_) HInvokeStaticOrDirect(
arena_,
number_of_arguments - 1,
Primitive::kPrimNot /*return_type */,
dex_pc,
method_idx,
- target_method,
+ nullptr,
dispatch_info,
invoke_type,
- kStatic /* optimized_invoke_type */,
+ target_method,
HInvokeStaticOrDirect::ClinitCheckRequirement::kImplicit);
return HandleStringInit(invoke,
number_of_vreg_arguments,
@@ -853,10 +852,9 @@
dex_pc, resolved_method, method_idx, &clinit_check_requirement);
} else if (invoke_type == kSuper) {
if (IsSameDexFile(*resolved_method->GetDexFile(), *dex_compilation_unit_->GetDexFile())) {
- // Update the target method to the one resolved. Note that this may be a no-op if
+ // Update the method index to the one resolved. Note that this may be a no-op if
// we resolved to the method referenced by the instruction.
method_idx = resolved_method->GetDexMethodIndex();
- target_method = MethodReference(dex_file_, method_idx);
}
}
@@ -866,15 +864,17 @@
0u,
0U
};
+ MethodReference target_method(resolved_method->GetDexFile(),
+ resolved_method->GetDexMethodIndex());
invoke = new (arena_) HInvokeStaticOrDirect(arena_,
number_of_arguments,
return_type,
dex_pc,
method_idx,
- target_method,
+ resolved_method,
dispatch_info,
invoke_type,
- invoke_type,
+ target_method,
clinit_check_requirement);
} else if (invoke_type == kVirtual) {
ScopedObjectAccess soa(Thread::Current()); // Needed for the method index
@@ -883,15 +883,17 @@
return_type,
dex_pc,
method_idx,
+ resolved_method,
resolved_method->GetMethodIndex());
} else {
DCHECK_EQ(invoke_type, kInterface);
- ScopedObjectAccess soa(Thread::Current()); // Needed for the method index
+ ScopedObjectAccess soa(Thread::Current()); // Needed for the IMT index.
invoke = new (arena_) HInvokeInterface(arena_,
number_of_arguments,
return_type,
dex_pc,
method_idx,
+ resolved_method,
resolved_method->GetImtIndex());
}
@@ -1103,7 +1105,7 @@
size_t start_index = 0;
size_t argument_index = 0;
- if (invoke->GetOriginalInvokeType() != InvokeType::kStatic) { // Instance call.
+ if (invoke->GetInvokeType() != InvokeType::kStatic) { // Instance call.
uint32_t obj_reg = is_range ? register_index : args[0];
HInstruction* arg = is_unresolved
? LoadLocal(obj_reg, Primitive::kPrimNot)
diff --git a/compiler/optimizing/instruction_simplifier.cc b/compiler/optimizing/instruction_simplifier.cc
index b787888..ff829af 100644
--- a/compiler/optimizing/instruction_simplifier.cc
+++ b/compiler/optimizing/instruction_simplifier.cc
@@ -1657,7 +1657,7 @@
bool is_left,
Primitive::Type type) {
DCHECK(invoke->IsInvokeStaticOrDirect());
- DCHECK_EQ(invoke->GetOriginalInvokeType(), InvokeType::kStatic);
+ DCHECK_EQ(invoke->GetInvokeType(), InvokeType::kStatic);
HInstruction* value = invoke->InputAt(0);
HInstruction* distance = invoke->InputAt(1);
// Replace the invoke with an HRor.
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 418d59c..4d4bbcf 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -532,9 +532,7 @@
// inline. If the precise type is known, however, the instruction will be sharpened to an
// InvokeStaticOrDirect.
InvokeType intrinsic_type = GetIntrinsicInvokeType(intrinsic);
- InvokeType invoke_type = invoke->IsInvokeStaticOrDirect() ?
- invoke->AsInvokeStaticOrDirect()->GetOptimizedInvokeType() :
- invoke->IsInvokeVirtual() ? kVirtual : kSuper;
+ InvokeType invoke_type = invoke->GetInvokeType();
switch (intrinsic_type) {
case kStatic:
return (invoke_type == kStatic);
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 6d207765..57ae555 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -3742,8 +3742,8 @@
uint32_t GetDexMethodIndex() const { return dex_method_index_; }
const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
- InvokeType GetOriginalInvokeType() const {
- return GetPackedField<OriginalInvokeTypeField>();
+ InvokeType GetInvokeType() const {
+ return GetPackedField<InvokeTypeField>();
}
Intrinsics GetIntrinsic() const {
@@ -3777,21 +3777,22 @@
bool IsIntrinsic() const { return intrinsic_ != Intrinsics::kNone; }
+ ArtMethod* GetResolvedMethod() const { return resolved_method_; }
+
DECLARE_ABSTRACT_INSTRUCTION(Invoke);
protected:
- static constexpr size_t kFieldOriginalInvokeType = kNumberOfGenericPackedBits;
- static constexpr size_t kFieldOriginalInvokeTypeSize =
+ static constexpr size_t kFieldInvokeType = kNumberOfGenericPackedBits;
+ static constexpr size_t kFieldInvokeTypeSize =
MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
static constexpr size_t kFieldReturnType =
- kFieldOriginalInvokeType + kFieldOriginalInvokeTypeSize;
+ kFieldInvokeType + kFieldInvokeTypeSize;
static constexpr size_t kFieldReturnTypeSize =
MinimumBitsToStore(static_cast<size_t>(Primitive::kPrimLast));
static constexpr size_t kFlagCanThrow = kFieldReturnType + kFieldReturnTypeSize;
static constexpr size_t kNumberOfInvokePackedBits = kFlagCanThrow + 1;
static_assert(kNumberOfInvokePackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
- using OriginalInvokeTypeField =
- BitField<InvokeType, kFieldOriginalInvokeType, kFieldOriginalInvokeTypeSize>;
+ using InvokeTypeField = BitField<InvokeType, kFieldInvokeType, kFieldInvokeTypeSize>;
using ReturnTypeField = BitField<Primitive::Type, kFieldReturnType, kFieldReturnTypeSize>;
HInvoke(ArenaAllocator* arena,
@@ -3800,23 +3801,26 @@
Primitive::Type return_type,
uint32_t dex_pc,
uint32_t dex_method_index,
- InvokeType original_invoke_type)
+ ArtMethod* resolved_method,
+ InvokeType invoke_type)
: HInstruction(
SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays.
number_of_arguments_(number_of_arguments),
+ resolved_method_(resolved_method),
inputs_(number_of_arguments + number_of_other_inputs,
arena->Adapter(kArenaAllocInvokeInputs)),
dex_method_index_(dex_method_index),
intrinsic_(Intrinsics::kNone),
intrinsic_optimizations_(0) {
SetPackedField<ReturnTypeField>(return_type);
- SetPackedField<OriginalInvokeTypeField>(original_invoke_type);
+ SetPackedField<InvokeTypeField>(invoke_type);
SetPackedFlag<kFlagCanThrow>(true);
}
void SetCanThrow(bool can_throw) { SetPackedFlag<kFlagCanThrow>(can_throw); }
uint32_t number_of_arguments_;
+ ArtMethod* const resolved_method_;
ArenaVector<HUserRecord<HInstruction*>> inputs_;
const uint32_t dex_method_index_;
Intrinsics intrinsic_;
@@ -3842,6 +3846,7 @@
return_type,
dex_pc,
dex_method_index,
+ nullptr,
invoke_type) {
}
@@ -3935,10 +3940,10 @@
Primitive::Type return_type,
uint32_t dex_pc,
uint32_t method_index,
- MethodReference target_method,
+ ArtMethod* resolved_method,
DispatchInfo dispatch_info,
- InvokeType original_invoke_type,
- InvokeType optimized_invoke_type,
+ InvokeType invoke_type,
+ MethodReference target_method,
ClinitCheckRequirement clinit_check_requirement)
: HInvoke(arena,
number_of_arguments,
@@ -3950,10 +3955,10 @@
return_type,
dex_pc,
method_index,
- original_invoke_type),
+ resolved_method,
+ invoke_type),
target_method_(target_method),
dispatch_info_(dispatch_info) {
- SetPackedField<OptimizedInvokeTypeField>(optimized_invoke_type);
SetPackedField<ClinitCheckRequirementField>(clinit_check_requirement);
}
@@ -4017,14 +4022,6 @@
uint32_t GetSpecialInputIndex() const { return GetNumberOfArguments(); }
bool HasSpecialInput() const { return GetNumberOfArguments() != InputCount(); }
- InvokeType GetOptimizedInvokeType() const {
- return GetPackedField<OptimizedInvokeTypeField>();
- }
-
- void SetOptimizedInvokeType(InvokeType invoke_type) {
- SetPackedField<OptimizedInvokeTypeField>(invoke_type);
- }
-
MethodLoadKind GetMethodLoadKind() const { return dispatch_info_.method_load_kind; }
CodePtrLocation GetCodePtrLocation() const { return dispatch_info_.code_ptr_location; }
bool IsRecursive() const { return GetMethodLoadKind() == MethodLoadKind::kRecursive; }
@@ -4046,8 +4043,6 @@
}
}
bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
- MethodReference GetTargetMethod() const { return target_method_; }
- void SetTargetMethod(MethodReference method) { target_method_ = method; }
int32_t GetStringInitOffset() const {
DCHECK(IsStringInit());
@@ -4075,7 +4070,11 @@
// Is this instruction a call to a static method?
bool IsStatic() const {
- return GetOriginalInvokeType() == kStatic;
+ return GetInvokeType() == kStatic;
+ }
+
+ MethodReference GetTargetMethod() const {
+ return target_method_;
}
// Remove the HClinitCheck or the replacement HLoadClass (set as last input by
@@ -4117,26 +4116,18 @@
void RemoveInputAt(size_t index);
private:
- static constexpr size_t kFieldOptimizedInvokeType = kNumberOfInvokePackedBits;
- static constexpr size_t kFieldOptimizedInvokeTypeSize =
- MinimumBitsToStore(static_cast<size_t>(kMaxInvokeType));
- static constexpr size_t kFieldClinitCheckRequirement =
- kFieldOptimizedInvokeType + kFieldOptimizedInvokeTypeSize;
+ static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
static constexpr size_t kFieldClinitCheckRequirementSize =
MinimumBitsToStore(static_cast<size_t>(ClinitCheckRequirement::kLast));
static constexpr size_t kNumberOfInvokeStaticOrDirectPackedBits =
kFieldClinitCheckRequirement + kFieldClinitCheckRequirementSize;
static_assert(kNumberOfInvokeStaticOrDirectPackedBits <= kMaxNumberOfPackedBits,
"Too many packed fields.");
- using OptimizedInvokeTypeField =
- BitField<InvokeType, kFieldOptimizedInvokeType, kFieldOptimizedInvokeTypeSize>;
using ClinitCheckRequirementField = BitField<ClinitCheckRequirement,
kFieldClinitCheckRequirement,
kFieldClinitCheckRequirementSize>;
- // The target method may refer to different dex file or method index than the original
- // invoke. This happens for sharpened calls and for calls where a method was redeclared
- // in derived class to increase visibility.
+ // Cached values of the resolved method, to avoid needing the mutator lock.
MethodReference target_method_;
DispatchInfo dispatch_info_;
@@ -4152,8 +4143,16 @@
Primitive::Type return_type,
uint32_t dex_pc,
uint32_t dex_method_index,
+ ArtMethod* resolved_method,
uint32_t vtable_index)
- : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kVirtual),
+ : HInvoke(arena,
+ number_of_arguments,
+ 0u,
+ return_type,
+ dex_pc,
+ dex_method_index,
+ resolved_method,
+ kVirtual),
vtable_index_(vtable_index) {}
bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
@@ -4166,6 +4165,7 @@
DECLARE_INSTRUCTION(InvokeVirtual);
private:
+ // Cached value of the resolved method, to avoid needing the mutator lock.
const uint32_t vtable_index_;
DISALLOW_COPY_AND_ASSIGN(HInvokeVirtual);
@@ -4178,8 +4178,16 @@
Primitive::Type return_type,
uint32_t dex_pc,
uint32_t dex_method_index,
+ ArtMethod* resolved_method,
uint32_t imt_index)
- : HInvoke(arena, number_of_arguments, 0u, return_type, dex_pc, dex_method_index, kInterface),
+ : HInvoke(arena,
+ number_of_arguments,
+ 0u,
+ return_type,
+ dex_pc,
+ dex_method_index,
+ resolved_method,
+ kInterface),
imt_index_(imt_index) {}
bool CanDoImplicitNullCheckOn(HInstruction* obj) const OVERRIDE {
@@ -4193,6 +4201,7 @@
DECLARE_INSTRUCTION(InvokeInterface);
private:
+ // Cached value of the resolved method, to avoid needing the mutator lock.
const uint32_t imt_index_;
DISALLOW_COPY_AND_ASSIGN(HInvokeInterface);
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index e64c005..abec55f 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -61,44 +61,28 @@
return;
}
- // TODO: Avoid CompilerDriver.
- InvokeType original_invoke_type = invoke->GetOriginalInvokeType();
- InvokeType optimized_invoke_type = original_invoke_type;
- MethodReference target_method(&graph_->GetDexFile(), invoke->GetDexMethodIndex());
- int vtable_idx;
- uintptr_t direct_code, direct_method;
- bool success = compiler_driver_->ComputeInvokeInfo(
- &compilation_unit_,
- invoke->GetDexPc(),
- false /* update_stats: already updated in builder */,
- true /* enable_devirtualization */,
- &optimized_invoke_type,
- &target_method,
- &vtable_idx,
- &direct_code,
- &direct_method);
- if (!success) {
- // TODO: try using kDexCachePcRelative. It's always a valid method load
- // kind as long as it's supported by the codegen
- return;
- }
- invoke->SetOptimizedInvokeType(optimized_invoke_type);
- invoke->SetTargetMethod(target_method);
+ HGraph* outer_graph = codegen_->GetGraph();
+ ArtMethod* compiling_method = graph_->GetArtMethod();
HInvokeStaticOrDirect::MethodLoadKind method_load_kind;
HInvokeStaticOrDirect::CodePtrLocation code_ptr_location;
uint64_t method_load_data = 0u;
uint64_t direct_code_ptr = 0u;
- HGraph* outer_graph = codegen_->GetGraph();
- if (target_method.dex_file == &outer_graph->GetDexFile() &&
- target_method.dex_method_index == outer_graph->GetMethodIdx()) {
+ if (invoke->GetResolvedMethod() == outer_graph->GetArtMethod()) {
+ DCHECK(outer_graph->GetArtMethod() != nullptr);
method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRecursive;
code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallSelf;
} else {
- bool use_pc_relative_instructions =
- ((direct_method == 0u || direct_code == static_cast<uintptr_t>(-1))) &&
- ContainsElement(compiler_driver_->GetDexFilesForOatFile(), target_method.dex_file);
+ uintptr_t direct_code, direct_method;
+ {
+ ScopedObjectAccess soa(Thread::Current());
+ compiler_driver_->GetCodeAndMethodForDirectCall(
+ (compiling_method == nullptr) ? nullptr : compiling_method->GetDeclaringClass(),
+ invoke->GetResolvedMethod(),
+ &direct_code,
+ &direct_method);
+ }
if (direct_method != 0u) { // Should we use a direct pointer to the method?
// Note: For JIT, kDirectAddressWithFixup doesn't make sense at all and while
// kDirectAddress would be fine for image methods, we don't support it at the moment.
@@ -110,13 +94,12 @@
method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup;
}
} else { // Use dex cache.
- DCHECK_EQ(target_method.dex_file, &graph_->GetDexFile());
- if (use_pc_relative_instructions) { // Can we use PC-relative access to the dex cache arrays?
- DCHECK(!Runtime::Current()->UseJitCompilation());
+ if (!Runtime::Current()->UseJitCompilation()) {
+ // Use PC-relative access to the dex cache arrays.
method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative;
DexCacheArraysLayout layout(GetInstructionSetPointerSize(codegen_->GetInstructionSet()),
&graph_->GetDexFile());
- method_load_data = layout.MethodOffset(target_method.dex_method_index);
+ method_load_data = layout.MethodOffset(invoke->GetDexMethodIndex());
} else { // We must go through the ArtMethod's pointer to resolved methods.
method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
}
@@ -125,10 +108,11 @@
// Note: For JIT, kCallPCRelative and kCallDirectWithFixup don't make sense at all and
// while kCallDirect would be fine for image methods, we don't support it at the moment.
DCHECK(!Runtime::Current()->UseJitCompilation());
+ const DexFile* dex_file_of_callee = invoke->GetTargetMethod().dex_file;
if (direct_code != static_cast<uintptr_t>(-1)) { // Is the code pointer known now?
code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallDirect;
direct_code_ptr = direct_code;
- } else if (use_pc_relative_instructions) {
+ } else if (ContainsElement(compiler_driver_->GetDexFilesForOatFile(), dex_file_of_callee)) {
// Use PC-relative calls for invokes within a multi-dex oat file.
code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative;
} else { // The direct pointer will be known at link time.
@@ -151,8 +135,7 @@
method_load_kind, code_ptr_location, method_load_data, direct_code_ptr
};
HInvokeStaticOrDirect::DispatchInfo dispatch_info =
- codegen_->GetSupportedInvokeStaticOrDirectDispatch(desired_dispatch_info,
- invoke->GetTargetMethod());
+ codegen_->GetSupportedInvokeStaticOrDirectDispatch(desired_dispatch_info, invoke);
invoke->SetDispatchInfo(dispatch_info);
}