Remove soon to be obsolete call kinds for direct calls.
And remove CompilerDriver::GetCodeAndMethodForDirectCall in
preparation of removing non-PIC prebuild and non-PIC on-device
boot image compilation.
Test: test-art-host test-art-target
bug:33192586
Change-Id: Ic48e3e8b9d7605dd0e66f31d458a182198ba9578
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 8104613..8a7f6d3 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -1201,11 +1201,6 @@
isa_features_(isa_features),
uint32_literals_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- method_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- call_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -7139,7 +7134,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARM::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- HInvokeStaticOrDirect* invoke) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable pc-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
@@ -7151,24 +7146,6 @@
dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
}
- if (dispatch_info.code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative) {
- const DexFile& outer_dex_file = GetGraph()->GetDexFile();
- if (&outer_dex_file != invoke->GetTargetMethod().dex_file) {
- // Calls across dex files are more likely to exceed the available BL range,
- // so use absolute patch with fixup if available and kCallArtMethod otherwise.
- HInvokeStaticOrDirect::CodePtrLocation code_ptr_location =
- (desired_dispatch_info.method_load_kind ==
- HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup)
- ? HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup
- : HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
- return HInvokeStaticOrDirect::DispatchInfo {
- dispatch_info.method_load_kind,
- code_ptr_location,
- dispatch_info.method_load_data,
- 0u
- };
- }
- }
return dispatch_info;
}
@@ -7199,20 +7176,6 @@
}
void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
- // For better instruction scheduling we load the direct code pointer before the method pointer.
- switch (invoke->GetCodePtrLocation()) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // LR = code address from literal pool with link-time patch.
- __ LoadLiteral(LR, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR = invoke->GetDirectCodePtr();
- __ LoadImmediate(LR, invoke->GetDirectCodePtr());
- break;
- default:
- break;
- }
-
Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
switch (invoke->GetMethodLoadKind()) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
@@ -7228,10 +7191,6 @@
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
__ LoadImmediate(temp.AsRegister<Register>(), invoke->GetMethodAddress());
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ LoadLiteral(temp.AsRegister<Register>(),
- DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
HArmDexCacheArraysBase* base =
invoke->InputAt(invoke->GetSpecialInputIndex())->AsArmDexCacheArraysBase();
@@ -7270,19 +7229,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ bl(GetFrameEntryLabel());
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
- relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- __ BindTrackedLabel(&relative_call_patches_.back().label);
- // Arbitrarily branch to the BL itself, override at link time.
- __ bl(&relative_call_patches_.back().label);
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR prepared above for better instruction scheduling.
- // LR()
- __ blx(LR);
- break;
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// LR = callee_method->entry_point_from_quick_compiled_code_
__ LoadFromOffset(
@@ -7410,9 +7356,6 @@
void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- call_patches_.size() +
- relative_call_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_dex_cache_patches_.size() +
boot_image_string_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
@@ -7420,29 +7363,6 @@
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
- for (const auto& entry : method_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = literal->GetLabel()->Position();
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const auto& entry : call_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = literal->GetLabel()->Position();
- linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const PatchInfo<Label>& info : relative_call_patches_) {
- uint32_t literal_offset = info.label.Position();
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
for (const auto& entry : boot_image_string_patches_) {
@@ -7494,14 +7414,6 @@
[this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
}
-Literal* CodeGeneratorARM::DeduplicateMethodAddressLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &method_patches_);
-}
-
-Literal* CodeGeneratorARM::DeduplicateMethodCodeLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &call_patches_);
-}
-
void LocationsBuilderARM::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index 605169d..6435851 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -607,8 +607,6 @@
Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Literal* DeduplicateMethodLiteral(MethodReference target_method, MethodToLiteralMap* map);
- Literal* DeduplicateMethodAddressLiteral(MethodReference target_method);
- Literal* DeduplicateMethodCodeLiteral(MethodReference target_method);
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
ArenaDeque<PcRelativePatchInfo>* patches);
@@ -627,12 +625,6 @@
// Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
Uint32ToLiteralMap uint32_literals_;
- // Method patch info, map MethodReference to a literal for method address and method code.
- MethodToLiteralMap method_patches_;
- MethodToLiteralMap call_patches_;
- // Relative call patch info.
- // Using ArenaDeque<> which retains element addresses on push/emplace_back().
- ArenaDeque<PatchInfo<Label>> relative_call_patches_;
// PC-relative patch info for each HArmDexCacheArraysBase.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
// Deduplication map for boot string literals for kBootImageLinkTimeAddress.
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 5cff303..13616db 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -1147,11 +1147,6 @@
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
uint64_literals_(std::less<uint64_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- method_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- call_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -3971,23 +3966,6 @@
}
void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) {
- // For better instruction scheduling we load the direct code pointer before the method pointer.
- bool direct_code_loaded = false;
- switch (invoke->GetCodePtrLocation()) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // LR = code address from literal pool with link-time patch.
- __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
- direct_code_loaded = true;
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR = invoke->GetDirectCodePtr();
- __ Ldr(lr, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
- direct_code_loaded = true;
- break;
- default:
- break;
- }
-
// Make sure that ArtMethod* is passed in kArtMethodRegister as per the calling convention.
Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
switch (invoke->GetMethodLoadKind()) {
@@ -4005,11 +3983,6 @@
// Load method address from literal pool.
__ Ldr(XRegisterFrom(temp), DeduplicateUint64Literal(invoke->GetMethodAddress()));
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- // Load method address from literal pool with a link-time patch.
- __ Ldr(XRegisterFrom(temp),
- DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
// Add ADRP with its PC-relative DexCache access patch.
const DexFile& dex_file = invoke->GetDexFile();
@@ -4051,23 +4024,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ Bl(&frame_entry_label_);
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
- relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- vixl::aarch64::Label* label = &relative_call_patches_.back().label;
- SingleEmissionCheckScope guard(GetVIXLAssembler());
- __ Bind(label);
- // Branch and link to itself. This will be overriden at link time.
- __ bl(static_cast<int64_t>(0));
- break;
- }
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR prepared above for better instruction scheduling.
- DCHECK(direct_code_loaded);
- // lr()
- __ Blr(lr);
- break;
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// LR = callee_method->entry_point_from_quick_compiled_code_;
__ Ldr(lr, MemOperand(
@@ -4229,9 +4185,6 @@
void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- call_patches_.size() +
- relative_call_patches_.size() +
pc_relative_dex_cache_patches_.size() +
boot_image_string_patches_.size() +
pc_relative_string_patches_.size() +
@@ -4239,24 +4192,6 @@
pc_relative_type_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
- for (const auto& entry : method_patches_) {
- const MethodReference& target_method = entry.first;
- vixl::aarch64::Literal<uint64_t>* literal = entry.second;
- linker_patches->push_back(LinkerPatch::MethodPatch(literal->GetOffset(),
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const auto& entry : call_patches_) {
- const MethodReference& target_method = entry.first;
- vixl::aarch64::Literal<uint64_t>* literal = entry.second;
- linker_patches->push_back(LinkerPatch::CodePatch(literal->GetOffset(),
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const PatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) {
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(info.label.GetLocation(), &info.dex_file, info.index));
- }
for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(),
&info.target_dex_file,
@@ -4314,17 +4249,6 @@
[this]() { return __ CreateLiteralDestroyedWithPool<uint64_t>(/* placeholder */ 0u); });
}
-vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodAddressLiteral(
- MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &method_patches_);
-}
-
-vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateMethodCodeLiteral(
- MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &call_patches_);
-}
-
-
void InstructionCodeGeneratorARM64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
// Explicit clinit checks triggered by static invokes must have been pruned by
// art::PrepareForRegisterAllocation.
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 85b6f9f..8f33b6b 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -693,8 +693,6 @@
vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value);
vixl::aarch64::Literal<uint64_t>* DeduplicateMethodLiteral(MethodReference target_method,
MethodToLiteralMap* map);
- vixl::aarch64::Literal<uint64_t>* DeduplicateMethodAddressLiteral(MethodReference target_method);
- vixl::aarch64::Literal<uint64_t>* DeduplicateMethodCodeLiteral(MethodReference target_method);
// The PcRelativePatchInfo is used for PC-relative addressing of dex cache arrays
// and boot image strings/types. The only difference is the interpretation of the
@@ -737,12 +735,6 @@
Uint32ToLiteralMap uint32_literals_;
// Deduplication map for 64-bit literals, used for non-patchable method address or method code.
Uint64ToLiteralMap uint64_literals_;
- // Method patch info, map MethodReference to a literal for method address and method code.
- MethodToLiteralMap method_patches_;
- MethodToLiteralMap call_patches_;
- // Relative call patch info.
- // Using ArenaDeque<> which retains element addresses on push/emplace_back().
- ArenaDeque<PatchInfo<vixl::aarch64::Label>> relative_call_patches_;
// PC-relative DexCache access info.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
// Deduplication map for boot string literals for kBootImageLinkTimeAddress.
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 1df00ff..33be80b 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -1245,11 +1245,6 @@
isa_features_(isa_features),
uint32_literals_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- method_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- call_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -7233,7 +7228,7 @@
// otherwise return a fall-back info that should be used instead.
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
- HInvokeStaticOrDirect* invoke) {
+ HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable pc-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
@@ -7245,24 +7240,6 @@
dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
}
- if (dispatch_info.code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative) {
- const DexFile& outer_dex_file = GetGraph()->GetDexFile();
- if (&outer_dex_file != invoke->GetTargetMethod().dex_file) {
- // Calls across dex files are more likely to exceed the available BL range,
- // so use absolute patch with fixup if available and kCallArtMethod otherwise.
- HInvokeStaticOrDirect::CodePtrLocation code_ptr_location =
- (desired_dispatch_info.method_load_kind ==
- HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup)
- ? HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup
- : HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
- return HInvokeStaticOrDirect::DispatchInfo {
- dispatch_info.method_load_kind,
- code_ptr_location,
- dispatch_info.method_load_data,
- 0u
- };
- }
- }
return dispatch_info;
}
@@ -7294,20 +7271,6 @@
void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
HInvokeStaticOrDirect* invoke, Location temp) {
- // For better instruction scheduling we load the direct code pointer before the method pointer.
- switch (invoke->GetCodePtrLocation()) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // LR = code address from literal pool with link-time patch.
- __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR = invoke->GetDirectCodePtr();
- __ Mov(lr, Operand::From(invoke->GetDirectCodePtr()));
- break;
- default:
- break;
- }
-
Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
switch (invoke->GetMethodLoadKind()) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
@@ -7323,9 +7286,6 @@
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
__ Mov(RegisterFrom(temp), Operand::From(invoke->GetMethodAddress()));
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ Ldr(RegisterFrom(temp), DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
HArmDexCacheArraysBase* base =
invoke->InputAt(invoke->GetSpecialInputIndex())->AsArmDexCacheArraysBase();
@@ -7365,30 +7325,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ Bl(GetFrameEntryLabel());
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
- relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- {
- ExactAssemblyScope aas(GetVIXLAssembler(),
- vixl32::kMaxInstructionSizeInBytes,
- CodeBufferCheckScope::kMaximumSize);
- __ bind(&relative_call_patches_.back().label);
- // Arbitrarily branch to the BL itself, override at link time.
- __ bl(&relative_call_patches_.back().label);
- }
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // LR prepared above for better instruction scheduling.
- // LR()
- {
- // blx in T32 has only 16bit encoding that's why a stricter check for the scope is used.
- ExactAssemblyScope aas(GetVIXLAssembler(),
- vixl32::k16BitT32InstructionSizeInBytes,
- CodeBufferCheckScope::kExactSize);
- __ blx(lr);
- }
- break;
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// LR = callee_method->entry_point_from_quick_compiled_code_
GetAssembler()->LoadFromOffset(
@@ -7552,9 +7488,6 @@
void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- call_patches_.size() +
- relative_call_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_dex_cache_patches_.size() +
boot_image_string_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
@@ -7562,29 +7495,6 @@
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
- for (const auto& entry : method_patches_) {
- const MethodReference& target_method = entry.first;
- VIXLUInt32Literal* literal = entry.second;
- DCHECK(literal->IsBound());
- uint32_t literal_offset = literal->GetLocation();
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const auto& entry : call_patches_) {
- const MethodReference& target_method = entry.first;
- VIXLUInt32Literal* literal = entry.second;
- DCHECK(literal->IsBound());
- uint32_t literal_offset = literal->GetLocation();
- linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const PatchInfo<vixl32::Label>& info : relative_call_patches_) {
- uint32_t literal_offset = info.label.GetLocation();
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
for (const auto& entry : boot_image_string_patches_) {
@@ -7643,16 +7553,6 @@
});
}
-VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateMethodAddressLiteral(
- MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &method_patches_);
-}
-
-VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateMethodCodeLiteral(
- MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &call_patches_);
-}
-
void LocationsBuilderARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(instr, LocationSummary::kNoCall);
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 45bd164..297d63c 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -700,8 +700,6 @@
VIXLUInt32Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
VIXLUInt32Literal* DeduplicateMethodLiteral(MethodReference target_method,
MethodToLiteralMap* map);
- VIXLUInt32Literal* DeduplicateMethodAddressLiteral(MethodReference target_method);
- VIXLUInt32Literal* DeduplicateMethodCodeLiteral(MethodReference target_method);
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
ArenaDeque<PcRelativePatchInfo>* patches);
@@ -724,12 +722,6 @@
// Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
Uint32ToLiteralMap uint32_literals_;
- // Method patch info, map MethodReference to a literal for method address and method code.
- MethodToLiteralMap method_patches_;
- MethodToLiteralMap call_patches_;
- // Relative call patch info.
- // Using ArenaDeque<> which retains element addresses on push/emplace_back().
- ArenaDeque<PatchInfo<vixl::aarch32::Label>> relative_call_patches_;
// PC-relative patch info for each HArmDexCacheArraysBase.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
// Deduplication map for boot string literals for kBootImageLinkTimeAddress.
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 897b719..c5029b3 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -458,10 +458,6 @@
isa_features_(isa_features),
uint32_literals_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- method_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- call_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -1008,8 +1004,6 @@
void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- call_patches_.size() +
pc_relative_dex_cache_patches_.size() +
pc_relative_string_patches_.size() +
pc_relative_type_patches_.size() +
@@ -1017,24 +1011,6 @@
boot_image_type_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
- for (const auto& entry : method_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const auto& entry : call_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
- linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
if (!GetCompilerOptions().IsBootImage()) {
@@ -1107,14 +1083,6 @@
[this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
}
-Literal* CodeGeneratorMIPS::DeduplicateMethodAddressLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &method_patches_);
-}
-
-Literal* CodeGeneratorMIPS::DeduplicateMethodCodeLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &call_patches_);
-}
-
Literal* CodeGeneratorMIPS::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
dex::StringIndex string_index) {
return boot_image_string_patches_.GetOrCreate(
@@ -5157,22 +5125,7 @@
// art::PrepareForRegisterAllocation.
DCHECK(!invoke->IsStaticWithExplicitClinitCheck());
- HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
- HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
- bool isR6 = codegen_->GetInstructionSetFeatures().IsR6();
-
- // kDirectAddressWithFixup and kCallDirectWithFixup need no extra input on R6 because
- // R6 has PC-relative addressing.
- bool has_extra_input = !isR6 &&
- ((method_load_kind == HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup) ||
- (code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup));
-
- if (invoke->HasPcRelativeDexCache()) {
- // kDexCachePcRelative is mutually exclusive with
- // kDirectAddressWithFixup/kCallDirectWithFixup.
- CHECK(!has_extra_input);
- has_extra_input = true;
- }
+ bool has_extra_input = invoke->HasPcRelativeDexCache();
IntrinsicLocationsBuilderMIPS intrinsic(codegen_);
if (intrinsic.TryDispatch(invoke)) {
@@ -5312,9 +5265,7 @@
// is incompatible with it.
bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops();
bool fallback_load = true;
- bool fallback_call = true;
switch (dispatch_info.method_load_kind) {
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
fallback_load = has_irreducible_loops;
break;
@@ -5322,25 +5273,10 @@
fallback_load = false;
break;
}
- switch (dispatch_info.code_ptr_location) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- fallback_call = has_irreducible_loops;
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
- // TODO: Implement this type.
- break;
- default:
- fallback_call = false;
- break;
- }
if (fallback_load) {
dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
dispatch_info.method_load_data = 0;
}
- if (fallback_call) {
- dispatch_info.code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
- dispatch_info.direct_code_ptr = 0;
- }
return dispatch_info;
}
@@ -5349,31 +5285,10 @@
Location callee_method = temp; // For all kinds except kRecursive, callee will be in temp.
HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
- bool isR6 = isa_features_.IsR6();
- // kDirectAddressWithFixup and kCallDirectWithFixup have no extra input on R6 because
- // R6 has PC-relative addressing.
- bool has_extra_input = invoke->HasPcRelativeDexCache() ||
- (!isR6 &&
- ((method_load_kind == HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup) ||
- (code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup)));
- Register base_reg = has_extra_input
+ Register base_reg = invoke->HasPcRelativeDexCache()
? GetInvokeStaticOrDirectExtraParameter(invoke, temp.AsRegister<Register>())
: ZERO;
- // For better instruction scheduling we load the direct code pointer before the method pointer.
- switch (code_ptr_location) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // T9 = invoke->GetDirectCodePtr();
- __ LoadConst32(T9, invoke->GetDirectCodePtr());
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // T9 = code address from literal pool with link-time patch.
- __ LoadLiteral(T9, base_reg, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
- break;
- default:
- break;
- }
-
switch (method_load_kind) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
// temp = thread->string_init_entrypoint
@@ -5391,11 +5306,6 @@
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
__ LoadConst32(temp.AsRegister<Register>(), invoke->GetMethodAddress());
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ LoadLiteral(temp.AsRegister<Register>(),
- base_reg,
- DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
HMipsDexCacheArraysBase* base =
invoke->InputAt(invoke->GetSpecialInputIndex())->AsMipsDexCacheArraysBase();
@@ -5438,18 +5348,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ Bal(&frame_entry_label_);
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // T9 prepared above for better instruction scheduling.
- // T9()
- __ Jalr(T9);
- __ NopIfNoReordering();
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative:
- // TODO: Implement this type.
- // Currently filtered out by GetSupportedInvokeStaticOrDirectDispatch().
- LOG(FATAL) << "Unsupported";
- UNREACHABLE();
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// T9 = callee_method->entry_point_from_quick_compiled_code_;
__ LoadFromOffset(kLoadWord,
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index f03f29c..9a9a838 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -474,8 +474,6 @@
Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Literal* DeduplicateMethodLiteral(MethodReference target_method, MethodToLiteralMap* map);
- Literal* DeduplicateMethodAddressLiteral(MethodReference target_method);
- Literal* DeduplicateMethodCodeLiteral(MethodReference target_method);
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
ArenaDeque<PcRelativePatchInfo>* patches);
@@ -495,9 +493,6 @@
// Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
Uint32ToLiteralMap uint32_literals_;
- // Method patch info, map MethodReference to a literal for method address and method code.
- MethodToLiteralMap method_patches_;
- MethodToLiteralMap call_patches_;
// PC-relative patch info for each HMipsDexCacheArraysBase.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
// Deduplication map for boot string literals for kBootImageLinkTimeAddress.
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index cf9f9d4..cdbc712 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -415,12 +415,7 @@
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
uint64_literals_(std::less<uint64_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- method_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- call_patches_(MethodReferenceComparator(),
- graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -924,44 +919,15 @@
void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- call_patches_.size() +
pc_relative_dex_cache_patches_.size() +
- relative_call_patches_.size() +
pc_relative_string_patches_.size() +
pc_relative_type_patches_.size() +
boot_image_string_patches_.size() +
boot_image_type_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
- for (const auto& entry : method_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
- for (const auto& entry : call_patches_) {
- const MethodReference& target_method = entry.first;
- Literal* literal = entry.second;
- DCHECK(literal->GetLabel()->IsBound());
- uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
- linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
- target_method.dex_file,
- target_method.dex_method_index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
- for (const PcRelativePatchInfo& info : relative_call_patches_) {
- const DexFile& dex_file = info.target_dex_file;
- uint32_t method_index = info.offset_or_index;
- DCHECK(info.pc_rel_label.IsBound());
- uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label);
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(pc_rel_offset, &dex_file, method_index));
- }
if (!GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
@@ -1013,11 +979,6 @@
return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
}
-CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeCallPatch(
- const DexFile& dex_file, uint32_t method_index) {
- return NewPcRelativePatch(dex_file, method_index, &relative_call_patches_);
-}
-
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
patches->emplace_back(dex_file, offset_or_index);
@@ -1043,14 +1004,6 @@
[this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
}
-Literal* CodeGeneratorMIPS64::DeduplicateMethodAddressLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &method_patches_);
-}
-
-Literal* CodeGeneratorMIPS64::DeduplicateMethodCodeLiteral(MethodReference target_method) {
- return DeduplicateMethodLiteral(target_method, &call_patches_);
-}
-
Literal* CodeGeneratorMIPS64::DeduplicateBootImageStringLiteral(const DexFile& dex_file,
dex::StringIndex string_index) {
return boot_image_string_patches_.GetOrCreate(
@@ -3276,22 +3229,6 @@
HInvokeStaticOrDirect::MethodLoadKind method_load_kind = invoke->GetMethodLoadKind();
HInvokeStaticOrDirect::CodePtrLocation code_ptr_location = invoke->GetCodePtrLocation();
- // For better instruction scheduling we load the direct code pointer before the method pointer.
- switch (code_ptr_location) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // T9 = invoke->GetDirectCodePtr();
- __ LoadLiteral(T9, kLoadDoubleword, DeduplicateUint64Literal(invoke->GetDirectCodePtr()));
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // T9 = code address from literal pool with link-time patch.
- __ LoadLiteral(T9,
- kLoadUnsignedWord,
- DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
- break;
- default:
- break;
- }
-
switch (method_load_kind) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit: {
// temp = thread->string_init_entrypoint
@@ -3311,11 +3248,6 @@
kLoadDoubleword,
DeduplicateUint64Literal(invoke->GetMethodAddress()));
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ LoadLiteral(temp.AsRegister<GpuRegister>(),
- kLoadUnsignedWord,
- DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
uint32_t offset = invoke->GetDexCacheArrayOffset();
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
@@ -3358,21 +3290,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ Balc(&frame_entry_label_);
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- // T9 prepared above for better instruction scheduling.
- // T9()
- __ Jalr(T9);
- __ Nop();
- break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
- CodeGeneratorMIPS64::PcRelativePatchInfo* info =
- NewPcRelativeCallPatch(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- EmitPcRelativeAddressPlaceholderHigh(info, AT);
- __ Jialc(AT, /* placeholder */ 0x5678);
- break;
- }
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// T9 = callee_method->entry_point_from_quick_compiled_code_;
__ LoadFromOffset(kLoadDoubleword,
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index cbd4957..5e049ae 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -412,8 +412,6 @@
Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
Literal* DeduplicateUint64Literal(uint64_t value);
Literal* DeduplicateMethodLiteral(MethodReference target_method, MethodToLiteralMap* map);
- Literal* DeduplicateMethodAddressLiteral(MethodReference target_method);
- Literal* DeduplicateMethodCodeLiteral(MethodReference target_method);
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
@@ -437,12 +435,8 @@
// Deduplication map for 64-bit literals, used for non-patchable method address or method code
// address.
Uint64ToLiteralMap uint64_literals_;
- // Method patch info, map MethodReference to a literal for method address and method code.
- MethodToLiteralMap method_patches_;
- MethodToLiteralMap call_patches_;
// PC-relative patch info.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
- ArenaDeque<PcRelativePatchInfo> relative_call_patches_;
// Deduplication map for boot string literals for kBootImageLinkTimeAddress.
BootStringToLiteralMap boot_image_string_patches_;
// PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 8612a67..0abe855 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1006,8 +1006,6 @@
move_resolver_(graph->GetArena(), this),
assembler_(graph->GetArena()),
isa_features_(isa_features),
- method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -4454,20 +4452,7 @@
HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative)) {
dispatch_info.method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
}
- switch (dispatch_info.code_ptr_location) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
- // (Though the direct CALL ptr16:32 is available for consideration).
- return HInvokeStaticOrDirect::DispatchInfo {
- dispatch_info.method_load_kind,
- HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
- dispatch_info.method_load_data,
- 0u
- };
- default:
- return dispatch_info;
- }
+ return dispatch_info;
}
Register CodeGeneratorX86::GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
@@ -4514,12 +4499,6 @@
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
__ movl(temp.AsRegister<Register>(), Immediate(invoke->GetMethodAddress()));
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ movl(temp.AsRegister<Register>(), Immediate(/* placeholder */ 0));
- method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
Register base_reg = GetInvokeStaticOrDirectExtraParameter(invoke,
temp.AsRegister<Register>());
@@ -4561,19 +4540,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ call(GetFrameEntryLabel());
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
- relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- Label* label = &relative_call_patches_.back().label;
- __ call(label); // Bind to the patch label, override at link time.
- __ Bind(label); // Bind the label at the end of the "call" insn.
- break;
- }
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
- LOG(FATAL) << "Unsupported";
- UNREACHABLE();
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// (callee_method + offset_of_quick_compiled_code)()
__ call(Address(callee_method.AsRegister<Register>(),
@@ -4664,22 +4630,11 @@
void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- relative_call_patches_.size() +
pc_relative_dex_cache_patches_.size() +
simple_patches_.size() +
string_patches_.size() +
type_patches_.size();
linker_patches->reserve(size);
- for (const PatchInfo<Label>& info : method_patches_) {
- uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
- }
- for (const PatchInfo<Label>& info : relative_call_patches_) {
- uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
for (const Label& label : simple_patches_) {
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index c44da97..1af6850 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -613,9 +613,6 @@
X86Assembler assembler_;
const X86InstructionSetFeatures& isa_features_;
- // Method patch info. Using ArenaDeque<> which retains element addresses on push/emplace_back().
- ArenaDeque<PatchInfo<Label>> method_patches_;
- ArenaDeque<PatchInfo<Label>> relative_call_patches_;
// PC-relative DexCache access info.
ArenaDeque<PatchInfo<Label>> pc_relative_dex_cache_patches_;
// Patch locations for patchoat where the linker doesn't do any other work.
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 7dfc736..903844f 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -960,19 +960,7 @@
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorX86_64::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
HInvokeStaticOrDirect* invoke ATTRIBUTE_UNUSED) {
- switch (desired_dispatch_info.code_ptr_location) {
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // For direct code, we actually prefer to call via the code pointer from ArtMethod*.
- return HInvokeStaticOrDirect::DispatchInfo {
- desired_dispatch_info.method_load_kind,
- HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
- desired_dispatch_info.method_load_data,
- 0u
- };
- default:
- return desired_dispatch_info;
- }
+ return desired_dispatch_info;
}
Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticOrDirect* invoke,
@@ -993,12 +981,6 @@
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
__ movq(temp.AsRegister<CpuRegister>(), Immediate(invoke->GetMethodAddress()));
break;
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder.
- method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn.
- break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
__ movq(temp.AsRegister<CpuRegister>(),
Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
@@ -1042,19 +1024,6 @@
case HInvokeStaticOrDirect::CodePtrLocation::kCallSelf:
__ call(&frame_entry_label_);
break;
- case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: {
- relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file,
- invoke->GetTargetMethod().dex_method_index);
- Label* label = &relative_call_patches_.back().label;
- __ call(label); // Bind to the patch label, override at link time.
- __ Bind(label); // Bind the label at the end of the "call" insn.
- break;
- }
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
- case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
- // Filtered out by GetSupportedInvokeStaticOrDirectDispatch().
- LOG(FATAL) << "Unsupported";
- UNREACHABLE();
case HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod:
// (callee_method + offset_of_quick_compiled_code)()
__ call(Address(callee_method.AsRegister<CpuRegister>(),
@@ -1146,22 +1115,11 @@
void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
- method_patches_.size() +
- relative_call_patches_.size() +
pc_relative_dex_cache_patches_.size() +
simple_patches_.size() +
string_patches_.size() +
type_patches_.size();
linker_patches->reserve(size);
- for (const PatchInfo<Label>& info : method_patches_) {
- uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
- linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index));
- }
- for (const PatchInfo<Label>& info : relative_call_patches_) {
- uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
- linker_patches->push_back(
- LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index));
- }
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
for (const Label& label : simple_patches_) {
@@ -1253,8 +1211,6 @@
assembler_(graph->GetArena()),
isa_features_(isa_features),
constant_area_start_(0),
- method_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 391a23b..f827e79 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -596,9 +596,6 @@
// Used for fixups to the constant area.
int constant_area_start_;
- // Method patch info. Using ArenaDeque<> which retains element addresses on push/emplace_back().
- ArenaDeque<PatchInfo<Label>> method_patches_;
- ArenaDeque<PatchInfo<Label>> relative_call_patches_;
// PC-relative DexCache access info.
ArenaDeque<PatchInfo<Label>> pc_relative_dex_cache_patches_;
// Patch locations for patchoat where the linker doesn't do any other work.
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 1ca3218..af8e2c8 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -816,8 +816,7 @@
HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
HInvokeStaticOrDirect::MethodLoadKind::kStringInit,
HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
- dchecked_integral_cast<uint64_t>(string_init_entry_point),
- 0U
+ dchecked_integral_cast<uint64_t>(string_init_entry_point)
};
MethodReference target_method(dex_file_, method_idx);
HInvoke* invoke = new (arena_) HInvokeStaticOrDirect(
@@ -862,8 +861,7 @@
HInvokeStaticOrDirect::DispatchInfo dispatch_info = {
HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
- 0u,
- 0U
+ 0u
};
MethodReference target_method(resolved_method->GetDexFile(),
resolved_method->GetDexMethodIndex());
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index b9e284f..a599c2a 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -2406,8 +2406,6 @@
return os << "recursive";
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress:
return os << "direct";
- case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- return os << "direct_fixup";
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative:
return os << "dex_cache_pc_relative";
case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod:
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 1f0c8e8..afa17ce 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -3982,12 +3982,6 @@
// Used for app->boot calls with non-relocatable image and for JIT-compiled calls.
kDirectAddress,
- // Use ArtMethod* at an address that will be known at link time, embed the direct
- // address in the code. If the image is relocatable, emit .patch_oat entry.
- // Used for app->boot calls with relocatable image and boot->boot calls, whether
- // the image relocatable or not.
- kDirectAddressWithFixup,
-
// Load from resolved methods array in the dex cache using a PC-relative load.
// Used when we need to use the dex cache, for example for invoke-static that
// may cause class initialization (the entry may point to a resolution method),
@@ -4006,20 +4000,6 @@
// Recursive call, use local PC-relative call instruction.
kCallSelf,
- // Use PC-relative call instruction patched at link time.
- // Used for calls within an oat file, boot->boot or app->app.
- kCallPCRelative,
-
- // Call to a known target address, embed the direct address in code.
- // Used for app->boot call with non-relocatable image and for JIT-compiled calls.
- kCallDirect,
-
- // Call to a target address that will be known at link time, embed the direct
- // address in code. If the image is relocatable, emit .patch_oat entry.
- // Used for app->boot calls with relocatable image and boot->boot calls, whether
- // the image relocatable or not.
- kCallDirectWithFixup,
-
// Use code pointer from the ArtMethod*.
// Used when we don't know the target code. This is also the last-resort-kind used when
// other kinds are unimplemented or impractical (i.e. slow) on a particular architecture.
@@ -4035,7 +4015,6 @@
// - the method address for kDirectAddress
// - the dex cache arrays offset for kDexCachePcRel.
uint64_t method_load_data;
- uint64_t direct_code_ptr;
};
HInvokeStaticOrDirect(ArenaAllocator* arena,
@@ -4145,7 +4124,6 @@
return false;
}
}
- bool HasDirectCodePtr() const { return GetCodePtrLocation() == CodePtrLocation::kCallDirect; }
QuickEntrypointEnum GetStringInitEntryPoint() const {
DCHECK(IsStringInit());
@@ -4162,11 +4140,6 @@
return dispatch_info_.method_load_data;
}
- uint64_t GetDirectCodePtr() const {
- DCHECK(HasDirectCodePtr());
- return dispatch_info_.direct_code_ptr;
- }
-
ClinitCheckRequirement GetClinitCheckRequirement() const {
return GetPackedField<ClinitCheckRequirementField>();
}
diff --git a/compiler/optimizing/pc_relative_fixups_mips.cc b/compiler/optimizing/pc_relative_fixups_mips.cc
index 82feb95..e321b9e 100644
--- a/compiler/optimizing/pc_relative_fixups_mips.cc
+++ b/compiler/optimizing/pc_relative_fixups_mips.cc
@@ -45,10 +45,6 @@
}
private:
- void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
- HandleInvoke(invoke);
- }
-
void InitializePCRelativeBasePointer() {
// Ensure we only initialize the pointer once.
if (base_ != nullptr) {
@@ -112,38 +108,6 @@
block->ReplaceAndRemoveInstructionWith(switch_insn, mips_switch);
}
- void HandleInvoke(HInvoke* invoke) {
- // If this is an invoke-static/-direct with PC-relative dex cache array
- // addressing, we need the PC-relative address base.
- HInvokeStaticOrDirect* invoke_static_or_direct = invoke->AsInvokeStaticOrDirect();
- if (invoke_static_or_direct != nullptr) {
- HInvokeStaticOrDirect::MethodLoadKind method_load_kind =
- invoke_static_or_direct->GetMethodLoadKind();
- HInvokeStaticOrDirect::CodePtrLocation code_ptr_location =
- invoke_static_or_direct->GetCodePtrLocation();
-
- bool has_extra_input =
- (method_load_kind == HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup) ||
- (code_ptr_location == HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup);
-
- // We can't add a pointer to the constant area if we already have a current
- // method pointer. This may arise when sharpening doesn't remove the current
- // method pointer from the invoke.
- if (invoke_static_or_direct->HasCurrentMethodInput()) {
- DCHECK(!invoke_static_or_direct->HasPcRelativeDexCache());
- CHECK(!has_extra_input);
- return;
- }
-
- if (has_extra_input &&
- !IsCallFreeIntrinsic<IntrinsicLocationsBuilderMIPS>(invoke, codegen_)) {
- InitializePCRelativeBasePointer();
- // Add the extra parameter base_.
- invoke_static_or_direct->AddSpecialInput(base_);
- }
- }
- }
-
CodeGeneratorMIPS* codegen_;
// The generated HMipsComputeBaseMethodAddress in the entry block needed as an
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 91826cf..9fdeccf 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -54,6 +54,24 @@
}
}
+static bool IsInBootImage(ArtMethod* method) {
+ const std::vector<gc::space::ImageSpace*>& image_spaces =
+ Runtime::Current()->GetHeap()->GetBootImageSpaces();
+ for (gc::space::ImageSpace* image_space : image_spaces) {
+ const auto& method_section = image_space->GetImageHeader().GetMethodsSection();
+ if (method_section.Contains(reinterpret_cast<uint8_t*>(method) - image_space->Begin())) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static bool AOTCanEmbedMethod(ArtMethod* method, const CompilerOptions& options) {
+ // Including patch information means the AOT code will be patched, which we don't
+ // support in the compiler, and is anyways moving away b/33192586.
+ return IsInBootImage(method) && !options.GetCompilePic() && !options.GetIncludePatchInformation();
+}
+
void HSharpening::ProcessInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
if (invoke->IsStringInit()) {
// Not using the dex cache arrays. But we could still try to use a better dispatch...
@@ -61,68 +79,42 @@
return;
}
- HGraph* outer_graph = codegen_->GetGraph();
- ArtMethod* compiling_method = graph_->GetArtMethod();
+ ArtMethod* callee = invoke->GetResolvedMethod();
+ DCHECK(callee != nullptr);
HInvokeStaticOrDirect::MethodLoadKind method_load_kind;
HInvokeStaticOrDirect::CodePtrLocation code_ptr_location;
uint64_t method_load_data = 0u;
- uint64_t direct_code_ptr = 0u;
- if (invoke->GetResolvedMethod() == outer_graph->GetArtMethod()) {
- DCHECK(outer_graph->GetArtMethod() != nullptr);
+ // Note: we never call an ArtMethod through a known code pointer, as
+ // we do not want to keep on invoking it if it gets deoptimized. This
+ // applies to both AOT and JIT.
+ // This also avoids having to find out if the code pointer of an ArtMethod
+ // is the resolution trampoline (for ensuring the class is initialized), or
+ // the interpreter entrypoint. Such code pointers we do not want to call
+ // directly.
+ // Only in the case of a recursive call can we call directly, as we know the
+ // class is initialized already or being initialized, and the call will not
+ // be invoked once the method is deoptimized.
+
+ if (callee == codegen_->GetGraph()->GetArtMethod()) {
+ // Recursive call.
method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kRecursive;
code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallSelf;
+ } else if (Runtime::Current()->UseJitCompilation() ||
+ AOTCanEmbedMethod(callee, codegen_->GetCompilerOptions())) {
+ // JIT or on-device AOT compilation referencing a boot image method.
+ // Use the method address directly.
+ method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress;
+ method_load_data = reinterpret_cast<uintptr_t>(callee);
+ code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
} else {
- uintptr_t direct_code, direct_method;
- {
- ScopedObjectAccess soa(Thread::Current());
- compiler_driver_->GetCodeAndMethodForDirectCall(
- (compiling_method == nullptr) ? nullptr : compiling_method->GetDeclaringClass(),
- invoke->GetResolvedMethod(),
- &direct_code,
- &direct_method);
- }
- if (direct_method != 0u) { // Should we use a direct pointer to the method?
- // Note: For JIT, kDirectAddressWithFixup doesn't make sense at all and while
- // kDirectAddress would be fine for image methods, we don't support it at the moment.
- DCHECK(!Runtime::Current()->UseJitCompilation());
- if (direct_method != static_cast<uintptr_t>(-1)) { // Is the method pointer known now?
- method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDirectAddress;
- method_load_data = direct_method;
- } else { // The direct pointer will be known at link time.
- method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup;
- }
- } else { // Use dex cache.
- if (!Runtime::Current()->UseJitCompilation()) {
- // Use PC-relative access to the dex cache arrays.
- method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative;
- DexCacheArraysLayout layout(GetInstructionSetPointerSize(codegen_->GetInstructionSet()),
- &graph_->GetDexFile());
- method_load_data = layout.MethodOffset(invoke->GetDexMethodIndex());
- } else { // We must go through the ArtMethod's pointer to resolved methods.
- method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod;
- }
- }
- if (direct_code != 0u) { // Should we use a direct pointer to the code?
- // Note: For JIT, kCallPCRelative and kCallDirectWithFixup don't make sense at all and
- // while kCallDirect would be fine for image methods, we don't support it at the moment.
- DCHECK(!Runtime::Current()->UseJitCompilation());
- const DexFile* dex_file_of_callee = invoke->GetTargetMethod().dex_file;
- if (direct_code != static_cast<uintptr_t>(-1)) { // Is the code pointer known now?
- code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallDirect;
- direct_code_ptr = direct_code;
- } else if (ContainsElement(compiler_driver_->GetDexFilesForOatFile(), dex_file_of_callee)) {
- // Use PC-relative calls for invokes within a multi-dex oat file.
- code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative;
- } else { // The direct pointer will be known at link time.
- // NOTE: This is used for app->boot calls when compiling an app against
- // a relocatable but not yet relocated image.
- code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup;
- }
- } else { // We must use the code pointer from the ArtMethod.
- code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
- }
+ // Use PC-relative access to the dex cache arrays.
+ method_load_kind = HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative;
+ DexCacheArraysLayout layout(GetInstructionSetPointerSize(codegen_->GetInstructionSet()),
+ &graph_->GetDexFile());
+ method_load_data = layout.MethodOffset(invoke->GetDexMethodIndex());
+ code_ptr_location = HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod;
}
if (graph_->IsDebuggable()) {
@@ -132,7 +124,7 @@
}
HInvokeStaticOrDirect::DispatchInfo desired_dispatch_info = {
- method_load_kind, code_ptr_location, method_load_data, direct_code_ptr
+ method_load_kind, code_ptr_location, method_load_data
};
HInvokeStaticOrDirect::DispatchInfo dispatch_info =
codegen_->GetSupportedInvokeStaticOrDirectDispatch(desired_dispatch_info, invoke);