Faster @CriticalNative for boot image.
The @CriticalNative call does not need the target method, so
we can avoid one instruction on x86, x86-64 and arm64. The
current approach for arm does not allow such optimization.
Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: aosp_blueline-userdebug boots.
Test: run-gtests.sh
Test: testrunner.py --target --64 --optimizing
Bug: 112189621
Change-Id: I11b7e415be2697757cbb11c9cccf4058d1d72d7d
diff --git a/compiler/linker/linker_patch.h b/compiler/linker/linker_patch.h
index 4000fc2..7da1e82 100644
--- a/compiler/linker/linker_patch.h
+++ b/compiler/linker/linker_patch.h
@@ -47,6 +47,7 @@
kDataBimgRelRo,
kMethodRelative,
kMethodBssEntry,
+ kJniEntrypointRelative,
kCallRelative,
kTypeRelative,
kTypeBssEntry,
@@ -96,6 +97,16 @@
return patch;
}
+ static LinkerPatch RelativeJniEntrypointPatch(size_t literal_offset,
+ const DexFile* target_dex_file,
+ uint32_t pc_insn_offset,
+ uint32_t target_method_idx) {
+ LinkerPatch patch(literal_offset, Type::kJniEntrypointRelative, target_dex_file);
+ patch.method_idx_ = target_method_idx;
+ patch.pc_insn_offset_ = pc_insn_offset;
+ return patch;
+ }
+
static LinkerPatch RelativeCodePatch(size_t literal_offset,
const DexFile* target_dex_file,
uint32_t target_method_idx) {
@@ -208,6 +219,7 @@
MethodReference TargetMethod() const {
DCHECK(patch_type_ == Type::kMethodRelative ||
patch_type_ == Type::kMethodBssEntry ||
+ patch_type_ == Type::kJniEntrypointRelative ||
patch_type_ == Type::kCallRelative);
return MethodReference(target_dex_file_, method_idx_);
}
@@ -245,6 +257,7 @@
patch_type_ == Type::kDataBimgRelRo ||
patch_type_ == Type::kMethodRelative ||
patch_type_ == Type::kMethodBssEntry ||
+ patch_type_ == Type::kJniEntrypointRelative ||
patch_type_ == Type::kTypeRelative ||
patch_type_ == Type::kTypeBssEntry ||
patch_type_ == Type::kPublicTypeBssEntry ||
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 007aa43..5920a48 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -954,6 +954,7 @@
package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
@@ -4576,24 +4577,35 @@
GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
return; // No code pointer retrieval; the runtime performs the call directly.
}
+ case MethodLoadKind::kBootImageLinkTimePcRelative:
+ DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
+ if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
+ // Do not materialize the method pointer, load directly the entrypoint.
+ // Add ADRP with its PC-relative JNI entrypoint patch.
+ vixl::aarch64::Label* adrp_label =
+ NewBootImageJniEntrypointPatch(invoke->GetResolvedMethodReference());
+ EmitAdrpPlaceholder(adrp_label, lr);
+ // Add the LDR with its PC-relative method patch.
+ vixl::aarch64::Label* add_label =
+ NewBootImageJniEntrypointPatch(invoke->GetResolvedMethodReference(), adrp_label);
+ EmitLdrOffsetPlaceholder(add_label, lr, lr);
+ break;
+ }
+ FALLTHROUGH_INTENDED;
default: {
LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
break;
}
}
- auto call_code_pointer_member = [&](MemberOffset offset) {
- // LR = callee_method->member;
- __ Ldr(lr, MemOperand(XRegisterFrom(callee_method), offset.Int32Value()));
- {
- // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
- ExactAssemblyScope eas(GetVIXLAssembler(),
- kInstructionSize,
- CodeBufferCheckScope::kExactSize);
- // lr()
- __ blr(lr);
- RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
- }
+ auto call_lr = [&]() {
+ // Use a scope to help guarantee that `RecordPcInfo()` records the correct pc.
+ ExactAssemblyScope eas(GetVIXLAssembler(),
+ kInstructionSize,
+ CodeBufferCheckScope::kExactSize);
+ // lr()
+ __ blr(lr);
+ RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
};
switch (invoke->GetCodePtrLocation()) {
case CodePtrLocation::kCallSelf:
@@ -4611,7 +4623,15 @@
PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorARM64,
kAapcs64StackAlignment,
GetCriticalNativeDirectCallFrameSize>(invoke);
- call_code_pointer_member(ArtMethod::EntryPointFromJniOffset(kArm64PointerSize));
+ if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
+ call_lr();
+ } else {
+ // LR = callee_method->ptr_sized_fields_.data_; // EntryPointFromJni
+ MemberOffset offset = ArtMethod::EntryPointFromJniOffset(kArm64PointerSize);
+ __ Ldr(lr, MemOperand(XRegisterFrom(callee_method), offset.Int32Value()));
+ // lr()
+ call_lr();
+ }
// Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
switch (invoke->GetType()) {
case DataType::Type::kBool:
@@ -4641,9 +4661,14 @@
}
break;
}
- case CodePtrLocation::kCallArtMethod:
- call_code_pointer_member(ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize));
+ case CodePtrLocation::kCallArtMethod: {
+ // LR = callee_method->ptr_sized_fields_.entry_point_from_quick_compiled_code_;
+ MemberOffset offset = ArtMethod::EntryPointFromQuickCompiledCodeOffset(kArm64PointerSize);
+ __ Ldr(lr, MemOperand(XRegisterFrom(callee_method), offset.Int32Value()));
+ // lr()
+ call_lr();
break;
+ }
}
DCHECK(!IsLeafMethod());
@@ -4814,6 +4839,13 @@
return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
}
+vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageJniEntrypointPatch(
+ MethodReference target_method,
+ vixl::aarch64::Label* adrp_label) {
+ return NewPcRelativePatch(
+ target_method.dex_file, target_method.index, adrp_label, &boot_image_jni_entrypoint_patches_);
+}
+
void CodeGeneratorARM64::EmitEntrypointThunkCall(ThreadOffset64 entrypoint_offset) {
DCHECK(!__ AllowMacroInstructions()); // In ExactAssemblyScope.
DCHECK(!GetCompilerOptions().IsJitCompiler());
@@ -4980,6 +5012,7 @@
package_type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
string_bss_entry_patches_.size() +
+ boot_image_jni_entrypoint_patches_.size() +
boot_image_other_patches_.size() +
call_entrypoint_patches_.size() +
baker_read_barrier_patches_.size();
@@ -5013,6 +5046,8 @@
package_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
string_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
+ boot_image_jni_entrypoint_patches_, linker_patches);
for (const PatchInfo<vixl::aarch64::Label>& info : call_entrypoint_patches_) {
DCHECK(info.target_dex_file == nullptr);
linker_patches->push_back(linker::LinkerPatch::CallEntrypointPatch(
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 7ae46d7..bebf762 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -756,6 +756,13 @@
dex::StringIndex string_index,
vixl::aarch64::Label* adrp_label = nullptr);
+ // Add a new boot image JNI entrypoint patch for an instruction and return the label
+ // to be bound before the instruction. The instruction will be either the
+ // ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label` pointing
+ // to the associated ADRP patch label).
+ vixl::aarch64::Label* NewBootImageJniEntrypointPatch(MethodReference target_method,
+ vixl::aarch64::Label* adrp_label = nullptr);
+
// Emit the BL instruction for entrypoint thunk call and record the associated patch for AOT.
void EmitEntrypointThunkCall(ThreadOffset64 entrypoint_offset);
@@ -1056,6 +1063,8 @@
ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_;
// PC-relative String patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
+ // PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative.
+ ArenaDeque<PcRelativePatchInfo> boot_image_jni_entrypoint_patches_;
// PC-relative patch info for IntrinsicObjects for the boot image,
// and for method/type/string patches for kBootImageRelRo otherwise.
ArenaDeque<PcRelativePatchInfo> boot_image_other_patches_;
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 85337ed..76b8be1 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -9140,6 +9140,12 @@
GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
return; // No code pointer retrieval; the runtime performs the call directly.
}
+ case MethodLoadKind::kBootImageLinkTimePcRelative:
+ // Note: Unlike arm64, x86 and x86-64, we do not avoid the materialization of method
+ // pointer for kCallCriticalNative because it would not save us an instruction from
+ // the current sequence MOVW+MOVT+ADD(pc)+LDR+BL. The ADD(pc) separates the patched
+ // offset instructions MOVW+MOVT from the entrypoint load, so they cannot be fused.
+ FALLTHROUGH_INTENDED;
default: {
LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
break;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index d05c2d9..0f68f3f 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1065,6 +1065,7 @@
package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
@@ -5259,6 +5260,12 @@
GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
return; // No code pointer retrieval; the runtime performs the call directly.
}
+ case MethodLoadKind::kBootImageLinkTimePcRelative:
+ // For kCallCriticalNative we skip loading the method and do the call directly.
+ if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
+ break;
+ }
+ FALLTHROUGH_INTENDED;
default: {
LoadMethod(invoke->GetMethodLoadKind(), callee_method, invoke);
}
@@ -5274,9 +5281,16 @@
PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86,
kNativeStackAlignment,
GetCriticalNativeDirectCallFrameSize>(invoke);
- // (callee_method + offset_of_jni_entry_point)()
- __ call(Address(callee_method.AsRegister<Register>(),
- ArtMethod::EntryPointFromJniOffset(kX86PointerSize).Int32Value()));
+ if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
+ DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
+ Register base_reg = GetInvokeExtraParameter(invoke, temp.AsRegister<Register>());
+ __ call(Address(base_reg, CodeGeneratorX86::kPlaceholder32BitOffset));
+ RecordBootImageJniEntrypointPatch(invoke);
+ } else {
+ // (callee_method + offset_of_jni_entry_point)()
+ __ call(Address(callee_method.AsRegister<Register>(),
+ ArtMethod::EntryPointFromJniOffset(kX86PointerSize).Int32Value()));
+ }
RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
if (out_frame_size == 0u && DataType::IsFloatingPointType(invoke->GetType())) {
// Create space for conversion.
@@ -5454,6 +5468,16 @@
return &string_bss_entry_patches_.back().label;
}
+void CodeGeneratorX86::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
+ HX86ComputeBaseMethodAddress* method_address =
+ invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress();
+ boot_image_jni_entrypoint_patches_.emplace_back(
+ method_address,
+ invoke->GetResolvedMethodReference().dex_file,
+ invoke->GetResolvedMethodReference().index);
+ __ Bind(&boot_image_jni_entrypoint_patches_.back().label);
+}
+
void CodeGeneratorX86::LoadBootImageAddress(Register reg,
uint32_t boot_image_reference,
HInvokeStaticOrDirect* invoke) {
@@ -5544,6 +5568,7 @@
package_type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
string_bss_entry_patches_.size() +
+ boot_image_jni_entrypoint_patches_.size() +
boot_image_other_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
@@ -5575,6 +5600,8 @@
package_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
string_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
+ boot_image_jni_entrypoint_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 0368de5..119494c 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -490,6 +490,7 @@
Label* NewTypeBssEntryPatch(HLoadClass* load_class);
void RecordBootImageStringPatch(HLoadString* load_string);
Label* NewStringBssEntryPatch(HLoadString* load_string);
+ void RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke);
void LoadBootImageAddress(Register reg,
uint32_t boot_image_reference,
@@ -725,6 +726,8 @@
ArenaDeque<X86PcRelativePatchInfo> boot_image_string_patches_;
// PC-relative String patch info for kBssEntry.
ArenaDeque<X86PcRelativePatchInfo> string_bss_entry_patches_;
+ // PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative.
+ ArenaDeque<X86PcRelativePatchInfo> boot_image_jni_entrypoint_patches_;
// PC-relative patch info for IntrinsicObjects for the boot image,
// and for method/type/string patches for kBootImageRelRo otherwise.
ArenaDeque<X86PcRelativePatchInfo> boot_image_other_patches_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 3a39ee8..dac04a5 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1060,6 +1060,12 @@
GenerateInvokeStaticOrDirectRuntimeCall(invoke, temp, slow_path);
return; // No code pointer retrieval; the runtime performs the call directly.
}
+ case MethodLoadKind::kBootImageLinkTimePcRelative:
+ // For kCallCriticalNative we skip loading the method and do the call directly.
+ if (invoke->GetCodePtrLocation() == CodePtrLocation::kCallCriticalNative) {
+ break;
+ }
+ FALLTHROUGH_INTENDED;
default: {
LoadMethod(invoke->GetMethodLoadKind(), temp, invoke);
break;
@@ -1076,9 +1082,15 @@
PrepareCriticalNativeCall<CriticalNativeCallingConventionVisitorX86_64,
kNativeStackAlignment,
GetCriticalNativeDirectCallFrameSize>(invoke);
- // (callee_method + offset_of_jni_entry_point)()
- __ call(Address(callee_method.AsRegister<CpuRegister>(),
- ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue()));
+ if (invoke->GetMethodLoadKind() == MethodLoadKind::kBootImageLinkTimePcRelative) {
+ DCHECK(GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension());
+ __ call(Address::Absolute(kPlaceholder32BitOffset, /* no_rip= */ false));
+ RecordBootImageJniEntrypointPatch(invoke);
+ } else {
+ // (callee_method + offset_of_jni_entry_point)()
+ __ call(Address(callee_method.AsRegister<CpuRegister>(),
+ ArtMethod::EntryPointFromJniOffset(kX86_64PointerSize).SizeValue()));
+ }
RecordPcInfo(invoke, invoke->GetDexPc(), slow_path);
// Zero-/sign-extend the result when needed due to native and managed ABI mismatch.
switch (invoke->GetType()) {
@@ -1218,6 +1230,12 @@
return &string_bss_entry_patches_.back().label;
}
+void CodeGeneratorX86_64::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
+ boot_image_jni_entrypoint_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
+ invoke->GetResolvedMethodReference().index);
+ __ Bind(&boot_image_jni_entrypoint_patches_.back().label);
+}
+
void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) {
if (GetCompilerOptions().IsBootImage()) {
__ leal(reg,
@@ -1292,6 +1310,7 @@
package_type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
string_bss_entry_patches_.size() +
+ boot_image_jni_entrypoint_patches_.size() +
boot_image_other_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsBootImageExtension()) {
@@ -1323,6 +1342,8 @@
package_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
string_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
+ boot_image_jni_entrypoint_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1394,35 +1415,36 @@
CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
const CompilerOptions& compiler_options,
OptimizingCompilerStats* stats)
- : CodeGenerator(graph,
- kNumberOfCpuRegisters,
- kNumberOfFloatRegisters,
- kNumberOfCpuRegisterPairs,
- ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
- arraysize(kCoreCalleeSaves))
- | (1 << kFakeReturnRegister),
- ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
- arraysize(kFpuCalleeSaves)),
- compiler_options,
- stats),
- block_labels_(nullptr),
- location_builder_(graph, this),
- instruction_visitor_(graph, this),
- move_resolver_(graph->GetAllocator(), this),
- assembler_(graph->GetAllocator()),
- constant_area_start_(0),
- boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
+ : CodeGenerator(graph,
+ kNumberOfCpuRegisters,
+ kNumberOfFloatRegisters,
+ kNumberOfCpuRegisterPairs,
+ ComputeRegisterMask(reinterpret_cast<const int*>(kCoreCalleeSaves),
+ arraysize(kCoreCalleeSaves))
+ | (1 << kFakeReturnRegister),
+ ComputeRegisterMask(reinterpret_cast<const int*>(kFpuCalleeSaves),
+ arraysize(kFpuCalleeSaves)),
+ compiler_options,
+ stats),
+ block_labels_(nullptr),
+ location_builder_(graph, this),
+ instruction_visitor_(graph, this),
+ move_resolver_(graph->GetAllocator(), this),
+ assembler_(graph->GetAllocator()),
+ constant_area_start_(0),
+ boot_image_method_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ method_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_type_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ public_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) {
AddAllocatedRegister(Location::RegisterLocation(kFakeReturnRegister));
}
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index c69c80a..2c93f18 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -466,6 +466,7 @@
Label* NewTypeBssEntryPatch(HLoadClass* load_class);
void RecordBootImageStringPatch(HLoadString* load_string);
Label* NewStringBssEntryPatch(HLoadString* load_string);
+ void RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke);
Label* NewJitRootStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
Handle<mirror::String> handle);
@@ -677,6 +678,8 @@
ArenaDeque<PatchInfo<Label>> boot_image_string_patches_;
// PC-relative String patch info for kBssEntry.
ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_;
+ // PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative.
+ ArenaDeque<PatchInfo<Label>> boot_image_jni_entrypoint_patches_;
// PC-relative patch info for IntrinsicObjects for the boot image,
// and for method/type/string patches for kBootImageRelRo otherwise.
ArenaDeque<PatchInfo<Label>> boot_image_other_patches_;
diff --git a/dex2oat/linker/arm64/relative_patcher_arm64.cc b/dex2oat/linker/arm64/relative_patcher_arm64.cc
index 494391b..4a73b83 100644
--- a/dex2oat/linker/arm64/relative_patcher_arm64.cc
+++ b/dex2oat/linker/arm64/relative_patcher_arm64.cc
@@ -65,6 +65,7 @@
case LinkerPatch::Type::kDataBimgRelRo:
case LinkerPatch::Type::kMethodRelative:
case LinkerPatch::Type::kMethodBssEntry:
+ case LinkerPatch::Type::kJniEntrypointRelative:
case LinkerPatch::Type::kTypeRelative:
case LinkerPatch::Type::kTypeBssEntry:
case LinkerPatch::Type::kPublicTypeBssEntry:
@@ -272,6 +273,7 @@
// LDR/STR 32-bit or 64-bit with imm12 == 0 (unset).
DCHECK(patch.GetType() == LinkerPatch::Type::kDataBimgRelRo ||
patch.GetType() == LinkerPatch::Type::kMethodBssEntry ||
+ patch.GetType() == LinkerPatch::Type::kJniEntrypointRelative ||
patch.GetType() == LinkerPatch::Type::kTypeBssEntry ||
patch.GetType() == LinkerPatch::Type::kPublicTypeBssEntry ||
patch.GetType() == LinkerPatch::Type::kPackageTypeBssEntry ||
diff --git a/dex2oat/linker/oat_writer.cc b/dex2oat/linker/oat_writer.cc
index 73ad9e9..88aac56 100644
--- a/dex2oat/linker/oat_writer.cc
+++ b/dex2oat/linker/oat_writer.cc
@@ -1887,6 +1887,17 @@
target_offset);
break;
}
+ case LinkerPatch::Type::kJniEntrypointRelative: {
+ DCHECK(GetTargetMethod(patch)->IsNative());
+ uint32_t target_offset =
+ GetTargetMethodOffset(GetTargetMethod(patch)) +
+ ArtMethod::EntryPointFromJniOffset(pointer_size_).Uint32Value();
+ writer_->relative_patcher_->PatchPcRelativeReference(&patched_code_,
+ patch,
+ offset_ + literal_offset,
+ target_offset);
+ break;
+ }
case LinkerPatch::Type::kCallEntrypoint: {
writer_->relative_patcher_->PatchEntrypointCall(&patched_code_,
patch,