summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Almaz Mingaleev <mingaleev@google.com> 2023-12-11 17:59:58 +0000
committer Almaz Mingaleev <mingaleev@google.com> 2023-12-15 09:23:22 +0000
commita627c7e71a59135daab7f2fb8505d4873f61e4ac (patch)
tree4ed65b8174984237773521f1f34c4e9408d97ffc /compiler/optimizing
parent9fedb9f473fd77f31285203f5baa9533b8e21ce6 (diff)
x86_64: Store resolved MethodType-s in .bss.
Bug: 297147201 Test: ./art/test/testrunner/testrunner.py --host --64 --optimizing -b Test: ./art/test/testrunner/testrunner.py --jvm -b Test: ./art/test.py --host -b Change-Id: I4bc2478ff88b2002a60c7126c1a1c9201082e550
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator.cc1
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc72
-rw-r--r--compiler/optimizing/code_generator_x86_64.h3
3 files changed, 72 insertions, 4 deletions
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 34400c9d22..b0e07e32ea 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1682,6 +1682,7 @@ void CodeGenerator::ValidateInvokeRuntimeWithoutRecordingPcInfo(HInstruction* in
instruction->IsArrayGet() ||
instruction->IsArraySet() ||
instruction->IsLoadClass() ||
+ instruction->IsLoadMethodType() ||
instruction->IsLoadString() ||
instruction->IsInstanceOf() ||
instruction->IsCheckCast() ||
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 24cb0c30b7..24630d2ae3 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -269,6 +269,38 @@ class BoundsCheckSlowPathX86_64 : public SlowPathCode {
DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
};
+class LoadMethodTypeSlowPathX86_64: public SlowPathCode {
+ public:
+ explicit LoadMethodTypeSlowPathX86_64(HLoadMethodType* mt) : SlowPathCode(mt) {}
+
+ void EmitNativeCode(CodeGenerator* codegen) override {
+ LocationSummary* locations = instruction_->GetLocations();
+ DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
+
+ CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
+ __ Bind(GetEntryLabel());
+ SaveLiveRegisters(codegen, locations);
+
+ const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex();
+ // Custom calling convention: RAX serves as both input and output.
+ __ movl(CpuRegister(RAX), Immediate(proto_index.index_));
+ x86_64_codegen->InvokeRuntime(kQuickResolveMethodType,
+ instruction_,
+ instruction_->GetDexPc(),
+ this);
+ CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
+ x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
+ RestoreLiveRegisters(codegen, locations);
+
+ __ jmp(GetExitLabel());
+ }
+
+ const char* GetDescription() const override { return "LoadMethodTypeSlowPathX86_64"; }
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(LoadMethodTypeSlowPathX86_64);
+};
+
class LoadClassSlowPathX86_64 : public SlowPathCode {
public:
LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
@@ -528,6 +560,7 @@ class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
instruction_->IsArrayGet() ||
instruction_->IsArraySet() ||
instruction_->IsLoadClass() ||
+ instruction_->IsLoadMethodType() ||
instruction_->IsLoadString() ||
instruction_->IsInstanceOf() ||
instruction_->IsCheckCast() ||
@@ -1318,6 +1351,12 @@ Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
return &string_bss_entry_patches_.back().label;
}
+Label* CodeGeneratorX86_64::NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type) {
+ method_type_bss_entry_patches_.emplace_back(
+ &load_method_type->GetDexFile(), load_method_type->GetProtoIndex().index_);
+ return &method_type_bss_entry_patches_.back().label;
+}
+
void CodeGeneratorX86_64::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
boot_image_jni_entrypoint_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
invoke->GetResolvedMethodReference().index);
@@ -1405,6 +1444,7 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li
package_type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
string_bss_entry_patches_.size() +
+ method_type_bss_entry_patches_.size() +
boot_image_jni_entrypoint_patches_.size() +
boot_image_other_patches_.size();
linker_patches->reserve(size);
@@ -1437,6 +1477,8 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li
package_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
string_bss_entry_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodTypeBssEntryPatch>(
+ method_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
boot_image_jni_entrypoint_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
@@ -1561,6 +1603,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
+ method_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
@@ -6668,13 +6711,34 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
}
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
- // Custom calling convention: RAX serves as both input and output.
- Location location = Location::RegisterLocation(RAX);
- CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
+ LocationSummary* locations =
+ new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
+ locations->SetOut(Location::RequiresRegister());
+ if (codegen_->EmitNonBakerReadBarrier()) {
+ // For non-Baker read barrier we have a temp-clobbering call.
+ } else {
+ // Rely on the pResolveMethodType to save everything.
+ locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
+ }
}
void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
- codegen_->GenerateLoadMethodTypeRuntimeCall(load);
+ LocationSummary* locations = load->GetLocations();
+ Location out_loc = locations->Out();
+ CpuRegister out = out_loc.AsRegister<CpuRegister>();
+
+ Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
+ /* no_rip= */ false);
+ Label* fixup_label = codegen_->NewMethodTypeBssEntryPatch(load);
+ // /* GcRoot<mirror::MethodType> */ out = *address /* PC-relative */
+ GenerateGcRootFieldLoad(
+ load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
+ // No need for memory fence, thanks to the x86-64 memory model.
+ SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadMethodTypeSlowPathX86_64(load);
+ codegen_->AddSlowPath(slow_path);
+ __ testl(out, out);
+ __ j(kEqual, slow_path->GetEntryLabel());
+ __ Bind(slow_path->GetExitLabel());
}
void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 7da2e39583..e4d3eac6bc 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -523,6 +523,7 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewTypeBssEntryPatch(HLoadClass* load_class);
void RecordBootImageStringPatch(HLoadString* load_string);
Label* NewStringBssEntryPatch(HLoadString* load_string);
+ Label* NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type);
void RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke);
Label* NewJitRootStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
@@ -735,6 +736,8 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> boot_image_string_patches_;
// PC-relative String patch info for kBssEntry.
ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_;
+ // PC-relative MethodType patch info for kBssEntry.
+ ArenaDeque<PatchInfo<Label>> method_type_bss_entry_patches_;
// PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative.
ArenaDeque<PatchInfo<Label>> boot_image_jni_entrypoint_patches_;
// PC-relative patch info for IntrinsicObjects for the boot image,