summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Almaz Mingaleev <mingaleev@google.com> 2023-12-15 11:48:05 +0000
committer Gerrit Code Review <noreply-gerritcodereview@google.com> 2023-12-15 11:48:05 +0000
commitd014fd019e84471665ac02f2de285541009892cd (patch)
treea2d729d072b1cdf01f2293f3e0b2f52bbc6f3291 /compiler
parenta627c7e71a59135daab7f2fb8505d4873f61e4ac (diff)
Revert "x86_64: Store resolved MethodType-s in .bss."
This reverts commit a627c7e71a59135daab7f2fb8505d4873f61e4ac. Reason for revert: 979-const-method-handle fails. Can repro with ITERATIONS_FOR_JIT set to 120_000 and ./art/test/testrunner/testrunner.py --host --64 --jit --no-jvmti --debug --prebuild --checkjni --cms --no-relocate --ntrace --cdex-fast -b -t 979-const Change-Id: I653a83aa12f2c28b163e0d43dbc95b8e8a190436
Diffstat (limited to 'compiler')
-rw-r--r--compiler/linker/linker_patch.h23
-rw-r--r--compiler/optimizing/code_generator.cc1
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc72
-rw-r--r--compiler/optimizing/code_generator_x86_64.h3
4 files changed, 4 insertions, 95 deletions
diff --git a/compiler/linker/linker_patch.h b/compiler/linker/linker_patch.h
index 133088240b..8ed7fce0ff 100644
--- a/compiler/linker/linker_patch.h
+++ b/compiler/linker/linker_patch.h
@@ -56,7 +56,6 @@ class LinkerPatch {
kPackageTypeBssEntry,
kStringRelative,
kStringBssEntry,
- kMethodTypeBssEntry,
kCallEntrypoint,
kBakerReadBarrierBranch,
};
@@ -177,16 +176,6 @@ class LinkerPatch {
return patch;
}
- static LinkerPatch MethodTypeBssEntryPatch(size_t literal_offset,
- const DexFile* target_dex_file,
- uint32_t pc_insn_offset,
- uint32_t target_proto_idx) {
- LinkerPatch patch(literal_offset, Type::kMethodTypeBssEntry, target_dex_file);
- patch.proto_idx_ = target_proto_idx;
- patch.pc_insn_offset_ = pc_insn_offset;
- return patch;
- }
-
static LinkerPatch CallEntrypointPatch(size_t literal_offset,
uint32_t entrypoint_offset) {
LinkerPatch patch(literal_offset,
@@ -264,16 +253,6 @@ class LinkerPatch {
return dex::StringIndex(string_idx_);
}
- const DexFile* TargetProtoDexFile() const {
- DCHECK(patch_type_ == Type::kMethodTypeBssEntry);
- return target_dex_file_;
- }
-
- dex::ProtoIndex TargetProtoIndex() const {
- DCHECK(patch_type_ == Type::kMethodTypeBssEntry);
- return dex::ProtoIndex(proto_idx_);
- }
-
uint32_t PcInsnOffset() const {
DCHECK(patch_type_ == Type::kIntrinsicReference ||
patch_type_ == Type::kDataBimgRelRo ||
@@ -326,14 +305,12 @@ class LinkerPatch {
uint32_t method_idx_; // Method index for Call/Method patches.
uint32_t type_idx_; // Type index for Type patches.
uint32_t string_idx_; // String index for String patches.
- uint32_t proto_idx_; // Proto index for MethodType patches.
uint32_t intrinsic_data_; // Data for IntrinsicObjects.
uint32_t entrypoint_offset_; // Entrypoint offset in the Thread object.
uint32_t baker_custom_value1_;
static_assert(sizeof(method_idx_) == sizeof(cmp1_), "needed by relational operators");
static_assert(sizeof(type_idx_) == sizeof(cmp1_), "needed by relational operators");
static_assert(sizeof(string_idx_) == sizeof(cmp1_), "needed by relational operators");
- static_assert(sizeof(proto_idx_) == sizeof(cmp1_), "needed by relational operators");
static_assert(sizeof(intrinsic_data_) == sizeof(cmp1_), "needed by relational operators");
static_assert(sizeof(baker_custom_value1_) == sizeof(cmp1_), "needed by relational operators");
};
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index b0e07e32ea..34400c9d22 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -1682,7 +1682,6 @@ void CodeGenerator::ValidateInvokeRuntimeWithoutRecordingPcInfo(HInstruction* in
instruction->IsArrayGet() ||
instruction->IsArraySet() ||
instruction->IsLoadClass() ||
- instruction->IsLoadMethodType() ||
instruction->IsLoadString() ||
instruction->IsInstanceOf() ||
instruction->IsCheckCast() ||
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 24630d2ae3..24cb0c30b7 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -269,38 +269,6 @@ class BoundsCheckSlowPathX86_64 : public SlowPathCode {
DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathX86_64);
};
-class LoadMethodTypeSlowPathX86_64: public SlowPathCode {
- public:
- explicit LoadMethodTypeSlowPathX86_64(HLoadMethodType* mt) : SlowPathCode(mt) {}
-
- void EmitNativeCode(CodeGenerator* codegen) override {
- LocationSummary* locations = instruction_->GetLocations();
- DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
-
- CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
- __ Bind(GetEntryLabel());
- SaveLiveRegisters(codegen, locations);
-
- const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex();
- // Custom calling convention: RAX serves as both input and output.
- __ movl(CpuRegister(RAX), Immediate(proto_index.index_));
- x86_64_codegen->InvokeRuntime(kQuickResolveMethodType,
- instruction_,
- instruction_->GetDexPc(),
- this);
- CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>();
- x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX));
- RestoreLiveRegisters(codegen, locations);
-
- __ jmp(GetExitLabel());
- }
-
- const char* GetDescription() const override { return "LoadMethodTypeSlowPathX86_64"; }
-
- private:
- DISALLOW_COPY_AND_ASSIGN(LoadMethodTypeSlowPathX86_64);
-};
-
class LoadClassSlowPathX86_64 : public SlowPathCode {
public:
LoadClassSlowPathX86_64(HLoadClass* cls, HInstruction* at)
@@ -560,7 +528,6 @@ class ReadBarrierMarkSlowPathX86_64 : public SlowPathCode {
instruction_->IsArrayGet() ||
instruction_->IsArraySet() ||
instruction_->IsLoadClass() ||
- instruction_->IsLoadMethodType() ||
instruction_->IsLoadString() ||
instruction_->IsInstanceOf() ||
instruction_->IsCheckCast() ||
@@ -1351,12 +1318,6 @@ Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
return &string_bss_entry_patches_.back().label;
}
-Label* CodeGeneratorX86_64::NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type) {
- method_type_bss_entry_patches_.emplace_back(
- &load_method_type->GetDexFile(), load_method_type->GetProtoIndex().index_);
- return &method_type_bss_entry_patches_.back().label;
-}
-
void CodeGeneratorX86_64::RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke) {
boot_image_jni_entrypoint_patches_.emplace_back(invoke->GetResolvedMethodReference().dex_file,
invoke->GetResolvedMethodReference().index);
@@ -1444,7 +1405,6 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li
package_type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
string_bss_entry_patches_.size() +
- method_type_bss_entry_patches_.size() +
boot_image_jni_entrypoint_patches_.size() +
boot_image_other_patches_.size();
linker_patches->reserve(size);
@@ -1477,8 +1437,6 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li
package_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>(
string_bss_entry_patches_, linker_patches);
- EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodTypeBssEntryPatch>(
- method_type_bss_entry_patches_, linker_patches);
EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>(
boot_image_jni_entrypoint_patches_, linker_patches);
DCHECK_EQ(size, linker_patches->size());
@@ -1603,7 +1561,6 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
- method_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)),
@@ -6711,34 +6668,13 @@ void InstructionCodeGeneratorX86_64::VisitLoadMethodHandle(HLoadMethodHandle* lo
}
void LocationsBuilderX86_64::VisitLoadMethodType(HLoadMethodType* load) {
- LocationSummary* locations =
- new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath);
- locations->SetOut(Location::RequiresRegister());
- if (codegen_->EmitNonBakerReadBarrier()) {
- // For non-Baker read barrier we have a temp-clobbering call.
- } else {
- // Rely on the pResolveMethodType to save everything.
- locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves());
- }
+ // Custom calling convention: RAX serves as both input and output.
+ Location location = Location::RegisterLocation(RAX);
+ CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location);
}
void InstructionCodeGeneratorX86_64::VisitLoadMethodType(HLoadMethodType* load) {
- LocationSummary* locations = load->GetLocations();
- Location out_loc = locations->Out();
- CpuRegister out = out_loc.AsRegister<CpuRegister>();
-
- Address address = Address::Absolute(CodeGeneratorX86_64::kPlaceholder32BitOffset,
- /* no_rip= */ false);
- Label* fixup_label = codegen_->NewMethodTypeBssEntryPatch(load);
- // /* GcRoot<mirror::MethodType> */ out = *address /* PC-relative */
- GenerateGcRootFieldLoad(
- load, out_loc, address, fixup_label, codegen_->GetCompilerReadBarrierOption());
- // No need for memory fence, thanks to the x86-64 memory model.
- SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) LoadMethodTypeSlowPathX86_64(load);
- codegen_->AddSlowPath(slow_path);
- __ testl(out, out);
- __ j(kEqual, slow_path->GetEntryLabel());
- __ Bind(slow_path->GetExitLabel());
+ codegen_->GenerateLoadMethodTypeRuntimeCall(load);
}
void InstructionCodeGeneratorX86_64::VisitClinitCheck(HClinitCheck* check) {
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index e4d3eac6bc..7da2e39583 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -523,7 +523,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
Label* NewTypeBssEntryPatch(HLoadClass* load_class);
void RecordBootImageStringPatch(HLoadString* load_string);
Label* NewStringBssEntryPatch(HLoadString* load_string);
- Label* NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type);
void RecordBootImageJniEntrypointPatch(HInvokeStaticOrDirect* invoke);
Label* NewJitRootStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
@@ -736,8 +735,6 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> boot_image_string_patches_;
// PC-relative String patch info for kBssEntry.
ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_;
- // PC-relative MethodType patch info for kBssEntry.
- ArenaDeque<PatchInfo<Label>> method_type_bss_entry_patches_;
// PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative.
ArenaDeque<PatchInfo<Label>> boot_image_jni_entrypoint_patches_;
// PC-relative patch info for IntrinsicObjects for the boot image,