diff options
author | 2024-11-06 12:42:18 +0000 | |
---|---|---|
committer | 2024-11-06 15:08:37 +0000 | |
commit | a687066b7043dbc1be8f85001eeb0f341cd25885 (patch) | |
tree | 4a718fe531d822ab7b065b62549fdaff7d0ffcde /compiler | |
parent | 6b32080cb3f09c786128043df88e53fb344b7a15 (diff) |
arm64: Store resolved MethodType-s in .bss.
Bug: 297147201
Test: art/test/testrunner/testrunner.py --target --64 --jit
Test: art/test/testrunner/testrunner.py --target --64
Change-Id: Iecd5575e5eb0d161cbc338f63f29cb52b0c23177
Diffstat (limited to 'compiler')
-rw-r--r-- | compiler/linker/linker_patch.h | 3 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 115 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 16 | ||||
-rw-r--r-- | compiler/optimizing/jit_patches_arm64.cc | 19 | ||||
-rw-r--r-- | compiler/optimizing/jit_patches_arm64.h | 16 |
5 files changed, 163 insertions, 6 deletions
diff --git a/compiler/linker/linker_patch.h b/compiler/linker/linker_patch.h index 4eb0782d52..d73d88dcb2 100644 --- a/compiler/linker/linker_patch.h +++ b/compiler/linker/linker_patch.h @@ -295,7 +295,8 @@ class LinkerPatch { patch_type_ == Type::kPublicTypeBssEntry || patch_type_ == Type::kPackageTypeBssEntry || patch_type_ == Type::kStringRelative || - patch_type_ == Type::kStringBssEntry); + patch_type_ == Type::kStringBssEntry || + patch_type_ == Type::kMethodTypeBssEntry); return pc_insn_offset_; } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 6854c7266d..369d21af03 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -28,6 +28,7 @@ #include "class_table.h" #include "code_generator_utils.h" #include "com_android_art_flags.h" +#include "dex/dex_file_types.h" #include "entrypoints/quick/quick_entrypoints.h" #include "entrypoints/quick/quick_entrypoints_enum.h" #include "gc/accounting/card_table.h" @@ -274,6 +275,42 @@ class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 { DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64); }; +class LoadMethodTypeSlowPathARM64 : public SlowPathCodeARM64 { + public: + explicit LoadMethodTypeSlowPathARM64(HLoadMethodType* mt) : SlowPathCodeARM64(mt) {} + + void EmitNativeCode(CodeGenerator* codegen) override { + LocationSummary* locations = instruction_->GetLocations(); + Location out = locations->Out(); + CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); + + __ Bind(GetEntryLabel()); + SaveLiveRegisters(codegen, locations); + + InvokeRuntimeCallingConvention calling_convention; + const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex(); + __ Mov(calling_convention.GetRegisterAt(0).W(), proto_index.index_); + + arm64_codegen->InvokeRuntime(kQuickResolveMethodType, + instruction_, + instruction_->GetDexPc(), + this); + CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>(); + + DataType::Type type = instruction_->GetType(); + arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type); + RestoreLiveRegisters(codegen, locations); + + __ B(GetExitLabel()); + } + + const char* GetDescription() const override { return "LoadMethodTypeSlowPathARM64"; } + + private: + DISALLOW_COPY_AND_ASSIGN(LoadMethodTypeSlowPathARM64); +}; + + class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { public: LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at) @@ -1058,6 +1095,7 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + method_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), @@ -5307,6 +5345,15 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch( return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_); } +vixl::aarch64::Label* CodeGeneratorARM64::NewMethodTypeBssEntryPatch( + HLoadMethodType* load_method_type, + vixl::aarch64::Label* adrp_label) { + return NewPcRelativePatch(&load_method_type->GetDexFile(), + load_method_type->GetProtoIndex().index_, + adrp_label, + &method_type_bss_entry_patches_); +} + vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageJniEntrypointPatch( MethodReference target_method, vixl::aarch64::Label* adrp_label) { @@ -5486,6 +5533,7 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin package_type_bss_entry_patches_.size() + boot_image_string_patches_.size() + string_bss_entry_patches_.size() + + method_type_bss_entry_patches_.size() + boot_image_jni_entrypoint_patches_.size() + boot_image_other_patches_.size() + call_entrypoint_patches_.size() + @@ -5526,6 +5574,8 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin package_type_bss_entry_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>( string_bss_entry_patches_, linker_patches); + EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodTypeBssEntryPatch>( + method_type_bss_entry_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>( boot_image_jni_entrypoint_patches_, linker_patches); for (const PatchInfo<vixl::aarch64::Label>& info : call_entrypoint_patches_) { @@ -5824,13 +5874,70 @@ void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* loa } void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) { - InvokeRuntimeCallingConvention calling_convention; - Location location = LocationFrom(calling_convention.GetRegisterAt(0)); - CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location); + if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) { + InvokeRuntimeCallingConvention calling_convention; + Location location = LocationFrom(calling_convention.GetRegisterAt(0)); + CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location); + } else { + LocationSummary* locations = + new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath); + locations->SetOut(Location::RequiresRegister()); + if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) { + if (codegen_->EmitNonBakerReadBarrier()) { + // For non-Baker read barrier we have a temp-clobbering call. + } else { + // Rely on the pResolveMethodType to save everything. + locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); + } + } + } } void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) { - codegen_->GenerateLoadMethodTypeRuntimeCall(load); + Location out_loc = load->GetLocations()->Out(); + Register out = OutputRegister(load); + + switch (load->GetLoadKind()) { + case HLoadMethodType::LoadKind::kBssEntry: { + // Add ADRP with its PC-relative Class .bss entry patch. + vixl::aarch64::Register temp = XRegisterFrom(out_loc); + vixl::aarch64::Label* adrp_label = codegen_->NewMethodTypeBssEntryPatch(load); + codegen_->EmitAdrpPlaceholder(adrp_label, temp); + // Add LDR with its PC-relative MethodType .bss entry patch. + vixl::aarch64::Label* ldr_label = codegen_->NewMethodTypeBssEntryPatch(load, adrp_label); + // /* GcRoot<mirror::MethodType> */ out = *(base_address + offset) /* PC-relative */ + // All aligned loads are implicitly atomic consume operations on ARM64. + codegen_->GenerateGcRootFieldLoad(load, + out_loc, + temp, + /* offset placeholder */ 0u, + ldr_label, + codegen_->GetCompilerReadBarrierOption()); + SlowPathCodeARM64* slow_path = + new (codegen_->GetScopedAllocator()) LoadMethodTypeSlowPathARM64(load); + codegen_->AddSlowPath(slow_path); + __ Cbz(out, slow_path->GetEntryLabel()); + __ Bind(slow_path->GetExitLabel()); + codegen_->MaybeGenerateMarkingRegisterCheck(/* code = */ __LINE__); + return; + } + case HLoadMethodType::LoadKind::kJitTableAddress: { + __ Ldr(out, codegen_->DeduplicateJitMethodTypeLiteral(load->GetDexFile(), + load->GetProtoIndex(), + load->GetMethodType())); + codegen_->GenerateGcRootFieldLoad(load, + out_loc, + out.X(), + /* offset= */ 0, + /* fixup_label= */ nullptr, + codegen_->GetCompilerReadBarrierOption()); + return; + } + default: + DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall); + codegen_->GenerateLoadMethodTypeRuntimeCall(load); + break; + } } static MemOperand GetExceptionTlsAddress() { diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 07e1d43486..14e75fab48 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -901,6 +901,13 @@ class CodeGeneratorARM64 : public CodeGenerator { dex::StringIndex string_index, vixl::aarch64::Label* adrp_label = nullptr); + // Add a new .bss entry MethodType patch for an instruction and return the label + // to be bound before the instruction. The instruction will be either the + // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing + // to the associated ADRP patch label). + vixl::aarch64::Label* NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type, + vixl::aarch64::Label* adrp_label = nullptr); + // Add a new boot image JNI entrypoint patch for an instruction and return the label // to be bound before the instruction. The instruction will be either the // ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label` pointing @@ -930,6 +937,13 @@ class CodeGeneratorARM64 : public CodeGenerator { return jit_patches_.DeduplicateJitClassLiteral( dex_file, class_index, handle, GetCodeGenerationData()); } + vixl::aarch64::Literal<uint32_t>* DeduplicateJitMethodTypeLiteral( + const DexFile& dex_file, + dex::ProtoIndex proto_index, + Handle<mirror::MethodType> handle) { + return jit_patches_.DeduplicateJitMethodTypeLiteral( + dex_file, proto_index, handle, GetCodeGenerationData()); + } void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg); void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, @@ -1226,6 +1240,8 @@ class CodeGeneratorARM64 : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative MethodType patch info for kBssEntry. + ArenaDeque<PcRelativePatchInfo> method_type_bss_entry_patches_; // PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative. ArenaDeque<PcRelativePatchInfo> boot_image_jni_entrypoint_patches_; // PC-relative patch info for IntrinsicObjects for the boot image, diff --git a/compiler/optimizing/jit_patches_arm64.cc b/compiler/optimizing/jit_patches_arm64.cc index 76ba182acb..193e913945 100644 --- a/compiler/optimizing/jit_patches_arm64.cc +++ b/compiler/optimizing/jit_patches_arm64.cc @@ -67,6 +67,12 @@ void JitPatchesARM64::EmitJitRootPatches( uint64_t index_in_table = code_generation_data.GetJitClassRootIndex(type_reference); PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table); } + for (const auto& entry : jit_method_type_patches_) { + const ProtoReference& proto_reference = entry.first; + vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second; + uint64_t index_in_table = code_generation_data.GetJitMethodTypeRootIndex(proto_reference); + PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table); + } } vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateBootImageAddressLiteral( @@ -100,5 +106,18 @@ vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitClassLiteral( }); } +vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitMethodTypeLiteral( + const DexFile& dex_file, + dex::ProtoIndex proto_index, + Handle<mirror::MethodType> handle, + CodeGenerationData* code_generation_data) { + code_generation_data->ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle); + return jit_method_type_patches_.GetOrCreate( + ProtoReference(&dex_file, proto_index), + [this]() { + return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); + }); +} + } // namespace arm64 } // namespace art diff --git a/compiler/optimizing/jit_patches_arm64.h b/compiler/optimizing/jit_patches_arm64.h index f5d92804ac..e13060210a 100644 --- a/compiler/optimizing/jit_patches_arm64.h +++ b/compiler/optimizing/jit_patches_arm64.h @@ -20,10 +20,12 @@ #include "base/arena_allocator.h" #include "base/arena_containers.h" #include "dex/dex_file.h" +#include "dex/proto_reference.h" #include "dex/string_reference.h" #include "dex/type_reference.h" #include "handle.h" #include "mirror/class.h" +#include "mirror/method_type.h" #include "mirror/string.h" #include "utils/arm64/assembler_arm64.h" @@ -56,7 +58,9 @@ class JitPatchesARM64 { jit_string_patches_(StringReferenceValueComparator(), allocator->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(TypeReferenceValueComparator(), - allocator->Adapter(kArenaAllocCodeGenerator)) { + allocator->Adapter(kArenaAllocCodeGenerator)), + jit_method_type_patches_(ProtoReferenceValueComparator(), + allocator->Adapter(kArenaAllocCodeGenerator)) { } using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>; @@ -67,6 +71,9 @@ class JitPatchesARM64 { using TypeToLiteralMap = ArenaSafeMap<TypeReference, vixl::aarch64::Literal<uint32_t>*, TypeReferenceValueComparator>; + using ProtoToLiteralMap = ArenaSafeMap<ProtoReference, + vixl::aarch64::Literal<uint32_t>*, + ProtoReferenceValueComparator>; vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value); vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value); @@ -81,6 +88,11 @@ class JitPatchesARM64 { dex::TypeIndex type_index, Handle<mirror::Class> handle, CodeGenerationData* code_generation_data); + vixl::aarch64::Literal<uint32_t>* DeduplicateJitMethodTypeLiteral( + const DexFile& dex_file, + dex::ProtoIndex proto_index, + Handle<mirror::MethodType> handle, + CodeGenerationData* code_generation_data); void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data, @@ -99,6 +111,8 @@ class JitPatchesARM64 { StringToLiteralMap jit_string_patches_; // Patches for class literals in JIT compiled code. TypeToLiteralMap jit_class_patches_; + // Patches for MethodType literals in JIT compiled code. + ProtoToLiteralMap jit_method_type_patches_; }; } // namespace arm64 |