diff options
-rw-r--r-- | compiler/linker/linker_patch.h | 3 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 115 | ||||
-rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 16 | ||||
-rw-r--r-- | compiler/optimizing/jit_patches_arm64.cc | 19 | ||||
-rw-r--r-- | compiler/optimizing/jit_patches_arm64.h | 16 | ||||
-rw-r--r-- | dex2oat/linker/arm64/relative_patcher_arm64.cc | 3 | ||||
-rw-r--r-- | runtime/oat/oat.h | 4 |
7 files changed, 9 insertions, 167 deletions
diff --git a/compiler/linker/linker_patch.h b/compiler/linker/linker_patch.h index d73d88dcb2..4eb0782d52 100644 --- a/compiler/linker/linker_patch.h +++ b/compiler/linker/linker_patch.h @@ -295,8 +295,7 @@ class LinkerPatch { patch_type_ == Type::kPublicTypeBssEntry || patch_type_ == Type::kPackageTypeBssEntry || patch_type_ == Type::kStringRelative || - patch_type_ == Type::kStringBssEntry || - patch_type_ == Type::kMethodTypeBssEntry); + patch_type_ == Type::kStringBssEntry); return pc_insn_offset_; } diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 369d21af03..6854c7266d 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -28,7 +28,6 @@ #include "class_table.h" #include "code_generator_utils.h" #include "com_android_art_flags.h" -#include "dex/dex_file_types.h" #include "entrypoints/quick/quick_entrypoints.h" #include "entrypoints/quick/quick_entrypoints_enum.h" #include "gc/accounting/card_table.h" @@ -275,42 +274,6 @@ class DivZeroCheckSlowPathARM64 : public SlowPathCodeARM64 { DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM64); }; -class LoadMethodTypeSlowPathARM64 : public SlowPathCodeARM64 { - public: - explicit LoadMethodTypeSlowPathARM64(HLoadMethodType* mt) : SlowPathCodeARM64(mt) {} - - void EmitNativeCode(CodeGenerator* codegen) override { - LocationSummary* locations = instruction_->GetLocations(); - Location out = locations->Out(); - CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); - - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - const dex::ProtoIndex proto_index = instruction_->AsLoadMethodType()->GetProtoIndex(); - __ Mov(calling_convention.GetRegisterAt(0).W(), proto_index.index_); - - arm64_codegen->InvokeRuntime(kQuickResolveMethodType, - instruction_, - instruction_->GetDexPc(), - this); - CheckEntrypointTypes<kQuickResolveMethodType, void*, uint32_t>(); - - DataType::Type type = instruction_->GetType(); - arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type); - RestoreLiveRegisters(codegen, locations); - - __ B(GetExitLabel()); - } - - const char* GetDescription() const override { return "LoadMethodTypeSlowPathARM64"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadMethodTypeSlowPathARM64); -}; - - class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { public: LoadClassSlowPathARM64(HLoadClass* cls, HInstruction* at) @@ -1095,7 +1058,6 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, package_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), - method_type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_jni_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_other_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), call_entrypoint_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), @@ -5345,15 +5307,6 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch( return NewPcRelativePatch(&dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_); } -vixl::aarch64::Label* CodeGeneratorARM64::NewMethodTypeBssEntryPatch( - HLoadMethodType* load_method_type, - vixl::aarch64::Label* adrp_label) { - return NewPcRelativePatch(&load_method_type->GetDexFile(), - load_method_type->GetProtoIndex().index_, - adrp_label, - &method_type_bss_entry_patches_); -} - vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageJniEntrypointPatch( MethodReference target_method, vixl::aarch64::Label* adrp_label) { @@ -5533,7 +5486,6 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin package_type_bss_entry_patches_.size() + boot_image_string_patches_.size() + string_bss_entry_patches_.size() + - method_type_bss_entry_patches_.size() + boot_image_jni_entrypoint_patches_.size() + boot_image_other_patches_.size() + call_entrypoint_patches_.size() + @@ -5574,8 +5526,6 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin package_type_bss_entry_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::StringBssEntryPatch>( string_bss_entry_patches_, linker_patches); - EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodTypeBssEntryPatch>( - method_type_bss_entry_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeJniEntrypointPatch>( boot_image_jni_entrypoint_patches_, linker_patches); for (const PatchInfo<vixl::aarch64::Label>& info : call_entrypoint_patches_) { @@ -5874,70 +5824,13 @@ void InstructionCodeGeneratorARM64::VisitLoadMethodHandle(HLoadMethodHandle* loa } void LocationsBuilderARM64::VisitLoadMethodType(HLoadMethodType* load) { - if (load->GetLoadKind() == HLoadMethodType::LoadKind::kRuntimeCall) { - InvokeRuntimeCallingConvention calling_convention; - Location location = LocationFrom(calling_convention.GetRegisterAt(0)); - CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location); - } else { - LocationSummary* locations = - new (GetGraph()->GetAllocator()) LocationSummary(load, LocationSummary::kCallOnSlowPath); - locations->SetOut(Location::RequiresRegister()); - if (load->GetLoadKind() == HLoadMethodType::LoadKind::kBssEntry) { - if (codegen_->EmitNonBakerReadBarrier()) { - // For non-Baker read barrier we have a temp-clobbering call. - } else { - // Rely on the pResolveMethodType to save everything. - locations->SetCustomSlowPathCallerSaves(OneRegInReferenceOutSaveEverythingCallerSaves()); - } - } - } + InvokeRuntimeCallingConvention calling_convention; + Location location = LocationFrom(calling_convention.GetRegisterAt(0)); + CodeGenerator::CreateLoadMethodTypeRuntimeCallLocationSummary(load, location, location); } void InstructionCodeGeneratorARM64::VisitLoadMethodType(HLoadMethodType* load) { - Location out_loc = load->GetLocations()->Out(); - Register out = OutputRegister(load); - - switch (load->GetLoadKind()) { - case HLoadMethodType::LoadKind::kBssEntry: { - // Add ADRP with its PC-relative Class .bss entry patch. - vixl::aarch64::Register temp = XRegisterFrom(out_loc); - vixl::aarch64::Label* adrp_label = codegen_->NewMethodTypeBssEntryPatch(load); - codegen_->EmitAdrpPlaceholder(adrp_label, temp); - // Add LDR with its PC-relative MethodType .bss entry patch. - vixl::aarch64::Label* ldr_label = codegen_->NewMethodTypeBssEntryPatch(load, adrp_label); - // /* GcRoot<mirror::MethodType> */ out = *(base_address + offset) /* PC-relative */ - // All aligned loads are implicitly atomic consume operations on ARM64. - codegen_->GenerateGcRootFieldLoad(load, - out_loc, - temp, - /* offset placeholder */ 0u, - ldr_label, - codegen_->GetCompilerReadBarrierOption()); - SlowPathCodeARM64* slow_path = - new (codegen_->GetScopedAllocator()) LoadMethodTypeSlowPathARM64(load); - codegen_->AddSlowPath(slow_path); - __ Cbz(out, slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - codegen_->MaybeGenerateMarkingRegisterCheck(/* code = */ __LINE__); - return; - } - case HLoadMethodType::LoadKind::kJitTableAddress: { - __ Ldr(out, codegen_->DeduplicateJitMethodTypeLiteral(load->GetDexFile(), - load->GetProtoIndex(), - load->GetMethodType())); - codegen_->GenerateGcRootFieldLoad(load, - out_loc, - out.X(), - /* offset= */ 0, - /* fixup_label= */ nullptr, - codegen_->GetCompilerReadBarrierOption()); - return; - } - default: - DCHECK_EQ(load->GetLoadKind(), HLoadMethodType::LoadKind::kRuntimeCall); - codegen_->GenerateLoadMethodTypeRuntimeCall(load); - break; - } + codegen_->GenerateLoadMethodTypeRuntimeCall(load); } static MemOperand GetExceptionTlsAddress() { diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 14e75fab48..07e1d43486 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -901,13 +901,6 @@ class CodeGeneratorARM64 : public CodeGenerator { dex::StringIndex string_index, vixl::aarch64::Label* adrp_label = nullptr); - // Add a new .bss entry MethodType patch for an instruction and return the label - // to be bound before the instruction. The instruction will be either the - // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing - // to the associated ADRP patch label). - vixl::aarch64::Label* NewMethodTypeBssEntryPatch(HLoadMethodType* load_method_type, - vixl::aarch64::Label* adrp_label = nullptr); - // Add a new boot image JNI entrypoint patch for an instruction and return the label // to be bound before the instruction. The instruction will be either the // ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label` pointing @@ -937,13 +930,6 @@ class CodeGeneratorARM64 : public CodeGenerator { return jit_patches_.DeduplicateJitClassLiteral( dex_file, class_index, handle, GetCodeGenerationData()); } - vixl::aarch64::Literal<uint32_t>* DeduplicateJitMethodTypeLiteral( - const DexFile& dex_file, - dex::ProtoIndex proto_index, - Handle<mirror::MethodType> handle) { - return jit_patches_.DeduplicateJitMethodTypeLiteral( - dex_file, proto_index, handle, GetCodeGenerationData()); - } void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg); void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, @@ -1240,8 +1226,6 @@ class CodeGeneratorARM64 : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; - // PC-relative MethodType patch info for kBssEntry. - ArenaDeque<PcRelativePatchInfo> method_type_bss_entry_patches_; // PC-relative method patch info for kBootImageLinkTimePcRelative+kCallCriticalNative. ArenaDeque<PcRelativePatchInfo> boot_image_jni_entrypoint_patches_; // PC-relative patch info for IntrinsicObjects for the boot image, diff --git a/compiler/optimizing/jit_patches_arm64.cc b/compiler/optimizing/jit_patches_arm64.cc index 193e913945..76ba182acb 100644 --- a/compiler/optimizing/jit_patches_arm64.cc +++ b/compiler/optimizing/jit_patches_arm64.cc @@ -67,12 +67,6 @@ void JitPatchesARM64::EmitJitRootPatches( uint64_t index_in_table = code_generation_data.GetJitClassRootIndex(type_reference); PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table); } - for (const auto& entry : jit_method_type_patches_) { - const ProtoReference& proto_reference = entry.first; - vixl::aarch64::Literal<uint32_t>* table_entry_literal = entry.second; - uint64_t index_in_table = code_generation_data.GetJitMethodTypeRootIndex(proto_reference); - PatchJitRootUse(code, roots_data, table_entry_literal, index_in_table); - } } vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateBootImageAddressLiteral( @@ -106,18 +100,5 @@ vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitClassLiteral( }); } -vixl::aarch64::Literal<uint32_t>* JitPatchesARM64::DeduplicateJitMethodTypeLiteral( - const DexFile& dex_file, - dex::ProtoIndex proto_index, - Handle<mirror::MethodType> handle, - CodeGenerationData* code_generation_data) { - code_generation_data->ReserveJitMethodTypeRoot(ProtoReference(&dex_file, proto_index), handle); - return jit_method_type_patches_.GetOrCreate( - ProtoReference(&dex_file, proto_index), - [this]() { - return GetVIXLAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* value= */ 0u); - }); -} - } // namespace arm64 } // namespace art diff --git a/compiler/optimizing/jit_patches_arm64.h b/compiler/optimizing/jit_patches_arm64.h index e13060210a..f5d92804ac 100644 --- a/compiler/optimizing/jit_patches_arm64.h +++ b/compiler/optimizing/jit_patches_arm64.h @@ -20,12 +20,10 @@ #include "base/arena_allocator.h" #include "base/arena_containers.h" #include "dex/dex_file.h" -#include "dex/proto_reference.h" #include "dex/string_reference.h" #include "dex/type_reference.h" #include "handle.h" #include "mirror/class.h" -#include "mirror/method_type.h" #include "mirror/string.h" #include "utils/arm64/assembler_arm64.h" @@ -58,9 +56,7 @@ class JitPatchesARM64 { jit_string_patches_(StringReferenceValueComparator(), allocator->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(TypeReferenceValueComparator(), - allocator->Adapter(kArenaAllocCodeGenerator)), - jit_method_type_patches_(ProtoReferenceValueComparator(), - allocator->Adapter(kArenaAllocCodeGenerator)) { + allocator->Adapter(kArenaAllocCodeGenerator)) { } using Uint64ToLiteralMap = ArenaSafeMap<uint64_t, vixl::aarch64::Literal<uint64_t>*>; @@ -71,9 +67,6 @@ class JitPatchesARM64 { using TypeToLiteralMap = ArenaSafeMap<TypeReference, vixl::aarch64::Literal<uint32_t>*, TypeReferenceValueComparator>; - using ProtoToLiteralMap = ArenaSafeMap<ProtoReference, - vixl::aarch64::Literal<uint32_t>*, - ProtoReferenceValueComparator>; vixl::aarch64::Literal<uint32_t>* DeduplicateUint32Literal(uint32_t value); vixl::aarch64::Literal<uint64_t>* DeduplicateUint64Literal(uint64_t value); @@ -88,11 +81,6 @@ class JitPatchesARM64 { dex::TypeIndex type_index, Handle<mirror::Class> handle, CodeGenerationData* code_generation_data); - vixl::aarch64::Literal<uint32_t>* DeduplicateJitMethodTypeLiteral( - const DexFile& dex_file, - dex::ProtoIndex proto_index, - Handle<mirror::MethodType> handle, - CodeGenerationData* code_generation_data); void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data, @@ -111,8 +99,6 @@ class JitPatchesARM64 { StringToLiteralMap jit_string_patches_; // Patches for class literals in JIT compiled code. TypeToLiteralMap jit_class_patches_; - // Patches for MethodType literals in JIT compiled code. - ProtoToLiteralMap jit_method_type_patches_; }; } // namespace arm64 diff --git a/dex2oat/linker/arm64/relative_patcher_arm64.cc b/dex2oat/linker/arm64/relative_patcher_arm64.cc index 79fcd18b00..ed700d0fd7 100644 --- a/dex2oat/linker/arm64/relative_patcher_arm64.cc +++ b/dex2oat/linker/arm64/relative_patcher_arm64.cc @@ -281,8 +281,7 @@ void Arm64RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code, patch.GetType() == LinkerPatch::Type::kTypeBssEntry || patch.GetType() == LinkerPatch::Type::kPublicTypeBssEntry || patch.GetType() == LinkerPatch::Type::kPackageTypeBssEntry || - patch.GetType() == LinkerPatch::Type::kStringBssEntry || - patch.GetType() == LinkerPatch::Type::kMethodTypeBssEntry) << patch.GetType(); + patch.GetType() == LinkerPatch::Type::kStringBssEntry) << patch.GetType(); DCHECK_EQ(insn & 0xbfbffc00, 0xb9000000) << std::hex << insn; } if (kIsDebugBuild) { diff --git a/runtime/oat/oat.h b/runtime/oat/oat.h index f9bb57cd72..c16d73be5f 100644 --- a/runtime/oat/oat.h +++ b/runtime/oat/oat.h @@ -44,8 +44,8 @@ std::ostream& operator<<(std::ostream& stream, StubType stub_type); class EXPORT PACKED(4) OatHeader { public: static constexpr std::array<uint8_t, 4> kOatMagic { { 'o', 'a', 't', '\n' } }; - // Last oat version changed reason: Force 4K ELF alignment on art/odex files. - static constexpr std::array<uint8_t, 4> kOatVersion{{'2', '5', '2', '\0'}}; + // Last oat version changed reason: Revert "arm64: Store resolved MethodType-s in .bss" + static constexpr std::array<uint8_t, 4> kOatVersion{{'2', '5', '3', '\0'}}; static constexpr const char* kDex2OatCmdLineKey = "dex2oat-cmdline"; static constexpr const char* kDebuggableKey = "debuggable"; |