diff options
Diffstat (limited to 'compiler/optimizing')
21 files changed, 649 insertions, 245 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 7fa272acee..26c9e9fa2b 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -1400,6 +1400,7 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(StringReferenceValueComparator(), graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), @@ -4678,6 +4679,13 @@ void InstructionCodeGeneratorARM64::VisitInvokeCustom(HInvokeCustom* invoke) { codegen_->MaybeGenerateMarkingRegisterCheck(/* code */ __LINE__); } +vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageIntrinsicPatch( + uint32_t intrinsic_data, + vixl::aarch64::Label* adrp_label) { + return NewPcRelativePatch( + /* dex_file */ nullptr, intrinsic_data, adrp_label, &boot_image_intrinsic_patches_); +} + vixl::aarch64::Label* CodeGeneratorARM64::NewBootImageRelRoPatch( uint32_t boot_image_offset, vixl::aarch64::Label* adrp_label) { @@ -4796,24 +4804,54 @@ void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_la } void CodeGeneratorARM64::LoadBootImageAddress(vixl::aarch64::Register reg, - uint32_t boot_image_offset) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { + uint32_t boot_image_reference) { + if (GetCompilerOptions().IsBootImage()) { + // Add ADRP with its PC-relative type patch. + vixl::aarch64::Label* adrp_label = NewBootImageIntrinsicPatch(boot_image_reference); + EmitAdrpPlaceholder(adrp_label, reg.X()); + // Add ADD with its PC-relative type patch. + vixl::aarch64::Label* add_label = NewBootImageIntrinsicPatch(boot_image_reference, adrp_label); + EmitAddPlaceholder(add_label, reg.X(), reg.X()); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); // Add ADRP with its PC-relative .data.bimg.rel.ro patch. - vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_offset); + vixl::aarch64::Label* adrp_label = NewBootImageRelRoPatch(boot_image_reference); EmitAdrpPlaceholder(adrp_label, reg.X()); // Add LDR with its PC-relative .data.bimg.rel.ro patch. - vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_offset, adrp_label); + vixl::aarch64::Label* ldr_label = NewBootImageRelRoPatch(boot_image_reference, adrp_label); EmitLdrOffsetPlaceholder(ldr_label, reg.W(), reg.X()); } else { gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); - const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset; + const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference; __ Ldr(reg.W(), DeduplicateBootImageAddressLiteral(reinterpret_cast<uintptr_t>(address))); } } +void CodeGeneratorARM64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConvention calling_convention; + Register argument = calling_convention.GetRegisterAt(0); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + // Add ADRP with its PC-relative type patch. + vixl::aarch64::Label* adrp_label = NewBootImageTypePatch(*target_method.dex_file, type_idx); + EmitAdrpPlaceholder(adrp_label, argument.X()); + // Add ADD with its PC-relative type patch. + vixl::aarch64::Label* add_label = + NewBootImageTypePatch(*target_method.dex_file, type_idx, adrp_label); + EmitAddPlaceholder(add_label, argument.X(), argument.X()); + } else { + LoadBootImageAddress(argument, boot_image_offset); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches( const ArenaDeque<PcRelativePatchInfo>& infos, @@ -4826,12 +4864,13 @@ inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -4843,6 +4882,7 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin type_bss_entry_patches_.size() + boot_image_string_patches_.size() + string_bss_entry_patches_.size() + + boot_image_intrinsic_patches_.size() + baker_read_barrier_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { @@ -4852,11 +4892,14 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* lin boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 548c59ca35..c44fa48066 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -560,6 +560,13 @@ class CodeGeneratorARM64 : public CodeGenerator { UNIMPLEMENTED(FATAL); } + // Add a new boot image intrinsic patch for an instruction and return the label + // to be bound before the instruction. The instruction will be either the + // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing + // to the associated ADRP patch label). + vixl::aarch64::Label* NewBootImageIntrinsicPatch(uint32_t intrinsic_data, + vixl::aarch64::Label* adrp_label = nullptr); + // Add a new boot image relocation patch for an instruction and return the label // to be bound before the instruction. The instruction will be either the // ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label` pointing @@ -633,7 +640,8 @@ class CodeGeneratorARM64 : public CodeGenerator { vixl::aarch64::Register out, vixl::aarch64::Register base); - void LoadBootImageAddress(vixl::aarch64::Register reg, uint32_t boot_image_offset); + void LoadBootImageAddress(vixl::aarch64::Register reg, uint32_t boot_image_reference); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE; bool NeedsThunkCode(const linker::LinkerPatch& patch) const OVERRIDE; @@ -911,6 +919,8 @@ class CodeGeneratorARM64 : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<PcRelativePatchInfo> boot_image_intrinsic_patches_; // Baker read barrier patch info. ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_; diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc index 6c8d5636d5..9e1ef4002e 100644 --- a/compiler/optimizing/code_generator_arm_vixl.cc +++ b/compiler/optimizing/code_generator_arm_vixl.cc @@ -29,6 +29,7 @@ #include "gc/accounting/card_table.h" #include "gc/space/image_space.h" #include "heap_poisoning.h" +#include "intrinsics.h" #include "intrinsics_arm_vixl.h" #include "linker/linker_patch.h" #include "mirror/array-inl.h" @@ -2347,6 +2348,7 @@ CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), baker_read_barrier_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(StringReferenceValueComparator(), graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), @@ -9462,6 +9464,11 @@ void CodeGeneratorARMVIXL::GenerateVirtualCall( } } +CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageIntrinsicPatch( + uint32_t intrinsic_data) { + return NewPcRelativePatch(/* dex_file */ nullptr, intrinsic_data, &boot_image_intrinsic_patches_); +} + CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewBootImageRelRoPatch( uint32_t boot_image_offset) { return NewPcRelativePatch(/* dex_file */ nullptr, @@ -9539,22 +9546,46 @@ VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFil }); } -void CodeGeneratorARMVIXL::LoadBootImageAddress(vixl32::Register reg, uint32_t boot_image_offset) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { +void CodeGeneratorARMVIXL::LoadBootImageAddress(vixl32::Register reg, + uint32_t boot_image_reference) { + if (GetCompilerOptions().IsBootImage()) { + CodeGeneratorARMVIXL::PcRelativePatchInfo* labels = + NewBootImageIntrinsicPatch(boot_image_reference); + EmitMovwMovtPlaceholder(labels, reg); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); - CodeGeneratorARMVIXL::PcRelativePatchInfo* labels = NewBootImageRelRoPatch(boot_image_offset); + CodeGeneratorARMVIXL::PcRelativePatchInfo* labels = + NewBootImageRelRoPatch(boot_image_reference); EmitMovwMovtPlaceholder(labels, reg); __ Ldr(reg, MemOperand(reg, /* offset */ 0)); } else { gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); uintptr_t address = - reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset); + reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference); __ Ldr(reg, DeduplicateBootImageAddressLiteral(dchecked_integral_cast<uint32_t>(address))); } } +void CodeGeneratorARMVIXL::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConventionARMVIXL calling_convention; + vixl32::Register argument = calling_convention.GetRegisterAt(0); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + PcRelativePatchInfo* labels = NewBootImageTypePatch(*target_method.dex_file, type_idx); + EmitMovwMovtPlaceholder(labels, argument); + } else { + LoadBootImageAddress(argument, boot_image_offset); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + template <linker::LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches( const ArenaDeque<PcRelativePatchInfo>& infos, @@ -9575,12 +9606,13 @@ inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -9592,6 +9624,7 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* l /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() + /* MOVW+MOVT for each entry */ 2u * boot_image_string_patches_.size() + /* MOVW+MOVT for each entry */ 2u * string_bss_entry_patches_.size() + + /* MOVW+MOVT for each entry */ 2u * boot_image_intrinsic_patches_.size() + baker_read_barrier_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { @@ -9601,11 +9634,14 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* l boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h index ae19cdbc50..fc8cf98173 100644 --- a/compiler/optimizing/code_generator_arm_vixl.h +++ b/compiler/optimizing/code_generator_arm_vixl.h @@ -578,6 +578,7 @@ class CodeGeneratorARMVIXL : public CodeGenerator { vixl::aarch32::Label add_pc_label; }; + PcRelativePatchInfo* NewBootImageIntrinsicPatch(uint32_t intrinsic_data); PcRelativePatchInfo* NewBootImageRelRoPatch(uint32_t boot_image_offset); PcRelativePatchInfo* NewBootImageMethodPatch(MethodReference target_method); PcRelativePatchInfo* NewMethodBssEntryPatch(MethodReference target_method); @@ -600,7 +601,8 @@ class CodeGeneratorARMVIXL : public CodeGenerator { dex::TypeIndex type_index, Handle<mirror::Class> handle); - void LoadBootImageAddress(vixl::aarch32::Register reg, uint32_t boot_image_offset); + void LoadBootImageAddress(vixl::aarch32::Register reg, uint32_t boot_image_reference); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE; bool NeedsThunkCode(const linker::LinkerPatch& patch) const OVERRIDE; @@ -904,6 +906,8 @@ class CodeGeneratorARMVIXL : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<PcRelativePatchInfo> boot_image_intrinsic_patches_; // Baker read barrier patch info. ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_; diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index 8c38824d12..f0ef30ee37 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -1023,6 +1023,7 @@ CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), clobbered_ra_(false) { @@ -1596,12 +1597,13 @@ inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -1612,7 +1614,8 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* link boot_image_type_patches_.size() + type_bss_entry_patches_.size() + boot_image_string_patches_.size() + - string_bss_entry_patches_.size(); + string_bss_entry_patches_.size() + + boot_image_intrinsic_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>( @@ -1621,11 +1624,14 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* link boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); @@ -1636,6 +1642,13 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* link DCHECK_EQ(size, linker_patches->size()); } +CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewBootImageIntrinsicPatch( + uint32_t intrinsic_data, + const PcRelativePatchInfo* info_high) { + return NewPcRelativePatch( + /* dex_file */ nullptr, intrinsic_data, info_high, &boot_image_intrinsic_patches_); +} + CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewBootImageRelRoPatch( uint32_t boot_image_offset, const PcRelativePatchInfo* info_high) { @@ -1739,22 +1752,48 @@ void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo // offset to `out` (e.g. lw, jialc, addiu). } -void CodeGeneratorMIPS::LoadBootImageAddress(Register reg, uint32_t boot_image_offset) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { +void CodeGeneratorMIPS::LoadBootImageAddress(Register reg, uint32_t boot_image_reference) { + if (GetCompilerOptions().IsBootImage()) { + PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference); + PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high); + EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, /* base */ ZERO); + __ Addiu(reg, TMP, /* placeholder */ 0x5678, &info_low->label); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); - PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset); - PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high); + PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_reference); + PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_reference, info_high); EmitPcRelativeAddressPlaceholderHigh(info_high, reg, /* base */ ZERO); __ Lw(reg, reg, /* placeholder */ 0x5678, &info_low->label); } else { gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); - const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset; + const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference; __ LoadConst32(reg, dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address))); } } +void CodeGeneratorMIPS::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConvention calling_convention; + Register argument = calling_convention.GetRegisterAt(0); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + PcRelativePatchInfo* info_high = NewBootImageTypePatch(*target_method.dex_file, type_idx); + PcRelativePatchInfo* info_low = + NewBootImageTypePatch(*target_method.dex_file, type_idx, info_high); + EmitPcRelativeAddressPlaceholderHigh(info_high, argument, /* base */ ZERO); + __ Addiu(argument, argument, /* placeholder */ 0x5678, &info_low->label); + } else { + LoadBootImageAddress(argument, boot_image_offset); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + CodeGeneratorMIPS::JitPatchInfo* CodeGeneratorMIPS::NewJitRootStringPatch( const DexFile& dex_file, dex::StringIndex string_index, diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h index 9758d35335..4830ac9bc6 100644 --- a/compiler/optimizing/code_generator_mips.h +++ b/compiler/optimizing/code_generator_mips.h @@ -618,6 +618,8 @@ class CodeGeneratorMIPS : public CodeGenerator { DISALLOW_COPY_AND_ASSIGN(PcRelativePatchInfo); }; + PcRelativePatchInfo* NewBootImageIntrinsicPatch(uint32_t intrinsic_data, + const PcRelativePatchInfo* info_high = nullptr); PcRelativePatchInfo* NewBootImageRelRoPatch(uint32_t boot_image_offset, const PcRelativePatchInfo* info_high = nullptr); PcRelativePatchInfo* NewBootImageMethodPatch(MethodReference target_method, @@ -642,7 +644,8 @@ class CodeGeneratorMIPS : public CodeGenerator { Register out, Register base); - void LoadBootImageAddress(Register reg, uint32_t boot_image_offset); + void LoadBootImageAddress(Register reg, uint32_t boot_image_reference); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); // The JitPatchInfo is used for JIT string and class loads. struct JitPatchInfo { @@ -708,6 +711,8 @@ class CodeGeneratorMIPS : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<PcRelativePatchInfo> boot_image_intrinsic_patches_; // Patches for string root accesses in JIT compiled code. ArenaDeque<JitPatchInfo> jit_string_patches_; diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc index 9682377f5e..6e72727f59 100644 --- a/compiler/optimizing/code_generator_mips64.cc +++ b/compiler/optimizing/code_generator_mips64.cc @@ -968,6 +968,7 @@ CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(StringReferenceValueComparator(), graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(TypeReferenceValueComparator(), @@ -1508,12 +1509,13 @@ inline void CodeGeneratorMIPS64::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -1524,7 +1526,8 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li boot_image_type_patches_.size() + type_bss_entry_patches_.size() + boot_image_string_patches_.size() + - string_bss_entry_patches_.size(); + string_bss_entry_patches_.size() + + boot_image_intrinsic_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>( @@ -1533,11 +1536,14 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); @@ -1548,6 +1554,13 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li DCHECK_EQ(size, linker_patches->size()); } +CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageIntrinsicPatch( + uint32_t intrinsic_data, + const PcRelativePatchInfo* info_high) { + return NewPcRelativePatch( + /* dex_file */ nullptr, intrinsic_data, info_high, &boot_image_intrinsic_patches_); +} + CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewBootImageRelRoPatch( uint32_t boot_image_offset, const PcRelativePatchInfo* info_high) { @@ -1638,12 +1651,16 @@ void CodeGeneratorMIPS64::EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchIn } } -void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_offset) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { +void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_reference) { + if (GetCompilerOptions().IsBootImage()) { + PcRelativePatchInfo* info_high = NewBootImageIntrinsicPatch(boot_image_reference); + PcRelativePatchInfo* info_low = NewBootImageIntrinsicPatch(boot_image_reference, info_high); + EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low); + __ Daddiu(reg, AT, /* placeholder */ 0x5678); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); - PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_offset); - PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_offset, info_high); + PcRelativePatchInfo* info_high = NewBootImageRelRoPatch(boot_image_reference); + PcRelativePatchInfo* info_low = NewBootImageRelRoPatch(boot_image_reference, info_high); EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low); // Note: Boot image is in the low 4GiB and the entry is 32-bit, so emit a 32-bit load. __ Lwu(reg, AT, /* placeholder */ 0x5678); @@ -1651,11 +1668,33 @@ void CodeGeneratorMIPS64::LoadBootImageAddress(GpuRegister reg, uint32_t boot_im gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); uintptr_t address = - reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset); + reinterpret_cast<uintptr_t>(heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference); __ LoadLiteral(reg, kLoadDoubleword, DeduplicateBootImageAddressLiteral(address)); } } +void CodeGeneratorMIPS64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConvention calling_convention; + GpuRegister argument = calling_convention.GetRegisterAt(0); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + PcRelativePatchInfo* info_high = NewBootImageTypePatch(*target_method.dex_file, type_idx); + PcRelativePatchInfo* info_low = + NewBootImageTypePatch(*target_method.dex_file, type_idx, info_high); + EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low); + __ Daddiu(argument, AT, /* placeholder */ 0x5678); + } else { + LoadBootImageAddress(argument, boot_image_offset); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + Literal* CodeGeneratorMIPS64::DeduplicateJitStringLiteral(const DexFile& dex_file, dex::StringIndex string_index, Handle<mirror::String> handle) { diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h index 96306d12c4..fc0908b2cb 100644 --- a/compiler/optimizing/code_generator_mips64.h +++ b/compiler/optimizing/code_generator_mips64.h @@ -588,6 +588,8 @@ class CodeGeneratorMIPS64 : public CodeGenerator { DISALLOW_COPY_AND_ASSIGN(PcRelativePatchInfo); }; + PcRelativePatchInfo* NewBootImageIntrinsicPatch(uint32_t intrinsic_data, + const PcRelativePatchInfo* info_high = nullptr); PcRelativePatchInfo* NewBootImageRelRoPatch(uint32_t boot_image_offset, const PcRelativePatchInfo* info_high = nullptr); PcRelativePatchInfo* NewBootImageMethodPatch(MethodReference target_method, @@ -612,7 +614,8 @@ class CodeGeneratorMIPS64 : public CodeGenerator { GpuRegister out, PcRelativePatchInfo* info_low = nullptr); - void LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_offset); + void LoadBootImageAddress(GpuRegister reg, uint32_t boot_image_reference); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); void PatchJitRootUse(uint8_t* code, const uint8_t* roots_data, @@ -673,6 +676,8 @@ class CodeGeneratorMIPS64 : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> boot_image_string_patches_; // PC-relative type patch info for kBssEntry. ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<PcRelativePatchInfo> boot_image_intrinsic_patches_; // Patches for string root accesses in JIT compiled code. StringToLiteralMap jit_string_patches_; diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index b03d72c0f3..d189476a48 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -1036,6 +1036,7 @@ CodeGeneratorX86::CodeGeneratorX86(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), constant_area_start_(-1), @@ -4915,6 +4916,13 @@ void CodeGeneratorX86::GenerateVirtualCall( RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); } +void CodeGeneratorX86::RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address, + uint32_t intrinsic_data) { + boot_image_intrinsic_patches_.emplace_back( + method_address, /* target_dex_file */ nullptr, intrinsic_data); + __ Bind(&boot_image_intrinsic_patches_.back().label); +} + void CodeGeneratorX86::RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address, uint32_t boot_image_offset) { boot_image_method_patches_.emplace_back( @@ -4975,10 +4983,18 @@ Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) { } void CodeGeneratorX86::LoadBootImageAddress(Register reg, - uint32_t boot_image_offset, + uint32_t boot_image_reference, HInvokeStaticOrDirect* invoke) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); + HX86ComputeBaseMethodAddress* method_address = + invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(); + DCHECK(method_address != nullptr); + Register method_address_reg = + invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>(); + __ leal(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset)); + RecordBootImageIntrinsicPatch(method_address, boot_image_reference); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); HX86ComputeBaseMethodAddress* method_address = @@ -4987,15 +5003,41 @@ void CodeGeneratorX86::LoadBootImageAddress(Register reg, Register method_address_reg = invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>(); __ movl(reg, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset)); - RecordBootImageRelRoPatch(method_address, boot_image_offset); + RecordBootImageRelRoPatch(method_address, boot_image_reference); } else { gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); - const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset; + const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference; __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address)))); } } +void CodeGeneratorX86::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConvention calling_convention; + Register argument = calling_convention.GetRegisterAt(0); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); + HX86ComputeBaseMethodAddress* method_address = + invoke->InputAt(invoke->GetSpecialInputIndex())->AsX86ComputeBaseMethodAddress(); + DCHECK(method_address != nullptr); + Register method_address_reg = + invoke->GetLocations()->InAt(invoke->GetSpecialInputIndex()).AsRegister<Register>(); + __ leal(argument, Address(method_address_reg, CodeGeneratorX86::kDummy32BitOffset)); + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + boot_image_type_patches_.emplace_back(method_address, target_method.dex_file, type_idx.index_); + __ Bind(&boot_image_type_patches_.back().label); + } else { + LoadBootImageAddress(argument, boot_image_offset, invoke); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + // The label points to the end of the "movl" or another instruction but the literal offset // for method patch needs to point to the embedded constant which occupies the last 4 bytes. constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; @@ -5013,12 +5055,13 @@ inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -5029,7 +5072,8 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linke boot_image_type_patches_.size() + type_bss_entry_patches_.size() + boot_image_string_patches_.size() + - string_bss_entry_patches_.size(); + string_bss_entry_patches_.size() + + boot_image_intrinsic_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>( @@ -5038,11 +5082,14 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linke boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index e947b9dfc8..cb58e920ea 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -419,6 +419,8 @@ class CodeGeneratorX86 : public CodeGenerator { void GenerateVirtualCall( HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE; + void RecordBootImageIntrinsicPatch(HX86ComputeBaseMethodAddress* method_address, + uint32_t intrinsic_data); void RecordBootImageRelRoPatch(HX86ComputeBaseMethodAddress* method_address, uint32_t boot_image_offset); void RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke); @@ -429,8 +431,9 @@ class CodeGeneratorX86 : public CodeGenerator { Label* NewStringBssEntryPatch(HLoadString* load_string); void LoadBootImageAddress(Register reg, - uint32_t boot_image_offset, + uint32_t boot_image_reference, HInvokeStaticOrDirect* invoke); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); Label* NewJitRootStringPatch(const DexFile& dex_file, dex::StringIndex string_index, @@ -651,6 +654,8 @@ class CodeGeneratorX86 : public CodeGenerator { ArenaDeque<X86PcRelativePatchInfo> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<X86PcRelativePatchInfo> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<X86PcRelativePatchInfo> boot_image_intrinsic_patches_; // Patches for string root accesses in JIT compiled code. ArenaDeque<PatchInfo<Label>> jit_string_patches_; diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 28f3abff79..bea3da070a 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -1067,6 +1067,11 @@ void CodeGeneratorX86_64::GenerateVirtualCall( RecordPcInfo(invoke, invoke->GetDexPc(), slow_path); } +void CodeGeneratorX86_64::RecordBootImageIntrinsicPatch(uint32_t intrinsic_data) { + boot_image_intrinsic_patches_.emplace_back(/* target_dex_file */ nullptr, intrinsic_data); + __ Bind(&boot_image_intrinsic_patches_.back().label); +} + void CodeGeneratorX86_64::RecordBootImageRelRoPatch(uint32_t boot_image_offset) { boot_image_method_patches_.emplace_back(/* target_dex_file */ nullptr, boot_image_offset); __ Bind(&boot_image_method_patches_.back().label); @@ -1108,20 +1113,43 @@ Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) { return &string_bss_entry_patches_.back().label; } -void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_offset) { - DCHECK(!GetCompilerOptions().IsBootImage()); - if (GetCompilerOptions().GetCompilePic()) { +void CodeGeneratorX86_64::LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference) { + if (GetCompilerOptions().IsBootImage()) { + __ leal(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false)); + RecordBootImageIntrinsicPatch(boot_image_reference); + } else if (GetCompilerOptions().GetCompilePic()) { DCHECK(Runtime::Current()->IsAotCompiler()); __ movl(reg, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false)); - RecordBootImageRelRoPatch(boot_image_offset); + RecordBootImageRelRoPatch(boot_image_reference); } else { gc::Heap* heap = Runtime::Current()->GetHeap(); DCHECK(!heap->GetBootImageSpaces().empty()); - const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_offset; + const uint8_t* address = heap->GetBootImageSpaces()[0]->Begin() + boot_image_reference; __ movl(reg, Immediate(dchecked_integral_cast<uint32_t>(reinterpret_cast<uintptr_t>(address)))); } } +void CodeGeneratorX86_64::AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, + uint32_t boot_image_offset) { + DCHECK(invoke->IsStatic()); + InvokeRuntimeCallingConvention calling_convention; + CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0)); + if (GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(boot_image_offset, IntrinsicVisitor::IntegerValueOfInfo::kInvalidReference); + // Load the class the same way as for HLoadClass::LoadKind::kBootImageLinkTimePcRelative. + __ leal(argument, + Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false)); + MethodReference target_method = invoke->GetTargetMethod(); + dex::TypeIndex type_idx = target_method.dex_file->GetMethodId(target_method.index).class_idx_; + boot_image_type_patches_.emplace_back(target_method.dex_file, type_idx.index_); + __ Bind(&boot_image_type_patches_.back().label); + } else { + LoadBootImageAddress(argument, boot_image_offset); + } + InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); + CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); +} + // The label points to the end of the "movl" or another instruction but the literal offset // for method patch needs to point to the embedded constant which occupies the last 4 bytes. constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; @@ -1137,12 +1165,13 @@ inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches( } } -linker::LinkerPatch DataBimgRelRoPatchAdapter(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t boot_image_offset) { - DCHECK(target_dex_file == nullptr); // Unused for DataBimgRelRoPatch(), should be null. - return linker::LinkerPatch::DataBimgRelRoPatch(literal_offset, pc_insn_offset, boot_image_offset); +template <linker::LinkerPatch (*Factory)(size_t, uint32_t, uint32_t)> +linker::LinkerPatch NoDexFileAdapter(size_t literal_offset, + const DexFile* target_dex_file, + uint32_t pc_insn_offset, + uint32_t boot_image_offset) { + DCHECK(target_dex_file == nullptr); // Unused for these patches, should be null. + return Factory(literal_offset, pc_insn_offset, boot_image_offset); } void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) { @@ -1153,7 +1182,8 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li boot_image_type_patches_.size() + type_bss_entry_patches_.size() + boot_image_string_patches_.size() + - string_bss_entry_patches_.size(); + string_bss_entry_patches_.size() + + boot_image_intrinsic_patches_.size(); linker_patches->reserve(size); if (GetCompilerOptions().IsBootImage()) { EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeMethodPatch>( @@ -1162,11 +1192,14 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* li boot_image_type_patches_, linker_patches); EmitPcRelativeLinkerPatches<linker::LinkerPatch::RelativeStringPatch>( boot_image_string_patches_, linker_patches); + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::IntrinsicReferencePatch>>( + boot_image_intrinsic_patches_, linker_patches); } else { - EmitPcRelativeLinkerPatches<DataBimgRelRoPatchAdapter>( + EmitPcRelativeLinkerPatches<NoDexFileAdapter<linker::LinkerPatch::DataBimgRelRoPatch>>( boot_image_method_patches_, linker_patches); DCHECK(boot_image_type_patches_.empty()); DCHECK(boot_image_string_patches_.empty()); + DCHECK(boot_image_intrinsic_patches_.empty()); } EmitPcRelativeLinkerPatches<linker::LinkerPatch::MethodBssEntryPatch>( method_bss_entry_patches_, linker_patches); @@ -1268,6 +1301,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph, type_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), boot_image_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), string_bss_entry_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), + boot_image_intrinsic_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_string_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), jit_class_patches_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)), fixups_to_jump_tables_(graph->GetAllocator()->Adapter(kArenaAllocCodeGenerator)) { diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 0937f55899..5ba7f9cb71 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -416,6 +416,7 @@ class CodeGeneratorX86_64 : public CodeGenerator { void GenerateVirtualCall( HInvokeVirtual* invoke, Location temp, SlowPathCode* slow_path = nullptr) OVERRIDE; + void RecordBootImageIntrinsicPatch(uint32_t intrinsic_data); void RecordBootImageRelRoPatch(uint32_t boot_image_offset); void RecordBootImageMethodPatch(HInvokeStaticOrDirect* invoke); void RecordMethodBssEntryPatch(HInvokeStaticOrDirect* invoke); @@ -430,7 +431,8 @@ class CodeGeneratorX86_64 : public CodeGenerator { dex::TypeIndex type_index, Handle<mirror::Class> handle); - void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_offset); + void LoadBootImageAddress(CpuRegister reg, uint32_t boot_image_reference); + void AllocateInstanceForIntrinsic(HInvokeStaticOrDirect* invoke, uint32_t boot_image_offset); void EmitLinkerPatches(ArenaVector<linker::LinkerPatch>* linker_patches) OVERRIDE; @@ -621,6 +623,8 @@ class CodeGeneratorX86_64 : public CodeGenerator { ArenaDeque<PatchInfo<Label>> boot_image_string_patches_; // PC-relative String patch info for kBssEntry. ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_; + // PC-relative patch info for IntrinsicObjects. + ArenaDeque<PatchInfo<Label>> boot_image_intrinsic_patches_; // Patches for string literals in JIT compiled code. ArenaDeque<PatchInfo<Label>> jit_string_patches_; diff --git a/compiler/optimizing/intrinsic_objects.h b/compiler/optimizing/intrinsic_objects.h index ffadd03428..863017be38 100644 --- a/compiler/optimizing/intrinsic_objects.h +++ b/compiler/optimizing/intrinsic_objects.h @@ -17,6 +17,8 @@ #ifndef ART_COMPILER_OPTIMIZING_INTRINSIC_OBJECTS_H_ #define ART_COMPILER_OPTIMIZING_INTRINSIC_OBJECTS_H_ +#include "base/bit_field.h" +#include "base/bit_utils.h" #include "base/mutex.h" namespace art { @@ -33,6 +35,26 @@ template <class T> class ObjectArray; class IntrinsicObjects { public: + enum class PatchType { + kIntegerValueOfObject, + kIntegerValueOfArray, + + kLast = kIntegerValueOfArray + }; + + static uint32_t EncodePatch(PatchType patch_type, uint32_t index = 0u) { + DCHECK(patch_type == PatchType::kIntegerValueOfObject || index == 0u); + return PatchTypeField::Encode(static_cast<uint32_t>(patch_type)) | IndexField::Encode(index); + } + + static PatchType DecodePatchType(uint32_t intrinsic_data) { + return static_cast<PatchType>(PatchTypeField::Decode(intrinsic_data)); + } + + static uint32_t DecodePatchIndex(uint32_t intrinsic_data) { + return IndexField::Decode(intrinsic_data); + } + static ObjPtr<mirror::ObjectArray<mirror::Object>> AllocateBootImageLiveObjects( Thread* self, ClassLinker* class_linker) REQUIRES_SHARED(Locks::mutator_lock_); @@ -47,6 +69,13 @@ class IntrinsicObjects { static MemberOffset GetIntegerValueOfArrayDataOffset( ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects) REQUIRES_SHARED(Locks::mutator_lock_); + + private: + static constexpr size_t kPatchTypeBits = + MinimumBitsToStore(static_cast<uint32_t>(PatchType::kLast)); + static constexpr size_t kIndexBits = BitSizeOf<uint32_t>() - kPatchTypeBits; + using PatchTypeField = BitField<uint32_t, 0u, kPatchTypeBits>; + using IndexField = BitField<uint32_t, kPatchTypeBits, kIndexBits>; }; } // namespace art diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc index 81b2b7b581..21efe11f31 100644 --- a/compiler/optimizing/intrinsics.cc +++ b/compiler/optimizing/intrinsics.cc @@ -223,6 +223,13 @@ std::ostream& operator<<(std::ostream& os, const Intrinsics& intrinsic) { return os; } +static const char kIntegerCacheDescriptor[] = "Ljava/lang/Integer$IntegerCache;"; +static const char kIntegerDescriptor[] = "Ljava/lang/Integer;"; +static const char kIntegerArrayDescriptor[] = "[Ljava/lang/Integer;"; +static const char kLowFieldName[] = "low"; +static const char kHighFieldName[] = "high"; +static const char kValueFieldName[] = "value"; + static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects() REQUIRES_SHARED(Locks::mutator_lock_) { gc::Heap* heap = Runtime::Current()->GetHeap(); @@ -237,6 +244,31 @@ static ObjPtr<mirror::ObjectArray<mirror::Object>> GetBootImageLiveObjects() return boot_image_live_objects; } +static ObjPtr<mirror::Class> LookupInitializedClass(Thread* self, + ClassLinker* class_linker, + const char* descriptor) + REQUIRES_SHARED(Locks::mutator_lock_) { + ObjPtr<mirror::Class> klass = + class_linker->LookupClass(self, descriptor, /* class_loader */ nullptr); + DCHECK(klass != nullptr); + DCHECK(klass->IsInitialized()); + return klass; +} + +static ObjPtr<mirror::ObjectArray<mirror::Object>> GetIntegerCacheArray( + ObjPtr<mirror::Class> cache_class) REQUIRES_SHARED(Locks::mutator_lock_) { + ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", kIntegerArrayDescriptor); + DCHECK(cache_field != nullptr); + return ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class)); +} + +static int32_t GetIntegerCacheField(ObjPtr<mirror::Class> cache_class, const char* field_name) + REQUIRES_SHARED(Locks::mutator_lock_) { + ArtField* field = cache_class->FindDeclaredStaticField(field_name, "I"); + DCHECK(field != nullptr); + return field->GetInt(cache_class); +} + static bool CheckIntegerCache(Thread* self, ClassLinker* class_linker, ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects, @@ -246,37 +278,26 @@ static bool CheckIntegerCache(Thread* self, // Since we have a cache in the boot image, both java.lang.Integer and // java.lang.Integer$IntegerCache must be initialized in the boot image. - ObjPtr<mirror::Class> cache_class = class_linker->LookupClass( - self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr); - DCHECK(cache_class != nullptr); - DCHECK(cache_class->IsInitialized()); + ObjPtr<mirror::Class> cache_class = + LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); ObjPtr<mirror::Class> integer_class = - class_linker->LookupClass(self, "Ljava/lang/Integer;", /* class_loader */ nullptr); - DCHECK(integer_class != nullptr); - DCHECK(integer_class->IsInitialized()); + LookupInitializedClass(self, class_linker, kIntegerDescriptor); // Check that the current cache is the same as the `boot_image_cache`. - ArtField* cache_field = cache_class->FindDeclaredStaticField("cache", "[Ljava/lang/Integer;"); - DCHECK(cache_field != nullptr); - ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = - ObjPtr<mirror::ObjectArray<mirror::Object>>::DownCast(cache_field->GetObject(cache_class)); + ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); if (current_cache != boot_image_cache) { return false; // Messed up IntegerCache.cache. } // Check that the range matches the boot image cache length. - ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I"); - DCHECK(low_field != nullptr); - int32_t low = low_field->GetInt(cache_class); - ArtField* high_field = cache_class->FindDeclaredStaticField("high", "I"); - DCHECK(high_field != nullptr); - int32_t high = high_field->GetInt(cache_class); + int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); + int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); if (boot_image_cache->GetLength() != high - low + 1) { return false; // Messed up IntegerCache.low or IntegerCache.high. } // Check that the elements match the boot image intrinsic objects and check their values as well. - ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); + ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); DCHECK(value_field != nullptr); for (int32_t i = 0, len = boot_image_cache->GetLength(); i != len; ++i) { ObjPtr<mirror::Object> boot_image_object = @@ -300,20 +321,64 @@ void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, CodeGenerator* codegen, Location return_location, Location first_argument_location) { - if (codegen->GetCompilerOptions().IsBootImage()) { - // TODO: Implement for boot image. We need access to CompilerDriver::IsImageClass() - // to verify that the IntegerCache shall be in the image. - return; - } - Runtime* runtime = Runtime::Current(); - gc::Heap* heap = runtime->GetHeap(); - if (heap->GetBootImageSpaces().empty()) { - return; // Running without boot image, cannot use required boot image objects. - } - // The intrinsic will call if it needs to allocate a j.l.Integer. LocationSummary::CallKind call_kind = LocationSummary::kCallOnMainOnly; - { + const CompilerOptions& compiler_options = codegen->GetCompilerOptions(); + if (compiler_options.IsBootImage()) { + // Piggyback on the method load kind to determine whether we can use PC-relative addressing. + // This should cover both the testing config (non-PIC boot image) and codegens that reject + // PC-relative load kinds and fall back to the runtime call. + if (!invoke->AsInvokeStaticOrDirect()->HasPcRelativeMethodLoadKind()) { + return; + } + if (!compiler_options.IsImageClass(kIntegerCacheDescriptor) || + !compiler_options.IsImageClass(kIntegerDescriptor)) { + return; + } + ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); + Thread* self = Thread::Current(); + ScopedObjectAccess soa(self); + ObjPtr<mirror::Class> cache_class = class_linker->LookupClass( + self, kIntegerCacheDescriptor, /* class_loader */ nullptr); + DCHECK(cache_class != nullptr); + if (UNLIKELY(!cache_class->IsInitialized())) { + LOG(WARNING) << "Image class " << cache_class->PrettyDescriptor() << " is uninitialized."; + return; + } + ObjPtr<mirror::Class> integer_class = + class_linker->LookupClass(self, kIntegerDescriptor, /* class_loader */ nullptr); + DCHECK(integer_class != nullptr); + if (UNLIKELY(!integer_class->IsInitialized())) { + LOG(WARNING) << "Image class " << integer_class->PrettyDescriptor() << " is uninitialized."; + return; + } + int32_t low = GetIntegerCacheField(cache_class, kLowFieldName); + int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); + if (kIsDebugBuild) { + ObjPtr<mirror::ObjectArray<mirror::Object>> current_cache = GetIntegerCacheArray(cache_class); + CHECK(current_cache != nullptr); + CHECK_EQ(current_cache->GetLength(), high - low + 1); + ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); + CHECK(value_field != nullptr); + for (int32_t i = 0, len = current_cache->GetLength(); i != len; ++i) { + ObjPtr<mirror::Object> current_object = current_cache->GetWithoutChecks(i); + CHECK(current_object != nullptr); + CHECK_EQ(value_field->GetInt(current_object), low + i); + } + } + if (invoke->InputAt(0)->IsIntConstant()) { + int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); + if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < + static_cast<uint32_t>(high - low + 1)) { + // No call, we shall use direct pointer to the Integer object. + call_kind = LocationSummary::kNoCall; + } + } + } else { + Runtime* runtime = Runtime::Current(); + if (runtime->GetHeap()->GetBootImageSpaces().empty()) { + return; // Running without boot image, cannot use required boot image objects. + } Thread* self = Thread::Current(); ScopedObjectAccess soa(self); ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); @@ -336,7 +401,7 @@ void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); - ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); + ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); DCHECK(value_field != nullptr); int32_t low = value_field->GetInt(low_integer); if (static_cast<uint32_t>(value) - static_cast<uint32_t>(low) < @@ -361,15 +426,11 @@ void IntrinsicVisitor::ComputeIntegerValueOfLocations(HInvoke* invoke, } } -static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self) +static int32_t GetIntegerCacheLowFromIntegerCache(Thread* self, ClassLinker* class_linker) REQUIRES_SHARED(Locks::mutator_lock_) { - ObjPtr<mirror::Class> cache_class = Runtime::Current()->GetClassLinker()->LookupClass( - self, "Ljava/lang/Integer$IntegerCache;", /* class_loader */ nullptr); - DCHECK(cache_class != nullptr); - DCHECK(cache_class->IsInitialized()); - ArtField* low_field = cache_class->FindDeclaredStaticField("low", "I"); - DCHECK(low_field != nullptr); - return low_field->GetInt(cache_class); + ObjPtr<mirror::Class> cache_class = + LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); + return GetIntegerCacheField(cache_class, kLowFieldName); } static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object) @@ -380,13 +441,14 @@ static uint32_t CalculateBootImageOffset(ObjPtr<mirror::Object> object) } inline IntrinsicVisitor::IntegerValueOfInfo::IntegerValueOfInfo() - : integer_boot_image_offset(0u), - value_offset(0), + : value_offset(0), low(0), length(0u), - value_boot_image_offset(0u) {} + integer_boot_image_offset(kInvalidReference), + value_boot_image_reference(kInvalidReference) {} -IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo(HInvoke* invoke) { +IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo( + HInvoke* invoke, const CompilerOptions& compiler_options) { // Note that we could cache all of the data looked up here. but there's no good // location for it. We don't want to add it to WellKnownClasses, to avoid creating global // jni values. Adding it as state to the compiler singleton seems like wrong @@ -398,46 +460,76 @@ IntrinsicVisitor::IntegerValueOfInfo IntrinsicVisitor::ComputeIntegerValueOfInfo // modified through reflection since ComputeIntegerValueOfLocations() when JITting. Runtime* runtime = Runtime::Current(); + ClassLinker* class_linker = runtime->GetClassLinker(); Thread* self = Thread::Current(); ScopedObjectAccess soa(self); - ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); - ObjPtr<mirror::Object> low_integer = - IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); - ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); - ArtField* value_field = integer_class->FindDeclaredInstanceField("value", "I"); - DCHECK(value_field != nullptr); IntegerValueOfInfo info; - info.integer_boot_image_offset = CalculateBootImageOffset(integer_class); - info.value_offset = value_field->GetOffset().Uint32Value(); - if (runtime->UseJitCompilation()) { - // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the - // code messes up the `value` field in the lowest cached Integer using reflection. - info.low = GetIntegerCacheLowFromIntegerCache(self); - } else { - // For AOT, the `low_integer->value` should be the same as `IntegerCache.low`. - info.low = value_field->GetInt(low_integer); - DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self)); - } - // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead. - info.length = dchecked_integral_cast<uint32_t>( - IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength()); - - if (invoke->InputAt(0)->IsIntConstant()) { - int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); - if (index < static_cast<uint32_t>(info.length)) { - ObjPtr<mirror::Object> integer = - IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index); - DCHECK(runtime->GetHeap()->ObjectIsInBootImageSpace(integer)); - info.value_boot_image_offset = CalculateBootImageOffset(integer); + if (compiler_options.IsBootImage()) { + ObjPtr<mirror::Class> integer_class = + LookupInitializedClass(self, class_linker, kIntegerDescriptor); + ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); + DCHECK(value_field != nullptr); + info.value_offset = value_field->GetOffset().Uint32Value(); + ObjPtr<mirror::Class> cache_class = + LookupInitializedClass(self, class_linker, kIntegerCacheDescriptor); + info.low = GetIntegerCacheField(cache_class, kLowFieldName); + int32_t high = GetIntegerCacheField(cache_class, kHighFieldName); + info.length = dchecked_integral_cast<uint32_t>(high - info.low + 1); + + info.integer_boot_image_offset = IntegerValueOfInfo::kInvalidReference; + if (invoke->InputAt(0)->IsIntConstant()) { + int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); + uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); + if (index < static_cast<uint32_t>(info.length)) { + info.value_boot_image_reference = IntrinsicObjects::EncodePatch( + IntrinsicObjects::PatchType::kIntegerValueOfObject, index); + } else { + // Not in the cache. + info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; + } } else { - info.value_boot_image_offset = 0u; // Not in the cache. + info.array_data_boot_image_reference = + IntrinsicObjects::EncodePatch(IntrinsicObjects::PatchType::kIntegerValueOfArray); } } else { - info.array_data_boot_image_offset = - CalculateBootImageOffset(boot_image_live_objects) + - IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value(); + ObjPtr<mirror::ObjectArray<mirror::Object>> boot_image_live_objects = GetBootImageLiveObjects(); + ObjPtr<mirror::Object> low_integer = + IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, 0u); + ObjPtr<mirror::Class> integer_class = low_integer->GetClass<kVerifyNone, kWithoutReadBarrier>(); + ArtField* value_field = integer_class->FindDeclaredInstanceField(kValueFieldName, "I"); + DCHECK(value_field != nullptr); + info.value_offset = value_field->GetOffset().Uint32Value(); + if (runtime->UseJitCompilation()) { + // Use the current `IntegerCache.low` for JIT to avoid truly surprising behavior if the + // code messes up the `value` field in the lowest cached Integer using reflection. + info.low = GetIntegerCacheLowFromIntegerCache(self, class_linker); + } else { + // For app AOT, the `low_integer->value` should be the same as `IntegerCache.low`. + info.low = value_field->GetInt(low_integer); + DCHECK_EQ(info.low, GetIntegerCacheLowFromIntegerCache(self, class_linker)); + } + // Do not look at `IntegerCache.high`, use the immutable length of the cache array instead. + info.length = dchecked_integral_cast<uint32_t>( + IntrinsicObjects::GetIntegerValueOfCache(boot_image_live_objects)->GetLength()); + + info.integer_boot_image_offset = CalculateBootImageOffset(integer_class); + if (invoke->InputAt(0)->IsIntConstant()) { + int32_t input_value = invoke->InputAt(0)->AsIntConstant()->GetValue(); + uint32_t index = static_cast<uint32_t>(input_value) - static_cast<uint32_t>(info.low); + if (index < static_cast<uint32_t>(info.length)) { + ObjPtr<mirror::Object> integer = + IntrinsicObjects::GetIntegerValueOfObject(boot_image_live_objects, index); + info.value_boot_image_reference = CalculateBootImageOffset(integer); + } else { + // Not in the cache. + info.value_boot_image_reference = IntegerValueOfInfo::kInvalidReference; + } + } else { + info.array_data_boot_image_reference = + CalculateBootImageOffset(boot_image_live_objects) + + IntrinsicObjects::GetIntegerValueOfArrayDataOffset(boot_image_live_objects).Uint32Value(); + } } return info; diff --git a/compiler/optimizing/intrinsics.h b/compiler/optimizing/intrinsics.h index f2b78239d6..993648f765 100644 --- a/compiler/optimizing/intrinsics.h +++ b/compiler/optimizing/intrinsics.h @@ -129,10 +129,10 @@ class IntrinsicVisitor : public ValueObject { // Temporary data structure for holding Integer.valueOf data for generating code. // We only use it if the boot image contains the IntegerCache objects. struct IntegerValueOfInfo { + static constexpr uint32_t kInvalidReference = static_cast<uint32_t>(-1); + IntegerValueOfInfo(); - // Boot image offset of java.lang.Integer for allocating an instance. - uint32_t integer_boot_image_offset; // Offset of the Integer.value field for initializing a newly allocated instance. uint32_t value_offset; // The low value in the cache. @@ -140,18 +140,27 @@ class IntrinsicVisitor : public ValueObject { // The length of the cache array. uint32_t length; + // Boot image offset of java.lang.Integer for allocating an instance. + uint32_t integer_boot_image_offset; // Set to kInvalidReference when compiling the boot image. + + // This union contains references to the boot image. For app AOT or JIT compilation, + // these are the boot image offsets of the target. For boot image compilation, the + // location shall be known only at link time, so we encode a symbolic reference using + // IntrinsicObjects::EncodePatch(). union { - // Boot image offset of the target Integer object for constant input in the cache range. - // If the input is out of range, this is set to 0u and the code must allocate a new Integer. - uint32_t value_boot_image_offset; + // The target value for a constant input in the cache range. If the constant input + // is out of range (use `low` and `length` to check), this value is bogus (set to + // kInvalidReference) and the code must allocate a new Integer. + uint32_t value_boot_image_reference; - // Boot image offset of the cache array data used for non-constant input in the cache range. + // The cache array data used for a non-constant input in the cache range. // If the input is out of range, the code must allocate a new Integer. - uint32_t array_data_boot_image_offset; + uint32_t array_data_boot_image_reference; }; }; - static IntegerValueOfInfo ComputeIntegerValueOfInfo(HInvoke* invoke); + static IntegerValueOfInfo ComputeIntegerValueOfInfo( + HInvoke* invoke, const CompilerOptions& compiler_options); protected: IntrinsicVisitor() {} diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index b4890e4850..4b2bcc8ca8 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -2791,28 +2791,27 @@ void IntrinsicLocationsBuilderARM64::VisitIntegerValueOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARM64::VisitIntegerValueOf(HInvoke* invoke) { - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); MacroAssembler* masm = GetVIXLAssembler(); Register out = RegisterFrom(locations->Out(), DataType::Type::kReference); UseScratchRegisterScope temps(masm); Register temp = temps.AcquireW(); - InvokeRuntimeCallingConvention calling_convention; - Register argument = calling_convention.GetRegisterAt(0); if (invoke->InputAt(0)->IsConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. - codegen_->LoadBootImageAddress(out, info.value_boot_image_offset); + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); + codegen_->LoadBootImageAddress(out, info.value_boot_image_reference); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ Mov(temp.W(), value); __ Str(temp.W(), HeapOperand(out.W(), info.value_offset)); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation @@ -2828,7 +2827,7 @@ void IntrinsicCodeGeneratorARM64::VisitIntegerValueOf(HInvoke* invoke) { vixl::aarch64::Label allocate, done; __ B(&allocate, hs); // If the value is within the bounds, load the j.l.Integer directly from the array. - codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_offset); + codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_reference); MemOperand source = HeapOperand( temp, out.X(), LSL, DataType::SizeShift(DataType::Type::kReference)); codegen_->Load(DataType::Type::kReference, out, source); @@ -2836,9 +2835,8 @@ void IntrinsicCodeGeneratorARM64::VisitIntegerValueOf(HInvoke* invoke) { __ B(&done); __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ Str(in.W(), HeapOperand(out.W(), info.value_offset)); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index 0835060f5c..f11e5a1989 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2940,28 +2940,27 @@ void IntrinsicLocationsBuilderARMVIXL::VisitIntegerValueOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorARMVIXL::VisitIntegerValueOf(HInvoke* invoke) { - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); ArmVIXLAssembler* const assembler = GetAssembler(); vixl32::Register out = RegisterFrom(locations->Out()); UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); vixl32::Register temp = temps.Acquire(); - InvokeRuntimeCallingConventionARMVIXL calling_convention; - vixl32::Register argument = calling_convention.GetRegisterAt(0); if (invoke->InputAt(0)->IsConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. - codegen_->LoadBootImageAddress(out, info.value_boot_image_offset); + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); + codegen_->LoadBootImageAddress(out, info.value_boot_image_reference); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ Mov(temp, value); assembler->StoreToOffset(kStoreWord, temp, out, info.value_offset); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation @@ -2977,15 +2976,14 @@ void IntrinsicCodeGeneratorARMVIXL::VisitIntegerValueOf(HInvoke* invoke) { vixl32::Label allocate, done; __ B(hs, &allocate, /* is_far_target */ false); // If the value is within the bounds, load the j.l.Integer directly from the array. - codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_offset); + codegen_->LoadBootImageAddress(temp, info.array_data_boot_image_reference); codegen_->LoadFromShiftedRegOffset(DataType::Type::kReference, locations->Out(), temp, out); assembler->MaybeUnpoisonHeapReference(out); __ B(&done); __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); assembler->StoreToOffset(kStoreWord, in, out, info.value_offset); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index a3eb42b4b7..01d9f962f2 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -2601,28 +2601,27 @@ void IntrinsicLocationsBuilderMIPS::VisitIntegerValueOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) { - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); MipsAssembler* assembler = GetAssembler(); InstructionCodeGeneratorMIPS* icodegen = down_cast<InstructionCodeGeneratorMIPS*>(codegen_->GetInstructionVisitor()); Register out = locations->Out().AsRegister<Register>(); - InvokeRuntimeCallingConvention calling_convention; - Register argument = calling_convention.GetRegisterAt(0); if (invoke->InputAt(0)->IsConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. - codegen_->LoadBootImageAddress(out, info.value_boot_image_offset); + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); + codegen_->LoadBootImageAddress(out, info.value_boot_image_reference); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. @@ -2645,7 +2644,7 @@ void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) { __ Beqz(AT, &allocate); // If the value is within the bounds, load the j.l.Integer directly from the array. - codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_offset); + codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_reference); __ ShiftAndAdd(out, out, TMP, TIMES_4); __ Lw(out, out, 0); __ MaybeUnpoisonHeapReference(out); @@ -2653,9 +2652,8 @@ void IntrinsicCodeGeneratorMIPS::VisitIntegerValueOf(HInvoke* invoke) { __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ StoreToOffset(kStoreWord, in, out, info.value_offset); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index 510040bf38..0bd69c6ec8 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -2267,28 +2267,27 @@ void IntrinsicLocationsBuilderMIPS64::VisitIntegerValueOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) { - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); Mips64Assembler* assembler = GetAssembler(); InstructionCodeGeneratorMIPS64* icodegen = down_cast<InstructionCodeGeneratorMIPS64*>(codegen_->GetInstructionVisitor()); GpuRegister out = locations->Out().AsRegister<GpuRegister>(); - InvokeRuntimeCallingConvention calling_convention; - GpuRegister argument = calling_convention.GetRegisterAt(0); if (invoke->InputAt(0)->IsConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. - codegen_->LoadBootImageAddress(out, info.value_boot_image_offset); + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); + codegen_->LoadBootImageAddress(out, info.value_boot_image_reference); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ StoreConstToOffset(kStoreWord, value, out, info.value_offset, TMP); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. @@ -2306,7 +2305,7 @@ void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) { __ Bgeuc(out, AT, &allocate); // If the value is within the bounds, load the j.l.Integer directly from the array. - codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_offset); + codegen_->LoadBootImageAddress(TMP, info.array_data_boot_image_reference); __ Dlsa(out, out, TMP, TIMES_4); __ Lwu(out, out, 0); __ MaybeUnpoisonHeapReference(out); @@ -2314,9 +2313,8 @@ void IntrinsicCodeGeneratorMIPS64::VisitIntegerValueOf(HInvoke* invoke) { __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ StoreToOffset(kStoreWord, in, out, info.value_offset); // `value` is a final field :-( Ideally, we'd merge this memory barrier with the allocation // one. diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index 645ca49645..98cea35af1 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -2873,28 +2873,27 @@ void IntrinsicLocationsBuilderX86::VisitIntegerValueOf(HInvoke* invoke) { void IntrinsicCodeGeneratorX86::VisitIntegerValueOf(HInvoke* invoke) { DCHECK(invoke->IsInvokeStaticOrDirect()); - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); X86Assembler* assembler = GetAssembler(); Register out = locations->Out().AsRegister<Register>(); InvokeRuntimeCallingConvention calling_convention; - Register argument = calling_convention.GetRegisterAt(0); if (invoke->InputAt(0)->IsConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); codegen_->LoadBootImageAddress( - out, info.value_boot_image_offset, invoke->AsInvokeStaticOrDirect()); + out, info.value_boot_image_reference, invoke->AsInvokeStaticOrDirect()); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress( - argument, info.integer_boot_image_offset, invoke->AsInvokeStaticOrDirect()); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ movl(Address(out, info.value_offset), Immediate(value)); } } else { @@ -2907,22 +2906,34 @@ void IntrinsicCodeGeneratorX86::VisitIntegerValueOf(HInvoke* invoke) { __ j(kAboveEqual, &allocate); // If the value is within the bounds, load the j.l.Integer directly from the array. constexpr size_t kElementSize = sizeof(mirror::HeapReference<mirror::Object>); - uint32_t mid_array_boot_image_offset = - info.array_data_boot_image_offset - info.low * kElementSize; - codegen_->LoadBootImageAddress( - out, mid_array_boot_image_offset, invoke->AsInvokeStaticOrDirect()); - DCHECK_NE(out, in); static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>), "Check heap reference size."); - __ movl(out, Address(out, in, TIMES_4, 0)); + if (codegen_->GetCompilerOptions().IsBootImage()) { + DCHECK_EQ(invoke->InputCount(), invoke->GetNumberOfArguments() + 1u); + size_t method_address_index = invoke->AsInvokeStaticOrDirect()->GetSpecialInputIndex(); + HX86ComputeBaseMethodAddress* method_address = + invoke->InputAt(method_address_index)->AsX86ComputeBaseMethodAddress(); + DCHECK(method_address != nullptr); + Register method_address_reg = + invoke->GetLocations()->InAt(method_address_index).AsRegister<Register>(); + __ movl(out, Address(method_address_reg, out, TIMES_4, CodeGeneratorX86::kDummy32BitOffset)); + codegen_->RecordBootImageIntrinsicPatch(method_address, info.array_data_boot_image_reference); + } else { + // Note: We're about to clobber the index in `out`, so we need to use `in` and + // adjust the offset accordingly. + uint32_t mid_array_boot_image_offset = + info.array_data_boot_image_reference - info.low * kElementSize; + codegen_->LoadBootImageAddress( + out, mid_array_boot_image_offset, invoke->AsInvokeStaticOrDirect()); + DCHECK_NE(out, in); + __ movl(out, Address(out, in, TIMES_4, 0)); + } __ MaybeUnpoisonHeapReference(out); __ jmp(&done); __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress( - argument, info.integer_boot_image_offset, invoke->AsInvokeStaticOrDirect()); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ movl(Address(out, info.value_offset), in); __ Bind(&done); } diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 6d85f3a1ac..ac6eab0834 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -2660,7 +2660,8 @@ void IntrinsicLocationsBuilderX86_64::VisitIntegerValueOf(HInvoke* invoke) { } void IntrinsicCodeGeneratorX86_64::VisitIntegerValueOf(HInvoke* invoke) { - IntrinsicVisitor::IntegerValueOfInfo info = IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke); + IntrinsicVisitor::IntegerValueOfInfo info = + IntrinsicVisitor::ComputeIntegerValueOfInfo(invoke, codegen_->GetCompilerOptions()); LocationSummary* locations = invoke->GetLocations(); X86_64Assembler* assembler = GetAssembler(); @@ -2669,17 +2670,17 @@ void IntrinsicCodeGeneratorX86_64::VisitIntegerValueOf(HInvoke* invoke) { CpuRegister argument = CpuRegister(calling_convention.GetRegisterAt(0)); if (invoke->InputAt(0)->IsIntConstant()) { int32_t value = invoke->InputAt(0)->AsIntConstant()->GetValue(); - if (info.value_boot_image_offset != 0u) { + if (static_cast<uint32_t>(value - info.low) < info.length) { // Just embed the j.l.Integer in the code. - codegen_->LoadBootImageAddress(out, info.value_boot_image_offset); + DCHECK_NE(info.value_boot_image_reference, IntegerValueOfInfo::kInvalidReference); + codegen_->LoadBootImageAddress(out, info.value_boot_image_reference); } else { DCHECK(locations->CanCall()); // Allocate and initialize a new j.l.Integer. // TODO: If we JIT, we could allocate the j.l.Integer now, and store it in the // JIT object table. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ movl(Address(out, info.value_offset), Immediate(value)); } } else { @@ -2692,7 +2693,7 @@ void IntrinsicCodeGeneratorX86_64::VisitIntegerValueOf(HInvoke* invoke) { __ j(kAboveEqual, &allocate); // If the value is within the bounds, load the j.l.Integer directly from the array. DCHECK_NE(out.AsRegister(), argument.AsRegister()); - codegen_->LoadBootImageAddress(argument, info.array_data_boot_image_offset); + codegen_->LoadBootImageAddress(argument, info.array_data_boot_image_reference); static_assert((1u << TIMES_4) == sizeof(mirror::HeapReference<mirror::Object>), "Check heap reference size."); __ movl(out, Address(argument, out, TIMES_4, 0)); @@ -2700,9 +2701,8 @@ void IntrinsicCodeGeneratorX86_64::VisitIntegerValueOf(HInvoke* invoke) { __ jmp(&done); __ Bind(&allocate); // Otherwise allocate and initialize a new j.l.Integer. - codegen_->LoadBootImageAddress(argument, info.integer_boot_image_offset); - codegen_->InvokeRuntime(kQuickAllocObjectInitialized, invoke, invoke->GetDexPc()); - CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>(); + codegen_->AllocateInstanceForIntrinsic(invoke->AsInvokeStaticOrDirect(), + info.integer_boot_image_offset); __ movl(Address(out, info.value_offset), in); __ Bind(&done); } |