diff options
author | 2016-09-30 17:04:49 +0000 | |
---|---|---|
committer | 2016-09-30 18:08:09 +0100 | |
commit | 5f926055cb88089d8ca27243f35a9dfd89d981f0 (patch) | |
tree | 8d87d400e36301eb648e19bcd225f13c469648ad | |
parent | 9e5739aaa690a8529c104f4c05035a657616c310 (diff) |
Revert "Store resolved Strings for AOT code in .bss."
There are some issues with oat_test64 on host and aosp_mips-eng.
Also reverts "compiler_driver: Fix build."
Bug: 20323084
Bug: 30627598
This reverts commit 63dccbbefef3014c99c22748d18befcc7bcb3b41.
This reverts commit 04a44135ace10123f059373691594ae0f270a8a4.
Change-Id: I568ba3e58cf103987fdd63c8a21521010a9f27c4
46 files changed, 692 insertions, 915 deletions
diff --git a/compiler/common_compiler_test.cc b/compiler/common_compiler_test.cc index bc8facdb41..b726649138 100644 --- a/compiler/common_compiler_test.cc +++ b/compiler/common_compiler_test.cc @@ -174,12 +174,13 @@ void CommonCompilerTest::SetUp() { void CommonCompilerTest::CreateCompilerDriver(Compiler::Kind kind, InstructionSet isa, size_t number_of_threads) { - compiler_options_->boot_image_ = true; compiler_driver_.reset(new CompilerDriver(compiler_options_.get(), verification_results_.get(), kind, isa, instruction_set_features_.get(), + /* boot_image */ true, + /* app_image */ false, GetImageClasses(), GetCompiledClasses(), GetCompiledMethods(), diff --git a/compiler/compiled_method.h b/compiler/compiled_method.h index 99b0ac10d1..1a87448e80 100644 --- a/compiler/compiled_method.h +++ b/compiler/compiled_method.h @@ -177,7 +177,6 @@ class LinkerPatch { kTypeRelative, // NOTE: Actual patching is instruction_set-dependent. kString, kStringRelative, // NOTE: Actual patching is instruction_set-dependent. - kStringBssEntry, // NOTE: Actual patching is instruction_set-dependent. kDexCacheArray, // NOTE: Actual patching is instruction_set-dependent. }; @@ -245,20 +244,10 @@ class LinkerPatch { return patch; } - static LinkerPatch StringBssEntryPatch(size_t literal_offset, - const DexFile* target_dex_file, - uint32_t pc_insn_offset, - uint32_t target_string_idx) { - LinkerPatch patch(literal_offset, Type::kStringBssEntry, target_dex_file); - patch.string_idx_ = target_string_idx; - patch.pc_insn_offset_ = pc_insn_offset; - return patch; - } - static LinkerPatch DexCacheArrayPatch(size_t literal_offset, const DexFile* target_dex_file, uint32_t pc_insn_offset, - uint32_t element_offset) { + size_t element_offset) { DCHECK(IsUint<32>(element_offset)); LinkerPatch patch(literal_offset, Type::kDexCacheArray, target_dex_file); patch.pc_insn_offset_ = pc_insn_offset; @@ -282,7 +271,6 @@ class LinkerPatch { case Type::kCallRelative: case Type::kTypeRelative: case Type::kStringRelative: - case Type::kStringBssEntry: case Type::kDexCacheArray: return true; default: @@ -308,16 +296,12 @@ class LinkerPatch { } const DexFile* TargetStringDexFile() const { - DCHECK(patch_type_ == Type::kString || - patch_type_ == Type::kStringRelative || - patch_type_ == Type::kStringBssEntry); + DCHECK(patch_type_ == Type::kString || patch_type_ == Type::kStringRelative); return target_dex_file_; } uint32_t TargetStringIndex() const { - DCHECK(patch_type_ == Type::kString || - patch_type_ == Type::kStringRelative || - patch_type_ == Type::kStringBssEntry); + DCHECK(patch_type_ == Type::kString || patch_type_ == Type::kStringRelative); return string_idx_; } @@ -334,7 +318,6 @@ class LinkerPatch { uint32_t PcInsnOffset() const { DCHECK(patch_type_ == Type::kTypeRelative || patch_type_ == Type::kStringRelative || - patch_type_ == Type::kStringBssEntry || patch_type_ == Type::kDexCacheArray); return pc_insn_offset_; } diff --git a/compiler/driver/compiled_method_storage_test.cc b/compiler/driver/compiled_method_storage_test.cc index b72d0acb8e..5063d716d5 100644 --- a/compiler/driver/compiled_method_storage_test.cc +++ b/compiler/driver/compiled_method_storage_test.cc @@ -32,6 +32,8 @@ TEST(CompiledMethodStorage, Deduplicate) { Compiler::kOptimizing, /* instruction_set_ */ kNone, /* instruction_set_features */ nullptr, + /* boot_image */ false, + /* app_image */ false, /* image_classes */ nullptr, /* compiled_classes */ nullptr, /* compiled_methods */ nullptr, diff --git a/compiler/driver/compiler_driver.cc b/compiler/driver/compiler_driver.cc index e2f8d929c3..2ec3f164e3 100644 --- a/compiler/driver/compiler_driver.cc +++ b/compiler/driver/compiler_driver.cc @@ -355,6 +355,8 @@ CompilerDriver::CompilerDriver( Compiler::Kind compiler_kind, InstructionSet instruction_set, const InstructionSetFeatures* instruction_set_features, + bool boot_image, + bool app_image, std::unordered_set<std::string>* image_classes, std::unordered_set<std::string>* compiled_classes, std::unordered_set<std::string>* compiled_methods, @@ -375,6 +377,8 @@ CompilerDriver::CompilerDriver( compiled_methods_lock_("compiled method lock"), compiled_methods_(MethodTable::key_compare()), non_relative_linker_patch_count_(0u), + boot_image_(boot_image), + app_image_(app_image), image_classes_(image_classes), classes_to_compile_(compiled_classes), methods_to_compile_(compiled_methods), @@ -400,7 +404,7 @@ CompilerDriver::CompilerDriver( if (compiler_options->VerifyOnlyProfile()) { CHECK(profile_compilation_info_ != nullptr) << "Requires profile"; } - if (GetCompilerOptions().IsBootImage()) { + if (boot_image_) { CHECK(image_classes_.get() != nullptr) << "Expected image classes for boot image"; } } @@ -492,7 +496,7 @@ void CompilerDriver::CompileAll(jobject class_loader, // 3) Attempt to verify all classes // 4) Attempt to initialize image classes, and trivially initialized classes PreCompile(class_loader, dex_files, timings); - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { // We don't need to setup the intrinsics for non boot image compilation, as // those compilations will pick up a boot image that have the ArtMethod already // set with the intrinsics flag. @@ -978,7 +982,7 @@ void CompilerDriver::PreCompile(jobject class_loader, return; } - if (GetCompilerOptions().IsForceDeterminism() && GetCompilerOptions().IsBootImage()) { + if (GetCompilerOptions().IsForceDeterminism() && IsBootImage()) { // Resolve strings from const-string. Do this now to have a deterministic image. ResolveConstStrings(this, dex_files, timings); VLOG(compiler) << "Resolve const-strings: " << GetMemoryUsageString(false); @@ -1006,7 +1010,7 @@ bool CompilerDriver::IsImageClass(const char* descriptor) const { } // No set of image classes, assume we include all the classes. // NOTE: Currently only reachable from InitImageMethodVisitor for the app image case. - return !GetCompilerOptions().IsBootImage(); + return !IsBootImage(); } bool CompilerDriver::IsClassToCompile(const char* descriptor) const { @@ -1130,7 +1134,7 @@ class RecordImageClassesVisitor : public ClassVisitor { // Make a list of descriptors for classes to include in the image void CompilerDriver::LoadImageClasses(TimingLogger* timings) { CHECK(timings != nullptr); - if (!GetCompilerOptions().IsBootImage()) { + if (!IsBootImage()) { return; } @@ -1358,7 +1362,7 @@ class ClinitImageUpdate { }; void CompilerDriver::UpdateImageClasses(TimingLogger* timings) { - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { TimingLogger::ScopedTiming t("UpdateImageClasses", timings); Runtime* runtime = Runtime::Current(); @@ -1385,7 +1389,7 @@ bool CompilerDriver::CanAssumeClassIsLoaded(mirror::Class* klass) { // Having the klass reference here implies that the klass is already loaded. return true; } - if (!GetCompilerOptions().IsBootImage()) { + if (!IsBootImage()) { // Assume loaded only if klass is in the boot image. App classes cannot be assumed // loaded because we don't even know what class loader will be used to load them. bool class_in_image = runtime->GetHeap()->FindSpaceFromObject(klass, false)->IsImageSpace(); @@ -1410,7 +1414,7 @@ void CompilerDriver::MarkForDexToDexCompilation(Thread* self, const MethodRefere bool CompilerDriver::CanAssumeTypeIsPresentInDexCache(Handle<mirror::DexCache> dex_cache, uint32_t type_idx) { bool result = false; - if ((GetCompilerOptions().IsBootImage() && + if ((IsBootImage() && IsImageClass(dex_cache->GetDexFile()->StringDataByIdx( dex_cache->GetDexFile()->GetTypeId(type_idx).descriptor_idx_))) || Runtime::Current()->UseJitCompilation()) { @@ -1431,13 +1435,13 @@ bool CompilerDriver::CanAssumeStringIsPresentInDexCache(const DexFile& dex_file, // See also Compiler::ResolveDexFile bool result = false; - if (GetCompilerOptions().IsBootImage() || Runtime::Current()->UseJitCompilation()) { + if (IsBootImage() || Runtime::Current()->UseJitCompilation()) { ScopedObjectAccess soa(Thread::Current()); StackHandleScope<1> hs(soa.Self()); ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); Handle<mirror::DexCache> dex_cache(hs.NewHandle(class_linker->FindDexCache( soa.Self(), dex_file, false))); - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { // We resolve all const-string strings when building for the image. class_linker->ResolveString(dex_file, string_idx, dex_cache); result = true; @@ -1540,7 +1544,7 @@ bool CompilerDriver::CanEmbedTypeInCode(const DexFile& dex_file, uint32_t type_i if (compiling_boot) { // boot -> boot class pointers. // True if the class is in the image at boot compiling time. - const bool is_image_class = GetCompilerOptions().IsBootImage() && IsImageClass( + const bool is_image_class = IsBootImage() && IsImageClass( dex_file.StringDataByIdx(dex_file.GetTypeId(type_idx).descriptor_idx_)); // True if pc relative load works. if (is_image_class && support_boot_image_fixup) { @@ -1729,7 +1733,7 @@ void CompilerDriver::GetCodeAndMethodForDirectCall(const mirror::Class* referrer if (!use_dex_cache && force_relocations) { bool is_in_image; - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { is_in_image = IsImageClass(method->GetDeclaringClassDescriptor()); } else { is_in_image = instruction_set_ != kX86 && instruction_set_ != kX86_64 && @@ -2128,7 +2132,7 @@ void CompilerDriver::ResolveDexFile(jobject class_loader, ParallelCompilationManager context(class_linker, class_loader, this, &dex_file, dex_files, thread_pool); - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { // For images we resolve all types, such as array, whereas for applications just those with // classdefs are resolved by ResolveClassFieldsAndMethods. TimingLogger::ScopedTiming t("Resolve Types", timings); @@ -2238,7 +2242,7 @@ class VerifyClassVisitor : public CompilationVisitor { // It is *very* problematic if there are verification errors in the boot classpath. For example, // we rely on things working OK without verification when the decryption dialog is brought up. // So abort in a debug build if we find this violated. - DCHECK(!manager_->GetCompiler()->GetCompilerOptions().IsBootImage() || klass->IsVerified()) + DCHECK(!manager_->GetCompiler()->IsBootImage() || klass->IsVerified()) << "Boot classpath class " << PrettyClass(klass.Get()) << " failed to fully verify."; } soa.Self()->AssertNoPendingException(); @@ -2369,8 +2373,7 @@ class InitializeClassVisitor : public CompilationVisitor { if (!klass->IsInitialized()) { // We need to initialize static fields, we only do this for image classes that aren't // marked with the $NoPreloadHolder (which implies this should not be initialized early). - bool can_init_static_fields = - manager_->GetCompiler()->GetCompilerOptions().IsBootImage() && + bool can_init_static_fields = manager_->GetCompiler()->IsBootImage() && manager_->GetCompiler()->IsImageClass(descriptor) && !StringPiece(descriptor).ends_with("$NoPreloadHolder;"); if (can_init_static_fields) { @@ -2442,7 +2445,7 @@ void CompilerDriver::InitializeClasses(jobject jni_class_loader, ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); ParallelCompilationManager context(class_linker, jni_class_loader, this, &dex_file, dex_files, init_thread_pool); - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { // TODO: remove this when transactional mode supports multithreading. init_thread_count = 1U; } @@ -2496,7 +2499,7 @@ void CompilerDriver::InitializeClasses(jobject class_loader, CHECK(dex_file != nullptr); InitializeClasses(class_loader, *dex_file, dex_files, timings); } - if (GetCompilerOptions().IsBootImage() || GetCompilerOptions().IsAppImage()) { + if (boot_image_ || app_image_) { // Make sure that we call EnsureIntiailized on all the array classes to call // SetVerificationAttempted so that the access flags are set. If we do not do this they get // changed at runtime resulting in more dirty image pages. @@ -2506,7 +2509,7 @@ void CompilerDriver::InitializeClasses(jobject class_loader, InitializeArrayClassesAndCreateConflictTablesVisitor visitor; Runtime::Current()->GetClassLinker()->VisitClassesWithoutClassesLock(&visitor); } - if (GetCompilerOptions().IsBootImage()) { + if (IsBootImage()) { // Prune garbage objects created during aborted transactions. Runtime::Current()->GetHeap()->CollectGarbage(true); } diff --git a/compiler/driver/compiler_driver.h b/compiler/driver/compiler_driver.h index eb1222c315..52a04cc46b 100644 --- a/compiler/driver/compiler_driver.h +++ b/compiler/driver/compiler_driver.h @@ -90,6 +90,8 @@ class CompilerDriver { Compiler::Kind compiler_kind, InstructionSet instruction_set, const InstructionSetFeatures* instruction_set_features, + bool boot_image, + bool app_image, std::unordered_set<std::string>* image_classes, std::unordered_set<std::string>* compiled_classes, std::unordered_set<std::string>* compiled_methods, @@ -145,6 +147,11 @@ class CompilerDriver { return compiler_.get(); } + // Are we compiling and creating an image file? + bool IsBootImage() const { + return boot_image_; + } + const std::unordered_set<std::string>* GetImageClasses() const { return image_classes_.get(); } @@ -621,6 +628,9 @@ class CompilerDriver { // in the .oat_patches ELF section if requested in the compiler options. size_t non_relative_linker_patch_count_ GUARDED_BY(compiled_methods_lock_); + const bool boot_image_; + const bool app_image_; + // If image_ is true, specifies the classes that will be included in the image. // Note if image_classes_ is null, all classes are included in the image. std::unique_ptr<std::unordered_set<std::string>> image_classes_; diff --git a/compiler/driver/compiler_options.cc b/compiler/driver/compiler_options.cc index cbcc169f41..30ba8c9e74 100644 --- a/compiler/driver/compiler_options.cc +++ b/compiler/driver/compiler_options.cc @@ -30,8 +30,6 @@ CompilerOptions::CompilerOptions() inline_depth_limit_(kUnsetInlineDepthLimit), inline_max_code_units_(kUnsetInlineMaxCodeUnits), no_inline_from_(nullptr), - boot_image_(false), - app_image_(false), include_patch_information_(kDefaultIncludePatchInformation), top_k_profile_threshold_(kDefaultTopKProfileThreshold), debuggable_(false), @@ -80,35 +78,34 @@ CompilerOptions::CompilerOptions(CompilerFilter::Filter compiler_filter, bool dump_cfg_append, bool force_determinism, RegisterAllocator::Strategy regalloc_strategy, - const std::vector<std::string>* passes_to_run) - : compiler_filter_(compiler_filter), - huge_method_threshold_(huge_method_threshold), - large_method_threshold_(large_method_threshold), - small_method_threshold_(small_method_threshold), - tiny_method_threshold_(tiny_method_threshold), - num_dex_methods_threshold_(num_dex_methods_threshold), - inline_depth_limit_(inline_depth_limit), - inline_max_code_units_(inline_max_code_units), - no_inline_from_(no_inline_from), - boot_image_(false), - app_image_(false), - include_patch_information_(include_patch_information), - top_k_profile_threshold_(top_k_profile_threshold), - debuggable_(debuggable), - generate_debug_info_(generate_debug_info), - generate_mini_debug_info_(kDefaultGenerateMiniDebugInfo), - implicit_null_checks_(implicit_null_checks), - implicit_so_checks_(implicit_so_checks), - implicit_suspend_checks_(implicit_suspend_checks), - compile_pic_(compile_pic), - verbose_methods_(verbose_methods), - abort_on_hard_verifier_failure_(abort_on_hard_verifier_failure), - init_failure_output_(init_failure_output), - dump_cfg_file_name_(dump_cfg_file_name), - dump_cfg_append_(dump_cfg_append), - force_determinism_(force_determinism), - register_allocation_strategy_(regalloc_strategy), - passes_to_run_(passes_to_run) { + const std::vector<std::string>* passes_to_run + ) : // NOLINT(whitespace/parens) + compiler_filter_(compiler_filter), + huge_method_threshold_(huge_method_threshold), + large_method_threshold_(large_method_threshold), + small_method_threshold_(small_method_threshold), + tiny_method_threshold_(tiny_method_threshold), + num_dex_methods_threshold_(num_dex_methods_threshold), + inline_depth_limit_(inline_depth_limit), + inline_max_code_units_(inline_max_code_units), + no_inline_from_(no_inline_from), + include_patch_information_(include_patch_information), + top_k_profile_threshold_(top_k_profile_threshold), + debuggable_(debuggable), + generate_debug_info_(generate_debug_info), + generate_mini_debug_info_(kDefaultGenerateMiniDebugInfo), + implicit_null_checks_(implicit_null_checks), + implicit_so_checks_(implicit_so_checks), + implicit_suspend_checks_(implicit_suspend_checks), + compile_pic_(compile_pic), + verbose_methods_(verbose_methods), + abort_on_hard_verifier_failure_(abort_on_hard_verifier_failure), + init_failure_output_(init_failure_output), + dump_cfg_file_name_(dump_cfg_file_name), + dump_cfg_append_(dump_cfg_append), + force_determinism_(force_determinism), + register_allocation_strategy_(regalloc_strategy), + passes_to_run_(passes_to_run) { } void CompilerOptions::ParseHugeMethodMax(const StringPiece& option, UsageFn Usage) { diff --git a/compiler/driver/compiler_options.h b/compiler/driver/compiler_options.h index 8e4a775558..abc58d7dda 100644 --- a/compiler/driver/compiler_options.h +++ b/compiler/driver/compiler_options.h @@ -203,14 +203,6 @@ class CompilerOptions FINAL { return include_patch_information_; } - bool IsBootImage() const { - return boot_image_; - } - - bool IsAppImage() const { - return app_image_; - } - // Should the code be compiled as position independent? bool GetCompilePic() const { return compile_pic_; @@ -289,8 +281,6 @@ class CompilerOptions FINAL { // prefer vector<> over a lookup-oriented container, such as set<>. const std::vector<const DexFile*>* no_inline_from_; - bool boot_image_; - bool app_image_; bool include_patch_information_; // When using a profile file only the top K% of the profiled samples will be compiled. double top_k_profile_threshold_; @@ -315,7 +305,7 @@ class CompilerOptions FINAL { std::string dump_cfg_file_name_; bool dump_cfg_append_; - // Whether the compiler should trade performance for determinism to guarantee exactly reproducible + // Whether the compiler should trade performance for determinism to guarantee exactly reproducable // outcomes. bool force_determinism_; @@ -330,7 +320,6 @@ class CompilerOptions FINAL { const std::vector<std::string>* passes_to_run_; friend class Dex2Oat; - friend class CommonCompilerTest; DISALLOW_COPY_AND_ASSIGN(CompilerOptions); }; diff --git a/compiler/elf_builder.h b/compiler/elf_builder.h index 73240bed03..02831c9dc7 100644 --- a/compiler/elf_builder.h +++ b/compiler/elf_builder.h @@ -619,8 +619,7 @@ class ElfBuilder FINAL { void PrepareDynamicSection(const std::string& elf_file_path, Elf_Word rodata_size, Elf_Word text_size, - Elf_Word bss_size, - Elf_Word bss_roots_offset) { + Elf_Word bss_size) { std::string soname(elf_file_path); size_t directory_separator_pos = soname.rfind('/'); if (directory_separator_pos != std::string::npos) { @@ -660,20 +659,10 @@ class ElfBuilder FINAL { Elf_Word oatlastword_address = rodata_address + rodata_size - 4; dynsym_.Add(oatlastword, rodata_index, oatlastword_address, 4, STB_GLOBAL, STT_OBJECT); } - DCHECK_LE(bss_roots_offset, bss_size); if (bss_size != 0u) { Elf_Word bss_index = rodata_index + 1u + (text_size != 0 ? 1u : 0u); Elf_Word oatbss = dynstr_.Add("oatbss"); - dynsym_.Add(oatbss, bss_index, bss_address, bss_roots_offset, STB_GLOBAL, STT_OBJECT); - // Add a symbol marking the start of the GC roots part of the .bss, if not empty. - if (bss_roots_offset != bss_size) { - DCHECK_LT(bss_roots_offset, bss_size); - Elf_Word bss_roots_address = bss_address + bss_roots_offset; - Elf_Word bss_roots_size = bss_size - bss_roots_offset; - Elf_Word oatbssroots = dynstr_.Add("oatbssroots"); - dynsym_.Add( - oatbssroots, bss_index, bss_roots_address, bss_roots_size, STB_GLOBAL, STT_OBJECT); - } + dynsym_.Add(oatbss, bss_index, bss_address, bss_size, STB_GLOBAL, STT_OBJECT); Elf_Word oatbsslastword = dynstr_.Add("oatbsslastword"); Elf_Word bsslastword_address = bss_address + bss_size - 4; dynsym_.Add(oatbsslastword, bss_index, bsslastword_address, 4, STB_GLOBAL, STT_OBJECT); diff --git a/compiler/elf_writer.h b/compiler/elf_writer.h index d55f7458b2..f8f91029d4 100644 --- a/compiler/elf_writer.h +++ b/compiler/elf_writer.h @@ -52,10 +52,7 @@ class ElfWriter { virtual ~ElfWriter() {} virtual void Start() = 0; - virtual void PrepareDynamicSection(size_t rodata_size, - size_t text_size, - size_t bss_size, - size_t bss_roots_offset) = 0; + virtual void SetLoadedSectionSizes(size_t rodata_size, size_t text_size, size_t bss_size) = 0; virtual void PrepareDebugInfo(const ArrayRef<const debug::MethodDebugInfo>& method_infos) = 0; virtual OutputStream* StartRoData() = 0; virtual void EndRoData(OutputStream* rodata) = 0; diff --git a/compiler/elf_writer_quick.cc b/compiler/elf_writer_quick.cc index 36cd2327c4..bed864b534 100644 --- a/compiler/elf_writer_quick.cc +++ b/compiler/elf_writer_quick.cc @@ -93,10 +93,7 @@ class ElfWriterQuick FINAL : public ElfWriter { ~ElfWriterQuick(); void Start() OVERRIDE; - void PrepareDynamicSection(size_t rodata_size, - size_t text_size, - size_t bss_size, - size_t bss_roots_offset) OVERRIDE; + void SetLoadedSectionSizes(size_t rodata_size, size_t text_size, size_t bss_size) OVERRIDE; void PrepareDebugInfo(const ArrayRef<const debug::MethodDebugInfo>& method_infos) OVERRIDE; OutputStream* StartRoData() OVERRIDE; void EndRoData(OutputStream* rodata) OVERRIDE; @@ -170,21 +167,16 @@ void ElfWriterQuick<ElfTypes>::Start() { } template <typename ElfTypes> -void ElfWriterQuick<ElfTypes>::PrepareDynamicSection(size_t rodata_size, +void ElfWriterQuick<ElfTypes>::SetLoadedSectionSizes(size_t rodata_size, size_t text_size, - size_t bss_size, - size_t bss_roots_offset) { + size_t bss_size) { DCHECK_EQ(rodata_size_, 0u); rodata_size_ = rodata_size; DCHECK_EQ(text_size_, 0u); text_size_ = text_size; DCHECK_EQ(bss_size_, 0u); bss_size_ = bss_size; - builder_->PrepareDynamicSection(elf_file_->GetPath(), - rodata_size_, - text_size_, - bss_size_, - bss_roots_offset); + builder_->PrepareDynamicSection(elf_file_->GetPath(), rodata_size_, text_size_, bss_size_); } template <typename ElfTypes> diff --git a/compiler/image_test.cc b/compiler/image_test.cc index 9e94b9d861..4689c9d300 100644 --- a/compiler/image_test.cc +++ b/compiler/image_test.cc @@ -263,10 +263,7 @@ void CompilationHelper::Compile(CompilerDriver* driver, oat_writer->PrepareLayout(driver, writer.get(), cur_dex_files, &patcher); size_t rodata_size = oat_writer->GetOatHeader().GetExecutableOffset(); size_t text_size = oat_writer->GetOatSize() - rodata_size; - elf_writer->PrepareDynamicSection(rodata_size, - text_size, - oat_writer->GetBssSize(), - oat_writer->GetBssRootsOffset()); + elf_writer->SetLoadedSectionSizes(rodata_size, text_size, oat_writer->GetBssSize()); writer->UpdateOatFileLayout(i, elf_writer->GetLoadedSize(), diff --git a/compiler/jit/jit_compiler.cc b/compiler/jit/jit_compiler.cc index 4ef2db8b91..4f8690530b 100644 --- a/compiler/jit/jit_compiler.cc +++ b/compiler/jit/jit_compiler.cc @@ -156,6 +156,8 @@ JitCompiler::JitCompiler() { Compiler::kOptimizing, instruction_set, instruction_set_features_.get(), + /* boot_image */ false, + /* app_image */ false, /* image_classes */ nullptr, /* compiled_classes */ nullptr, /* compiled_methods */ nullptr, diff --git a/compiler/linker/arm64/relative_patcher_arm64.cc b/compiler/linker/arm64/relative_patcher_arm64.cc index 3b7788068e..4c8788e30d 100644 --- a/compiler/linker/arm64/relative_patcher_arm64.cc +++ b/compiler/linker/arm64/relative_patcher_arm64.cc @@ -222,10 +222,9 @@ void Arm64RelativePatcher::PatchPcRelativeReference(std::vector<uint8_t>* code, } shift = 0u; // No shift for ADD. } else { - // LDR/STR 32-bit or 64-bit with imm12 == 0 (unset). - DCHECK(patch.GetType() == LinkerPatch::Type::kDexCacheArray || - patch.GetType() == LinkerPatch::Type::kStringBssEntry) << patch.GetType(); - DCHECK_EQ(insn & 0xbfbffc00, 0xb9000000) << std::hex << insn; + // LDR 32-bit or 64-bit with imm12 == 0 (unset). + DCHECK(patch.GetType() == LinkerPatch::Type::kDexCacheArray) << patch.GetType(); + DCHECK_EQ(insn & 0xbffffc00, 0xb9400000) << std::hex << insn; } if (kIsDebugBuild) { uint32_t adrp = GetInsn(code, pc_insn_offset); diff --git a/compiler/linker/relative_patcher_test.h b/compiler/linker/relative_patcher_test.h index 015178980c..62b3a0a167 100644 --- a/compiler/linker/relative_patcher_test.h +++ b/compiler/linker/relative_patcher_test.h @@ -47,6 +47,8 @@ class RelativePatcherTest : public testing::Test { Compiler::kQuick, instruction_set, /* instruction_set_features*/ nullptr, + /* boot_image */ false, + /* app_image */ false, /* image_classes */ nullptr, /* compiled_classes */ nullptr, /* compiled_methods */ nullptr, diff --git a/compiler/oat_test.cc b/compiler/oat_test.cc index 3fddbde4b7..e8bc67d967 100644 --- a/compiler/oat_test.cc +++ b/compiler/oat_test.cc @@ -108,6 +108,8 @@ class OatTest : public CommonCompilerTest { compiler_kind, insn_set, insn_features_.get(), + /* boot_image */ false, + /* app_image */ false, /* image_classes */ nullptr, /* compiled_classes */ nullptr, /* compiled_methods */ nullptr, @@ -205,10 +207,7 @@ class OatTest : public CommonCompilerTest { oat_writer.PrepareLayout(compiler_driver_.get(), nullptr, dex_files, &patcher); size_t rodata_size = oat_writer.GetOatHeader().GetExecutableOffset(); size_t text_size = oat_writer.GetOatSize() - rodata_size; - elf_writer->PrepareDynamicSection(rodata_size, - text_size, - oat_writer.GetBssSize(), - oat_writer.GetBssRootsOffset()); + elf_writer->SetLoadedSectionSizes(rodata_size, text_size, oat_writer.GetBssSize()); if (!oat_writer.WriteRodata(oat_rodata)) { return false; diff --git a/compiler/oat_writer.cc b/compiler/oat_writer.cc index 44c26edd71..54ec7c1edb 100644 --- a/compiler/oat_writer.cc +++ b/compiler/oat_writer.cc @@ -300,10 +300,7 @@ OatWriter::OatWriter(bool compiling_boot_image, TimingLogger* timings) vdex_dex_files_offset_(0u), vdex_verifier_deps_offset_(0u), oat_size_(0u), - bss_start_(0u), bss_size_(0u), - bss_roots_offset_(0u), - bss_string_entries_(), oat_data_offset_(0u), oat_header_(nullptr), size_vdex_header_(0), @@ -557,8 +554,15 @@ void OatWriter::PrepareLayout(const CompilerDriver* compiler, oat_size_ = offset; if (!HasBootImage()) { - TimingLogger::ScopedTiming split("InitBssLayout", timings_); - InitBssLayout(instruction_set); + // Allocate space for app dex cache arrays in the .bss section. + size_t bss_start = RoundUp(oat_size_, kPageSize); + PointerSize pointer_size = GetInstructionSetPointerSize(instruction_set); + bss_size_ = 0u; + for (const DexFile* dex_file : *dex_files_) { + dex_cache_arrays_offsets_.Put(dex_file, bss_start + bss_size_); + DexCacheArraysLayout layout(pointer_size, dex_file); + bss_size_ += layout.Size(); + } } CHECK_EQ(dex_files_->size(), oat_dex_files_.size()); @@ -801,10 +805,6 @@ class OatWriter::InitCodeMethodVisitor : public OatDexMethodVisitor { if (!patch.IsPcRelative()) { writer_->absolute_patch_locations_.push_back(base_loc + patch.LiteralOffset()); } - if (patch.GetType() == LinkerPatch::Type::kStringBssEntry) { - StringReference ref(patch.TargetStringDexFile(), patch.TargetStringIndex()); - writer_->bss_string_entries_.Overwrite(ref, /* placeholder */ 0u); - } } } } @@ -1115,15 +1115,6 @@ class OatWriter::WriteCodeMethodVisitor : public OatDexMethodVisitor { target_offset); break; } - case LinkerPatch::Type::kStringBssEntry: { - StringReference ref(patch.TargetStringDexFile(), patch.TargetStringIndex()); - uint32_t target_offset = writer_->bss_string_entries_.Get(ref); - writer_->relative_patcher_->PatchPcRelativeReference(&patched_code_, - patch, - offset_ + literal_offset, - target_offset); - break; - } case LinkerPatch::Type::kTypeRelative: { uint32_t target_offset = GetTargetObjectOffset(GetTargetType(patch)); writer_->relative_patcher_->PatchPcRelativeReference(&patched_code_, @@ -1509,7 +1500,7 @@ size_t OatWriter::InitOatCode(size_t offset) { offset = RoundUp(offset, kPageSize); oat_header_->SetExecutableOffset(offset); size_executable_offset_alignment_ = offset - old_offset; - if (compiler_driver_->GetCompilerOptions().IsBootImage()) { + if (compiler_driver_->IsBootImage()) { InstructionSet instruction_set = compiler_driver_->GetInstructionSet(); #define DO_TRAMPOLINE(field, fn_name) \ @@ -1557,29 +1548,6 @@ size_t OatWriter::InitOatCodeDexFiles(size_t offset) { return offset; } -void OatWriter::InitBssLayout(InstructionSet instruction_set) { - DCHECK(!HasBootImage()); - - // Allocate space for app dex cache arrays in the .bss section. - bss_start_ = RoundUp(oat_size_, kPageSize); - PointerSize pointer_size = GetInstructionSetPointerSize(instruction_set); - bss_size_ = 0u; - for (const DexFile* dex_file : *dex_files_) { - dex_cache_arrays_offsets_.Put(dex_file, bss_start_ + bss_size_); - DexCacheArraysLayout layout(pointer_size, dex_file); - bss_size_ += layout.Size(); - } - - bss_roots_offset_ = bss_size_; - - // Prepare offsets for .bss String entries. - for (auto& entry : bss_string_entries_) { - DCHECK_EQ(entry.second, 0u); - entry.second = bss_start_ + bss_size_; - bss_size_ += sizeof(GcRoot<mirror::String>); - } -} - bool OatWriter::WriteRodata(OutputStream* out) { CHECK(write_state_ == WriteState::kWriteRoData); @@ -1768,7 +1736,7 @@ bool OatWriter::WriteHeader(OutputStream* out, oat_header_->SetImageFileLocationOatChecksum(image_file_location_oat_checksum); oat_header_->SetImageFileLocationOatDataBegin(image_file_location_oat_begin); - if (compiler_driver_->GetCompilerOptions().IsBootImage()) { + if (compiler_driver_->IsBootImage()) { CHECK_EQ(image_patch_delta, 0); CHECK_EQ(oat_header_->GetImagePatchDelta(), 0); } else { @@ -1858,7 +1826,7 @@ size_t OatWriter::WriteMaps(OutputStream* out, const size_t file_offset, size_t } size_t OatWriter::WriteCode(OutputStream* out, const size_t file_offset, size_t relative_offset) { - if (compiler_driver_->GetCompilerOptions().IsBootImage()) { + if (compiler_driver_->IsBootImage()) { InstructionSet instruction_set = compiler_driver_->GetInstructionSet(); #define DO_TRAMPOLINE(field) \ diff --git a/compiler/oat_writer.h b/compiler/oat_writer.h index 1cc193b341..670accbbaf 100644 --- a/compiler/oat_writer.h +++ b/compiler/oat_writer.h @@ -30,7 +30,6 @@ #include "oat.h" #include "os.h" #include "safe_map.h" -#include "string_reference.h" namespace art { @@ -195,10 +194,6 @@ class OatWriter { return bss_size_; } - size_t GetBssRootsOffset() const { - return bss_roots_offset_; - } - size_t GetOatDataOffset() const { return oat_data_offset_; } @@ -270,7 +265,6 @@ class OatWriter { size_t InitOatMaps(size_t offset); size_t InitOatCode(size_t offset); size_t InitOatCodeDexFiles(size_t offset); - void InitBssLayout(InstructionSet instruction_set); bool WriteClassOffsets(OutputStream* out); bool WriteClasses(OutputStream* out); @@ -328,20 +322,9 @@ class OatWriter { // Size required for Oat data structures. size_t oat_size_; - // The start of the required .bss section. - size_t bss_start_; - - // The size of the required .bss section holding the DexCache data and GC roots. + // The size of the required .bss section holding the DexCache data. size_t bss_size_; - // The offset of the GC roots in .bss section. - size_t bss_roots_offset_; - - // Map for allocating String entries in .bss. Indexed by StringReference for the source - // string in the dex file with the "string value comparator" for deduplication. The value - // is the target offset for patching, starting at `bss_start_ + bss_roots_offset_`. - SafeMap<StringReference, size_t, StringReferenceValueComparator> bss_string_entries_; - // Offsets of the dex cache arrays for each app dex file. For the // boot image, this information is provided by the ImageWriter. SafeMap<const DexFile*, size_t> dex_cache_arrays_offsets_; // DexFiles not owned. diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h index 49f4f18390..85002045a3 100644 --- a/compiler/optimizing/code_generator.h +++ b/compiler/optimizing/code_generator.h @@ -531,15 +531,40 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> { uint32_t GetReferenceDisableFlagOffset() const; protected: - // Patch info used for recording locations of required linker patches and their targets, - // i.e. target method, string, type or code identified by their dex file and index. + // Method patch info used for recording locations of required linker patches and + // target methods. The target method can be used for various purposes, whether for + // patching the address of the method or the code pointer or a PC-relative call. template <typename LabelType> - struct PatchInfo { - PatchInfo(const DexFile& target_dex_file, uint32_t target_index) - : dex_file(target_dex_file), index(target_index) { } + struct MethodPatchInfo { + explicit MethodPatchInfo(MethodReference m) : target_method(m), label() { } + + MethodReference target_method; + LabelType label; + }; + + // String patch info used for recording locations of required linker patches and + // target strings. The actual string address can be absolute or PC-relative. + template <typename LabelType> + struct StringPatchInfo { + StringPatchInfo(const DexFile& df, uint32_t index) + : dex_file(df), string_index(index), label() { } + + const DexFile& dex_file; + uint32_t string_index; + LabelType label; + }; + + // Type patch info used for recording locations of required linker patches and + // target types. The actual type address can be absolute or PC-relative. + // TODO: Consider merging with MethodPatchInfo and StringPatchInfo - all these + // classes contain the dex file, some index and the label. + template <typename LabelType> + struct TypePatchInfo { + TypePatchInfo(const DexFile& df, uint32_t index) + : dex_file(df), type_index(index), label() { } const DexFile& dex_file; - uint32_t index; + uint32_t type_index; LabelType label; }; diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index e343657f29..55e122150e 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -422,50 +422,6 @@ class LoadClassSlowPathARM : public SlowPathCodeARM { DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM); }; -class LoadStringSlowPathARM : public SlowPathCodeARM { - public: - explicit LoadStringSlowPathARM(HLoadString* instruction) : SlowPathCodeARM(instruction) {} - - void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - LocationSummary* locations = instruction_->GetLocations(); - DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); - - CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - HLoadString* load = instruction_->AsLoadString(); - const uint32_t string_index = load->GetStringIndex(); - __ LoadImmediate(calling_convention.GetRegisterAt(0), string_index); - arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); - CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); - - RestoreLiveRegisters(codegen, locations); - - // Store the resolved String to the BSS entry. - // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary for the - // .bss entry address in the fast path, so that we can avoid another calculation here. - CodeGeneratorARM::PcRelativePatchInfo* labels = - arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index); - __ BindTrackedLabel(&labels->movw_label); - __ movw(IP, /* placeholder */ 0u); - __ BindTrackedLabel(&labels->movt_label); - __ movt(IP, /* placeholder */ 0u); - __ BindTrackedLabel(&labels->add_pc_label); - __ add(IP, IP, ShifterOperand(PC)); - __ str(locations->Out().AsRegister<Register>(), Address(IP)); - - __ b(GetExitLabel()); - } - - const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM); -}; - class TypeCheckSlowPathARM : public SlowPathCodeARM { public: TypeCheckSlowPathARM(HInstruction* instruction, bool is_fatal) @@ -5639,8 +5595,15 @@ HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind( case HLoadString::LoadKind::kDexCacheAddress: DCHECK(Runtime::Current()->UseJitCompilation()); break; - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: DCHECK(!Runtime::Current()->UseJitCompilation()); + // We disable pc-relative load when there is an irreducible loop, as the optimization + // is incompatible with it. + // TODO: Create as many ArmDexCacheArraysBase instructions as needed for methods + // with irreducible loops. + if (GetGraph()->HasIrreducibleLoops()) { + return HLoadString::LoadKind::kDexCacheViaMethod; + } break; case HLoadString::LoadKind::kDexCacheViaMethod: break; @@ -5650,13 +5613,12 @@ HLoadString::LoadKind CodeGeneratorARM::GetSupportedLoadStringKind( void LocationsBuilderARM::VisitLoadString(HLoadString* load) { LocationSummary::CallKind call_kind = load->NeedsEnvironment() - ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) - ? LocationSummary::kCallOnMainOnly - : LocationSummary::kCallOnSlowPath) + ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); HLoadString::LoadKind load_kind = load->GetLoadKind(); + DCHECK(load_kind != HLoadString::LoadKind::kDexCachePcRelative) << "Not supported"; if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) { locations->SetInAt(0, Location::RequiresRegister()); locations->SetOut(Location::RegisterLocation(R0)); @@ -5678,7 +5640,6 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageLinkTimePcRelative: { - DCHECK(codegen_->GetCompilerOptions().IsBootImage()); CodeGeneratorARM::PcRelativePatchInfo* labels = codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex()); __ BindTrackedLabel(&labels->movw_label); @@ -5695,23 +5656,6 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { __ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address)); return; // No dex cache slow path. } - case HLoadString::LoadKind::kBssEntry: { - DCHECK(!codegen_->GetCompilerOptions().IsBootImage()); - CodeGeneratorARM::PcRelativePatchInfo* labels = - codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex()); - __ BindTrackedLabel(&labels->movw_label); - __ movw(out, /* placeholder */ 0u); - __ BindTrackedLabel(&labels->movt_label); - __ movt(out, /* placeholder */ 0u); - __ BindTrackedLabel(&labels->add_pc_label); - __ add(out, out, ShifterOperand(PC)); - GenerateGcRootFieldLoad(load, out_loc, out, 0); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load); - codegen_->AddSlowPath(slow_path); - __ CompareAndBranchIfZero(out, slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - return; - } default: break; } @@ -6860,8 +6804,7 @@ void CodeGeneratorARM::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, __ bl(GetFrameEntryLabel()); break; case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: - relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + relative_call_patches_.emplace_back(invoke->GetTargetMethod()); __ BindTrackedLabel(&relative_call_patches_.back().label); // Arbitrarily branch to the BL itself, override at link time. __ bl(&relative_call_patches_.back().label); @@ -6963,37 +6906,17 @@ Literal* CodeGeneratorARM::DeduplicateDexCacheAddressLiteral(uint32_t address) { return DeduplicateUint32Literal(address, &uint32_literals_); } -template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> -inline void CodeGeneratorARM::EmitPcRelativeLinkerPatches( - const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches) { - for (const PcRelativePatchInfo& info : infos) { - const DexFile& dex_file = info.target_dex_file; - size_t offset_or_index = info.offset_or_index; - DCHECK(info.add_pc_label.IsBound()); - uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.Position()); - // Add MOVW patch. - DCHECK(info.movw_label.IsBound()); - uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.Position()); - linker_patches->push_back(Factory(movw_offset, &dex_file, add_pc_offset, offset_or_index)); - // Add MOVT patch. - DCHECK(info.movt_label.IsBound()); - uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.Position()); - linker_patches->push_back(Factory(movt_offset, &dex_file, add_pc_offset, offset_or_index)); - } -} - void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { DCHECK(linker_patches->empty()); size_t size = method_patches_.size() + call_patches_.size() + relative_call_patches_.size() + - /* MOVW+MOVT for each entry */ 2u * pc_relative_dex_cache_patches_.size() + + /* MOVW+MOVT for each base */ 2u * pc_relative_dex_cache_patches_.size() + boot_image_string_patches_.size() + - /* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() + + /* MOVW+MOVT for each base */ 2u * pc_relative_string_patches_.size() + boot_image_type_patches_.size() + - /* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() + + /* MOVW+MOVT for each base */ 2u * pc_relative_type_patches_.size() + boot_image_address_patches_.size(); linker_patches->reserve(size); for (const auto& entry : method_patches_) { @@ -7014,13 +6937,32 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche target_method.dex_file, target_method.dex_method_index)); } - for (const PatchInfo<Label>& info : relative_call_patches_) { + for (const MethodPatchInfo<Label>& info : relative_call_patches_) { uint32_t literal_offset = info.label.Position(); - linker_patches->push_back( - LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset, + info.target_method.dex_file, + info.target_method.dex_method_index)); + } + for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) { + const DexFile& dex_file = info.target_dex_file; + size_t base_element_offset = info.offset_or_index; + DCHECK(info.add_pc_label.IsBound()); + uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.Position()); + // Add MOVW patch. + DCHECK(info.movw_label.IsBound()); + uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.Position()); + linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(movw_offset, + &dex_file, + add_pc_offset, + base_element_offset)); + // Add MOVT patch. + DCHECK(info.movt_label.IsBound()); + uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.Position()); + linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(movt_offset, + &dex_file, + add_pc_offset, + base_element_offset)); } - EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_, - linker_patches); for (const auto& entry : boot_image_string_patches_) { const StringReference& target_string = entry.first; Literal* literal = entry.second; @@ -7030,12 +6972,25 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche target_string.dex_file, target_string.string_index)); } - if (!GetCompilerOptions().IsBootImage()) { - EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_, - linker_patches); - } else { - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_, - linker_patches); + for (const PcRelativePatchInfo& info : pc_relative_string_patches_) { + const DexFile& dex_file = info.target_dex_file; + uint32_t string_index = info.offset_or_index; + DCHECK(info.add_pc_label.IsBound()); + uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.Position()); + // Add MOVW patch. + DCHECK(info.movw_label.IsBound()); + uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.Position()); + linker_patches->push_back(LinkerPatch::RelativeStringPatch(movw_offset, + &dex_file, + add_pc_offset, + string_index)); + // Add MOVT patch. + DCHECK(info.movt_label.IsBound()); + uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.Position()); + linker_patches->push_back(LinkerPatch::RelativeStringPatch(movt_offset, + &dex_file, + add_pc_offset, + string_index)); } for (const auto& entry : boot_image_type_patches_) { const TypeReference& target_type = entry.first; @@ -7046,8 +7001,26 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche target_type.dex_file, target_type.type_index)); } - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_, - linker_patches); + for (const PcRelativePatchInfo& info : pc_relative_type_patches_) { + const DexFile& dex_file = info.target_dex_file; + uint32_t type_index = info.offset_or_index; + DCHECK(info.add_pc_label.IsBound()); + uint32_t add_pc_offset = dchecked_integral_cast<uint32_t>(info.add_pc_label.Position()); + // Add MOVW patch. + DCHECK(info.movw_label.IsBound()); + uint32_t movw_offset = dchecked_integral_cast<uint32_t>(info.movw_label.Position()); + linker_patches->push_back(LinkerPatch::RelativeTypePatch(movw_offset, + &dex_file, + add_pc_offset, + type_index)); + // Add MOVT patch. + DCHECK(info.movt_label.IsBound()); + uint32_t movt_offset = dchecked_integral_cast<uint32_t>(info.movt_label.Position()); + linker_patches->push_back(LinkerPatch::RelativeTypePatch(movt_offset, + &dex_file, + add_pc_offset, + type_index)); + } for (const auto& entry : boot_image_address_patches_) { DCHECK(GetCompilerOptions().GetIncludePatchInformation()); Literal* literal = entry.second; diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h index ef2e23f258..6416d40f7f 100644 --- a/compiler/optimizing/code_generator_arm.h +++ b/compiler/optimizing/code_generator_arm.h @@ -593,10 +593,6 @@ class CodeGeneratorARM : public CodeGenerator { uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches); - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> - static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches); - // Labels for each block that will be compiled. Label* block_labels_; // Indexed by block id. Label frame_entry_label_; @@ -613,12 +609,12 @@ class CodeGeneratorARM : public CodeGenerator { MethodToLiteralMap call_patches_; // Relative call patch info. // Using ArenaDeque<> which retains element addresses on push/emplace_back(). - ArenaDeque<PatchInfo<Label>> relative_call_patches_; + ArenaDeque<MethodPatchInfo<Label>> relative_call_patches_; // PC-relative patch info for each HArmDexCacheArraysBase. ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; // Deduplication map for boot string literals for kBootImageLinkTimeAddress. BootStringToLiteralMap boot_image_string_patches_; - // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). + // PC-relative String patch info. ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; // Deduplication map for boot type literals for kBootImageLinkTimeAddress. BootTypeToLiteralMap boot_image_type_patches_; diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index 5d002674d8..a2a2e426b6 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -329,55 +329,6 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 { DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM64); }; -class LoadStringSlowPathARM64 : public SlowPathCodeARM64 { - public: - explicit LoadStringSlowPathARM64(HLoadString* instruction) : SlowPathCodeARM64(instruction) {} - - void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - LocationSummary* locations = instruction_->GetLocations(); - DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); - CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen); - - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); - __ Mov(calling_convention.GetRegisterAt(0).W(), string_index); - arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); - CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - Primitive::Type type = instruction_->GetType(); - arm64_codegen->MoveLocation(locations->Out(), calling_convention.GetReturnLocation(type), type); - - RestoreLiveRegisters(codegen, locations); - - // Store the resolved String to the BSS entry. - UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler()); - Register temp = temps.AcquireX(); - const DexFile& dex_file = instruction_->AsLoadString()->GetDexFile(); - // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary - // for the ADRP in the fast path, so that we can avoid the ADRP here. - vixl::aarch64::Label* adrp_label = - arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index); - arm64_codegen->EmitAdrpPlaceholder(adrp_label, temp); - vixl::aarch64::Label* strp_label = - arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); - { - SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler()); - __ Bind(strp_label); - __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot), - MemOperand(temp, /* offset placeholder */ 0)); - } - - __ B(GetExitLabel()); - } - - const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathARM64"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM64); -}; - class NullCheckSlowPathARM64 : public SlowPathCodeARM64 { public: explicit NullCheckSlowPathARM64(HNullCheck* instr) : SlowPathCodeARM64(instr) {} @@ -3643,11 +3594,19 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invok const DexFile& dex_file = invoke->GetDexFile(); uint32_t element_offset = invoke->GetDexCacheArrayOffset(); vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset); - EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp)); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(adrp_label); + __ adrp(XRegisterFrom(temp), /* offset placeholder */ 0); + } // Add LDR with its PC-relative DexCache access patch. vixl::aarch64::Label* ldr_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label); - EmitLdrOffsetPlaceholder(ldr_label, XRegisterFrom(temp), XRegisterFrom(temp)); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(ldr_label); + __ ldr(XRegisterFrom(temp), MemOperand(XRegisterFrom(temp), /* offset placeholder */ 0)); + } break; } case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: { @@ -3680,8 +3639,7 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invok __ Bl(&frame_entry_label_); break; case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: { - relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + relative_call_patches_.emplace_back(invoke->GetTargetMethod()); vixl::aarch64::Label* label = &relative_call_patches_.back().label; SingleEmissionCheckScope guard(GetVIXLAssembler()); __ Bind(label); @@ -3803,45 +3761,6 @@ vixl::aarch64::Literal<uint64_t>* CodeGeneratorARM64::DeduplicateDexCacheAddress return DeduplicateUint64Literal(address); } -void CodeGeneratorARM64::EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, - vixl::aarch64::Register reg) { - DCHECK(reg.IsX()); - SingleEmissionCheckScope guard(GetVIXLAssembler()); - __ Bind(fixup_label); - __ adrp(reg, /* offset placeholder */ 0); -} - -void CodeGeneratorARM64::EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, - vixl::aarch64::Register out, - vixl::aarch64::Register base) { - DCHECK(out.IsX()); - DCHECK(base.IsX()); - SingleEmissionCheckScope guard(GetVIXLAssembler()); - __ Bind(fixup_label); - __ add(out, base, Operand(/* offset placeholder */ 0)); -} - -void CodeGeneratorARM64::EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label, - vixl::aarch64::Register out, - vixl::aarch64::Register base) { - DCHECK(base.IsX()); - SingleEmissionCheckScope guard(GetVIXLAssembler()); - __ Bind(fixup_label); - __ ldr(out, MemOperand(base, /* offset placeholder */ 0)); -} - -template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> -inline void CodeGeneratorARM64::EmitPcRelativeLinkerPatches( - const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches) { - for (const PcRelativePatchInfo& info : infos) { - linker_patches->push_back(Factory(info.label.GetLocation(), - &info.target_dex_file, - info.pc_insn_label->GetLocation(), - info.offset_or_index)); - } -} - void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { DCHECK(linker_patches->empty()); size_t size = @@ -3869,9 +3788,10 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc target_method.dex_file, target_method.dex_method_index)); } - for (const PatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) { - linker_patches->push_back( - LinkerPatch::RelativeCodePatch(info.label.GetLocation(), &info.dex_file, info.index)); + for (const MethodPatchInfo<vixl::aarch64::Label>& info : relative_call_patches_) { + linker_patches->push_back(LinkerPatch::RelativeCodePatch(info.label.GetLocation(), + info.target_method.dex_file, + info.target_method.dex_method_index)); } for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) { linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(info.label.GetLocation(), @@ -3886,12 +3806,11 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc target_string.dex_file, target_string.string_index)); } - if (!GetCompilerOptions().IsBootImage()) { - EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_, - linker_patches); - } else { - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_, - linker_patches); + for (const PcRelativePatchInfo& info : pc_relative_string_patches_) { + linker_patches->push_back(LinkerPatch::RelativeStringPatch(info.label.GetLocation(), + &info.target_dex_file, + info.pc_insn_label->GetLocation(), + info.offset_or_index)); } for (const auto& entry : boot_image_type_patches_) { const TypeReference& target_type = entry.first; @@ -3900,8 +3819,12 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc target_type.dex_file, target_type.type_index)); } - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_, - linker_patches); + for (const PcRelativePatchInfo& info : pc_relative_type_patches_) { + linker_patches->push_back(LinkerPatch::RelativeTypePatch(info.label.GetLocation(), + &info.target_dex_file, + info.pc_insn_label->GetLocation(), + info.offset_or_index)); + } for (const auto& entry : boot_image_address_patches_) { DCHECK(GetCompilerOptions().GetIncludePatchInformation()); vixl::aarch64::Literal<uint32_t>* literal = entry.second; @@ -4058,11 +3981,19 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { const DexFile& dex_file = cls->GetDexFile(); uint32_t type_index = cls->GetTypeIndex(); vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index); - codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(adrp_label); + __ adrp(out.X(), /* offset placeholder */ 0); + } // Add ADD with its PC-relative type patch. vixl::aarch64::Label* add_label = codegen_->NewPcRelativeTypePatch(dex_file, type_index, adrp_label); - codegen_->EmitAddPlaceholder(add_label, out.X(), out.X()); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(add_label); + __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0)); + } break; } case HLoadClass::LoadKind::kBootImageAddress: { @@ -4099,7 +4030,11 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { uint32_t element_offset = cls->GetDexCacheElementOffset(); vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset); - codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(adrp_label); + __ adrp(out.X(), /* offset placeholder */ 0); + } // Add LDR with its PC-relative DexCache access patch. vixl::aarch64::Label* ldr_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label); @@ -4184,7 +4119,7 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( case HLoadString::LoadKind::kDexCacheAddress: DCHECK(Runtime::Current()->UseJitCompilation()); break; - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: DCHECK(!Runtime::Current()->UseJitCompilation()); break; case HLoadString::LoadKind::kDexCacheViaMethod: @@ -4195,9 +4130,7 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind( void LocationsBuilderARM64::VisitLoadString(HLoadString* load) { LocationSummary::CallKind call_kind = load->NeedsEnvironment() - ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) - ? LocationSummary::kCallOnMainOnly - : LocationSummary::kCallOnSlowPath) + ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { @@ -4221,13 +4154,20 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { // Add ADRP with its PC-relative String patch. const DexFile& dex_file = load->GetDexFile(); uint32_t string_index = load->GetStringIndex(); - DCHECK(codegen_->GetCompilerOptions().IsBootImage()); vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index); - codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(adrp_label); + __ adrp(out.X(), /* offset placeholder */ 0); + } // Add ADD with its PC-relative String patch. vixl::aarch64::Label* add_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); - codegen_->EmitAddPlaceholder(add_label, out.X(), out.X()); + { + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(add_label); + __ add(out.X(), out.X(), Operand(/* offset placeholder */ 0)); + } return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageAddress: { @@ -4235,28 +4175,6 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) { __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(load->GetAddress())); return; // No dex cache slow path. } - case HLoadString::LoadKind::kBssEntry: { - // Add ADRP with its PC-relative String .bss entry patch. - const DexFile& dex_file = load->GetDexFile(); - uint32_t string_index = load->GetStringIndex(); - DCHECK(!codegen_->GetCompilerOptions().IsBootImage()); - vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index); - codegen_->EmitAdrpPlaceholder(adrp_label, out.X()); - // Add LDR with its PC-relative String patch. - vixl::aarch64::Label* ldr_label = - codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label); - // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */ - GenerateGcRootFieldLoad(load, - load->GetLocations()->Out(), - out.X(), - /* placeholder */ 0u, - ldr_label); - SlowPathCodeARM64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM64(load); - codegen_->AddSlowPath(slow_path); - __ Cbz(out.X(), slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - return; - } default: break; } @@ -5026,7 +4944,6 @@ void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instru uint32_t offset, vixl::aarch64::Label* fixup_label, bool requires_read_barrier) { - DCHECK(fixup_label == nullptr || offset == 0u); Register root_reg = RegisterFrom(root, Primitive::kPrimNot); if (requires_read_barrier) { DCHECK(kEmitCompilerReadBarrier); @@ -5043,7 +4960,9 @@ void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instru if (fixup_label == nullptr) { __ Ldr(root_reg, MemOperand(obj, offset)); } else { - codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj); + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(fixup_label); + __ ldr(root_reg, MemOperand(obj, offset)); } static_assert( sizeof(mirror::CompressedReference<mirror::Object>) == sizeof(GcRoot<mirror::Object>), @@ -5072,7 +4991,9 @@ void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instru if (fixup_label == nullptr) { __ Add(root_reg.X(), obj.X(), offset); } else { - codegen_->EmitAddPlaceholder(fixup_label, root_reg.X(), obj.X()); + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(fixup_label); + __ add(root_reg.X(), obj.X(), offset); } // /* mirror::Object* */ root = root->Read() codegen_->GenerateReadBarrierForRootSlow(instruction, root, root); @@ -5083,7 +5004,9 @@ void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instru if (fixup_label == nullptr) { __ Ldr(root_reg, MemOperand(obj, offset)); } else { - codegen_->EmitLdrOffsetPlaceholder(fixup_label, root_reg, obj.X()); + SingleEmissionCheckScope guard(GetVIXLAssembler()); + __ Bind(fixup_label); + __ ldr(root_reg, MemOperand(obj, offset)); } // Note that GC roots are not affected by heap poisoning, thus we // do not have to unpoison `root_reg` here. diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index eb28ecb427..a15224578d 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -564,14 +564,6 @@ class CodeGeneratorARM64 : public CodeGenerator { vixl::aarch64::Literal<uint32_t>* DeduplicateBootImageAddressLiteral(uint64_t address); vixl::aarch64::Literal<uint64_t>* DeduplicateDexCacheAddressLiteral(uint64_t address); - void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg); - void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label, - vixl::aarch64::Register out, - vixl::aarch64::Register base); - void EmitLdrOffsetPlaceholder(vixl::aarch64::Label* fixup_label, - vixl::aarch64::Register out, - vixl::aarch64::Register base); - void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE; // Fast path implementation of ReadBarrier::Barrier for a heap @@ -699,10 +691,6 @@ class CodeGeneratorARM64 : public CodeGenerator { void EmitJumpTables(); - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> - static void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches); - // Labels for each block that will be compiled. // We use a deque so that the `vixl::aarch64::Label` objects do not move in memory. ArenaDeque<vixl::aarch64::Label> block_labels_; // Indexed by block id. @@ -725,12 +713,12 @@ class CodeGeneratorARM64 : public CodeGenerator { MethodToLiteralMap call_patches_; // Relative call patch info. // Using ArenaDeque<> which retains element addresses on push/emplace_back(). - ArenaDeque<PatchInfo<vixl::aarch64::Label>> relative_call_patches_; + ArenaDeque<MethodPatchInfo<vixl::aarch64::Label>> relative_call_patches_; // PC-relative DexCache access info. ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; // Deduplication map for boot string literals for kBootImageLinkTimeAddress. BootStringToLiteralMap boot_image_string_patches_; - // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). + // PC-relative String patch info. ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; // Deduplication map for boot type literals for kBootImageLinkTimeAddress. BootTypeToLiteralMap boot_image_type_patches_; diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc index f0118a465b..5c0ca85c78 100644 --- a/compiler/optimizing/code_generator_mips.cc +++ b/compiler/optimizing/code_generator_mips.cc @@ -279,8 +279,7 @@ class LoadStringSlowPathMIPS : public SlowPathCodeMIPS { SaveLiveRegisters(codegen, locations); InvokeRuntimeCallingConvention calling_convention; - HLoadString* load = instruction_->AsLoadString(); - const uint32_t string_index = load->GetStringIndex(); + const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); __ LoadConst32(calling_convention.GetRegisterAt(0), string_index); mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); @@ -290,19 +289,6 @@ class LoadStringSlowPathMIPS : public SlowPathCodeMIPS { type); RestoreLiveRegisters(codegen, locations); - - // Store the resolved String to the BSS entry. - // TODO: Change art_quick_resolve_string to kSaveEverything and use a temporary for the - // .bss entry address in the fast path, so that we can avoid another calculation here. - bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6(); - Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>(); - Register out = locations->Out().AsRegister<Register>(); - DCHECK_NE(out, AT); - CodeGeneratorMIPS::PcRelativePatchInfo* info = - mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index); - mips_codegen->EmitPcRelativeAddressPlaceholder(info, AT, base); - __ StoreToOffset(kStoreWord, out, AT, 0); - __ B(GetExitLabel()); } @@ -971,24 +957,6 @@ void CodeGeneratorMIPS::AddLocationAsTemp(Location location, LocationSummary* lo } } -template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> -inline void CodeGeneratorMIPS::EmitPcRelativeLinkerPatches( - const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches) { - for (const PcRelativePatchInfo& info : infos) { - const DexFile& dex_file = info.target_dex_file; - size_t offset_or_index = info.offset_or_index; - DCHECK(info.high_label.IsBound()); - uint32_t high_offset = __ GetLabelLocation(&info.high_label); - // On R2 we use HMipsComputeBaseMethodAddress and patch relative to - // the assembler's base label used for PC-relative addressing. - uint32_t pc_rel_offset = info.pc_rel_label.IsBound() - ? __ GetLabelLocation(&info.pc_rel_label) - : __ GetPcRelBaseLabelLocation(); - linker_patches->push_back(Factory(high_offset, &dex_file, pc_rel_offset, offset_or_index)); - } -} - void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { DCHECK(linker_patches->empty()); size_t size = @@ -1019,17 +987,48 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch target_method.dex_file, target_method.dex_method_index)); } - EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_, - linker_patches); - if (!GetCompilerOptions().IsBootImage()) { - EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_, - linker_patches); - } else { - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_, - linker_patches); + for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) { + const DexFile& dex_file = info.target_dex_file; + size_t base_element_offset = info.offset_or_index; + DCHECK(info.high_label.IsBound()); + uint32_t high_offset = __ GetLabelLocation(&info.high_label); + DCHECK(info.pc_rel_label.IsBound()); + uint32_t pc_rel_offset = __ GetLabelLocation(&info.pc_rel_label); + linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(high_offset, + &dex_file, + pc_rel_offset, + base_element_offset)); + } + for (const PcRelativePatchInfo& info : pc_relative_string_patches_) { + const DexFile& dex_file = info.target_dex_file; + size_t string_index = info.offset_or_index; + DCHECK(info.high_label.IsBound()); + uint32_t high_offset = __ GetLabelLocation(&info.high_label); + // On R2 we use HMipsComputeBaseMethodAddress and patch relative to + // the assembler's base label used for PC-relative literals. + uint32_t pc_rel_offset = info.pc_rel_label.IsBound() + ? __ GetLabelLocation(&info.pc_rel_label) + : __ GetPcRelBaseLabelLocation(); + linker_patches->push_back(LinkerPatch::RelativeStringPatch(high_offset, + &dex_file, + pc_rel_offset, + string_index)); + } + for (const PcRelativePatchInfo& info : pc_relative_type_patches_) { + const DexFile& dex_file = info.target_dex_file; + size_t type_index = info.offset_or_index; + DCHECK(info.high_label.IsBound()); + uint32_t high_offset = __ GetLabelLocation(&info.high_label); + // On R2 we use HMipsComputeBaseMethodAddress and patch relative to + // the assembler's base label used for PC-relative literals. + uint32_t pc_rel_offset = info.pc_rel_label.IsBound() + ? __ GetLabelLocation(&info.pc_rel_label) + : __ GetPcRelBaseLabelLocation(); + linker_patches->push_back(LinkerPatch::RelativeTypePatch(high_offset, + &dex_file, + pc_rel_offset, + type_index)); } - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_, - linker_patches); for (const auto& entry : boot_image_string_patches_) { const StringReference& target_string = entry.first; Literal* literal = entry.second; @@ -1119,36 +1118,6 @@ Literal* CodeGeneratorMIPS::DeduplicateBootImageAddressLiteral(uint32_t address) return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map); } -void CodeGeneratorMIPS::EmitPcRelativeAddressPlaceholder( - PcRelativePatchInfo* info, Register out, Register base) { - bool reordering = __ SetReorder(false); - if (GetInstructionSetFeatures().IsR6()) { - DCHECK_EQ(base, ZERO); - __ Bind(&info->high_label); - __ Bind(&info->pc_rel_label); - // Add a 32-bit offset to PC. - __ Auipc(out, /* placeholder */ 0x1234); - __ Addiu(out, out, /* placeholder */ 0x5678); - } else { - // If base is ZERO, emit NAL to obtain the actual base. - if (base == ZERO) { - // Generate a dummy PC-relative call to obtain PC. - __ Nal(); - } - __ Bind(&info->high_label); - __ Lui(out, /* placeholder */ 0x1234); - // If we emitted the NAL, bind the pc_rel_label, otherwise base is a register holding - // the HMipsComputeBaseMethodAddress which has its own label stored in MipsAssembler. - if (base == ZERO) { - __ Bind(&info->pc_rel_label); - } - __ Ori(out, out, /* placeholder */ 0x5678); - // Add a 32-bit offset to PC. - __ Addu(out, out, (base == ZERO) ? RA : base); - } - __ SetReorder(reordering); -} - void CodeGeneratorMIPS::MarkGCCard(Register object, Register value) { MipsLabel done; Register card = AT; @@ -4260,8 +4229,6 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind( } // We disable PC-relative load when there is an irreducible loop, as the optimization // is incompatible with it. - // TODO: Create as many MipsDexCacheArraysBase instructions as needed for methods - // with irreducible loops. bool has_irreducible_loops = GetGraph()->HasIrreducibleLoops(); bool fallback_load = has_irreducible_loops; switch (desired_string_load_kind) { @@ -4277,8 +4244,10 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind( DCHECK(Runtime::Current()->UseJitCompilation()); fallback_load = false; break; - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: DCHECK(!Runtime::Current()->UseJitCompilation()); + // TODO: Create as many MipsDexCacheArraysBase instructions as needed for methods + // with irreducible loops. break; case HLoadString::LoadKind::kDexCacheViaMethod: fallback_load = false; @@ -4658,7 +4627,23 @@ void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) { DCHECK(!kEmitCompilerReadBarrier); CodeGeneratorMIPS::PcRelativePatchInfo* info = codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex()); - codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg); + bool reordering = __ SetReorder(false); + if (isR6) { + __ Bind(&info->high_label); + __ Bind(&info->pc_rel_label); + // Add a 32-bit offset to PC. + __ Auipc(out, /* placeholder */ 0x1234); + __ Addiu(out, out, /* placeholder */ 0x5678); + } else { + __ Bind(&info->high_label); + __ Lui(out, /* placeholder */ 0x1234); + // We do not bind info->pc_rel_label here, we'll use the assembler's label + // for PC-relative literals and the base from HMipsComputeBaseMethodAddress. + __ Ori(out, out, /* placeholder */ 0x5678); + // Add a 32-bit offset to PC. + __ Addu(out, out, base_or_current_method_reg); + } + __ SetReorder(reordering); break; } case HLoadClass::LoadKind::kBootImageAddress: { @@ -4747,9 +4732,7 @@ void InstructionCodeGeneratorMIPS::VisitClearException(HClearException* clear AT void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) { LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier) - ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) - ? LocationSummary::kCallOnMainOnly - : LocationSummary::kCallOnSlowPath) + ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); HLoadString::LoadKind load_kind = load->GetLoadKind(); @@ -4763,7 +4746,7 @@ void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) { } FALLTHROUGH_INTENDED; // We need an extra register for PC-relative dex cache accesses. - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: case HLoadString::LoadKind::kDexCacheViaMethod: locations->SetInAt(0, Location::RequiresRegister()); break; @@ -4785,7 +4768,6 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) { case HLoadString::LoadKind::kBootImageLinkTimeAddress: case HLoadString::LoadKind::kBootImageAddress: case HLoadString::LoadKind::kBootImageLinkTimePcRelative: - case HLoadString::LoadKind::kBssEntry: base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>(); break; default: @@ -4803,10 +4785,25 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) { return; // No dex cache slow path. case HLoadString::LoadKind::kBootImageLinkTimePcRelative: { DCHECK(!kEmitCompilerReadBarrier); - DCHECK(codegen_->GetCompilerOptions().IsBootImage()); CodeGeneratorMIPS::PcRelativePatchInfo* info = codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex()); - codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg); + bool reordering = __ SetReorder(false); + if (isR6) { + __ Bind(&info->high_label); + __ Bind(&info->pc_rel_label); + // Add a 32-bit offset to PC. + __ Auipc(out, /* placeholder */ 0x1234); + __ Addiu(out, out, /* placeholder */ 0x5678); + } else { + __ Bind(&info->high_label); + __ Lui(out, /* placeholder */ 0x1234); + // We do not bind info->pc_rel_label here, we'll use the assembler's label + // for PC-relative literals and the base from HMipsComputeBaseMethodAddress. + __ Ori(out, out, /* placeholder */ 0x5678); + // Add a 32-bit offset to PC. + __ Addu(out, out, base_or_current_method_reg); + } + __ SetReorder(reordering); return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageAddress: { @@ -4818,18 +4815,6 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) { codegen_->DeduplicateBootImageAddressLiteral(address)); return; // No dex cache slow path. } - case HLoadString::LoadKind::kBssEntry: { - DCHECK(!codegen_->GetCompilerOptions().IsBootImage()); - CodeGeneratorMIPS::PcRelativePatchInfo* info = - codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex()); - codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg); - __ LoadFromOffset(kLoadWord, out, out, 0); - SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load); - codegen_->AddSlowPath(slow_path); - __ Beqz(out, slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - return; - } default: break; } @@ -6026,8 +6011,25 @@ void InstructionCodeGeneratorMIPS::VisitMipsDexCacheArraysBase(HMipsDexCacheArra Register reg = base->GetLocations()->Out().AsRegister<Register>(); CodeGeneratorMIPS::PcRelativePatchInfo* info = codegen_->NewPcRelativeDexCacheArrayPatch(base->GetDexFile(), base->GetElementOffset()); - // TODO: Reuse MipsComputeBaseMethodAddress on R2 instead of passing ZERO to force emitting NAL. - codegen_->EmitPcRelativeAddressPlaceholder(info, reg, ZERO); + bool reordering = __ SetReorder(false); + if (codegen_->GetInstructionSetFeatures().IsR6()) { + __ Bind(&info->high_label); + __ Bind(&info->pc_rel_label); + // Add a 32-bit offset to PC. + __ Auipc(reg, /* placeholder */ 0x1234); + __ Addiu(reg, reg, /* placeholder */ 0x5678); + } else { + // Generate a dummy PC-relative call to obtain PC. + __ Nal(); + __ Bind(&info->high_label); + __ Lui(reg, /* placeholder */ 0x1234); + __ Bind(&info->pc_rel_label); + __ Ori(reg, reg, /* placeholder */ 0x5678); + // Add a 32-bit offset to PC. + __ Addu(reg, reg, RA); + // TODO: Can we share this code with that of VisitMipsComputeBaseMethodAddress()? + } + __ SetReorder(reordering); } void LocationsBuilderMIPS::VisitInvokeUnresolved(HInvokeUnresolved* invoke) { diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h index 0e8d8d40cf..f943978b3b 100644 --- a/compiler/optimizing/code_generator_mips.h +++ b/compiler/optimizing/code_generator_mips.h @@ -435,8 +435,6 @@ class CodeGeneratorMIPS : public CodeGenerator { Literal* DeduplicateBootImageTypeLiteral(const DexFile& dex_file, uint32_t type_index); Literal* DeduplicateBootImageAddressLiteral(uint32_t address); - void EmitPcRelativeAddressPlaceholder(PcRelativePatchInfo* info, Register out, Register base); - private: Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke, Register temp); @@ -457,10 +455,6 @@ class CodeGeneratorMIPS : public CodeGenerator { uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches); - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> - void EmitPcRelativeLinkerPatches(const ArenaDeque<PcRelativePatchInfo>& infos, - ArenaVector<LinkerPatch>* linker_patches); - // Labels for each block that will be compiled. MipsLabel* block_labels_; MipsLabel frame_entry_label_; @@ -479,7 +473,7 @@ class CodeGeneratorMIPS : public CodeGenerator { ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_; // Deduplication map for boot string literals for kBootImageLinkTimeAddress. BootStringToLiteralMap boot_image_string_patches_; - // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC). + // PC-relative String patch info. ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_; // Deduplication map for boot type literals for kBootImageLinkTimeAddress. BootTypeToLiteralMap boot_image_type_patches_; diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 49426440f6..c3000805d1 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -209,42 +209,6 @@ class SuspendCheckSlowPathX86 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathX86); }; -class LoadStringSlowPathX86 : public SlowPathCode { - public: - explicit LoadStringSlowPathX86(HLoadString* instruction): SlowPathCode(instruction) {} - - void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - LocationSummary* locations = instruction_->GetLocations(); - DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); - - CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen); - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); - __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index)); - x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this); - CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX)); - RestoreLiveRegisters(codegen, locations); - - // Store the resolved String to the BSS entry. - Register method_address = locations->InAt(0).AsRegister<Register>(); - __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset), - locations->Out().AsRegister<Register>()); - Label* fixup_label = x86_codegen->NewStringBssEntryPatch(instruction_->AsLoadString()); - __ Bind(fixup_label); - - __ jmp(GetExitLabel()); - } - - const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86); -}; - class LoadClassSlowPathX86 : public SlowPathCode { public: LoadClassSlowPathX86(HLoadClass* cls, @@ -4327,8 +4291,7 @@ Location CodeGeneratorX86::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticO break; case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup: __ movl(temp.AsRegister<Register>(), Immediate(/* placeholder */ 0)); - method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + method_patches_.emplace_back(invoke->GetTargetMethod()); __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn. break; case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: { @@ -4373,8 +4336,7 @@ void CodeGeneratorX86::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, __ call(GetFrameEntryLabel()); break; case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: { - relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + relative_call_patches_.emplace_back(invoke->GetTargetMethod()); Label* label = &relative_call_patches_.back().label; __ call(label); // Bind to the patch label, override at link time. __ Bind(label); // Bind the label at the end of the "call" insn. @@ -4433,8 +4395,7 @@ void CodeGeneratorX86::RecordSimplePatch() { } } -void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) { - DCHECK(GetCompilerOptions().IsBootImage()); +void CodeGeneratorX86::RecordStringPatch(HLoadString* load_string) { string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex()); __ Bind(&string_patches_.back().label); } @@ -4444,12 +4405,6 @@ void CodeGeneratorX86::RecordTypePatch(HLoadClass* load_class) { __ Bind(&type_patches_.back().label); } -Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) { - DCHECK(!GetCompilerOptions().IsBootImage()); - string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex()); - return &string_patches_.back().label; -} - Label* CodeGeneratorX86::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset) { // Add the patch entry and bind its label at the end of the instruction. @@ -4457,21 +4412,6 @@ Label* CodeGeneratorX86::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file return &pc_relative_dex_cache_patches_.back().label; } -// The label points to the end of the "movl" or another instruction but the literal offset -// for method patch needs to point to the embedded constant which occupies the last 4 bytes. -constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; - -template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> -inline void CodeGeneratorX86::EmitPcRelativeLinkerPatches( - const ArenaDeque<PatchInfo<Label>>& infos, - ArenaVector<LinkerPatch>* linker_patches) { - for (const PatchInfo<Label>& info : infos) { - uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back( - Factory(literal_offset, &info.dex_file, GetMethodAddressOffset(), info.index)); - } -} - void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { DCHECK(linker_patches->empty()); size_t size = @@ -4482,38 +4422,59 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche string_patches_.size() + type_patches_.size(); linker_patches->reserve(size); - for (const PatchInfo<Label>& info : method_patches_) { + // The label points to the end of the "movl" insn but the literal offset for method + // patch needs to point to the embedded constant which occupies the last 4 bytes. + constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; + for (const MethodPatchInfo<Label>& info : method_patches_) { + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, + info.target_method.dex_file, + info.target_method.dex_method_index)); + } + for (const MethodPatchInfo<Label>& info : relative_call_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset, + info.target_method.dex_file, + info.target_method.dex_method_index)); } - for (const PatchInfo<Label>& info : relative_call_patches_) { + for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back( - LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset, + &info.target_dex_file, + GetMethodAddressOffset(), + info.element_offset)); } - EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_, - linker_patches); for (const Label& label : simple_patches_) { uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment; linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset)); } - if (!GetCompilerOptions().IsBootImage()) { - EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches); - } else if (GetCompilerOptions().GetCompilePic()) { - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches); - } else { - for (const PatchInfo<Label>& info : string_patches_) { + if (GetCompilerOptions().GetCompilePic()) { + for (const StringPatchInfo<Label>& info : string_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back( - LinkerPatch::StringPatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset, + &info.dex_file, + GetMethodAddressOffset(), + info.string_index)); + } + for (const TypePatchInfo<Label>& info : type_patches_) { + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset, + &info.dex_file, + GetMethodAddressOffset(), + info.type_index)); } - } - if (GetCompilerOptions().GetCompilePic()) { - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches); } else { - for (const PatchInfo<Label>& info : type_patches_) { + for (const StringPatchInfo<Label>& info : string_patches_) { + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::StringPatch(literal_offset, + &info.dex_file, + info.string_index)); + } + for (const TypePatchInfo<Label>& info : type_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, + &info.dex_file, + info.type_index)); } } } @@ -5991,7 +5952,7 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind( case HLoadString::LoadKind::kBootImageLinkTimePcRelative: DCHECK(GetCompilerOptions().GetCompilePic()); FALLTHROUGH_INTENDED; - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT. // We disable pc-relative load when there is an irreducible loop, as the optimization // is incompatible with it. @@ -6014,15 +5975,13 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind( void LocationsBuilderX86::VisitLoadString(HLoadString* load) { LocationSummary::CallKind call_kind = (load->NeedsEnvironment() || kEmitCompilerReadBarrier) - ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) - ? LocationSummary::kCallOnMainOnly - : LocationSummary::kCallOnSlowPath) + ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); HLoadString::LoadKind load_kind = load->GetLoadKind(); if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod || load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative || - load_kind == HLoadString::LoadKind::kBssEntry) { + load_kind == HLoadString::LoadKind::kDexCachePcRelative) { locations->SetInAt(0, Location::RequiresRegister()); } if (load_kind == HLoadString::LoadKind::kDexCacheViaMethod) { @@ -6040,13 +5999,13 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { switch (load->GetLoadKind()) { case HLoadString::LoadKind::kBootImageLinkTimeAddress: { __ movl(out, Immediate(/* placeholder */ 0)); - codegen_->RecordBootStringPatch(load); + codegen_->RecordStringPatch(load); return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageLinkTimePcRelative: { Register method_address = locations->InAt(0).AsRegister<Register>(); __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset)); - codegen_->RecordBootStringPatch(load); + codegen_->RecordStringPatch(load); return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageAddress: { @@ -6056,19 +6015,6 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) { codegen_->RecordSimplePatch(); return; // No dex cache slow path. } - case HLoadString::LoadKind::kBssEntry: { - Register method_address = locations->InAt(0).AsRegister<Register>(); - Address address = Address(method_address, CodeGeneratorX86::kDummy32BitOffset); - Label* fixup_label = codegen_->NewStringBssEntryPatch(load); - // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */ - GenerateGcRootFieldLoad(load, out_loc, address, fixup_label); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86(load); - codegen_->AddSlowPath(slow_path); - __ testl(out, out); - __ j(kEqual, slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - return; - } default: break; } diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index 27ea3bfc0a..1ae9af3b94 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -411,9 +411,8 @@ class CodeGeneratorX86 : public CodeGenerator { void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE; void RecordSimplePatch(); - void RecordBootStringPatch(HLoadString* load_string); + void RecordStringPatch(HLoadString* load_string); void RecordTypePatch(HLoadClass* load_class); - Label* NewStringBssEntryPatch(HLoadString* load_string); Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset); void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; @@ -580,9 +579,15 @@ class CodeGeneratorX86 : public CodeGenerator { private: Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke, Register temp); - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> - void EmitPcRelativeLinkerPatches(const ArenaDeque<PatchInfo<Label>>& infos, - ArenaVector<LinkerPatch>* linker_patches); + struct PcRelativeDexCacheAccessInfo { + PcRelativeDexCacheAccessInfo(const DexFile& dex_file, uint32_t element_off) + : target_dex_file(dex_file), element_offset(element_off), label() { } + + const DexFile& target_dex_file; + uint32_t element_offset; + // NOTE: Label is bound to the end of the instruction that has an embedded 32-bit offset. + Label label; + }; // Labels for each block that will be compiled. Label* block_labels_; // Indexed by block id. @@ -594,16 +599,16 @@ class CodeGeneratorX86 : public CodeGenerator { const X86InstructionSetFeatures& isa_features_; // Method patch info. Using ArenaDeque<> which retains element addresses on push/emplace_back(). - ArenaDeque<PatchInfo<Label>> method_patches_; - ArenaDeque<PatchInfo<Label>> relative_call_patches_; + ArenaDeque<MethodPatchInfo<Label>> method_patches_; + ArenaDeque<MethodPatchInfo<Label>> relative_call_patches_; // PC-relative DexCache access info. - ArenaDeque<PatchInfo<Label>> pc_relative_dex_cache_patches_; + ArenaDeque<PcRelativeDexCacheAccessInfo> pc_relative_dex_cache_patches_; // Patch locations for patchoat where the linker doesn't do any other work. ArenaDeque<Label> simple_patches_; - // String patch locations; type depends on configuration (app .bss or boot image PIC/non-PIC). - ArenaDeque<PatchInfo<Label>> string_patches_; + // String patch locations. + ArenaDeque<StringPatchInfo<Label>> string_patches_; // Type patch locations. - ArenaDeque<PatchInfo<Label>> type_patches_; + ArenaDeque<TypePatchInfo<Label>> type_patches_; // Offset to the start of the constant area in the assembled code. // Used for fixups to the constant area. diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 95569a7903..f9a3e429d7 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -284,44 +284,6 @@ class LoadClassSlowPathX86_64 : public SlowPathCode { DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathX86_64); }; -class LoadStringSlowPathX86_64 : public SlowPathCode { - public: - explicit LoadStringSlowPathX86_64(HLoadString* instruction) : SlowPathCode(instruction) {} - - void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { - LocationSummary* locations = instruction_->GetLocations(); - DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); - - CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen); - __ Bind(GetEntryLabel()); - SaveLiveRegisters(codegen, locations); - - InvokeRuntimeCallingConvention calling_convention; - const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex(); - __ movl(CpuRegister(calling_convention.GetRegisterAt(0)), Immediate(string_index)); - x86_64_codegen->InvokeRuntime(kQuickResolveString, - instruction_, - instruction_->GetDexPc(), - this); - CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>(); - x86_64_codegen->Move(locations->Out(), Location::RegisterLocation(RAX)); - RestoreLiveRegisters(codegen, locations); - - // Store the resolved String to the BSS entry. - __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false), - locations->Out().AsRegister<CpuRegister>()); - Label* fixup_label = x86_64_codegen->NewStringBssEntryPatch(instruction_->AsLoadString()); - __ Bind(fixup_label); - - __ jmp(GetExitLabel()); - } - - const char* GetDescription() const OVERRIDE { return "LoadStringSlowPathX86_64"; } - - private: - DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathX86_64); -}; - class TypeCheckSlowPathX86_64 : public SlowPathCode { public: TypeCheckSlowPathX86_64(HInstruction* instruction, bool is_fatal) @@ -807,8 +769,7 @@ Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStat break; case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup: __ movl(temp.AsRegister<CpuRegister>(), Immediate(0)); // Placeholder. - method_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + method_patches_.emplace_back(invoke->GetTargetMethod()); __ Bind(&method_patches_.back().label); // Bind the label at the end of the "movl" insn. break; case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: { @@ -855,8 +816,7 @@ void CodeGeneratorX86_64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invo __ call(&frame_entry_label_); break; case HInvokeStaticOrDirect::CodePtrLocation::kCallPCRelative: { - relative_call_patches_.emplace_back(*invoke->GetTargetMethod().dex_file, - invoke->GetTargetMethod().dex_method_index); + relative_call_patches_.emplace_back(invoke->GetTargetMethod()); Label* label = &relative_call_patches_.back().label; __ call(label); // Bind to the patch label, override at link time. __ Bind(label); // Bind the label at the end of the "call" insn. @@ -916,8 +876,7 @@ void CodeGeneratorX86_64::RecordSimplePatch() { } } -void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) { - DCHECK(GetCompilerOptions().IsBootImage()); +void CodeGeneratorX86_64::RecordStringPatch(HLoadString* load_string) { string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex()); __ Bind(&string_patches_.back().label); } @@ -927,12 +886,6 @@ void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) { __ Bind(&type_patches_.back().label); } -Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) { - DCHECK(!GetCompilerOptions().IsBootImage()); - string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex()); - return &string_patches_.back().label; -} - Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset) { // Add a patch entry and return the label. @@ -940,21 +893,6 @@ Label* CodeGeneratorX86_64::NewPcRelativeDexCacheArrayPatch(const DexFile& dex_f return &pc_relative_dex_cache_patches_.back().label; } -// The label points to the end of the "movl" or another instruction but the literal offset -// for method patch needs to point to the embedded constant which occupies the last 4 bytes. -constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; - -template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> -inline void CodeGeneratorX86_64::EmitPcRelativeLinkerPatches( - const ArenaDeque<PatchInfo<Label>>& infos, - ArenaVector<LinkerPatch>* linker_patches) { - for (const PatchInfo<Label>& info : infos) { - uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back( - Factory(literal_offset, &info.dex_file, info.label.Position(), info.index)); - } -} - void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) { DCHECK(linker_patches->empty()); size_t size = @@ -965,29 +903,48 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat string_patches_.size() + type_patches_.size(); linker_patches->reserve(size); - for (const PatchInfo<Label>& info : method_patches_) { + // The label points to the end of the "movl" insn but the literal offset for method + // patch needs to point to the embedded constant which occupies the last 4 bytes. + constexpr uint32_t kLabelPositionToLiteralOffsetAdjustment = 4u; + for (const MethodPatchInfo<Label>& info : method_patches_) { + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, + info.target_method.dex_file, + info.target_method.dex_method_index)); + } + for (const MethodPatchInfo<Label>& info : relative_call_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::RelativeCodePatch(literal_offset, + info.target_method.dex_file, + info.target_method.dex_method_index)); } - for (const PatchInfo<Label>& info : relative_call_patches_) { + for (const PcRelativeDexCacheAccessInfo& info : pc_relative_dex_cache_patches_) { uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; - linker_patches->push_back( - LinkerPatch::RelativeCodePatch(literal_offset, &info.dex_file, info.index)); + linker_patches->push_back(LinkerPatch::DexCacheArrayPatch(literal_offset, + &info.target_dex_file, + info.label.Position(), + info.element_offset)); } - EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_, - linker_patches); for (const Label& label : simple_patches_) { uint32_t literal_offset = label.Position() - kLabelPositionToLiteralOffsetAdjustment; linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset)); } - if (!GetCompilerOptions().IsBootImage()) { - EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches); - } else { + for (const StringPatchInfo<Label>& info : string_patches_) { // These are always PC-relative, see GetSupportedLoadStringKind(). - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches); + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::RelativeStringPatch(literal_offset, + &info.dex_file, + info.label.Position(), + info.string_index)); + } + for (const TypePatchInfo<Label>& info : type_patches_) { + // These are always PC-relative, see GetSupportedLoadClassKind(). + uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment; + linker_patches->push_back(LinkerPatch::RelativeTypePatch(literal_offset, + &info.dex_file, + info.label.Position(), + info.type_index)); } - // These are always PC-relative, see GetSupportedLoadClassKind(). - EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches); } void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const { @@ -5402,7 +5359,7 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind( case HLoadString::LoadKind::kDexCacheAddress: DCHECK(Runtime::Current()->UseJitCompilation()); break; - case HLoadString::LoadKind::kBssEntry: + case HLoadString::LoadKind::kDexCachePcRelative: DCHECK(!Runtime::Current()->UseJitCompilation()); break; case HLoadString::LoadKind::kDexCacheViaMethod: @@ -5413,9 +5370,7 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind( void LocationsBuilderX86_64::VisitLoadString(HLoadString* load) { LocationSummary::CallKind call_kind = load->NeedsEnvironment() - ? ((load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) - ? LocationSummary::kCallOnMainOnly - : LocationSummary::kCallOnSlowPath) + ? LocationSummary::kCallOnMainOnly : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind); if (load->GetLoadKind() == HLoadString::LoadKind::kDexCacheViaMethod) { @@ -5434,7 +5389,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { switch (load->GetLoadKind()) { case HLoadString::LoadKind::kBootImageLinkTimePcRelative: { __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false)); - codegen_->RecordBootStringPatch(load); + codegen_->RecordStringPatch(load); return; // No dex cache slow path. } case HLoadString::LoadKind::kBootImageAddress: { @@ -5444,19 +5399,6 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) { codegen_->RecordSimplePatch(); return; // No dex cache slow path. } - case HLoadString::LoadKind::kBssEntry: { - Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, - /* no_rip */ false); - Label* fixup_label = codegen_->NewStringBssEntryPatch(load); - // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */ - GenerateGcRootFieldLoad(load, out_loc, address, fixup_label); - SlowPathCode* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathX86_64(load); - codegen_->AddSlowPath(slow_path); - __ testl(out, out); - __ j(kEqual, slow_path->GetEntryLabel()); - __ Bind(slow_path->GetExitLabel()); - return; - } default: break; } diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index 74b065d317..594f05157b 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -406,9 +406,8 @@ class CodeGeneratorX86_64 : public CodeGenerator { void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE; void RecordSimplePatch(); - void RecordBootStringPatch(HLoadString* load_string); + void RecordStringPatch(HLoadString* load_string); void RecordTypePatch(HLoadClass* load_class); - Label* NewStringBssEntryPatch(HLoadString* load_string); Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset); void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE; @@ -555,9 +554,14 @@ class CodeGeneratorX86_64 : public CodeGenerator { static constexpr int32_t kDummy32BitOffset = 256; private: - template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)> - static void EmitPcRelativeLinkerPatches(const ArenaDeque<PatchInfo<Label>>& infos, - ArenaVector<LinkerPatch>* linker_patches); + struct PcRelativeDexCacheAccessInfo { + PcRelativeDexCacheAccessInfo(const DexFile& dex_file, uint32_t element_off) + : target_dex_file(dex_file), element_offset(element_off), label() { } + + const DexFile& target_dex_file; + uint32_t element_offset; + Label label; + }; // Labels for each block that will be compiled. Label* block_labels_; // Indexed by block id. @@ -573,16 +577,16 @@ class CodeGeneratorX86_64 : public CodeGenerator { int constant_area_start_; // Method patch info. Using ArenaDeque<> which retains element addresses on push/emplace_back(). - ArenaDeque<PatchInfo<Label>> method_patches_; - ArenaDeque<PatchInfo<Label>> relative_call_patches_; + ArenaDeque<MethodPatchInfo<Label>> method_patches_; + ArenaDeque<MethodPatchInfo<Label>> relative_call_patches_; // PC-relative DexCache access info. - ArenaDeque<PatchInfo<Label>> pc_relative_dex_cache_patches_; + ArenaDeque<PcRelativeDexCacheAccessInfo> pc_relative_dex_cache_patches_; // Patch locations for patchoat where the linker doesn't do any other work. ArenaDeque<Label> simple_patches_; - // String patch locations; type depends on configuration (app .bss or boot image PIC). - ArenaDeque<PatchInfo<Label>> string_patches_; + // String patch locations. + ArenaDeque<StringPatchInfo<Label>> string_patches_; // Type patch locations. - ArenaDeque<PatchInfo<Label>> type_patches_; + ArenaDeque<TypePatchInfo<Label>> type_patches_; // Fixups for jump tables need to be handled specially. ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_; diff --git a/compiler/optimizing/dex_cache_array_fixups_arm.cc b/compiler/optimizing/dex_cache_array_fixups_arm.cc index 82b81238ab..7010171c80 100644 --- a/compiler/optimizing/dex_cache_array_fixups_arm.cc +++ b/compiler/optimizing/dex_cache_array_fixups_arm.cc @@ -62,6 +62,21 @@ class DexCacheArrayFixupsVisitor : public HGraphVisitor { } } + void VisitLoadString(HLoadString* load_string) OVERRIDE { + // If this is a load with PC-relative access to the dex cache strings array, + // we need to add the dex cache arrays base as the special input. + if (load_string->GetLoadKind() == HLoadString::LoadKind::kDexCachePcRelative) { + // Initialize base for target dex file if needed. + const DexFile& dex_file = load_string->GetDexFile(); + HArmDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(dex_file); + // Update the element offset in base. + DexCacheArraysLayout layout(kArmPointerSize, &dex_file); + base->UpdateElementOffset(layout.StringOffset(load_string->GetStringIndex())); + // Add the special argument base to the load. + load_string->AddSpecialInput(base); + } + } + void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE { // If this is an invoke with PC-relative access to the dex cache methods array, // we need to add the dex cache arrays base as the special input. diff --git a/compiler/optimizing/dex_cache_array_fixups_mips.cc b/compiler/optimizing/dex_cache_array_fixups_mips.cc index 31fff26dd5..4456b49e87 100644 --- a/compiler/optimizing/dex_cache_array_fixups_mips.cc +++ b/compiler/optimizing/dex_cache_array_fixups_mips.cc @@ -68,6 +68,21 @@ class DexCacheArrayFixupsVisitor : public HGraphVisitor { } } + void VisitLoadString(HLoadString* load_string) OVERRIDE { + // If this is a load with PC-relative access to the dex cache strings array, + // we need to add the dex cache arrays base as the special input. + if (load_string->GetLoadKind() == HLoadString::LoadKind::kDexCachePcRelative) { + // Initialize base for target dex file if needed. + const DexFile& dex_file = load_string->GetDexFile(); + HMipsDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(dex_file); + // Update the element offset in base. + DexCacheArraysLayout layout(kMipsPointerSize, &dex_file); + base->UpdateElementOffset(layout.StringOffset(load_string->GetStringIndex())); + // Add the special argument base to the load. + load_string->AddSpecialInput(base); + } + } + void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE { // If this is an invoke with PC-relative access to the dex cache methods array, // we need to add the dex cache arrays base as the special input. diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc index cea29bca2b..ef9bf23a17 100644 --- a/compiler/optimizing/nodes.cc +++ b/compiler/optimizing/nodes.cc @@ -2607,8 +2607,12 @@ bool HLoadString::InstructionDataEquals(const HInstruction* other) const { LoadKind load_kind = GetLoadKind(); if (HasAddress(load_kind)) { return GetAddress() == other_load_string->GetAddress(); + } else if (HasStringReference(load_kind)) { + return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile()); } else { - DCHECK(HasStringReference(load_kind)) << load_kind; + DCHECK(HasDexCacheReference(load_kind)) << load_kind; + // If the string indexes and dex files are the same, dex cache element offsets + // must also be the same, so we don't need to compare them. return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile()); } } @@ -2638,8 +2642,8 @@ std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs) { return os << "BootImageAddress"; case HLoadString::LoadKind::kDexCacheAddress: return os << "DexCacheAddress"; - case HLoadString::LoadKind::kBssEntry: - return os << "BssEntry"; + case HLoadString::LoadKind::kDexCachePcRelative: + return os << "DexCachePcRelative"; case HLoadString::LoadKind::kDexCacheViaMethod: return os << "DexCacheViaMethod"; default: diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 077e867d64..397abded27 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -5650,9 +5650,10 @@ class HLoadString FINAL : public HInstruction { // Used for strings outside the boot image referenced by JIT-compiled code. kDexCacheAddress, - // Load from an entry in the .bss section using a PC-relative load. - // Used for strings outside boot image when .bss is accessible with a PC-relative load. - kBssEntry, + // Load from resolved strings array in the dex cache using a PC-relative load. + // Used for strings outside boot image when we know that we can access + // the dex cache arrays using a PC-relative load. + kDexCachePcRelative, // Load from resolved strings array accessed through the class loaded from // the compiled method's own ArtMethod*. This is the default access type when @@ -5671,7 +5672,7 @@ class HLoadString FINAL : public HInstruction { string_index_(string_index) { SetPackedFlag<kFlagIsInDexCache>(false); SetPackedField<LoadKindField>(LoadKind::kDexCacheViaMethod); - load_data_.dex_file_ = &dex_file; + load_data_.ref.dex_file = &dex_file; } void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) { @@ -5684,11 +5685,20 @@ class HLoadString FINAL : public HInstruction { const DexFile& dex_file, uint32_t string_index) { DCHECK(HasStringReference(load_kind)); - load_data_.dex_file_ = &dex_file; + load_data_.ref.dex_file = &dex_file; string_index_ = string_index; SetLoadKindInternal(load_kind); } + void SetLoadKindWithDexCacheReference(LoadKind load_kind, + const DexFile& dex_file, + uint32_t element_index) { + DCHECK(HasDexCacheReference(load_kind)); + load_data_.ref.dex_file = &dex_file; + load_data_.ref.dex_cache_element_index = element_index; + SetLoadKindInternal(load_kind); + } + LoadKind GetLoadKind() const { return GetPackedField<LoadKindField>(); } @@ -5700,6 +5710,8 @@ class HLoadString FINAL : public HInstruction { return string_index_; } + uint32_t GetDexCacheElementOffset() const; + uint64_t GetAddress() const { DCHECK(HasAddress(GetLoadKind())); return load_data_.address; @@ -5769,7 +5781,6 @@ class HLoadString FINAL : public HInstruction { static bool HasStringReference(LoadKind load_kind) { return load_kind == LoadKind::kBootImageLinkTimeAddress || load_kind == LoadKind::kBootImageLinkTimePcRelative || - load_kind == LoadKind::kBssEntry || load_kind == LoadKind::kDexCacheViaMethod; } @@ -5777,6 +5788,10 @@ class HLoadString FINAL : public HInstruction { return load_kind == LoadKind::kBootImageAddress || load_kind == LoadKind::kDexCacheAddress; } + static bool HasDexCacheReference(LoadKind load_kind) { + return load_kind == LoadKind::kDexCachePcRelative; + } + void SetLoadKindInternal(LoadKind load_kind); // The special input is the HCurrentMethod for kDexCacheViaMethod. @@ -5789,7 +5804,10 @@ class HLoadString FINAL : public HInstruction { uint32_t string_index_; union { - const DexFile* dex_file_; // For string reference. + struct { + const DexFile* dex_file; // For string reference and dex cache reference. + uint32_t dex_cache_element_index; // Only for dex cache reference. + } ref; uint64_t address; // Up to 64-bit, needed for kDexCacheAddress on 64-bit targets. } load_data_; @@ -5799,8 +5817,15 @@ std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs); // Note: defined outside class to see operator<<(., HLoadString::LoadKind). inline const DexFile& HLoadString::GetDexFile() const { - DCHECK(HasStringReference(GetLoadKind())) << GetLoadKind(); - return *load_data_.dex_file_; + DCHECK(HasStringReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind())) + << GetLoadKind(); + return *load_data_.ref.dex_file; +} + +// Note: defined outside class to see operator<<(., HLoadString::LoadKind). +inline uint32_t HLoadString::GetDexCacheElementOffset() const { + DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind(); + return load_data_.ref.dex_cache_element_index; } // Note: defined outside class to see operator<<(., HLoadString::LoadKind). @@ -5808,7 +5833,7 @@ inline void HLoadString::AddSpecialInput(HInstruction* special_input) { // The special input is used for PC-relative loads on some architectures, // including literal pool loads, which are PC-relative too. DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative || - GetLoadKind() == LoadKind::kBssEntry || + GetLoadKind() == LoadKind::kDexCachePcRelative || GetLoadKind() == LoadKind::kBootImageLinkTimeAddress || GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind(); // HLoadString::GetInputRecords() returns an empty array at this point, diff --git a/compiler/optimizing/pc_relative_fixups_mips.cc b/compiler/optimizing/pc_relative_fixups_mips.cc index 82feb95a2f..6006e6cf5d 100644 --- a/compiler/optimizing/pc_relative_fixups_mips.cc +++ b/compiler/optimizing/pc_relative_fixups_mips.cc @@ -83,7 +83,6 @@ class PCRelativeHandlerVisitor : public HGraphVisitor { case HLoadString::LoadKind::kBootImageLinkTimeAddress: case HLoadString::LoadKind::kBootImageAddress: case HLoadString::LoadKind::kBootImageLinkTimePcRelative: - case HLoadString::LoadKind::kBssEntry: // Add a base register for PC-relative literals on R2. InitializePCRelativeBasePointer(); load_string->AddSpecialInput(base_); diff --git a/compiler/optimizing/pc_relative_fixups_x86.cc b/compiler/optimizing/pc_relative_fixups_x86.cc index b1fdb1792d..75587af7a1 100644 --- a/compiler/optimizing/pc_relative_fixups_x86.cc +++ b/compiler/optimizing/pc_relative_fixups_x86.cc @@ -92,7 +92,7 @@ class PCRelativeHandlerVisitor : public HGraphVisitor { void VisitLoadString(HLoadString* load_string) OVERRIDE { HLoadString::LoadKind load_kind = load_string->GetLoadKind(); if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative || - load_kind == HLoadString::LoadKind::kBssEntry) { + load_kind == HLoadString::LoadKind::kDexCachePcRelative) { InitializePCRelativeBasePointer(); load_string->AddSpecialInput(base_); } diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc index c1cfe8d00f..a4a3e0695d 100644 --- a/compiler/optimizing/sharpening.cc +++ b/compiler/optimizing/sharpening.cc @@ -163,7 +163,7 @@ void HSharpening::ProcessLoadClass(HLoadClass* load_class) { : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); mirror::Class* klass = dex_cache->GetResolvedType(type_index); - if (codegen_->GetCompilerOptions().IsBootImage()) { + if (compiler_driver_->IsBootImage()) { // Compiling boot image. Check if the class is a boot image class. DCHECK(!runtime->UseJitCompilation()); if (!compiler_driver_->GetSupportBootImageFixup()) { @@ -281,7 +281,7 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { ? compilation_unit_.GetDexCache() : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file)); - if (codegen_->GetCompilerOptions().IsBootImage()) { + if (compiler_driver_->IsBootImage()) { // Compiling boot image. Resolve the string and allocate it if needed. DCHECK(!runtime->UseJitCompilation()); mirror::String* string = class_linker->ResolveString(dex_file, string_index, dex_cache); @@ -311,8 +311,6 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { !codegen_->GetCompilerOptions().GetCompilePic()) { desired_load_kind = HLoadString::LoadKind::kBootImageAddress; address = reinterpret_cast64<uint64_t>(string); - } else { - desired_load_kind = HLoadString::LoadKind::kBssEntry; } } } @@ -321,7 +319,6 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { switch (load_kind) { case HLoadString::LoadKind::kBootImageLinkTimeAddress: case HLoadString::LoadKind::kBootImageLinkTimePcRelative: - case HLoadString::LoadKind::kBssEntry: case HLoadString::LoadKind::kDexCacheViaMethod: load_string->SetLoadKindWithStringReference(load_kind, dex_file, string_index); break; @@ -330,6 +327,13 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) { DCHECK_NE(address, 0u); load_string->SetLoadKindWithAddress(load_kind, address); break; + case HLoadString::LoadKind::kDexCachePcRelative: { + PointerSize pointer_size = InstructionSetPointerSize(codegen_->GetInstructionSet()); + DexCacheArraysLayout layout(pointer_size, &dex_file); + size_t element_index = layout.StringOffset(string_index); + load_string->SetLoadKindWithDexCacheReference(load_kind, dex_file, element_index); + break; + } } } diff --git a/compiler/utils/arm/assembler_arm.h b/compiler/utils/arm/assembler_arm.h index 0ed8a35338..ee5811c3c0 100644 --- a/compiler/utils/arm/assembler_arm.h +++ b/compiler/utils/arm/assembler_arm.h @@ -262,6 +262,12 @@ class Address : public ValueObject { CHECK_NE(rm, PC); } + // LDR(literal) - pc relative load. + explicit Address(int32_t offset) : + rn_(PC), rm_(R0), offset_(offset), + am_(Offset), is_immed_offset_(false), shift_(LSL) { + } + static bool CanHoldLoadOffsetArm(LoadOperandType type, int offset); static bool CanHoldStoreOffsetArm(StoreOperandType type, int offset); diff --git a/compiler/utils/arm/assembler_thumb2.cc b/compiler/utils/arm/assembler_thumb2.cc index 61b7f08518..2269ba2d20 100644 --- a/compiler/utils/arm/assembler_thumb2.cc +++ b/compiler/utils/arm/assembler_thumb2.cc @@ -2461,36 +2461,58 @@ void Thumb2Assembler::EmitLoadStore(Condition cond, } } else { // Register shift. - CHECK_NE(ad.GetRegister(), PC); - if (ad.GetShiftCount() != 0) { - // If there is a shift count this must be 32 bit. - must_be_32bit = true; - } else if (IsHighRegister(ad.GetRegisterOffset())) { - must_be_32bit = true; - } - - if (must_be_32bit) { - int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 | - ad.encodingThumb(true); - if (half) { - encoding |= B21; - } else if (!byte) { - encoding |= B22; - } - if (load && is_signed && (byte || half)) { - encoding |= B24; + if (ad.GetRegister() == PC) { + // PC relative literal encoding. + int32_t offset = ad.GetOffset(); + if (must_be_32bit || offset < 0 || offset >= (1 << 10) || !load) { + int32_t up = B23; + if (offset < 0) { + offset = -offset; + up = 0; + } + CHECK_LT(offset, (1 << 12)); + int32_t encoding = 0x1f << 27 | 0xf << 16 | B22 | (load ? B20 : 0) | + offset | up | + static_cast<uint32_t>(rd) << 12; + Emit32(encoding); + } else { + // 16 bit literal load. + CHECK_GE(offset, 0); + CHECK_LT(offset, (1 << 10)); + int32_t encoding = B14 | (load ? B11 : 0) | static_cast<uint32_t>(rd) << 8 | offset >> 2; + Emit16(encoding); } - Emit32(encoding); } else { - // 16 bit register offset. - int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) | - ad.encodingThumb(false); - if (byte) { - encoding |= B10; - } else if (half) { - encoding |= B9; + if (ad.GetShiftCount() != 0) { + // If there is a shift count this must be 32 bit. + must_be_32bit = true; + } else if (IsHighRegister(ad.GetRegisterOffset())) { + must_be_32bit = true; + } + + if (must_be_32bit) { + int32_t encoding = 0x1f << 27 | (load ? B20 : 0) | static_cast<uint32_t>(rd) << 12 | + ad.encodingThumb(true); + if (half) { + encoding |= B21; + } else if (!byte) { + encoding |= B22; + } + if (load && is_signed && (byte || half)) { + encoding |= B24; + } + Emit32(encoding); + } else { + // 16 bit register offset. + int32_t encoding = B14 | B12 | (load ? B11 : 0) | static_cast<uint32_t>(rd) | + ad.encodingThumb(false); + if (byte) { + encoding |= B10; + } else if (half) { + encoding |= B9; + } + Emit16(encoding); } - Emit16(encoding); } } } diff --git a/compiler/utils/assembler_thumb_test.cc b/compiler/utils/assembler_thumb_test.cc index 86a4aa2245..3b05173d88 100644 --- a/compiler/utils/assembler_thumb_test.cc +++ b/compiler/utils/assembler_thumb_test.cc @@ -1245,6 +1245,22 @@ TEST_F(Thumb2AssemblerTest, LoadStoreRegOffset) { EmitAndCheck(&assembler, "LoadStoreRegOffset"); } +TEST_F(Thumb2AssemblerTest, LoadStoreLiteral) { + __ ldr(R0, Address(4)); + __ str(R0, Address(4)); + + __ ldr(R0, Address(-8)); + __ str(R0, Address(-8)); + + // Limits. + __ ldr(R0, Address(0x3ff)); // 10 bits (16 bit). + __ ldr(R0, Address(0x7ff)); // 11 bits (32 bit). + __ str(R0, Address(0x3ff)); // 32 bit (no 16 bit str(literal)). + __ str(R0, Address(0x7ff)); // 11 bits (32 bit). + + EmitAndCheck(&assembler, "LoadStoreLiteral"); +} + TEST_F(Thumb2AssemblerTest, LoadStoreLimits) { __ ldr(R0, Address(R4, 124)); // 16 bit. __ ldr(R0, Address(R4, 128)); // 32 bit. diff --git a/compiler/utils/assembler_thumb_test_expected.cc.inc b/compiler/utils/assembler_thumb_test_expected.cc.inc index 91f397087c..81c6ec5fac 100644 --- a/compiler/utils/assembler_thumb_test_expected.cc.inc +++ b/compiler/utils/assembler_thumb_test_expected.cc.inc @@ -5012,6 +5012,17 @@ const char* const LoadStoreRegOffsetResults[] = { " 28: f841 0008 str.w r0, [r1, r8]\n", nullptr }; +const char* const LoadStoreLiteralResults[] = { + " 0: 4801 ldr r0, [pc, #4] ; (8 <LoadStoreLiteral+0x8>)\n", + " 2: f8cf 0004 str.w r0, [pc, #4] ; 8 <LoadStoreLiteral+0x8>\n", + " 6: f85f 0008 ldr.w r0, [pc, #-8] ; 0 <LoadStoreLiteral>\n", + " a: f84f 0008 str.w r0, [pc, #-8] ; 4 <LoadStoreLiteral+0x4>\n", + " e: 48ff ldr r0, [pc, #1020] ; (40c <LoadStoreLiteral+0x40c>)\n", + " 10: f8df 07ff ldr.w r0, [pc, #2047] ; 813 <LoadStoreLiteral+0x813>\n", + " 14: f8cf 03ff str.w r0, [pc, #1023] ; 417 <LoadStoreLiteral+0x417>\n", + " 18: f8cf 07ff str.w r0, [pc, #2047] ; 81b <LoadStoreLiteral+0x81b>\n", + nullptr +}; const char* const LoadStoreLimitsResults[] = { " 0: 6fe0 ldr r0, [r4, #124] ; 0x7c\n", " 2: f8d4 0080 ldr.w r0, [r4, #128] ; 0x80\n", @@ -5697,6 +5708,7 @@ void setup_results() { test_results["MixedBranch32"] = MixedBranch32Results; test_results["Shifts"] = ShiftsResults; test_results["LoadStoreRegOffset"] = LoadStoreRegOffsetResults; + test_results["LoadStoreLiteral"] = LoadStoreLiteralResults; test_results["LoadStoreLimits"] = LoadStoreLimitsResults; test_results["CompareAndBranch"] = CompareAndBranchResults; test_results["AddConstant"] = AddConstantResults; diff --git a/dex2oat/dex2oat.cc b/dex2oat/dex2oat.cc index 9cdd24b30b..5d44cc1b9f 100644 --- a/dex2oat/dex2oat.cc +++ b/dex2oat/dex2oat.cc @@ -523,6 +523,8 @@ class Dex2Oat FINAL { compiled_methods_zip_filename_(nullptr), compiled_methods_filename_(nullptr), passes_to_run_filename_(nullptr), + app_image_(false), + boot_image_(false), multi_image_(false), is_host_(false), class_loader_(nullptr), @@ -689,8 +691,8 @@ class Dex2Oat FINAL { } void ProcessOptions(ParserOptions* parser_options) { - compiler_options_->boot_image_ = !image_filenames_.empty(); - compiler_options_->app_image_ = app_image_fd_ != -1 || !app_image_file_name_.empty(); + boot_image_ = !image_filenames_.empty(); + app_image_ = app_image_fd_ != -1 || !app_image_file_name_.empty(); if (IsAppImage() && IsBootImage()) { Usage("Can't have both --image and (--app-image-fd or --app-image-file)"); @@ -742,7 +744,7 @@ class Dex2Oat FINAL { android_root_ += android_root_env_var; } - if (!IsBootImage() && parser_options->boot_image_filename.empty()) { + if (!boot_image_ && parser_options->boot_image_filename.empty()) { parser_options->boot_image_filename += android_root_; parser_options->boot_image_filename += "/framework/boot.art"; } @@ -1323,7 +1325,7 @@ class Dex2Oat FINAL { } void LoadClassProfileDescriptors() { - if (profile_compilation_info_ != nullptr && IsAppImage()) { + if (profile_compilation_info_ != nullptr && app_image_) { Runtime* runtime = Runtime::Current(); CHECK(runtime != nullptr); std::set<DexCacheResolvedClasses> resolved_classes( @@ -1630,6 +1632,8 @@ class Dex2Oat FINAL { compiler_kind_, instruction_set_, instruction_set_features_.get(), + IsBootImage(), + IsAppImage(), image_classes_.release(), compiled_classes_.release(), compiled_methods_.release(), @@ -1720,7 +1724,7 @@ class Dex2Oat FINAL { } if (IsImage()) { - if (IsAppImage() && image_base_ == 0) { + if (app_image_ && image_base_ == 0) { gc::Heap* const heap = Runtime::Current()->GetHeap(); for (gc::space::ImageSpace* image_space : heap->GetBootImageSpaces()) { image_base_ = std::max(image_base_, RoundUp( @@ -1788,10 +1792,7 @@ class Dex2Oat FINAL { size_t rodata_size = oat_writer->GetOatHeader().GetExecutableOffset(); size_t text_size = oat_writer->GetOatSize() - rodata_size; - elf_writer->PrepareDynamicSection(rodata_size, - text_size, - oat_writer->GetBssSize(), - oat_writer->GetBssRootsOffset()); + elf_writer->SetLoadedSectionSizes(rodata_size, text_size, oat_writer->GetBssSize()); if (IsImage()) { // Update oat layout. @@ -1974,11 +1975,11 @@ class Dex2Oat FINAL { } bool IsAppImage() const { - return compiler_options_->IsAppImage(); + return app_image_; } bool IsBootImage() const { - return compiler_options_->IsBootImage(); + return boot_image_; } bool IsHost() const { @@ -2572,6 +2573,8 @@ class Dex2Oat FINAL { std::unique_ptr<std::unordered_set<std::string>> compiled_classes_; std::unique_ptr<std::unordered_set<std::string>> compiled_methods_; std::unique_ptr<std::vector<std::string>> passes_to_run_; + bool app_image_; + bool boot_image_; bool multi_image_; bool is_host_; std::string android_root_; diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc index f2062c39cb..f3f418d32f 100644 --- a/oatdump/oatdump.cc +++ b/oatdump/oatdump.cc @@ -154,11 +154,8 @@ class OatSymbolizer FINAL { if (isa == kMips || isa == kMips64) { builder_->WriteMIPSabiflagsSection(); } - builder_->PrepareDynamicSection(elf_file->GetPath(), - rodata_size, - text_size, - oat_file_->BssSize(), - oat_file_->BssRootsOffset()); + builder_->PrepareDynamicSection( + elf_file->GetPath(), rodata_size, text_size, oat_file_->BssSize()); builder_->WriteDynamicSection(); Walk(); diff --git a/runtime/mirror/dex_cache-inl.h b/runtime/mirror/dex_cache-inl.h index 477402a6dc..359462d21a 100644 --- a/runtime/mirror/dex_cache-inl.h +++ b/runtime/mirror/dex_cache-inl.h @@ -25,7 +25,6 @@ #include "base/enums.h" #include "base/logging.h" #include "mirror/class.h" -#include "oat_file.h" #include "runtime.h" #include <atomic> @@ -168,11 +167,6 @@ inline void DexCache::VisitReferences(mirror::Class* klass, const Visitor& visit for (size_t i = 0, num_types = NumResolvedTypes(); i != num_types; ++i) { visitor.VisitRootIfNonNull(resolved_types[i].AddressWithoutBarrier()); } - if (GetDexFile() != nullptr && GetDexFile()->GetOatDexFile() != nullptr) { - for (GcRoot<Object>& root : OatFile::GetBssRoots(GetDexFile()->GetOatDexFile())) { - visitor.VisitRootIfNonNull(root.AddressWithoutBarrier()); - } - } } } diff --git a/runtime/oat_file.cc b/runtime/oat_file.cc index 52baddde6d..ea692cdaae 100644 --- a/runtime/oat_file.cc +++ b/runtime/oat_file.cc @@ -40,7 +40,6 @@ #include "base/unix_file/fd_file.h" #include "elf_file.h" #include "elf_utils.h" -#include "gc_root.h" #include "oat.h" #include "mem_map.h" #include "mirror/class.h" @@ -240,8 +239,6 @@ bool OatFileBase::ComputeFields(uint8_t* requested_base, } // Readjust to be non-inclusive upper bound. bss_end_ += sizeof(uint32_t); - // Find bss roots if present. - bss_roots_ = const_cast<uint8_t*>(FindDynamicSymbolAddress("oatbssroots", &symbol_error_msg)); } return true; @@ -294,31 +291,8 @@ bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) { return false; } - if (!IsAligned<alignof(GcRoot<mirror::Object>)>(bss_begin_) || - !IsAligned<alignof(GcRoot<mirror::Object>)>(bss_roots_) || - !IsAligned<alignof(GcRoot<mirror::Object>)>(bss_end_)) { - *error_msg = StringPrintf("In oat file '%s' found unaligned bss symbol(s): " - "begin = %p, roots = %p, end = %p", - GetLocation().c_str(), - bss_begin_, - bss_roots_, - bss_end_); - return false; - } - - if (bss_roots_ != nullptr && (bss_roots_ < bss_begin_ || bss_roots_ > bss_end_)) { - *error_msg = StringPrintf("In oat file '%s' found bss roots outside .bss: " - "%p is outside range [%p, %p]", - GetLocation().c_str(), - bss_roots_, - bss_begin_, - bss_end_); - return false; - } - PointerSize pointer_size = GetInstructionSetPointerSize(GetOatHeader().GetInstructionSet()); uint8_t* dex_cache_arrays = bss_begin_; - uint8_t* dex_cache_arrays_end = (bss_roots_ != nullptr) ? bss_roots_ : bss_end_; uint32_t dex_file_count = GetOatHeader().GetDexFileCount(); oat_dex_files_storage_.reserve(dex_file_count); for (size_t i = 0; i < dex_file_count; i++) { @@ -495,13 +469,13 @@ bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) { if (dex_cache_arrays != nullptr) { DexCacheArraysLayout layout(pointer_size, *header); if (layout.Size() != 0u) { - if (static_cast<size_t>(dex_cache_arrays_end - dex_cache_arrays) < layout.Size()) { + if (static_cast<size_t>(bss_end_ - dex_cache_arrays) < layout.Size()) { *error_msg = StringPrintf("In oat file '%s' found OatDexFile #%zu for '%s' with " "truncated dex cache arrays, %zu < %zu.", GetLocation().c_str(), i, dex_file_location.c_str(), - static_cast<size_t>(dex_cache_arrays_end - dex_cache_arrays), + static_cast<size_t>(bss_end_ - dex_cache_arrays), layout.Size()); return false; } @@ -532,9 +506,9 @@ bool OatFileBase::Setup(const char* abs_dex_location, std::string* error_msg) { } } - if (dex_cache_arrays != dex_cache_arrays_end) { + if (dex_cache_arrays != bss_end_) { // We expect the bss section to be either empty (dex_cache_arrays and bss_end_ - // both null) or contain just the dex cache arrays and optionally some GC roots. + // both null) or contain just the dex cache arrays and nothing else. *error_msg = StringPrintf("In oat file '%s' found unexpected bss size bigger by %zu bytes.", GetLocation().c_str(), static_cast<size_t>(bss_end_ - dex_cache_arrays)); @@ -1108,7 +1082,6 @@ OatFile::OatFile(const std::string& location, bool is_executable) end_(nullptr), bss_begin_(nullptr), bss_end_(nullptr), - bss_roots_(nullptr), is_executable_(is_executable), secondary_lookup_lock_("OatFile secondary lookup lock", kOatFileSecondaryLookupLock) { CHECK(!location_.empty()); @@ -1524,18 +1497,4 @@ bool OatFile::GetDexLocationsFromDependencies(const char* dex_dependencies, return true; } -ArrayRef<GcRoot<mirror::Object>> OatFile::GetBssRoots(const OatDexFile* oat_dex_file) { - const OatFile* oat_file = oat_dex_file->GetOatFile(); - DCHECK(ContainsElement(oat_file->oat_dex_files_storage_, oat_dex_file)); - // Arbitrarily attribute all the roots to the first oat_dex_file. - if (oat_file->bss_roots_ != nullptr && - oat_file->oat_dex_files_storage_.front() == oat_dex_file) { - auto* roots = reinterpret_cast<GcRoot<mirror::Object>*>(oat_file->bss_roots_); - auto* roots_end = reinterpret_cast<GcRoot<mirror::Object>*>(oat_file->bss_end_); - return ArrayRef<GcRoot<mirror::Object>>(roots, roots_end - roots); - } else { - return ArrayRef<GcRoot<mirror::Object>>(); - } -} - } // namespace art diff --git a/runtime/oat_file.h b/runtime/oat_file.h index a4aef3a88e..a61b941862 100644 --- a/runtime/oat_file.h +++ b/runtime/oat_file.h @@ -21,7 +21,6 @@ #include <string> #include <vector> -#include "base/array_ref.h" #include "base/mutex.h" #include "base/stringpiece.h" #include "dex_file.h" @@ -39,7 +38,6 @@ namespace art { class BitVector; class ElfFile; -template <class MirrorType> class GcRoot; class MemMap; class OatMethodOffsets; class OatHeader; @@ -255,10 +253,6 @@ class OatFile { return BssEnd() - BssBegin(); } - size_t BssRootsOffset() const { - return bss_roots_ - BssBegin(); - } - size_t DexSize() const { return DexEnd() - DexBegin(); } @@ -296,8 +290,6 @@ class OatFile { static bool GetDexLocationsFromDependencies(const char* dex_dependencies, std::vector<std::string>* locations); - static ArrayRef<GcRoot<mirror::Object>> GetBssRoots(const OatDexFile* oat_dex_file); - protected: OatFile(const std::string& filename, bool executable); @@ -322,9 +314,6 @@ class OatFile { // Pointer to the end of the .bss section, if present, otherwise null. uint8_t* bss_end_; - // Pointer to the beginning of the GC roots in .bss section, if present, otherwise null. - uint8_t* bss_roots_; - // Was this oat_file loaded executable? const bool is_executable_; diff --git a/test/552-checker-sharpening/src/Main.java b/test/552-checker-sharpening/src/Main.java index 3c053cf5ea..2232ff43d2 100644 --- a/test/552-checker-sharpening/src/Main.java +++ b/test/552-checker-sharpening/src/Main.java @@ -285,27 +285,31 @@ public class Main { /// CHECK: LoadString load_kind:DexCacheViaMethod /// CHECK-START-X86: java.lang.String Main.$noinline$getNonBootImageString() sharpening (after) - /// CHECK: LoadString load_kind:BssEntry + /// CHECK: LoadString load_kind:DexCachePcRelative /// CHECK-START-X86: java.lang.String Main.$noinline$getNonBootImageString() pc_relative_fixups_x86 (after) /// CHECK-DAG: X86ComputeBaseMethodAddress - /// CHECK-DAG: LoadString load_kind:BssEntry + /// CHECK-DAG: LoadString load_kind:DexCachePcRelative /// CHECK-START-X86_64: java.lang.String Main.$noinline$getNonBootImageString() sharpening (after) - /// CHECK: LoadString load_kind:BssEntry + /// CHECK: LoadString load_kind:DexCachePcRelative /// CHECK-START-ARM: java.lang.String Main.$noinline$getNonBootImageString() sharpening (after) - /// CHECK: LoadString load_kind:BssEntry + /// CHECK: LoadString load_kind:DexCachePcRelative + + /// CHECK-START-ARM: java.lang.String Main.$noinline$getNonBootImageString() dex_cache_array_fixups_arm (after) + /// CHECK-DAG: ArmDexCacheArraysBase + /// CHECK-DAG: LoadString load_kind:DexCachePcRelative /// CHECK-START-ARM64: java.lang.String Main.$noinline$getNonBootImageString() sharpening (after) - /// CHECK: LoadString load_kind:BssEntry + /// CHECK: LoadString load_kind:DexCachePcRelative /// CHECK-START-MIPS: java.lang.String Main.$noinline$getNonBootImageString() sharpening (after) - /// CHECK: LoadString load_kind:BssEntry + /// CHECK: LoadString load_kind:DexCachePcRelative - /// CHECK-START-MIPS: java.lang.String Main.$noinline$getNonBootImageString() pc_relative_fixups_mips (after) - /// CHECK-DAG: MipsComputeBaseMethodAddress - /// CHECK-DAG: LoadString load_kind:BssEntry + /// CHECK-START-MIPS: java.lang.String Main.$noinline$getNonBootImageString() dex_cache_array_fixups_mips (after) + /// CHECK-DAG: MipsDexCacheArraysBase + /// CHECK-DAG: LoadString load_kind:DexCachePcRelative public static String $noinline$getNonBootImageString() { // Prevent inlining to avoid the string comparison being optimized away. diff --git a/test/Android.run-test.mk b/test/Android.run-test.mk index d5a93b2639..211a69fa76 100644 --- a/test/Android.run-test.mk +++ b/test/Android.run-test.mk @@ -233,9 +233,11 @@ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES), # Disable 149-suspend-all-stress, its output is flaky (b/28988206). # Disable 577-profile-foreign-dex (b/27454772). +# Disable 552-checker-sharpening, until compiler component of new string dex cache is added (@cwadsworth, @vmarko) TEST_ART_BROKEN_ALL_TARGET_TESTS := \ 149-suspend-all-stress \ 577-profile-foreign-dex \ + 552-checker-sharpening \ ART_TEST_KNOWN_BROKEN += $(call all-run-test-names,$(TARGET_TYPES),$(RUN_TYPES),$(PREBUILD_TYPES), \ $(COMPILER_TYPES), $(RELOCATE_TYPES),$(TRACE_TYPES),$(GC_TYPES),$(JNI_TYPES), \ |