summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Vladimir Marko <vmarko@google.com> 2017-07-25 13:26:39 +0100
committer Vladimir Marko <vmarko@google.com> 2017-09-07 17:52:35 +0100
commit6cfbdbc359ec5414d3e49f70d28f8c0e65b98d63 (patch)
treef92b309ddc43c2254b6067346a653170fbbf7316 /compiler/optimizing
parent0f3c7003e08a42a4ed8c9f8dfffb1bee1118de59 (diff)
Use mmapped boot image intern table for PIC app HLoadString.
Implement new HLoadString load kind for boot image strings referenced by PIC-compiled apps (i.e. prebuilts) that uses PC-relative load from a boot image InternTable mmapped into the apps .bss. This reduces the size of the PIC prebuilts that reference boot image strings compared to the kBssEntry as we can completely avoid the slow path and stack map. We separate the InternedStrings and ClassTable sections of the boot image (.art) file from the rest, aligning the start of the InternedStrings section to a page boundary. This may actually increase the size of the boot image file by a page but it also allows mprotecting() these tables as read-only. The ClassTable section is included in anticipation of a similar load kind for HLoadClass. Prebuilt services.odex for aosp_angler-userdebug (arm64): - before: 20862776 - after: 20308512 (-541KiB) Note that 92KiB savings could have been achieved by simply avoiding the read barrier, similar to the HLoadClass flag IsInBootImage(). Such flag is now unnecessary. Test: m test-art-host-gtest Test: testrunner.py --host Test: testrunner.py --host --pictest Test: testrunner.py --target on Nexus 6P. Test: testrunner.py --target --pictest on Nexus 6P. Test: Nexus 6P boots. Bug: 31951624 Change-Id: I5f2bf1fc0bb36a8483244317cfdfa69e192ef6c5
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_arm64.cc45
-rw-r--r--compiler/optimizing/code_generator_arm64.h12
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc30
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h6
-rw-r--r--compiler/optimizing/code_generator_mips.cc44
-rw-r--r--compiler/optimizing/code_generator_mips.h7
-rw-r--r--compiler/optimizing/code_generator_mips64.cc44
-rw-r--r--compiler/optimizing/code_generator_mips64.h7
-rw-r--r--compiler/optimizing/code_generator_x86.cc27
-rw-r--r--compiler/optimizing/code_generator_x86.h4
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc26
-rw-r--r--compiler/optimizing/code_generator_x86_64.h4
-rw-r--r--compiler/optimizing/nodes.cc3
-rw-r--r--compiler/optimizing/nodes.h10
-rw-r--r--compiler/optimizing/pc_relative_fixups_mips.cc3
-rw-r--r--compiler/optimizing/pc_relative_fixups_x86.cc1
-rw-r--r--compiler/optimizing/sharpening.cc10
17 files changed, 220 insertions, 63 deletions
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 3be774a421..1b628688ec 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -435,11 +435,11 @@ class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
// The string entry page address was preserved in temp_ thanks to kSaveEverything.
} else {
// For non-Baker read barrier, we need to re-calculate the address of the string entry page.
- adrp_label_ = arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index);
+ adrp_label_ = arm64_codegen->NewStringBssEntryPatch(dex_file, string_index);
arm64_codegen->EmitAdrpPlaceholder(adrp_label_, temp_);
}
vixl::aarch64::Label* strp_label =
- arm64_codegen->NewPcRelativeStringPatch(dex_file, string_index, adrp_label_);
+ arm64_codegen->NewStringBssEntryPatch(dex_file, string_index, adrp_label_);
{
SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
__ Bind(strp_label);
@@ -1463,6 +1463,7 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -4675,6 +4676,13 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
}
+vixl::aarch64::Label* CodeGeneratorARM64::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ vixl::aarch64::Label* adrp_label) {
+ return NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &string_bss_entry_patches_);
+}
+
vixl::aarch64::Label* CodeGeneratorARM64::NewBakerReadBarrierPatch(uint32_t custom_data) {
baker_read_barrier_patches_.emplace_back(custom_data);
return &baker_read_barrier_patches_.back().label;
@@ -4764,6 +4772,7 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size() +
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
@@ -4776,13 +4785,15 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
info.custom_data));
@@ -5043,6 +5054,7 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -5090,24 +5102,37 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
switch (load->GetLoadKind()) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
// Add ADRP with its PC-relative String patch.
const DexFile& dex_file = load->GetDexFile();
const dex::StringIndex string_index = load->GetStringIndex();
- DCHECK(codegen_->GetCompilerOptions().IsBootImage());
vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
// Add ADD with its PC-relative String patch.
vixl::aarch64::Label* add_label =
codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
codegen_->EmitAddPlaceholder(add_label, out.X(), out.X());
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ // Add ADRP with its PC-relative String patch.
+ const DexFile& dex_file = load->GetDexFile();
+ const dex::StringIndex string_index = load->GetStringIndex();
+ vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
+ codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
+ // Add LDR with its PC-relative String patch.
+ vixl::aarch64::Label* ldr_label =
+ codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
+ codegen_->EmitLdrOffsetPlaceholder(ldr_label, out.W(), out.X());
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
// Add ADRP with its PC-relative String .bss entry patch.
@@ -5115,11 +5140,11 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
const dex::StringIndex string_index = load->GetStringIndex();
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Register temp = XRegisterFrom(load->GetLocations()->GetTemp(0));
- vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
+ vixl::aarch64::Label* adrp_label = codegen_->NewStringBssEntryPatch(dex_file, string_index);
codegen_->EmitAdrpPlaceholder(adrp_label, temp);
- // Add LDR with its PC-relative String patch.
+ // Add LDR with its .bss entry String patch.
vixl::aarch64::Label* ldr_label =
- codegen_->NewPcRelativeStringPatch(dex_file, string_index, adrp_label);
+ codegen_->NewStringBssEntryPatch(dex_file, string_index, adrp_label);
// /* GcRoot<mirror::String> */ out = *(base_address + offset) /* PC-relative */
GenerateGcRootFieldLoad(load,
out_loc,
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index c3392097a2..69c511907e 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -599,6 +599,14 @@ class CodeGeneratorARM64 : public CodeGenerator {
dex::StringIndex string_index,
vixl::aarch64::Label* adrp_label = nullptr);
+ // Add a new .bss entry string patch for an instruction and return the label
+ // to be bound before the instruction. The instruction will be either the
+ // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
+ // to the associated ADRP patch label).
+ vixl::aarch64::Label* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ vixl::aarch64::Label* adrp_label = nullptr);
+
// Add a new baker read barrier patch and return the label to be bound
// before the CBNZ instruction.
vixl::aarch64::Label* NewBakerReadBarrierPatch(uint32_t custom_data);
@@ -825,8 +833,10 @@ class CodeGeneratorARM64 : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Baker read barrier patch info.
ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index d78756e964..8288141954 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -598,7 +598,7 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
down_cast<CodeGeneratorARMVIXL*>(codegen)->GetVIXLAssembler());
vixl32::Register temp = temps.Acquire();
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- arm_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ arm_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
arm_codegen->EmitMovwMovtPlaceholder(labels, temp);
__ Str(r0, MemOperand(temp));
}
@@ -2380,6 +2380,7 @@ CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
baker_read_barrier_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
@@ -7315,6 +7316,7 @@ HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -7372,14 +7374,22 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, out);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
+ codegen_->EmitMovwMovtPlaceholder(labels, out);
+ __ Ldr(out, MemOperand(out, /* offset */ 0));
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
@@ -7387,7 +7397,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
? RegisterFrom(locations->GetTemp(0))
: out;
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, temp);
GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
LoadStringSlowPathARMVIXL* slow_path =
@@ -9119,6 +9129,11 @@ CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeSt
return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
+CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewStringBssEntryPatch(
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &string_bss_entry_patches_);
+}
+
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativePatch(
const DexFile& dex_file, uint32_t offset_or_index, ArenaDeque<PcRelativePatchInfo>* patches) {
patches->emplace_back(dex_file, offset_or_index);
@@ -9187,6 +9202,7 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
+ /* MOVW+MOVT for each entry */ 2u * string_bss_entry_patches_.size() +
baker_read_barrier_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
@@ -9199,13 +9215,15 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
for (const BakerReadBarrierPatchInfo& info : baker_read_barrier_patches_) {
linker_patches->push_back(LinkerPatch::BakerReadBarrierBranchPatch(info.label.GetLocation(),
info.custom_data));
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 5feb33b1e1..e78bc15614 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -579,6 +579,8 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
// Add a new baker read barrier patch and return the label to be bound
// before the BNE instruction.
@@ -803,8 +805,10 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Baker read barrier patch info.
ArenaDeque<BakerReadBarrierPatchInfo> baker_read_barrier_patches_;
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 51f5b969d5..ac8f675e2d 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -360,7 +360,7 @@ class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
// The string entry address was preserved in `entry_address` thanks to kSaveEverything.
DCHECK(bss_info_high_);
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, bss_info_high_);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, bss_info_high_);
__ Sw(calling_convention.GetRegisterAt(0),
entry_address,
/* placeholder */ 0x5678,
@@ -380,9 +380,9 @@ class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
const bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
- mips_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
+ mips_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
mips_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, base);
__ Sw(out, TMP, /* placeholder */ 0x5678, &info_low->label);
}
@@ -1101,6 +1101,7 @@ CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
clobbered_ra_(false) {
@@ -1651,7 +1652,8 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
method_bss_entry_patches_.size() +
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
- pc_relative_string_patches_.size();
+ pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
@@ -1663,13 +1665,15 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1712,6 +1716,13 @@ CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPa
return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
}
+CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high) {
+ return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
+}
+
CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativePatch(
const DexFile& dex_file,
uint32_t offset_or_index,
@@ -7365,6 +7376,7 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
bool fallback_load = has_irreducible_loops && !is_r6;
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -7817,6 +7829,7 @@ void LocationsBuilderMIPS::VisitLoadString(HLoadString* load) {
// We need an extra register for PC-relative literals on R2.
case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
if (isR6) {
break;
@@ -7863,6 +7876,7 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
// We need an extra register for PC-relative literals on R2.
case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
break;
@@ -7882,7 +7896,7 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
out,
base_or_current_method_reg);
__ Addiu(out, out, /* placeholder */ 0x5678, &info_low->label);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
@@ -7891,14 +7905,26 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
__ LoadLiteral(out,
base_or_current_method_reg,
codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
}
- case HLoadString::LoadKind::kBssEntry: {
+ case HLoadString::LoadKind::kBootImageInternTable: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
+ codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
+ out,
+ base_or_current_method_reg);
+ __ Lw(out, out, /* placeholder */ 0x5678, &info_low->label);
+ return;
+ }
+ case HLoadString::LoadKind::kBssEntry: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorMIPS::PcRelativePatchInfo* info_high =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
+ CodeGeneratorMIPS::PcRelativePatchInfo* info_low =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
Register temp = non_baker_read_barrier ? out : locations->GetTemp(0).AsRegister<Register>();
codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high,
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index c0e1ec0fa2..f15f8c672a 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -633,6 +633,9 @@ class CodeGeneratorMIPS : public CodeGenerator {
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
const PcRelativePatchInfo* info_high = nullptr);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high = nullptr);
Literal* DeduplicateBootImageAddressLiteral(uint32_t address);
void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
@@ -699,8 +702,10 @@ class CodeGeneratorMIPS : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative String patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
ArenaDeque<JitPatchInfo> jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 25fb1d05dc..71c2bfff19 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -318,9 +318,9 @@ class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
// The string entry address was preserved in `entry_address` thanks to kSaveEverything.
DCHECK(bss_info_high_);
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(),
- string_index,
- bss_info_high_);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(),
+ string_index,
+ bss_info_high_);
__ Bind(&info_low->label);
__ StoreToOffset(kStoreWord,
calling_convention.GetRegisterAt(0),
@@ -339,9 +339,9 @@ class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
// For non-Baker read barriers we need to re-calculate the address of
// the string entry.
CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index);
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
- mips64_codegen->NewPcRelativeStringPatch(load->GetDexFile(), string_index, info_high);
+ mips64_codegen->NewStringBssEntryPatch(load->GetDexFile(), string_index, info_high);
mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info_high, TMP, info_low);
__ StoreToOffset(kStoreWord, out, TMP, /* placeholder */ 0x5678);
}
@@ -1049,6 +1049,7 @@ CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(TypeReferenceValueComparator(),
@@ -1560,7 +1561,8 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
method_bss_entry_patches_.size() +
pc_relative_type_patches_.size() +
type_bss_entry_patches_.size() +
- pc_relative_string_patches_.size();
+ pc_relative_string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(pc_relative_method_patches_,
@@ -1572,13 +1574,15 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
} else {
DCHECK(pc_relative_method_patches_.empty());
DCHECK(pc_relative_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
- linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(pc_relative_string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1621,6 +1625,13 @@ CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStri
return NewPcRelativePatch(dex_file, string_index.index_, info_high, &pc_relative_string_patches_);
}
+CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewStringBssEntryPatch(
+ const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high) {
+ return NewPcRelativePatch(dex_file, string_index.index_, info_high, &string_bss_entry_patches_);
+}
+
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativePatch(
const DexFile& dex_file,
uint32_t offset_or_index,
@@ -5729,6 +5740,7 @@ HLoadString::LoadKind CodeGeneratorMIPS64::GetSupportedLoadStringKind(
bool fallback_load = false;
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -6117,7 +6129,7 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREA
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
__ Daddiu(out, AT, /* placeholder */ 0x5678);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
@@ -6126,14 +6138,24 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREA
__ LoadLiteral(out,
kLoadUnsignedWord,
codegen_->DeduplicateBootImageAddressLiteral(address));
- return; // No dex cache slow path.
+ return;
}
- case HLoadString::LoadKind::kBssEntry: {
+ case HLoadString::LoadKind::kBootImageInternTable: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
+ codegen_->EmitPcRelativeAddressPlaceholderHigh(info_high, AT, info_low);
+ __ Lwu(out, AT, /* placeholder */ 0x5678);
+ return;
+ }
+ case HLoadString::LoadKind::kBssEntry: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ CodeGeneratorMIPS64::PcRelativePatchInfo* info_high =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex());
+ CodeGeneratorMIPS64::PcRelativePatchInfo* info_low =
+ codegen_->NewStringBssEntryPatch(load->GetDexFile(), load->GetStringIndex(), info_high);
constexpr bool non_baker_read_barrier = kUseReadBarrier && !kUseBakerReadBarrier;
GpuRegister temp = non_baker_read_barrier
? out
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 72d474308e..3035621972 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -605,6 +605,9 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
dex::StringIndex string_index,
const PcRelativePatchInfo* info_high = nullptr);
+ PcRelativePatchInfo* NewStringBssEntryPatch(const DexFile& dex_file,
+ dex::StringIndex string_index,
+ const PcRelativePatchInfo* info_high = nullptr);
Literal* DeduplicateBootImageAddressLiteral(uint64_t address);
void EmitPcRelativeAddressPlaceholderHigh(PcRelativePatchInfo* info_high,
@@ -666,8 +669,10 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
// PC-relative type patch info for kBssEntry.
ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
- // PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
+ // PC-relative String patch info; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
StringToLiteralMap jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 99b7793c81..512968f01d 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -1035,6 +1035,7 @@ CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
constant_area_start_(-1),
@@ -4652,7 +4653,6 @@ Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
}
void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
- DCHECK(GetCompilerOptions().IsBootImage());
HX86ComputeBaseMethodAddress* address = load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
string_patches_.emplace_back(address,
load_string->GetDexFile(),
@@ -4664,9 +4664,9 @@ Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
DCHECK(!GetCompilerOptions().IsBootImage());
HX86ComputeBaseMethodAddress* address =
load_string->InputAt(0)->AsX86ComputeBaseMethodAddress();
- string_patches_.emplace_back(
+ string_bss_entry_patches_.emplace_back(
address, load_string->GetDexFile(), load_string->GetStringIndex().index_);
- return &string_patches_.back().label;
+ return &string_bss_entry_patches_.back().label;
}
// The label points to the end of the "movl" or another instruction but the literal offset
@@ -4691,7 +4691,8 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
method_bss_entry_patches_.size() +
boot_image_type_patches_.size() +
type_bss_entry_patches_.size() +
- string_patches_.size();
+ string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
@@ -4702,12 +4703,15 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
} else {
DCHECK(boot_image_method_patches_.empty());
DCHECK(boot_image_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -6219,6 +6223,7 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -6237,6 +6242,7 @@ void LocationsBuilderX86::VisitLoadString(HLoadString* load) {
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(load, call_kind);
HLoadString::LoadKind load_kind = load->GetLoadKind();
if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
+ load_kind == HLoadString::LoadKind::kBootImageInternTable ||
load_kind == HLoadString::LoadKind::kBssEntry) {
locations->SetInAt(0, Location::RequiresRegister());
}
@@ -6282,14 +6288,21 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_S
Register method_address = locations->InAt(0).AsRegister<Register>();
__ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
codegen_->RecordBootStringPatch(load);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ movl(out, Immediate(address));
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ Register method_address = locations->InAt(0).AsRegister<Register>();
+ __ movl(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
+ codegen_->RecordBootStringPatch(load);
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
Register method_address = locations->InAt(0).AsRegister<Register>();
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index f48753b614..b32d57a774 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -640,8 +640,10 @@ class CodeGeneratorX86 : public CodeGenerator {
ArenaDeque<X86PcRelativePatchInfo> boot_image_type_patches_;
// Type patch locations for kBssEntry.
ArenaDeque<X86PcRelativePatchInfo> type_bss_entry_patches_;
- // String patch locations; type depends on configuration (app .bss or boot image).
+ // String patch locations; type depends on configuration (intern table or boot image PIC).
ArenaDeque<X86PcRelativePatchInfo> string_patches_;
+ // String patch locations for kBssEntry.
+ ArenaDeque<X86PcRelativePatchInfo> string_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 8283887a96..0c3b2ad742 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -1089,15 +1089,15 @@ Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
}
void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
- DCHECK(GetCompilerOptions().IsBootImage());
string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
__ Bind(&string_patches_.back().label);
}
Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
DCHECK(!GetCompilerOptions().IsBootImage());
- string_patches_.emplace_back(load_string->GetDexFile(), load_string->GetStringIndex().index_);
- return &string_patches_.back().label;
+ string_bss_entry_patches_.emplace_back(
+ load_string->GetDexFile(), load_string->GetStringIndex().index_);
+ return &string_bss_entry_patches_.back().label;
}
// The label points to the end of the "movl" or another instruction but the literal offset
@@ -1122,7 +1122,8 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
method_bss_entry_patches_.size() +
boot_image_type_patches_.size() +
type_bss_entry_patches_.size() +
- string_patches_.size();
+ string_patches_.size() +
+ string_bss_entry_patches_.size();
linker_patches->reserve(size);
if (GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeMethodPatch>(boot_image_method_patches_,
@@ -1133,12 +1134,15 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
} else {
DCHECK(boot_image_method_patches_.empty());
DCHECK(boot_image_type_patches_.empty());
- EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringInternTablePatch>(string_patches_,
+ linker_patches);
}
EmitPcRelativeLinkerPatches<LinkerPatch::MethodBssEntryPatch>(method_bss_entry_patches_,
linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_bss_entry_patches_,
+ linker_patches);
DCHECK_EQ(size, linker_patches->size());
}
@@ -1230,6 +1234,7 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ string_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
@@ -5621,6 +5626,7 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
@@ -5678,14 +5684,20 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREA
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
codegen_->RecordBootStringPatch(load);
- return; // No dex cache slow path.
+ return;
}
case HLoadString::LoadKind::kBootImageAddress: {
uint32_t address = dchecked_integral_cast<uint32_t>(
reinterpret_cast<uintptr_t>(load->GetString().Get()));
DCHECK_NE(address, 0u);
__ movl(out, Immediate(static_cast<int32_t>(address))); // Zero-extended.
- return; // No dex cache slow path.
+ return;
+ }
+ case HLoadString::LoadKind::kBootImageInternTable: {
+ DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
+ __ movl(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
+ codegen_->RecordBootStringPatch(load);
+ return;
}
case HLoadString::LoadKind::kBssEntry: {
Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 33c64290d4..f5fa86bf23 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -611,8 +611,10 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<PatchInfo<Label>> boot_image_type_patches_;
// Type patch locations for kBssEntry.
ArenaDeque<PatchInfo<Label>> type_bss_entry_patches_;
- // String patch locations; type depends on configuration (app .bss or boot image).
+ // String patch locations; type depends on configuration (intern table or boot image PIC).
ArenaDeque<PatchInfo<Label>> string_patches_;
+ // String patch locations for kBssEntry.
+ ArenaDeque<PatchInfo<Label>> string_bss_entry_patches_;
// Patches for string literals in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 1510eafa40..ebbea27e08 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -2791,6 +2791,7 @@ bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
}
switch (GetLoadKind()) {
case LoadKind::kBootImageAddress:
+ case LoadKind::kBootImageInternTable:
case LoadKind::kJitTableAddress: {
ScopedObjectAccess soa(Thread::Current());
return GetString().Get() == other_load_string->GetString().Get();
@@ -2821,6 +2822,8 @@ std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs) {
return os << "BootImageLinkTimePcRelative";
case HLoadString::LoadKind::kBootImageAddress:
return os << "BootImageAddress";
+ case HLoadString::LoadKind::kBootImageInternTable:
+ return os << "BootImageInternTable";
case HLoadString::LoadKind::kBssEntry:
return os << "BssEntry";
case HLoadString::LoadKind::kJitTableAddress:
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index f60d532c37..5e8c77102a 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -5871,6 +5871,10 @@ class HLoadString FINAL : public HInstruction {
// Used for boot image strings referenced by apps in AOT- and JIT-compiled code.
kBootImageAddress,
+ // Use a PC-relative load from a boot image InternTable mmapped into the .bss
+ // of the oat file.
+ kBootImageInternTable,
+
// Load from an entry in the .bss section using a PC-relative load.
// Used for strings outside boot image when .bss is accessible with a PC-relative load.
kBssEntry,
@@ -5930,6 +5934,7 @@ class HLoadString FINAL : public HInstruction {
LoadKind load_kind = GetLoadKind();
if (load_kind == LoadKind::kBootImageLinkTimePcRelative ||
load_kind == LoadKind::kBootImageAddress ||
+ load_kind == LoadKind::kBootImageInternTable ||
load_kind == LoadKind::kJitTableAddress) {
return false;
}
@@ -5990,8 +5995,9 @@ inline void HLoadString::AddSpecialInput(HInstruction* special_input) {
// The special input is used for PC-relative loads on some architectures,
// including literal pool loads, which are PC-relative too.
DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
- GetLoadKind() == LoadKind::kBssEntry ||
- GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
+ GetLoadKind() == LoadKind::kBootImageAddress ||
+ GetLoadKind() == LoadKind::kBootImageInternTable ||
+ GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
// HLoadString::GetInputRecords() returns an empty array at this point,
// so use the GetInputRecords() from the base class to set the input record.
DCHECK(special_input_.GetInstruction() == nullptr);
diff --git a/compiler/optimizing/pc_relative_fixups_mips.cc b/compiler/optimizing/pc_relative_fixups_mips.cc
index 21b645279e..4cb99f9b5c 100644
--- a/compiler/optimizing/pc_relative_fixups_mips.cc
+++ b/compiler/optimizing/pc_relative_fixups_mips.cc
@@ -88,8 +88,9 @@ class PCRelativeHandlerVisitor : public HGraphVisitor {
void VisitLoadString(HLoadString* load_string) OVERRIDE {
HLoadString::LoadKind load_kind = load_string->GetLoadKind();
switch (load_kind) {
- case HLoadString::LoadKind::kBootImageAddress:
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadString::LoadKind::kBootImageAddress:
+ case HLoadString::LoadKind::kBootImageInternTable:
case HLoadString::LoadKind::kBssEntry:
// Add a base register for PC-relative literals on R2.
InitializePCRelativeBasePointer();
diff --git a/compiler/optimizing/pc_relative_fixups_x86.cc b/compiler/optimizing/pc_relative_fixups_x86.cc
index 2743df9dcf..c463ecdb0a 100644
--- a/compiler/optimizing/pc_relative_fixups_x86.cc
+++ b/compiler/optimizing/pc_relative_fixups_x86.cc
@@ -92,6 +92,7 @@ class PCRelativeHandlerVisitor : public HGraphVisitor {
void VisitLoadString(HLoadString* load_string) OVERRIDE {
HLoadString::LoadKind load_kind = load_string->GetLoadKind();
if (load_kind == HLoadString::LoadKind::kBootImageLinkTimePcRelative ||
+ load_kind == HLoadString::LoadKind::kBootImageInternTable ||
load_kind == HLoadString::LoadKind::kBssEntry) {
HX86ComputeBaseMethodAddress* method_address = GetPCRelativeBasePointer(load_string);
load_string->AddSpecialInput(method_address);
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 9536d149f6..1ca63f4f86 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -278,10 +278,12 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) {
} else {
// AOT app compilation. Try to lookup the string without allocating if not found.
string = class_linker->LookupString(dex_file, string_index, dex_cache.Get());
- if (string != nullptr &&
- runtime->GetHeap()->ObjectIsInBootImageSpace(string) &&
- !codegen_->GetCompilerOptions().GetCompilePic()) {
- desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
+ if (string != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(string)) {
+ if (codegen_->GetCompilerOptions().GetCompilePic()) {
+ desired_load_kind = HLoadString::LoadKind::kBootImageInternTable;
+ } else {
+ desired_load_kind = HLoadString::LoadKind::kBootImageAddress;
+ }
} else {
desired_load_kind = HLoadString::LoadKind::kBssEntry;
}