summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc248
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h57
-rw-r--r--compiler/optimizing/nodes.cc34
-rw-r--r--compiler/optimizing/nodes.h52
4 files changed, 304 insertions, 87 deletions
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index 1c5aec01c6..1df00fff9a 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -1243,10 +1243,26 @@ CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
move_resolver_(graph->GetArena(), this),
assembler_(graph->GetArena()),
isa_features_(isa_features),
+ uint32_literals_(std::less<uint32_t>(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ method_patches_(MethodReferenceComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ call_patches_(MethodReferenceComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
relative_call_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_string_patches_(StringReferenceValueComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
+ boot_image_type_patches_(TypeReferenceValueComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_address_patches_(std::less<uint32_t>(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ jit_string_patches_(StringReferenceValueComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ jit_class_patches_(TypeReferenceValueComparator(),
+ graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
// Always save the LR register to mimic Quick.
AddAllocatedRegister(Location::RegisterLocation(LR));
// Give d14 and d15 as scratch registers to VIXL.
@@ -4415,7 +4431,7 @@ void LocationsBuilderARMVIXL::HandleFieldGet(HInstruction* instruction,
locations->AddTemp(Location::RequiresRegister());
} else if (object_field_get_with_read_barrier && kUseBakerReadBarrier) {
// We need a temporary register for the read barrier marking slow
- // path in CodeGeneratorARM::GenerateFieldLoadWithBakerReadBarrier.
+ // path in CodeGeneratorARMVIXL::GenerateFieldLoadWithBakerReadBarrier.
locations->AddTemp(Location::RequiresRegister());
}
}
@@ -4877,7 +4893,7 @@ void LocationsBuilderARMVIXL::VisitArrayGet(HArrayGet* instruction) {
object_array_get_with_read_barrier ? Location::kOutputOverlap : Location::kNoOutputOverlap);
}
// We need a temporary register for the read barrier marking slow
- // path in CodeGeneratorARM::GenerateArrayLoadWithBakerReadBarrier.
+ // path in CodeGeneratorARMVIXL::GenerateArrayLoadWithBakerReadBarrier.
// Also need for String compression feature.
if ((object_array_get_with_read_barrier && kUseBakerReadBarrier)
|| (mirror::kUseStringCompression && instruction->IsStringCharAt())) {
@@ -5772,17 +5788,15 @@ HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
case HLoadClass::LoadKind::kReferrersClass:
break;
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- // TODO(VIXL): Enable it back when literal pools are fixed in VIXL.
- return HLoadClass::LoadKind::kDexCacheViaMethod;
+ DCHECK(!GetCompilerOptions().GetCompilePic());
+ break;
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
DCHECK(GetCompilerOptions().GetCompilePic());
break;
case HLoadClass::LoadKind::kBootImageAddress:
- // TODO(VIXL): Enable it back when literal pools are fixed in VIXL.
- return HLoadClass::LoadKind::kDexCacheViaMethod;
+ break;
case HLoadClass::LoadKind::kJitTableAddress:
- // TODO(VIXL): Enable it back when literal pools are fixed in VIXL.
- return HLoadClass::LoadKind::kDexCacheViaMethod;
+ break;
case HLoadClass::LoadKind::kDexCachePcRelative:
DCHECK(!Runtime::Current()->UseJitCompilation());
// We disable pc-relative load when there is an irreducible loop, as the optimization
@@ -5858,7 +5872,9 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
- TODO_VIXL32(FATAL);
+ DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
+ __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
+ cls->GetTypeIndex()));
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
@@ -5869,11 +5885,18 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageAddress: {
- TODO_VIXL32(FATAL);
+ DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
+ DCHECK_NE(cls->GetAddress(), 0u);
+ uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
case HLoadClass::LoadKind::kJitTableAddress: {
- TODO_VIXL32(FATAL);
+ __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
+ cls->GetTypeIndex(),
+ cls->GetAddress()));
+ // /* GcRoot<mirror::Class> */ out = *out
+ GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
break;
}
case HLoadClass::LoadKind::kDexCachePcRelative: {
@@ -5958,21 +5981,19 @@ HLoadString::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadStringKind(
HLoadString::LoadKind desired_string_load_kind) {
switch (desired_string_load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress:
- // TODO(VIXL): Implement missing optimization.
- return HLoadString::LoadKind::kDexCacheViaMethod;
+ DCHECK(!GetCompilerOptions().GetCompilePic());
+ break;
case HLoadString::LoadKind::kBootImageLinkTimePcRelative:
DCHECK(GetCompilerOptions().GetCompilePic());
break;
case HLoadString::LoadKind::kBootImageAddress:
- // TODO(VIXL): Implement missing optimization.
- return HLoadString::LoadKind::kDexCacheViaMethod;
+ break;
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
case HLoadString::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
- // TODO(VIXL): Implement missing optimization.
- return HLoadString::LoadKind::kDexCacheViaMethod;
+ break;
case HLoadString::LoadKind::kDexCacheViaMethod:
break;
}
@@ -6014,8 +6035,9 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
switch (load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
- TODO_VIXL32(FATAL);
- break;
+ __ Ldr(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
+ load->GetStringIndex()));
+ return; // No dex cache slow path.
}
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
@@ -6025,8 +6047,10 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
return; // No dex cache slow path.
}
case HLoadString::LoadKind::kBootImageAddress: {
- TODO_VIXL32(FATAL);
- break;
+ DCHECK_NE(load->GetAddress(), 0u);
+ uint32_t address = dchecked_integral_cast<uint32_t>(load->GetAddress());
+ __ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
+ return; // No dex cache slow path.
}
case HLoadString::LoadKind::kBssEntry: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
@@ -6043,8 +6067,11 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) {
return;
}
case HLoadString::LoadKind::kJitTableAddress: {
- TODO_VIXL32(FATAL);
- break;
+ __ Ldr(out, codegen_->DeduplicateJitStringLiteral(load->GetDexFile(),
+ load->GetStringIndex()));
+ // /* GcRoot<mirror::String> */ out = *out
+ GenerateGcRootFieldLoad(load, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
+ return;
}
default:
break;
@@ -7207,19 +7234,6 @@ void CodeGeneratorARMVIXL::GenerateReadBarrierForRootSlow(HInstruction* instruct
HInvokeStaticOrDirect::DispatchInfo CodeGeneratorARMVIXL::GetSupportedInvokeStaticOrDirectDispatch(
const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
HInvokeStaticOrDirect* invoke) {
- // TODO(VIXL): Implement optimized code paths.
- if (desired_dispatch_info.method_load_kind ==
- HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup ||
- desired_dispatch_info.code_ptr_location ==
- HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup) {
- return {
- HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod,
- HInvokeStaticOrDirect::CodePtrLocation::kCallArtMethod,
- 0u,
- 0u
- };
- }
-
HInvokeStaticOrDirect::DispatchInfo dispatch_info = desired_dispatch_info;
// We disable pc-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
@@ -7284,7 +7298,7 @@ void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
switch (invoke->GetCodePtrLocation()) {
case HInvokeStaticOrDirect::CodePtrLocation::kCallDirectWithFixup:
// LR = code address from literal pool with link-time patch.
- TODO_VIXL32(FATAL);
+ __ Ldr(lr, DeduplicateMethodCodeLiteral(invoke->GetTargetMethod()));
break;
case HInvokeStaticOrDirect::CodePtrLocation::kCallDirect:
// LR = invoke->GetDirectCodePtr();
@@ -7310,7 +7324,7 @@ void CodeGeneratorARMVIXL::GenerateStaticOrDirectCall(
__ Mov(RegisterFrom(temp), Operand::From(invoke->GetMethodAddress()));
break;
case HInvokeStaticOrDirect::MethodLoadKind::kDirectAddressWithFixup:
- TODO_VIXL32(FATAL);
+ __ Ldr(RegisterFrom(temp), DeduplicateMethodAddressLiteral(invoke->GetTargetMethod()));
break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
HArmDexCacheArraysBase* base =
@@ -7464,6 +7478,57 @@ CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativePa
return &patches->back();
}
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateBootImageStringLiteral(
+ const DexFile& dex_file,
+ dex::StringIndex string_index) {
+ return boot_image_string_patches_.GetOrCreate(
+ StringReference(&dex_file, string_index),
+ [this]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u);
+ });
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateBootImageTypeLiteral(
+ const DexFile& dex_file,
+ dex::TypeIndex type_index) {
+ return boot_image_type_patches_.GetOrCreate(
+ TypeReference(&dex_file, type_index),
+ [this]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u);
+ });
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateBootImageAddressLiteral(uint32_t address) {
+ bool needs_patch = GetCompilerOptions().GetIncludePatchInformation();
+ Uint32ToLiteralMap* map = needs_patch ? &boot_image_address_patches_ : &uint32_literals_;
+ return DeduplicateUint32Literal(dchecked_integral_cast<uint32_t>(address), map);
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateDexCacheAddressLiteral(uint32_t address) {
+ return DeduplicateUint32Literal(address, &uint32_literals_);
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitStringLiteral(const DexFile& dex_file,
+ dex::StringIndex string_index) {
+ jit_string_roots_.Overwrite(StringReference(&dex_file, string_index), /* placeholder */ 0u);
+ return jit_string_patches_.GetOrCreate(
+ StringReference(&dex_file, string_index),
+ [this]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u);
+ });
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ uint64_t address) {
+ jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index), address);
+ return jit_class_patches_.GetOrCreate(
+ TypeReference(&dex_file, type_index),
+ [this]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u);
+ });
+}
+
template <LinkerPatch (*Factory)(size_t, const DexFile*, uint32_t, uint32_t)>
inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
const ArenaDeque<PcRelativePatchInfo>& infos,
@@ -7487,11 +7552,34 @@ inline void CodeGeneratorARMVIXL::EmitPcRelativeLinkerPatches(
void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) {
DCHECK(linker_patches->empty());
size_t size =
+ method_patches_.size() +
+ call_patches_.size() +
relative_call_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_dex_cache_patches_.size() +
+ boot_image_string_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
- /* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size();
+ boot_image_type_patches_.size() +
+ /* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
+ boot_image_address_patches_.size();
linker_patches->reserve(size);
+ for (const auto& entry : method_patches_) {
+ const MethodReference& target_method = entry.first;
+ VIXLUInt32Literal* literal = entry.second;
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ linker_patches->push_back(LinkerPatch::MethodPatch(literal_offset,
+ target_method.dex_file,
+ target_method.dex_method_index));
+ }
+ for (const auto& entry : call_patches_) {
+ const MethodReference& target_method = entry.first;
+ VIXLUInt32Literal* literal = entry.second;
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ linker_patches->push_back(LinkerPatch::CodePatch(literal_offset,
+ target_method.dex_file,
+ target_method.dex_method_index));
+ }
for (const PatchInfo<vixl32::Label>& info : relative_call_patches_) {
uint32_t literal_offset = info.label.GetLocation();
linker_patches->push_back(
@@ -7499,6 +7587,15 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
}
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
+ for (const auto& entry : boot_image_string_patches_) {
+ const StringReference& target_string = entry.first;
+ VIXLUInt32Literal* literal = entry.second;
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ linker_patches->push_back(LinkerPatch::StringPatch(literal_offset,
+ target_string.dex_file,
+ target_string.string_index.index_));
+ }
if (!GetCompilerOptions().IsBootImage()) {
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
@@ -7506,8 +7603,54 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
+ for (const auto& entry : boot_image_type_patches_) {
+ const TypeReference& target_type = entry.first;
+ VIXLUInt32Literal* literal = entry.second;
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ linker_patches->push_back(LinkerPatch::TypePatch(literal_offset,
+ target_type.dex_file,
+ target_type.type_index.index_));
+ }
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
linker_patches);
+ for (const auto& entry : boot_image_address_patches_) {
+ DCHECK(GetCompilerOptions().GetIncludePatchInformation());
+ VIXLUInt32Literal* literal = entry.second;
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
+ }
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateUint32Literal(
+ uint32_t value,
+ Uint32ToLiteralMap* map) {
+ return map->GetOrCreate(
+ value,
+ [this, value]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ value);
+ });
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateMethodLiteral(
+ MethodReference target_method,
+ MethodToLiteralMap* map) {
+ return map->GetOrCreate(
+ target_method,
+ [this]() {
+ return GetAssembler()->CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u);
+ });
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateMethodAddressLiteral(
+ MethodReference target_method) {
+ return DeduplicateMethodLiteral(target_method, &method_patches_);
+}
+
+VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateMethodCodeLiteral(
+ MethodReference target_method) {
+ return DeduplicateMethodLiteral(target_method, &call_patches_);
}
void LocationsBuilderARMVIXL::VisitMultiplyAccumulate(HMultiplyAccumulate* instr) {
@@ -7703,6 +7846,31 @@ void InstructionCodeGeneratorARMVIXL::VisitClassTableGet(HClassTableGet* instruc
}
}
+static void PatchJitRootUse(uint8_t* code,
+ const uint8_t* roots_data,
+ VIXLUInt32Literal* literal,
+ uint64_t index_in_table) {
+ DCHECK(literal->IsBound());
+ uint32_t literal_offset = literal->GetLocation();
+ uintptr_t address =
+ reinterpret_cast<uintptr_t>(roots_data) + index_in_table * sizeof(GcRoot<mirror::Object>);
+ uint8_t* data = code + literal_offset;
+ reinterpret_cast<uint32_t*>(data)[0] = dchecked_integral_cast<uint32_t>(address);
+}
+
+void CodeGeneratorARMVIXL::EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) {
+ for (const auto& entry : jit_string_patches_) {
+ const auto& it = jit_string_roots_.find(entry.first);
+ DCHECK(it != jit_string_roots_.end());
+ PatchJitRootUse(code, roots_data, entry.second, it->second);
+ }
+ for (const auto& entry : jit_class_patches_) {
+ const auto& it = jit_class_roots_.find(entry.first);
+ DCHECK(it != jit_class_roots_.end());
+ PatchJitRootUse(code, roots_data, entry.second, it->second);
+ }
+}
+
void CodeGeneratorARMVIXL::EmitMovwMovtPlaceholder(
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels,
vixl32::Register out) {
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 93ea601ed8..45bd164508 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -107,20 +107,20 @@ static const size_t kRuntimeParameterFpuRegistersLengthVIXL =
arraysize(kRuntimeParameterFpuRegistersVIXL);
class LoadClassSlowPathARMVIXL;
-
class CodeGeneratorARMVIXL;
+using VIXLInt32Literal = vixl::aarch32::Literal<int32_t>;
+using VIXLUInt32Literal = vixl::aarch32::Literal<uint32_t>;
+
class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> {
public:
- typedef vixl::aarch32::Literal<int32_t> IntLiteral;
-
explicit JumpTableARMVIXL(HPackedSwitch* switch_instr)
: switch_instr_(switch_instr),
table_start_(),
bb_addresses_(switch_instr->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
uint32_t num_entries = switch_instr_->GetNumEntries();
for (uint32_t i = 0; i < num_entries; i++) {
- IntLiteral *lit = new IntLiteral(0, vixl32::RawLiteral::kManuallyPlaced);
+ VIXLInt32Literal *lit = new VIXLInt32Literal(0, vixl32::RawLiteral::kManuallyPlaced);
bb_addresses_.emplace_back(lit);
}
}
@@ -133,7 +133,7 @@ class JumpTableARMVIXL : public DeletableArenaObject<kArenaAllocSwitchTable> {
private:
HPackedSwitch* const switch_instr_;
vixl::aarch32::Label table_start_;
- ArenaVector<std::unique_ptr<IntLiteral>> bb_addresses_;
+ ArenaVector<std::unique_ptr<VIXLInt32Literal>> bb_addresses_;
DISALLOW_COPY_AND_ASSIGN(JumpTableARMVIXL);
};
@@ -566,8 +566,22 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
uint32_t element_offset);
+ VIXLUInt32Literal* DeduplicateBootImageStringLiteral(const DexFile& dex_file,
+ dex::StringIndex string_index);
+ VIXLUInt32Literal* DeduplicateBootImageTypeLiteral(const DexFile& dex_file,
+ dex::TypeIndex type_index);
+ VIXLUInt32Literal* DeduplicateBootImageAddressLiteral(uint32_t address);
+ VIXLUInt32Literal* DeduplicateDexCacheAddressLiteral(uint32_t address);
+ VIXLUInt32Literal* DeduplicateJitStringLiteral(const DexFile& dex_file,
+ dex::StringIndex string_index);
+ VIXLUInt32Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ uint64_t address);
+
void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
+ void EmitJitRootPatches(uint8_t* code, const uint8_t* roots_data) OVERRIDE;
+
// Fast path implementation of ReadBarrier::Barrier for a heap
// reference field load when Baker's read barriers are used.
void GenerateFieldLoadWithBakerReadBarrier(HInstruction* instruction,
@@ -673,10 +687,21 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
vixl::aarch32::Register GetInvokeStaticOrDirectExtraParameter(HInvokeStaticOrDirect* invoke,
vixl::aarch32::Register temp);
- using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, vixl::aarch32::Literal<uint32_t>*>;
+ using Uint32ToLiteralMap = ArenaSafeMap<uint32_t, VIXLUInt32Literal*>;
using MethodToLiteralMap =
- ArenaSafeMap<MethodReference, vixl::aarch32::Literal<uint32_t>*, MethodReferenceComparator>;
-
+ ArenaSafeMap<MethodReference, VIXLUInt32Literal*, MethodReferenceComparator>;
+ using StringToLiteralMap = ArenaSafeMap<StringReference,
+ VIXLUInt32Literal*,
+ StringReferenceValueComparator>;
+ using TypeToLiteralMap = ArenaSafeMap<TypeReference,
+ VIXLUInt32Literal*,
+ TypeReferenceValueComparator>;
+
+ VIXLUInt32Literal* DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map);
+ VIXLUInt32Literal* DeduplicateMethodLiteral(MethodReference target_method,
+ MethodToLiteralMap* map);
+ VIXLUInt32Literal* DeduplicateMethodAddressLiteral(MethodReference target_method);
+ VIXLUInt32Literal* DeduplicateMethodCodeLiteral(MethodReference target_method);
PcRelativePatchInfo* NewPcRelativePatch(const DexFile& dex_file,
uint32_t offset_or_index,
ArenaDeque<PcRelativePatchInfo>* patches);
@@ -697,15 +722,31 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
ArmVIXLAssembler assembler_;
const ArmInstructionSetFeatures& isa_features_;
+ // Deduplication map for 32-bit literals, used for non-patchable boot image addresses.
+ Uint32ToLiteralMap uint32_literals_;
+ // Method patch info, map MethodReference to a literal for method address and method code.
+ MethodToLiteralMap method_patches_;
+ MethodToLiteralMap call_patches_;
// Relative call patch info.
// Using ArenaDeque<> which retains element addresses on push/emplace_back().
ArenaDeque<PatchInfo<vixl::aarch32::Label>> relative_call_patches_;
// PC-relative patch info for each HArmDexCacheArraysBase.
ArenaDeque<PcRelativePatchInfo> pc_relative_dex_cache_patches_;
+ // Deduplication map for boot string literals for kBootImageLinkTimeAddress.
+ StringToLiteralMap boot_image_string_patches_;
// PC-relative String patch info; type depends on configuration (app .bss or boot image PIC).
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
+ // Deduplication map for boot type literals for kBootImageLinkTimeAddress.
+ TypeToLiteralMap boot_image_type_patches_;
// PC-relative type patch info.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // Deduplication map for patchable boot image addresses.
+ Uint32ToLiteralMap boot_image_address_patches_;
+
+ // Patches for string literals in JIT compiled code.
+ StringToLiteralMap jit_string_patches_;
+ // Patches for class literals in JIT compiled code.
+ TypeToLiteralMap jit_class_patches_;
DISALLOW_COPY_AND_ASSIGN(CodeGeneratorARMVIXL);
};
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index 1e946d67b6..cabc0782ca 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -1108,13 +1108,23 @@ size_t HInstruction::EnvironmentSize() const {
return HasEnvironment() ? environment_->Size() : 0;
}
-void HPhi::AddInput(HInstruction* input) {
+void HVariableInputSizeInstruction::AddInput(HInstruction* input) {
DCHECK(input->GetBlock() != nullptr);
inputs_.push_back(HUserRecord<HInstruction*>(input));
input->AddUseAt(this, inputs_.size() - 1);
}
-void HPhi::RemoveInputAt(size_t index) {
+void HVariableInputSizeInstruction::InsertInputAt(size_t index, HInstruction* input) {
+ inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
+ input->AddUseAt(this, index);
+ // Update indexes in use nodes of inputs that have been pushed further back by the insert().
+ for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
+ DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
+ inputs_[i].GetUseNode()->SetIndex(i);
+ }
+}
+
+void HVariableInputSizeInstruction::RemoveInputAt(size_t index) {
RemoveAsUserOfInput(index);
inputs_.erase(inputs_.begin() + index);
// Update indexes in use nodes of inputs that have been pulled forward by the erase().
@@ -2386,26 +2396,6 @@ bool HInvokeStaticOrDirect::NeedsDexCacheOfDeclaringClass() const {
return !opt.GetDoesNotNeedDexCache();
}
-void HInvokeStaticOrDirect::InsertInputAt(size_t index, HInstruction* input) {
- inputs_.insert(inputs_.begin() + index, HUserRecord<HInstruction*>(input));
- input->AddUseAt(this, index);
- // Update indexes in use nodes of inputs that have been pushed further back by the insert().
- for (size_t i = index + 1u, e = inputs_.size(); i < e; ++i) {
- DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i - 1u);
- inputs_[i].GetUseNode()->SetIndex(i);
- }
-}
-
-void HInvokeStaticOrDirect::RemoveInputAt(size_t index) {
- RemoveAsUserOfInput(index);
- inputs_.erase(inputs_.begin() + index);
- // Update indexes in use nodes of inputs that have been pulled forward by the erase().
- for (size_t i = index, e = inputs_.size(); i < e; ++i) {
- DCHECK_EQ(inputs_[i].GetUseNode()->GetIndex(), i + 1u);
- inputs_[i].GetUseNode()->SetIndex(i);
- }
-}
-
std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::MethodLoadKind rhs) {
switch (rhs) {
case HInvokeStaticOrDirect::MethodLoadKind::kStringInit:
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 4a8cfcb158..4a77bed44a 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -2347,6 +2347,27 @@ class HBackwardInstructionIterator : public ValueObject {
DISALLOW_COPY_AND_ASSIGN(HBackwardInstructionIterator);
};
+class HVariableInputSizeInstruction : public HInstruction {
+ public:
+ void AddInput(HInstruction* input);
+ void InsertInputAt(size_t index, HInstruction* input);
+ void RemoveInputAt(size_t index);
+
+ protected:
+ HVariableInputSizeInstruction(SideEffects side_effects,
+ uint32_t dex_pc,
+ ArenaAllocator* arena,
+ size_t number_of_inputs,
+ ArenaAllocKind kind)
+ : HInstruction(side_effects, dex_pc),
+ inputs_(number_of_inputs, arena->Adapter(kind)) {}
+
+ ArenaVector<HUserRecord<HInstruction*>> inputs_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(HVariableInputSizeInstruction);
+};
+
template<size_t N>
class HTemplateInstruction: public HInstruction {
public:
@@ -2438,15 +2459,19 @@ class HReturn FINAL : public HTemplateInstruction<1> {
DISALLOW_COPY_AND_ASSIGN(HReturn);
};
-class HPhi FINAL : public HInstruction {
+class HPhi FINAL : public HVariableInputSizeInstruction {
public:
HPhi(ArenaAllocator* arena,
uint32_t reg_number,
size_t number_of_inputs,
Primitive::Type type,
uint32_t dex_pc = kNoDexPc)
- : HInstruction(SideEffects::None(), dex_pc),
- inputs_(number_of_inputs, arena->Adapter(kArenaAllocPhiInputs)),
+ : HVariableInputSizeInstruction(
+ SideEffects::None(),
+ dex_pc,
+ arena,
+ number_of_inputs,
+ kArenaAllocPhiInputs),
reg_number_(reg_number) {
SetPackedField<TypeField>(ToPhiType(type));
DCHECK_NE(GetType(), Primitive::kPrimVoid);
@@ -2469,9 +2494,6 @@ class HPhi FINAL : public HInstruction {
return ArrayRef<HUserRecord<HInstruction*>>(inputs_);
}
- void AddInput(HInstruction* input);
- void RemoveInputAt(size_t index);
-
Primitive::Type GetType() const OVERRIDE { return GetPackedField<TypeField>(); }
void SetType(Primitive::Type new_type) {
// Make sure that only valid type changes occur. The following are allowed:
@@ -2527,7 +2549,6 @@ class HPhi FINAL : public HInstruction {
static_assert(kNumberOfPhiPackedBits <= kMaxNumberOfPackedBits, "Too many packed fields.");
using TypeField = BitField<Primitive::Type, kFieldType, kFieldTypeSize>;
- ArenaVector<HUserRecord<HInstruction*>> inputs_;
const uint32_t reg_number_;
DISALLOW_COPY_AND_ASSIGN(HPhi);
@@ -3791,7 +3812,7 @@ enum IntrinsicExceptions {
kCanThrow // Intrinsic may throw exceptions.
};
-class HInvoke : public HInstruction {
+class HInvoke : public HVariableInputSizeInstruction {
public:
bool NeedsEnvironment() const OVERRIDE;
@@ -3878,12 +3899,14 @@ class HInvoke : public HInstruction {
uint32_t dex_method_index,
ArtMethod* resolved_method,
InvokeType invoke_type)
- : HInstruction(
- SideEffects::AllExceptGCDependency(), dex_pc), // Assume write/read on all fields/arrays.
+ : HVariableInputSizeInstruction(
+ SideEffects::AllExceptGCDependency(), // Assume write/read on all fields/arrays.
+ dex_pc,
+ arena,
+ number_of_arguments + number_of_other_inputs,
+ kArenaAllocInvokeInputs),
number_of_arguments_(number_of_arguments),
resolved_method_(resolved_method),
- inputs_(number_of_arguments + number_of_other_inputs,
- arena->Adapter(kArenaAllocInvokeInputs)),
dex_method_index_(dex_method_index),
intrinsic_(Intrinsics::kNone),
intrinsic_optimizations_(0) {
@@ -3894,7 +3917,6 @@ class HInvoke : public HInstruction {
uint32_t number_of_arguments_;
ArtMethod* const resolved_method_;
- ArenaVector<HUserRecord<HInstruction*>> inputs_;
const uint32_t dex_method_index_;
Intrinsics intrinsic_;
@@ -4184,10 +4206,6 @@ class HInvokeStaticOrDirect FINAL : public HInvoke {
DECLARE_INSTRUCTION(InvokeStaticOrDirect);
- protected:
- void InsertInputAt(size_t index, HInstruction* input);
- void RemoveInputAt(size_t index);
-
private:
static constexpr size_t kFieldClinitCheckRequirement = kNumberOfInvokePackedBits;
static constexpr size_t kFieldClinitCheckRequirementSize =