summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/builder.h6
-rw-r--r--compiler/optimizing/code_generator.cc55
-rw-r--r--compiler/optimizing/code_generator.h17
-rw-r--r--compiler/optimizing/code_generator_arm.cc165
-rw-r--r--compiler/optimizing/code_generator_arm.h10
-rw-r--r--compiler/optimizing/code_generator_arm64.cc182
-rw-r--r--compiler/optimizing/code_generator_arm64.h16
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.cc156
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h10
-rw-r--r--compiler/optimizing/code_generator_mips.cc158
-rw-r--r--compiler/optimizing/code_generator_mips.h8
-rw-r--r--compiler/optimizing/code_generator_mips64.cc143
-rw-r--r--compiler/optimizing/code_generator_mips64.h8
-rw-r--r--compiler/optimizing/code_generator_x86.cc174
-rw-r--r--compiler/optimizing/code_generator_x86.h13
-rw-r--r--compiler/optimizing/code_generator_x86_64.cc156
-rw-r--r--compiler/optimizing/code_generator_x86_64.h13
-rw-r--r--compiler/optimizing/dex_cache_array_fixups_arm.cc20
-rw-r--r--compiler/optimizing/dex_cache_array_fixups_mips.cc20
-rw-r--r--compiler/optimizing/graph_visualizer.cc5
-rw-r--r--compiler/optimizing/induction_var_range.cc8
-rw-r--r--compiler/optimizing/induction_var_range.h4
-rw-r--r--compiler/optimizing/inliner.cc32
-rw-r--r--compiler/optimizing/inliner.h2
-rw-r--r--compiler/optimizing/instruction_builder.cc202
-rw-r--r--compiler/optimizing/instruction_builder.h24
-rw-r--r--compiler/optimizing/intrinsics.cc2
-rw-r--r--compiler/optimizing/intrinsics_mips.cc5
-rw-r--r--compiler/optimizing/load_store_elimination.cc6
-rw-r--r--compiler/optimizing/loop_optimization.cc166
-rw-r--r--compiler/optimizing/loop_optimization.h8
-rw-r--r--compiler/optimizing/nodes.cc67
-rw-r--r--compiler/optimizing/nodes.h197
-rw-r--r--compiler/optimizing/nodes_test.cc8
-rw-r--r--compiler/optimizing/pc_relative_fixups_mips.cc3
-rw-r--r--compiler/optimizing/pc_relative_fixups_x86.cc2
-rw-r--r--compiler/optimizing/prepare_for_register_allocation.cc47
-rw-r--r--compiler/optimizing/prepare_for_register_allocation.h1
-rw-r--r--compiler/optimizing/reference_type_propagation.cc25
-rw-r--r--compiler/optimizing/sharpening.cc96
-rw-r--r--compiler/optimizing/sharpening.h2
-rw-r--r--compiler/optimizing/stack_map_stream.cc68
-rw-r--r--compiler/optimizing/stack_map_stream.h8
-rw-r--r--compiler/optimizing/stack_map_test.cc53
44 files changed, 1260 insertions, 1111 deletions
diff --git a/compiler/optimizing/builder.h b/compiler/optimizing/builder.h
index f896f1199e..8cf4089eba 100644
--- a/compiler/optimizing/builder.h
+++ b/compiler/optimizing/builder.h
@@ -63,7 +63,8 @@ class HGraphBuilder : public ValueObject {
driver,
interpreter_metadata,
compiler_stats,
- dex_cache) {}
+ dex_cache,
+ handles) {}
// Only for unit testing.
HGraphBuilder(HGraph* graph,
@@ -90,7 +91,8 @@ class HGraphBuilder : public ValueObject {
/* compiler_driver */ nullptr,
/* interpreter_metadata */ nullptr,
/* compiler_stats */ nullptr,
- null_dex_cache_) {}
+ null_dex_cache_,
+ handles) {}
GraphAnalysisResult BuildGraph();
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index f00648f570..70c2738010 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -367,6 +367,12 @@ void CodeGenerator::GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invok
InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), nullptr);
}
+void CodeGenerator::GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke) {
+ MoveConstant(invoke->GetLocations()->GetTemp(0), static_cast<int32_t>(invoke->GetType()));
+ QuickEntrypointEnum entrypoint = kQuickInvokePolymorphic;
+ InvokeRuntime(entrypoint, invoke, invoke->GetDexPc(), nullptr);
+}
+
void CodeGenerator::CreateUnresolvedFieldLocationSummary(
HInstruction* field_access,
Primitive::Type field_type,
@@ -491,31 +497,34 @@ void CodeGenerator::GenerateUnresolvedFieldAccess(
}
}
-// TODO: Remove argument `code_generator_supports_read_barrier` when
-// all code generators have read barrier support.
-void CodeGenerator::CreateLoadClassLocationSummary(HLoadClass* cls,
- Location runtime_type_index_location,
- Location runtime_return_location,
- bool code_generator_supports_read_barrier) {
- ArenaAllocator* allocator = cls->GetBlock()->GetGraph()->GetArena();
- LocationSummary::CallKind call_kind = cls->NeedsAccessCheck()
- ? LocationSummary::kCallOnMainOnly
- : (((code_generator_supports_read_barrier && kEmitCompilerReadBarrier) ||
- cls->CanCallRuntime())
- ? LocationSummary::kCallOnSlowPath
- : LocationSummary::kNoCall);
- LocationSummary* locations = new (allocator) LocationSummary(cls, call_kind);
+void CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
+ Location runtime_type_index_location,
+ Location runtime_return_location) {
+ DCHECK_EQ(cls->GetLoadKind(), HLoadClass::LoadKind::kDexCacheViaMethod);
+ DCHECK_EQ(cls->InputCount(), 1u);
+ LocationSummary* locations = new (cls->GetBlock()->GetGraph()->GetArena()) LocationSummary(
+ cls, LocationSummary::kCallOnMainOnly);
+ locations->SetInAt(0, Location::NoLocation());
+ locations->AddTemp(runtime_type_index_location);
+ locations->SetOut(runtime_return_location);
+}
+
+void CodeGenerator::GenerateLoadClassRuntimeCall(HLoadClass* cls) {
+ DCHECK_EQ(cls->GetLoadKind(), HLoadClass::LoadKind::kDexCacheViaMethod);
+ LocationSummary* locations = cls->GetLocations();
+ MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
if (cls->NeedsAccessCheck()) {
- locations->SetInAt(0, Location::NoLocation());
- locations->AddTemp(runtime_type_index_location);
- locations->SetOut(runtime_return_location);
+ CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
+ } else if (cls->MustGenerateClinitCheck()) {
+ CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeStaticStorage, cls, cls->GetDexPc());
} else {
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister());
+ CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeType, cls, cls->GetDexPc());
}
}
-
void CodeGenerator::BlockIfInRegister(Location location, bool is_out) const {
// The DCHECKS below check that a register is not specified twice in
// the summary. The out location can overlap with an input, so we need
@@ -927,10 +936,10 @@ void CodeGenerator::EmitEnvironment(HEnvironment* environment, SlowPathCode* slo
if (environment->GetParent() != nullptr) {
// We emit the parent environment first.
EmitEnvironment(environment->GetParent(), slow_path);
- stack_map_stream_.BeginInlineInfoEntry(environment->GetMethodIdx(),
+ stack_map_stream_.BeginInlineInfoEntry(environment->GetMethod(),
environment->GetDexPc(),
- environment->GetInvokeType(),
- environment->Size());
+ environment->Size(),
+ &graph_->GetDexFile());
}
// Walk over the environment, and record the location of dex registers.
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 6366b9838f..38d532e1e9 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -426,12 +426,12 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
}
- // Perfoms checks pertaining to an InvokeRuntime call.
+ // Performs checks pertaining to an InvokeRuntime call.
void ValidateInvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
SlowPathCode* slow_path);
- // Perfoms checks pertaining to an InvokeRuntimeWithoutRecordingPcInfo call.
+ // Performs checks pertaining to an InvokeRuntimeWithoutRecordingPcInfo call.
static void ValidateInvokeRuntimeWithoutRecordingPcInfo(HInstruction* instruction,
SlowPathCode* slow_path);
@@ -495,6 +495,8 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
void GenerateInvokeUnresolvedRuntimeCall(HInvokeUnresolved* invoke);
+ void GenerateInvokePolymorphicCall(HInvokePolymorphic* invoke);
+
void CreateUnresolvedFieldLocationSummary(
HInstruction* field_access,
Primitive::Type field_type,
@@ -507,11 +509,10 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
uint32_t dex_pc,
const FieldAccessCallingConvention& calling_convention);
- // TODO: This overlaps a bit with MoveFromReturnRegister. Refactor for a better design.
- static void CreateLoadClassLocationSummary(HLoadClass* cls,
- Location runtime_type_index_location,
- Location runtime_return_location,
- bool code_generator_supports_read_barrier = false);
+ static void CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
+ Location runtime_type_index_location,
+ Location runtime_return_location);
+ void GenerateLoadClassRuntimeCall(HLoadClass* cls);
static void CreateSystemArrayCopyLocationSummary(HInvoke* invoke);
@@ -521,7 +522,7 @@ class CodeGenerator : public DeletableArenaObject<kArenaAllocCodeGenerator> {
virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
uint32_t dex_pc,
- SlowPathCode* slow_path) = 0;
+ SlowPathCode* slow_path = nullptr) = 0;
// Check if the desired_string_load_kind is supported. If it is, return it,
// otherwise return a fall-back kind that should be used instead.
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 3009103ac7..07b174698a 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -371,22 +371,23 @@ class LoadClassSlowPathARM : public SlowPathCodeARM {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCodeARM(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCodeARM(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ LoadImmediate(calling_convention.GetRegisterAt(0), type_index.index_);
QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType;
- arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
+ arm_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -400,6 +401,23 @@ class LoadClassSlowPathARM : public SlowPathCodeARM {
arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0));
}
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
+ // kSaveEverything and use a temporary for the .bss entry address in the fast path,
+ // so that we can avoid another calculation here.
+ CodeGeneratorARM::PcRelativePatchInfo* labels =
+ arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+ __ BindTrackedLabel(&labels->movw_label);
+ __ movw(IP, /* placeholder */ 0u);
+ __ BindTrackedLabel(&labels->movt_label);
+ __ movt(IP, /* placeholder */ 0u);
+ __ BindTrackedLabel(&labels->add_pc_label);
+ __ add(IP, IP, ShifterOperand(PC));
+ __ str(locations->Out().AsRegister<Register>(), Address(IP));
+ }
__ b(GetExitLabel());
}
@@ -409,10 +427,6 @@ class LoadClassSlowPathARM : public SlowPathCodeARM {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -430,7 +444,7 @@ class LoadStringSlowPathARM : public SlowPathCodeARM {
LocationSummary* locations = instruction_->GetLocations();
DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
HLoadString* load = instruction_->AsLoadString();
- const uint32_t string_index = load->GetStringIndex().index_;
+ const dex::StringIndex string_index = load->GetStringIndex();
Register out = locations->Out().AsRegister<Register>();
Register temp = locations->GetTemp(0).AsRegister<Register>();
constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
@@ -449,7 +463,7 @@ class LoadStringSlowPathARM : public SlowPathCodeARM {
__ mov(entry_address, ShifterOperand(temp));
}
- __ LoadImmediate(calling_convention.GetRegisterAt(0), string_index);
+ __ LoadImmediate(calling_convention.GetRegisterAt(0), string_index.index_);
arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
@@ -1208,6 +1222,7 @@ CodeGeneratorARM::CodeGeneratorARM(HGraph* graph,
boot_image_type_patches_(TypeReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_address_patches_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
@@ -2370,6 +2385,14 @@ void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke)
codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
}
+void LocationsBuilderARM::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorARM::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
void LocationsBuilderARM::VisitNeg(HNeg* neg) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
@@ -3936,7 +3959,6 @@ void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) {
} else {
InvokeRuntimeCallingConvention calling_convention;
locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(Location::RegisterLocation(R0));
}
@@ -3954,7 +3976,7 @@ void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) {
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
}
@@ -5709,17 +5731,11 @@ HLoadClass::LoadKind CodeGeneratorARM::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
- case HLoadClass::LoadKind::kJitTableAddress:
- break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
+ case HLoadClass::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
- // We disable pc-relative load when there is an irreducible loop, as the optimization
- // is incompatible with it.
- // TODO: Create as many ArmDexCacheArraysBase instructions as needed for methods
- // with irreducible loops.
- if (GetGraph()->HasIrreducibleLoops()) {
- return HLoadClass::LoadKind::kDexCacheViaMethod;
- }
+ break;
+ case HLoadClass::LoadKind::kJitTableAddress:
+ DCHECK(Runtime::Current()->UseJitCompilation());
break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
break;
@@ -5728,15 +5744,16 @@ HLoadClass::LoadKind CodeGeneratorARM::GetSupportedLoadClassKind(
}
void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(R0),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(R0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5747,24 +5764,23 @@ void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
- load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
-void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
@@ -5772,7 +5788,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5786,12 +5802,14 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
__ LoadLiteral(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
cls->GetTypeIndex()));
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
CodeGeneratorARM::PcRelativePatchInfo* labels =
codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
@@ -5805,41 +5823,36 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
+ case HLoadClass::LoadKind::kBssEntry: {
+ CodeGeneratorARM::PcRelativePatchInfo* labels =
+ codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
+ __ BindTrackedLabel(&labels->movw_label);
+ __ movw(out, /* placeholder */ 0u);
+ __ BindTrackedLabel(&labels->movt_label);
+ __ movt(out, /* placeholder */ 0u);
+ __ BindTrackedLabel(&labels->add_pc_label);
+ __ add(out, out, ShifterOperand(PC));
+ GenerateGcRootFieldLoad(cls, out_loc, out, 0, kCompilerReadBarrierOption);
+ generate_null_check = true;
+ break;
+ }
case HLoadClass::LoadKind::kJitTableAddress: {
__ LoadLiteral(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
cls->GetTypeIndex(),
- cls->GetAddress()));
+ cls->GetClass()));
// /* GcRoot<mirror::Class> */ out = *out
GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- Register base_reg = locations->InAt(0).AsRegister<Register>();
- HArmDexCacheArraysBase* base = cls->InputAt(0)->AsArmDexCacheArraysBase();
- int32_t offset = cls->GetDexCacheElementOffset() - base->GetElementOffset();
- // /* GcRoot<mirror::Class> */ out = *(dex_cache_arrays_base + offset)
- GenerateGcRootFieldLoad(cls, out_loc, base_reg, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = locations->InAt(0).AsRegister<Register>();
- __ LoadFromOffset(kLoadWord,
- out,
- current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -5947,6 +5960,7 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) NO_THREAD_S
switch (load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ LoadLiteral(out, codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
load->GetStringIndex()));
return; // No dex cache slow path.
@@ -5954,7 +5968,7 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) NO_THREAD_S
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorARM::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
__ BindTrackedLabel(&labels->movw_label);
__ movw(out, /* placeholder */ 0u);
__ BindTrackedLabel(&labels->movt_label);
@@ -5974,7 +5988,7 @@ void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) NO_THREAD_S
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
Register temp = locations->GetTemp(0).AsRegister<Register>();
CodeGeneratorARM::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
__ BindTrackedLabel(&labels->movw_label);
__ movw(temp, /* placeholder */ 0u);
__ BindTrackedLabel(&labels->movt_label);
@@ -7280,8 +7294,8 @@ void CodeGeneratorARM::GenerateVirtualCall(HInvokeVirtual* invoke, Location temp
}
CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeStringPatch(
- const DexFile& dex_file, uint32_t string_index) {
- return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeTypePatch(
@@ -7289,6 +7303,11 @@ CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeTypePatch(
return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
}
+CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewTypeBssEntryPatch(
+ const DexFile& dex_file, dex::TypeIndex type_index) {
+ return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
+}
+
CodeGeneratorARM::PcRelativePatchInfo* CodeGeneratorARM::NewPcRelativeDexCacheArrayPatch(
const DexFile& dex_file, uint32_t element_offset) {
return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
@@ -7332,8 +7351,9 @@ Literal* CodeGeneratorARM::DeduplicateJitStringLiteral(const DexFile& dex_file,
Literal* CodeGeneratorARM::DeduplicateJitClassLiteral(const DexFile& dex_file,
dex::TypeIndex type_index,
- uint64_t address) {
- jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index), address);
+ Handle<mirror::Class> handle) {
+ jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
+ reinterpret_cast64<uint64_t>(handle.GetReference()));
return jit_class_patches_.GetOrCreate(
TypeReference(&dex_file, type_index),
[this]() { return __ NewLiteral<uint32_t>(/* placeholder */ 0u); });
@@ -7367,6 +7387,7 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
boot_image_type_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
+ /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
@@ -7381,12 +7402,17 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
target_string.string_index.index_));
}
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(pc_relative_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
} else {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
+ linker_patches);
for (const auto& entry : boot_image_type_patches_) {
const TypeReference& target_type = entry.first;
Literal* literal = entry.second;
@@ -7396,8 +7422,6 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
target_type.dex_file,
target_type.type_index.index_));
}
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
for (const auto& entry : boot_image_address_patches_) {
DCHECK(GetCompilerOptions().GetIncludePatchInformation());
Literal* literal = entry.second;
@@ -7405,6 +7429,7 @@ void CodeGeneratorARM::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
uint32_t literal_offset = literal->GetLabel()->Position();
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
+ DCHECK_EQ(size, linker_patches->size());
}
Literal* CodeGeneratorARM::DeduplicateUint32Literal(uint32_t value, Uint32ToLiteralMap* map) {
diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h
index d5968e0764..52d18575ff 100644
--- a/compiler/optimizing/code_generator_arm.h
+++ b/compiler/optimizing/code_generator_arm.h
@@ -481,8 +481,10 @@ class CodeGeneratorARM : public CodeGenerator {
Label add_pc_label;
};
- PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file, uint32_t string_index);
+ PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
+ PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
uint32_t element_offset);
Literal* DeduplicateBootImageStringLiteral(const DexFile& dex_file,
@@ -494,7 +496,7 @@ class CodeGeneratorARM : public CodeGenerator {
Handle<mirror::String> handle);
Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
dex::TypeIndex type_index,
- uint64_t address);
+ Handle<mirror::Class> handle);
void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
@@ -635,8 +637,10 @@ class CodeGeneratorARM : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
// Deduplication map for boot type literals for kBootImageLinkTimeAddress.
TypeToLiteralMap boot_image_type_patches_;
- // PC-relative type patch info.
+ // PC-relative type patch info for kBootImageLinkTimePcRelative.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
// Deduplication map for patchable boot image addresses.
Uint32ToLiteralMap boot_image_address_patches_;
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index 4b6a9bed61..b094e54f8a 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -276,22 +276,23 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCodeARM64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCodeARM64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorARM64* arm64_codegen = down_cast<CodeGeneratorARM64*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- __ Mov(calling_convention.GetRegisterAt(0).W(), cls_->GetTypeIndex().index_);
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ Mov(calling_convention.GetRegisterAt(0).W(), type_index.index_);
QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType;
- arm64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
+ arm64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -302,11 +303,32 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
Location out = locations->Out();
if (out.IsValid()) {
DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
- Primitive::Type type = at_->GetType();
+ Primitive::Type type = instruction_->GetType();
arm64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
}
-
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ UseScratchRegisterScope temps(arm64_codegen->GetVIXLAssembler());
+ Register temp = temps.AcquireX();
+ const DexFile& dex_file = cls_->GetDexFile();
+ // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
+ // kSaveEverything and use a temporary for the ADRP in the fast path, so that we
+ // can avoid the ADRP here.
+ vixl::aarch64::Label* adrp_label =
+ arm64_codegen->NewBssEntryTypePatch(dex_file, type_index);
+ arm64_codegen->EmitAdrpPlaceholder(adrp_label, temp);
+ vixl::aarch64::Label* strp_label =
+ arm64_codegen->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
+ {
+ SingleEmissionCheckScope guard(arm64_codegen->GetVIXLAssembler());
+ __ Bind(strp_label);
+ __ str(RegisterFrom(locations->Out(), Primitive::kPrimNot),
+ MemOperand(temp, /* offset placeholder */ 0));
+ }
+ }
__ B(GetExitLabel());
}
@@ -316,10 +338,6 @@ class LoadClassSlowPathARM64 : public SlowPathCodeARM64 {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -349,8 +367,8 @@ class LoadStringSlowPathARM64 : public SlowPathCodeARM64 {
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
- __ Mov(calling_convention.GetRegisterAt(0).W(), string_index);
+ const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
+ __ Mov(calling_convention.GetRegisterAt(0).W(), string_index.index_);
arm64_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Primitive::Type type = instruction_->GetType();
@@ -1154,6 +1172,7 @@ CodeGeneratorARM64::CodeGeneratorARM64(HGraph* graph,
boot_image_type_patches_(TypeReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_address_patches_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
@@ -3994,7 +4013,7 @@ void CodeGeneratorARM64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invok
break;
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
// Add ADRP with its PC-relative DexCache access patch.
- const DexFile& dex_file = invoke->GetDexFile();
+ const DexFile& dex_file = invoke->GetDexFileForPcRelativeDexCache();
uint32_t element_offset = invoke->GetDexCacheArrayOffset();
vixl::aarch64::Label* adrp_label = NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
EmitAdrpPlaceholder(adrp_label, XRegisterFrom(temp));
@@ -4080,11 +4099,20 @@ void CodeGeneratorARM64::GenerateVirtualCall(HInvokeVirtual* invoke, Location te
__ Blr(lr);
}
+void LocationsBuilderARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorARM64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeStringPatch(
const DexFile& dex_file,
- uint32_t string_index,
+ dex::StringIndex string_index,
vixl::aarch64::Label* adrp_label) {
- return NewPcRelativePatch(dex_file, string_index, adrp_label, &pc_relative_string_patches_);
+ return
+ NewPcRelativePatch(dex_file, string_index.index_, adrp_label, &pc_relative_string_patches_);
}
vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
@@ -4094,6 +4122,13 @@ vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeTypePatch(
return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &pc_relative_type_patches_);
}
+vixl::aarch64::Label* CodeGeneratorARM64::NewBssEntryTypePatch(
+ const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ vixl::aarch64::Label* adrp_label) {
+ return NewPcRelativePatch(dex_file, type_index.index_, adrp_label, &type_bss_entry_patches_);
+}
+
vixl::aarch64::Label* CodeGeneratorARM64::NewPcRelativeDexCacheArrayPatch(
const DexFile& dex_file,
uint32_t element_offset,
@@ -4146,8 +4181,9 @@ vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitStringLitera
}
vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateJitClassLiteral(
- const DexFile& dex_file, dex::TypeIndex type_index, uint64_t address) {
- jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index), address);
+ const DexFile& dex_file, dex::TypeIndex type_index, Handle<mirror::Class> handle) {
+ jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
+ reinterpret_cast64<uint64_t>(handle.GetReference()));
return jit_class_patches_.GetOrCreate(
TypeReference(&dex_file, type_index),
[this]() { return __ CreateLiteralDestroyedWithPool<uint32_t>(/* placeholder */ 0u); });
@@ -4200,6 +4236,7 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
pc_relative_string_patches_.size() +
boot_image_type_patches_.size() +
pc_relative_type_patches_.size() +
+ type_bss_entry_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
for (const PcRelativePatchInfo& info : pc_relative_dex_cache_patches_) {
@@ -4216,12 +4253,17 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
target_string.string_index.index_));
}
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(pc_relative_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
} else {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
+ linker_patches);
for (const auto& entry : boot_image_type_patches_) {
const TypeReference& target_type = entry.first;
vixl::aarch64::Literal<uint32_t>* literal = entry.second;
@@ -4229,13 +4271,12 @@ void CodeGeneratorARM64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patc
target_type.dex_file,
target_type.type_index.index_));
}
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
for (const auto& entry : boot_image_address_patches_) {
DCHECK(GetCompilerOptions().GetIncludePatchInformation());
vixl::aarch64::Literal<uint32_t>* literal = entry.second;
linker_patches->push_back(LinkerPatch::RecordPosition(literal->GetOffset()));
}
+ DCHECK_EQ(size, linker_patches->size());
}
vixl::aarch64::Literal<uint32_t>* CodeGeneratorARM64::DeduplicateUint32Literal(uint32_t value,
@@ -4298,12 +4339,12 @@ HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
+ case HLoadClass::LoadKind::kBssEntry:
+ DCHECK(!Runtime::Current()->UseJitCompilation());
+ break;
case HLoadClass::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
- DCHECK(!Runtime::Current()->UseJitCompilation());
- break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
break;
}
@@ -4311,15 +4352,16 @@ HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind(
}
void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
LocationFrom(calling_convention.GetRegisterAt(0)),
- LocationFrom(vixl::aarch64::x0),
- /* code_generator_supports_read_barrier */ true);
+ LocationFrom(vixl::aarch64::x0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -4330,21 +4372,21 @@ void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
-void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
Location out_loc = cls->GetLocations()->Out();
Register out = OutputRegister(cls);
@@ -4353,7 +4395,7 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -4387,59 +4429,46 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
- DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress()));
- __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress()));
- break;
- }
- case HLoadClass::LoadKind::kJitTableAddress: {
- __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
- cls->GetTypeIndex(),
- cls->GetAddress()));
- GenerateGcRootFieldLoad(cls,
- out_loc,
- out.X(),
- /* offset */ 0,
- /* fixup_label */ nullptr,
- kCompilerReadBarrierOption);
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
+ __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- // Add ADRP with its PC-relative DexCache access patch.
+ case HLoadClass::LoadKind::kBssEntry: {
+ // Add ADRP with its PC-relative Class .bss entry patch.
const DexFile& dex_file = cls->GetDexFile();
- uint32_t element_offset = cls->GetDexCacheElementOffset();
- vixl::aarch64::Label* adrp_label =
- codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset);
+ dex::TypeIndex type_index = cls->GetTypeIndex();
+ vixl::aarch64::Label* adrp_label = codegen_->NewBssEntryTypePatch(dex_file, type_index);
codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
- // Add LDR with its PC-relative DexCache access patch.
+ // Add LDR with its PC-relative Class patch.
vixl::aarch64::Label* ldr_label =
- codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label);
+ codegen_->NewBssEntryTypePatch(dex_file, type_index, adrp_label);
// /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */
GenerateGcRootFieldLoad(cls,
- out_loc,
+ cls->GetLocations()->Out(),
out.X(),
- /* offset placeholder */ 0,
+ /* placeholder */ 0u,
ldr_label,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
+ kCompilerReadBarrierOption);
+ generate_null_check = true;
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- MemberOffset resolved_types_offset =
- ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = InputRegisterAt(cls, 0);
- __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
+ case HLoadClass::LoadKind::kJitTableAddress: {
+ __ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
+ cls->GetTypeIndex(),
+ cls->GetClass()));
GenerateGcRootFieldLoad(cls,
out_loc,
out.X(),
- CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_),
+ /* offset */ 0,
/* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
+ kCompilerReadBarrierOption);
break;
}
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -4494,11 +4523,11 @@ HLoadString::LoadKind CodeGeneratorARM64::GetSupportedLoadStringKind(
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
- case HLoadString::LoadKind::kDexCacheViaMethod:
- break;
case HLoadString::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
break;
+ case HLoadString::LoadKind::kDexCacheViaMethod:
+ break;
}
return desired_string_load_kind;
}
@@ -4542,7 +4571,7 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
// Add ADRP with its PC-relative String patch.
const DexFile& dex_file = load->GetDexFile();
- uint32_t string_index = load->GetStringIndex().index_;
+ const dex::StringIndex string_index = load->GetStringIndex();
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
vixl::aarch64::Label* adrp_label = codegen_->NewPcRelativeStringPatch(dex_file, string_index);
codegen_->EmitAdrpPlaceholder(adrp_label, out.X());
@@ -4562,7 +4591,7 @@ void InstructionCodeGeneratorARM64::VisitLoadString(HLoadString* load) NO_THREAD
case HLoadString::LoadKind::kBssEntry: {
// Add ADRP with its PC-relative String .bss entry patch.
const DexFile& dex_file = load->GetDexFile();
- uint32_t string_index = load->GetStringIndex().index_;
+ const dex::StringIndex string_index = load->GetStringIndex();
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
UseScratchRegisterScope temps(codegen_->GetVIXLAssembler());
Register temp = temps.AcquireX();
@@ -4744,7 +4773,6 @@ void LocationsBuilderARM64::VisitNewInstance(HNewInstance* instruction) {
locations->AddTemp(LocationFrom(kArtMethodRegister));
} else {
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
}
@@ -4762,7 +4790,7 @@ void InstructionCodeGeneratorARM64::VisitNewInstance(HNewInstance* instruction)
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
}
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index d6a5f9d1fa..a9dca92980 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -540,7 +540,7 @@ class CodeGeneratorARM64 : public CodeGenerator {
// ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
// to the associated ADRP patch label).
vixl::aarch64::Label* NewPcRelativeStringPatch(const DexFile& dex_file,
- uint32_t string_index,
+ dex::StringIndex string_index,
vixl::aarch64::Label* adrp_label = nullptr);
// Add a new PC-relative type patch for an instruction and return the label
@@ -551,6 +551,14 @@ class CodeGeneratorARM64 : public CodeGenerator {
dex::TypeIndex type_index,
vixl::aarch64::Label* adrp_label = nullptr);
+ // Add a new .bss entry type patch for an instruction and return the label
+ // to be bound before the instruction. The instruction will be either the
+ // ADRP (pass `adrp_label = null`) or the ADD (pass `adrp_label` pointing
+ // to the associated ADRP patch label).
+ vixl::aarch64::Label* NewBssEntryTypePatch(const DexFile& dex_file,
+ dex::TypeIndex type_index,
+ vixl::aarch64::Label* adrp_label = nullptr);
+
// Add a new PC-relative dex cache array patch for an instruction and return
// the label to be bound before the instruction. The instruction will be
// either the ADRP (pass `adrp_label = null`) or the LDR (pass `adrp_label`
@@ -571,7 +579,7 @@ class CodeGeneratorARM64 : public CodeGenerator {
Handle<mirror::String> handle);
vixl::aarch64::Literal<uint32_t>* DeduplicateJitClassLiteral(const DexFile& dex_file,
dex::TypeIndex string_index,
- uint64_t address);
+ Handle<mirror::Class> handle);
void EmitAdrpPlaceholder(vixl::aarch64::Label* fixup_label, vixl::aarch64::Register reg);
void EmitAddPlaceholder(vixl::aarch64::Label* fixup_label,
@@ -744,8 +752,10 @@ class CodeGeneratorARM64 : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
// Deduplication map for boot type literals for kBootImageLinkTimeAddress.
TypeToLiteralMap boot_image_type_patches_;
- // PC-relative type patch info.
+ // PC-relative type patch info for kBootImageLinkTimePcRelative.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
// Deduplication map for patchable boot image addresses.
Uint32ToLiteralMap boot_image_address_patches_;
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index b1f6d599ab..05a76e1105 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -394,22 +394,23 @@ class BoundsCheckSlowPathARMVIXL : public SlowPathCodeARMVIXL {
class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
public:
LoadClassSlowPathARMVIXL(HLoadClass* cls, HInstruction* at, uint32_t dex_pc, bool do_clinit)
- : SlowPathCodeARMVIXL(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCodeARMVIXL(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorARMVIXL* arm_codegen = down_cast<CodeGeneratorARMVIXL*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConventionARMVIXL calling_convention;
- __ Mov(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ Mov(calling_convention.GetRegisterAt(0), type_index.index_);
QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType;
- arm_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
+ arm_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -423,6 +424,18 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
arm_codegen->Move32(locations->Out(), LocationFrom(r0));
}
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
+ // kSaveEverything and use a temporary for the .bss entry address in the fast path,
+ // so that we can avoid another calculation here.
+ CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
+ arm_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+ arm_codegen->EmitMovwMovtPlaceholder(labels, ip);
+ __ Str(OutputRegister(cls_), MemOperand(ip));
+ }
__ B(GetExitLabel());
}
@@ -432,10 +445,6 @@ class LoadClassSlowPathARMVIXL : public SlowPathCodeARMVIXL {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -454,7 +463,7 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
LocationSummary* locations = instruction_->GetLocations();
DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg()));
HLoadString* load = instruction_->AsLoadString();
- const uint32_t string_index = load->GetStringIndex().index_;
+ const dex::StringIndex string_index = load->GetStringIndex();
vixl32::Register out = OutputRegister(load);
vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
constexpr bool call_saves_everything_except_r0 = (!kUseReadBarrier || kUseBakerReadBarrier);
@@ -473,7 +482,7 @@ class LoadStringSlowPathARMVIXL : public SlowPathCodeARMVIXL {
__ Mov(entry_address, temp);
}
- __ Mov(calling_convention.GetRegisterAt(0), string_index);
+ __ Mov(calling_convention.GetRegisterAt(0), string_index.index_);
arm_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
@@ -1252,6 +1261,7 @@ CodeGeneratorARMVIXL::CodeGeneratorARMVIXL(HGraph* graph,
boot_image_type_patches_(TypeReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_address_patches_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(StringReferenceValueComparator(),
@@ -2445,6 +2455,14 @@ void InstructionCodeGeneratorARMVIXL::VisitInvokeInterface(HInvokeInterface* inv
}
}
+void LocationsBuilderARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorARMVIXL::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
void LocationsBuilderARMVIXL::VisitNeg(HNeg* neg) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
@@ -3948,7 +3966,6 @@ void LocationsBuilderARMVIXL::VisitNewInstance(HNewInstance* instruction) {
} else {
InvokeRuntimeCallingConventionARMVIXL calling_convention;
locations->SetInAt(0, LocationFrom(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, LocationFrom(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(LocationFrom(r0));
}
@@ -3970,7 +3987,7 @@ void InstructionCodeGeneratorARMVIXL::VisitNewInstance(HNewInstance* instruction
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
}
@@ -5790,17 +5807,11 @@ HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
- case HLoadClass::LoadKind::kJitTableAddress:
- break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
+ case HLoadClass::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
- // We disable pc-relative load when there is an irreducible loop, as the optimization
- // is incompatible with it.
- // TODO: Create as many ArmDexCacheArraysBase instructions as needed for methods
- // with irreducible loops.
- if (GetGraph()->HasIrreducibleLoops()) {
- return HLoadClass::LoadKind::kDexCacheViaMethod;
- }
+ break;
+ case HLoadClass::LoadKind::kJitTableAddress:
+ DCHECK(Runtime::Current()->UseJitCompilation());
break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
break;
@@ -5809,15 +5820,16 @@ HLoadClass::LoadKind CodeGeneratorARMVIXL::GetSupportedLoadClassKind(
}
void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConventionARMVIXL calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
LocationFrom(calling_convention.GetRegisterAt(0)),
- LocationFrom(r0),
- /* code_generator_supports_read_barrier */ true);
+ LocationFrom(r0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5828,24 +5840,23 @@ void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
- load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
-void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
vixl32::Register out = OutputRegister(cls);
@@ -5853,7 +5864,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5867,12 +5878,14 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
__ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
cls->GetTypeIndex()));
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
@@ -5881,43 +5894,31 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ Ldr(out, codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
+ case HLoadClass::LoadKind::kBssEntry: {
+ CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
+ codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
+ codegen_->EmitMovwMovtPlaceholder(labels, out);
+ GenerateGcRootFieldLoad(cls, out_loc, out, 0, kCompilerReadBarrierOption);
+ generate_null_check = true;
+ break;
+ }
case HLoadClass::LoadKind::kJitTableAddress: {
__ Ldr(out, codegen_->DeduplicateJitClassLiteral(cls->GetDexFile(),
cls->GetTypeIndex(),
- cls->GetAddress()));
+ cls->GetClass()));
// /* GcRoot<mirror::Class> */ out = *out
GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- vixl32::Register base_reg = InputRegisterAt(cls, 0);
- HArmDexCacheArraysBase* base = cls->InputAt(0)->AsArmDexCacheArraysBase();
- int32_t offset = cls->GetDexCacheElementOffset() - base->GetElementOffset();
- // /* GcRoot<mirror::Class> */ out = *(dex_cache_arrays_base + offset)
- GenerateGcRootFieldLoad(cls, out_loc, base_reg, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- vixl32::Register current_method = InputRegisterAt(cls, 0);
- const int32_t resolved_types_offset =
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value();
- GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
- default:
- TODO_VIXL32(FATAL);
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -6039,7 +6040,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, out);
return; // No dex cache slow path.
}
@@ -6054,7 +6055,7 @@ void InstructionCodeGeneratorARMVIXL::VisitLoadString(HLoadString* load) NO_THRE
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
vixl32::Register temp = RegisterFrom(locations->GetTemp(0));
CodeGeneratorARMVIXL::PcRelativePatchInfo* labels =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitMovwMovtPlaceholder(labels, temp);
GenerateGcRootFieldLoad(load, out_loc, temp, /* offset */ 0, kCompilerReadBarrierOption);
LoadStringSlowPathARMVIXL* slow_path =
@@ -7398,8 +7399,8 @@ void CodeGeneratorARMVIXL::GenerateVirtualCall(HInvokeVirtual* invoke, Location
}
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeStringPatch(
- const DexFile& dex_file, uint32_t string_index) {
- return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeTypePatch(
@@ -7407,6 +7408,11 @@ CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeTy
return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
}
+CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewTypeBssEntryPatch(
+ const DexFile& dex_file, dex::TypeIndex type_index) {
+ return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
+}
+
CodeGeneratorARMVIXL::PcRelativePatchInfo* CodeGeneratorARMVIXL::NewPcRelativeDexCacheArrayPatch(
const DexFile& dex_file, uint32_t element_offset) {
return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
@@ -7463,8 +7469,9 @@ VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitStringLiteral(
VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateJitClassLiteral(const DexFile& dex_file,
dex::TypeIndex type_index,
- uint64_t address) {
- jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index), address);
+ Handle<mirror::Class> handle) {
+ jit_class_roots_.Overwrite(TypeReference(&dex_file, type_index),
+ reinterpret_cast64<uint64_t>(handle.GetReference()));
return jit_class_patches_.GetOrCreate(
TypeReference(&dex_file, type_index),
[this]() {
@@ -7500,6 +7507,7 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
/* MOVW+MOVT for each entry */ 2u * pc_relative_string_patches_.size() +
boot_image_type_patches_.size() +
/* MOVW+MOVT for each entry */ 2u * pc_relative_type_patches_.size() +
+ /* MOVW+MOVT for each entry */ 2u * type_bss_entry_patches_.size() +
boot_image_address_patches_.size();
linker_patches->reserve(size);
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
@@ -7514,12 +7522,17 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
target_string.string_index.index_));
}
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(pc_relative_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
} else {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
+ linker_patches);
for (const auto& entry : boot_image_type_patches_) {
const TypeReference& target_type = entry.first;
VIXLUInt32Literal* literal = entry.second;
@@ -7529,8 +7542,6 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
target_type.dex_file,
target_type.type_index.index_));
}
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
- linker_patches);
for (const auto& entry : boot_image_address_patches_) {
DCHECK(GetCompilerOptions().GetIncludePatchInformation());
VIXLUInt32Literal* literal = entry.second;
@@ -7538,6 +7549,7 @@ void CodeGeneratorARMVIXL::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pa
uint32_t literal_offset = literal->GetLocation();
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
+ DCHECK_EQ(size, linker_patches->size());
}
VIXLUInt32Literal* CodeGeneratorARMVIXL::DeduplicateUint32Literal(
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 200a463c75..be653535ea 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -562,8 +562,10 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
vixl::aarch32::Label add_pc_label;
};
- PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file, uint32_t string_index);
+ PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
+ PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
uint32_t element_offset);
VIXLUInt32Literal* DeduplicateBootImageStringLiteral(const DexFile& dex_file,
@@ -577,7 +579,7 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
Handle<mirror::String> handle);
VIXLUInt32Literal* DeduplicateJitClassLiteral(const DexFile& dex_file,
dex::TypeIndex type_index,
- uint64_t address);
+ Handle<mirror::Class> handle);
void EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patches) OVERRIDE;
@@ -731,8 +733,10 @@ class CodeGeneratorARMVIXL : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
// Deduplication map for boot type literals for kBootImageLinkTimeAddress.
TypeToLiteralMap boot_image_type_patches_;
- // PC-relative type patch info.
+ // PC-relative type patch info for kBootImageLinkTimePcRelative.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
// Deduplication map for patchable boot image addresses.
Uint32ToLiteralMap boot_image_address_patches_;
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index 9af03e8153..24234e18c1 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -213,23 +213,24 @@ class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCodeMIPS(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCodeMIPS(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorMIPS* mips_codegen = down_cast<CodeGeneratorMIPS*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType;
- mips_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
+ mips_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -240,11 +241,26 @@ class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
Location out = locations->Out();
if (out.IsValid()) {
DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
- Primitive::Type type = at_->GetType();
+ Primitive::Type type = instruction_->GetType();
mips_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
}
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
+ // kSaveEverything and use a temporary for the .bss entry address in the fast path,
+ // so that we can avoid another calculation here.
+ bool isR6 = mips_codegen->GetInstructionSetFeatures().IsR6();
+ Register base = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
+ DCHECK_NE(out.AsRegister<Register>(), AT);
+ CodeGeneratorMIPS::PcRelativePatchInfo* info =
+ mips_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+ mips_codegen->EmitPcRelativeAddressPlaceholder(info, TMP, base);
+ __ StoreToOffset(kStoreWord, out.AsRegister<Register>(), TMP, 0);
+ }
__ B(GetExitLabel());
}
@@ -254,10 +270,6 @@ class LoadClassSlowPathMIPS : public SlowPathCodeMIPS {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -281,8 +293,8 @@ class LoadStringSlowPathMIPS : public SlowPathCodeMIPS {
InvokeRuntimeCallingConvention calling_convention;
HLoadString* load = instruction_->AsLoadString();
- const uint32_t string_index = load->GetStringIndex().index_;
- __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
+ const dex::StringIndex string_index = load->GetStringIndex();
+ __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
mips_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
Primitive::Type type = instruction_->GetType();
@@ -465,6 +477,7 @@ CodeGeneratorMIPS::CodeGeneratorMIPS(HGraph* graph,
boot_image_type_patches_(TypeReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_address_patches_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
clobbered_ra_(false) {
@@ -1007,6 +1020,7 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
pc_relative_dex_cache_patches_.size() +
pc_relative_string_patches_.size() +
pc_relative_type_patches_.size() +
+ type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
boot_image_type_patches_.size() +
boot_image_address_patches_.size();
@@ -1014,13 +1028,16 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(pc_relative_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
} else {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
for (const auto& entry : boot_image_string_patches_) {
const StringReference& target_string = entry.first;
@@ -1047,11 +1064,12 @@ void CodeGeneratorMIPS::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patch
uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
+ DCHECK_EQ(size, linker_patches->size());
}
CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeStringPatch(
- const DexFile& dex_file, uint32_t string_index) {
- return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatch(
@@ -1059,6 +1077,11 @@ CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeTypePatc
return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
}
+CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewTypeBssEntryPatch(
+ const DexFile& dex_file, dex::TypeIndex type_index) {
+ return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
+}
+
CodeGeneratorMIPS::PcRelativePatchInfo* CodeGeneratorMIPS::NewPcRelativeDexCacheArrayPatch(
const DexFile& dex_file, uint32_t element_offset) {
return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
@@ -5154,6 +5177,14 @@ void LocationsBuilderMIPS::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invo
}
}
+void LocationsBuilderMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorMIPS::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS* codegen) {
if (invoke->GetLocations()->Intrinsified()) {
IntrinsicCodeGeneratorMIPS intrinsic(codegen);
@@ -5186,14 +5217,14 @@ HLoadString::LoadKind CodeGeneratorMIPS::GetSupportedLoadStringKind(
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
- case HLoadString::LoadKind::kDexCacheViaMethod:
- fallback_load = false;
- break;
case HLoadString::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
// TODO: implement.
fallback_load = true;
break;
+ case HLoadString::LoadKind::kDexCacheViaMethod:
+ fallback_load = false;
+ break;
}
if (fallback_load) {
desired_string_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
@@ -5222,15 +5253,13 @@ HLoadClass::LoadKind CodeGeneratorMIPS::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
+ case HLoadClass::LoadKind::kBssEntry:
+ DCHECK(!Runtime::Current()->UseJitCompilation());
+ break;
case HLoadClass::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
fallback_load = true;
break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
- DCHECK(!Runtime::Current()->UseJitCompilation());
- // TODO: Create as many MipsDexCacheArraysBase instructions as needed for methods
- // with irreducible loops.
- break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
fallback_load = false;
break;
@@ -5427,34 +5456,32 @@ void InstructionCodeGeneratorMIPS::VisitInvokeVirtual(HInvokeVirtual* invoke) {
}
void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(V0),
- /* code_generator_supports_read_barrier */ false); // TODO: revisit this bool.
+ Location::RegisterLocation(V0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
? LocationSummary::kCallOnSlowPath
: LocationSummary::kNoCall;
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
switch (load_kind) {
// We need an extra register for PC-relative literals on R2.
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- case HLoadClass::LoadKind::kBootImageAddress:
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadClass::LoadKind::kBootImageAddress:
+ case HLoadClass::LoadKind::kBssEntry:
if (codegen_->GetInstructionSetFeatures().IsR6()) {
break;
}
FALLTHROUGH_INTENDED;
- // We need an extra register for PC-relative dex cache accesses.
- case HLoadClass::LoadKind::kDexCachePcRelative:
case HLoadClass::LoadKind::kReferrersClass:
- case HLoadClass::LoadKind::kDexCacheViaMethod:
locations->SetInAt(0, Location::RequiresRegister());
break;
default:
@@ -5463,16 +5490,17 @@ void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
locations->SetOut(Location::RequiresRegister());
}
-void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
Register base_or_current_method_reg;
@@ -5480,12 +5508,11 @@ void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) {
switch (load_kind) {
// We need an extra register for PC-relative literals on R2.
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- case HLoadClass::LoadKind::kBootImageAddress:
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadClass::LoadKind::kBootImageAddress:
+ case HLoadClass::LoadKind::kBssEntry:
base_or_current_method_reg = isR6 ? ZERO : locations->InAt(0).AsRegister<Register>();
break;
- // We need an extra register for PC-relative dex cache accesses.
- case HLoadClass::LoadKind::kDexCachePcRelative:
case HLoadClass::LoadKind::kReferrersClass:
case HLoadClass::LoadKind::kDexCacheViaMethod:
base_or_current_method_reg = locations->InAt(0).AsRegister<Register>();
@@ -5508,14 +5535,14 @@ void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- DCHECK(!kEmitCompilerReadBarrier);
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ LoadLiteral(out,
base_or_current_method_reg,
codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
cls->GetTypeIndex()));
break;
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
- DCHECK(!kEmitCompilerReadBarrier);
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS::PcRelativePatchInfo* info =
codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg);
@@ -5523,38 +5550,29 @@ void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) {
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK(!kEmitCompilerReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ LoadLiteral(out,
base_or_current_method_reg,
codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
- case HLoadClass::LoadKind::kJitTableAddress: {
- LOG(FATAL) << "Unimplemented";
+ case HLoadClass::LoadKind::kBssEntry: {
+ CodeGeneratorMIPS::PcRelativePatchInfo* info =
+ codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
+ codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg);
+ __ LoadFromOffset(kLoadWord, out, out, 0);
+ generate_null_check = true;
break;
}
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- HMipsDexCacheArraysBase* base = cls->InputAt(0)->AsMipsDexCacheArraysBase();
- int32_t offset =
- cls->GetDexCacheElementOffset() - base->GetElementOffset() - kDexCacheArrayLwOffset;
- // /* GcRoot<mirror::Class> */ out = *(dex_cache_arrays_base + offset)
- GenerateGcRootFieldLoad(cls, out_loc, base_or_current_method_reg, offset);
- generate_null_check = !cls->IsInDexCache();
+ case HLoadClass::LoadKind::kJitTableAddress: {
+ LOG(FATAL) << "Unimplemented";
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- __ LoadFromOffset(kLoadWord,
- out,
- base_or_current_method_reg,
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset);
- generate_null_check = !cls->IsInDexCache();
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -5649,6 +5667,7 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
switch (load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress:
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ LoadLiteral(out,
base_or_current_method_reg,
codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
@@ -5657,7 +5676,7 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS::PcRelativePatchInfo* info =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg);
return; // No dex cache slow path.
}
@@ -5673,7 +5692,7 @@ void InstructionCodeGeneratorMIPS::VisitLoadString(HLoadString* load) NO_THREAD_
case HLoadString::LoadKind::kBssEntry: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS::PcRelativePatchInfo* info =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitPcRelativeAddressPlaceholder(info, out, base_or_current_method_reg);
__ LoadFromOffset(kLoadWord, out, out, 0);
SlowPathCodeMIPS* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS(load);
@@ -5903,7 +5922,6 @@ void LocationsBuilderMIPS::VisitNewInstance(HNewInstance* instruction) {
locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
} else {
locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
}
@@ -5920,7 +5938,7 @@ void InstructionCodeGeneratorMIPS::VisitNewInstance(HNewInstance* instruction) {
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
}
diff --git a/compiler/optimizing/code_generator_mips.h b/compiler/optimizing/code_generator_mips.h
index 7b0812cb7b..c8fd325999 100644
--- a/compiler/optimizing/code_generator_mips.h
+++ b/compiler/optimizing/code_generator_mips.h
@@ -452,8 +452,10 @@ class CodeGeneratorMIPS : public CodeGenerator {
MipsLabel pc_rel_label;
};
- PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file, uint32_t string_index);
+ PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
+ PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
uint32_t element_offset);
Literal* DeduplicateBootImageStringLiteral(const DexFile& dex_file,
@@ -504,8 +506,10 @@ class CodeGeneratorMIPS : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
// Deduplication map for boot type literals for kBootImageLinkTimeAddress.
BootTypeToLiteralMap boot_image_type_patches_;
- // PC-relative type patch info.
+ // PC-relative type patch info for kBootImageLinkTimePcRelative.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
// Deduplication map for patchable boot image addresses.
Uint32ToLiteralMap boot_image_address_patches_;
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index 046d59cee7..fc8fb7acb2 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -167,22 +167,23 @@ class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCodeMIPS64(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCodeMIPS64(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorMIPS64* mips64_codegen = down_cast<CodeGeneratorMIPS64*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- __ LoadConst32(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex().index_);
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ LoadConst32(calling_convention.GetRegisterAt(0), type_index.index_);
QuickEntrypointEnum entrypoint = do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType;
- mips64_codegen->InvokeRuntime(entrypoint, at_, dex_pc_, this);
+ mips64_codegen->InvokeRuntime(entrypoint, instruction_, dex_pc_, this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -193,11 +194,24 @@ class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
Location out = locations->Out();
if (out.IsValid()) {
DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
- Primitive::Type type = at_->GetType();
+ Primitive::Type type = instruction_->GetType();
mips64_codegen->MoveLocation(out, calling_convention.GetReturnLocation(type), type);
}
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ // TODO: Change art_quick_initialize_type/art_quick_initialize_static_storage to
+ // kSaveEverything and use a temporary for the .bss entry address in the fast path,
+ // so that we can avoid another calculation here.
+ DCHECK_NE(out.AsRegister<GpuRegister>(), AT);
+ CodeGeneratorMIPS64::PcRelativePatchInfo* info =
+ mips64_codegen->NewTypeBssEntryPatch(cls_->GetDexFile(), type_index);
+ mips64_codegen->EmitPcRelativeAddressPlaceholderHigh(info, AT);
+ __ Sw(out.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
+ }
__ Bc(GetExitLabel());
}
@@ -207,10 +221,6 @@ class LoadClassSlowPathMIPS64 : public SlowPathCodeMIPS64 {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -234,8 +244,8 @@ class LoadStringSlowPathMIPS64 : public SlowPathCodeMIPS64 {
InvokeRuntimeCallingConvention calling_convention;
HLoadString* load = instruction_->AsLoadString();
- const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
- __ LoadConst32(calling_convention.GetRegisterAt(0), string_index);
+ const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
+ __ LoadConst32(calling_convention.GetRegisterAt(0), string_index.index_);
mips64_codegen->InvokeRuntime(kQuickResolveString,
instruction_,
instruction_->GetDexPc(),
@@ -422,6 +432,7 @@ CodeGeneratorMIPS64::CodeGeneratorMIPS64(HGraph* graph,
boot_image_type_patches_(TypeReferenceValueComparator(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
pc_relative_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
boot_image_address_patches_(std::less<uint32_t>(),
graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
// Save RA (containing the return address) to mimic Quick.
@@ -922,6 +933,7 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
pc_relative_dex_cache_patches_.size() +
pc_relative_string_patches_.size() +
pc_relative_type_patches_.size() +
+ type_bss_entry_patches_.size() +
boot_image_string_patches_.size() +
boot_image_type_patches_.size() +
boot_image_address_patches_.size();
@@ -929,13 +941,16 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(pc_relative_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(pc_relative_string_patches_,
linker_patches);
} else {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(pc_relative_string_patches_,
linker_patches);
}
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(pc_relative_type_patches_,
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
linker_patches);
for (const auto& entry : boot_image_string_patches_) {
const StringReference& target_string = entry.first;
@@ -962,11 +977,12 @@ void CodeGeneratorMIPS64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
uint32_t literal_offset = __ GetLabelLocation(literal->GetLabel());
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
+ DCHECK_EQ(size, linker_patches->size());
}
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeStringPatch(
- const DexFile& dex_file, uint32_t string_index) {
- return NewPcRelativePatch(dex_file, string_index, &pc_relative_string_patches_);
+ const DexFile& dex_file, dex::StringIndex string_index) {
+ return NewPcRelativePatch(dex_file, string_index.index_, &pc_relative_string_patches_);
}
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeTypePatch(
@@ -974,6 +990,11 @@ CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeType
return NewPcRelativePatch(dex_file, type_index.index_, &pc_relative_type_patches_);
}
+CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewTypeBssEntryPatch(
+ const DexFile& dex_file, dex::TypeIndex type_index) {
+ return NewPcRelativePatch(dex_file, type_index.index_, &type_bss_entry_patches_);
+}
+
CodeGeneratorMIPS64::PcRelativePatchInfo* CodeGeneratorMIPS64::NewPcRelativeDexCacheArrayPatch(
const DexFile& dex_file, uint32_t element_offset) {
return NewPcRelativePatch(dex_file, element_offset, &pc_relative_dex_cache_patches_);
@@ -3095,7 +3116,7 @@ void InstructionCodeGeneratorMIPS64::GenerateGcRootFieldLoad(
Location root,
GpuRegister obj,
uint32_t offset) {
- // When handling HLoadClass::LoadKind::kDexCachePcRelative, the caller calls
+ // When handling PC-relative loads, the caller calls
// EmitPcRelativeAddressPlaceholderHigh() and then GenerateGcRootFieldLoad().
// The relative patcher expects the two methods to emit the following patchable
// sequence of instructions in this case:
@@ -3256,6 +3277,14 @@ void LocationsBuilderMIPS64::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* in
HandleInvoke(invoke);
}
+void LocationsBuilderMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorMIPS64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
static bool TryGenerateIntrinsicCode(HInvoke* invoke, CodeGeneratorMIPS64* codegen) {
if (invoke->GetLocations()->Intrinsified()) {
IntrinsicCodeGeneratorMIPS64 intrinsic(codegen);
@@ -3314,14 +3343,14 @@ HLoadClass::LoadKind CodeGeneratorMIPS64::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
+ case HLoadClass::LoadKind::kBssEntry:
+ DCHECK(!Runtime::Current()->UseJitCompilation());
+ break;
case HLoadClass::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
// TODO: implement.
fallback_load = true;
break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
- DCHECK(!Runtime::Current()->UseJitCompilation());
- break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
break;
}
@@ -3366,7 +3395,7 @@ void CodeGeneratorMIPS64::GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invo
case HInvokeStaticOrDirect::MethodLoadKind::kDexCachePcRelative: {
uint32_t offset = invoke->GetDexCacheArrayOffset();
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
- NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset);
+ NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset);
EmitPcRelativeAddressPlaceholderHigh(info, AT);
__ Ld(temp.AsRegister<GpuRegister>(), AT, /* placeholder */ 0x5678);
break;
@@ -3474,38 +3503,38 @@ void InstructionCodeGeneratorMIPS64::VisitInvokeVirtual(HInvokeVirtual* invoke)
}
void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- calling_convention.GetReturnLocation(Primitive::kPrimNot),
- /* code_generator_supports_read_barrier */ false);
+ calling_convention.GetReturnLocation(Primitive::kPrimNot));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
? LocationSummary::kCallOnSlowPath
: LocationSummary::kNoCall;
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
-void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
GpuRegister out = out_loc.AsRegister<GpuRegister>();
GpuRegister current_method_reg = ZERO;
@@ -3526,14 +3555,14 @@ void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
ArtMethod::DeclaringClassOffset().Int32Value());
break;
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- DCHECK(!kEmitCompilerReadBarrier);
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ LoadLiteral(out,
kLoadUnsignedWord,
codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(),
cls->GetTypeIndex()));
break;
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
- DCHECK(!kEmitCompilerReadBarrier);
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex());
codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
@@ -3542,39 +3571,29 @@ void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK(!kEmitCompilerReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ LoadLiteral(out,
kLoadUnsignedWord,
codegen_->DeduplicateBootImageAddressLiteral(address));
break;
}
- case HLoadClass::LoadKind::kJitTableAddress: {
- LOG(FATAL) << "Unimplemented";
- break;
- }
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- uint32_t element_offset = cls->GetDexCacheElementOffset();
+ case HLoadClass::LoadKind::kBssEntry: {
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
- codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), element_offset);
+ codegen_->NewTypeBssEntryPatch(cls->GetDexFile(), cls->GetTypeIndex());
codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
- // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
- GenerateGcRootFieldLoad(cls, out_loc, AT, /* placeholder */ 0x5678);
- generate_null_check = !cls->IsInDexCache();
+ __ Lwu(out, AT, /* placeholder */ 0x5678);
+ generate_null_check = true;
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- __ LoadFromOffset(kLoadDoubleword,
- out,
- current_method_reg,
- ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset);
- generate_null_check = !cls->IsInDexCache();
+ case HLoadClass::LoadKind::kJitTableAddress: {
+ LOG(FATAL) << "Unimplemented";
+ break;
}
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -3638,6 +3657,7 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREA
switch (load_kind) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress:
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ LoadLiteral(out,
kLoadUnsignedWord,
codegen_->DeduplicateBootImageStringLiteral(load->GetDexFile(),
@@ -3646,7 +3666,7 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREA
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
DCHECK(codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
__ Daddiu(out, AT, /* placeholder */ 0x5678);
return; // No dex cache slow path.
@@ -3663,7 +3683,7 @@ void InstructionCodeGeneratorMIPS64::VisitLoadString(HLoadString* load) NO_THREA
case HLoadString::LoadKind::kBssEntry: {
DCHECK(!codegen_->GetCompilerOptions().IsBootImage());
CodeGeneratorMIPS64::PcRelativePatchInfo* info =
- codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex().index_);
+ codegen_->NewPcRelativeStringPatch(load->GetDexFile(), load->GetStringIndex());
codegen_->EmitPcRelativeAddressPlaceholderHigh(info, AT);
__ Lwu(out, AT, /* placeholder */ 0x5678);
SlowPathCodeMIPS64* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathMIPS64(load);
@@ -3844,7 +3864,6 @@ void LocationsBuilderMIPS64::VisitNewInstance(HNewInstance* instruction) {
locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
} else {
locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(calling_convention.GetReturnLocation(Primitive::kPrimNot));
}
@@ -3862,7 +3881,7 @@ void InstructionCodeGeneratorMIPS64::VisitNewInstance(HNewInstance* instruction)
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
}
}
diff --git a/compiler/optimizing/code_generator_mips64.h b/compiler/optimizing/code_generator_mips64.h
index 8ac919f47e..52b780c106 100644
--- a/compiler/optimizing/code_generator_mips64.h
+++ b/compiler/optimizing/code_generator_mips64.h
@@ -411,8 +411,10 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
Mips64Label pc_rel_label;
};
- PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file, uint32_t string_index);
+ PcRelativePatchInfo* NewPcRelativeStringPatch(const DexFile& dex_file,
+ dex::StringIndex string_index);
PcRelativePatchInfo* NewPcRelativeTypePatch(const DexFile& dex_file, dex::TypeIndex type_index);
+ PcRelativePatchInfo* NewTypeBssEntryPatch(const DexFile& dex_file, dex::TypeIndex type_index);
PcRelativePatchInfo* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file,
uint32_t element_offset);
PcRelativePatchInfo* NewPcRelativeCallPatch(const DexFile& dex_file,
@@ -469,8 +471,10 @@ class CodeGeneratorMIPS64 : public CodeGenerator {
ArenaDeque<PcRelativePatchInfo> pc_relative_string_patches_;
// Deduplication map for boot type literals for kBootImageLinkTimeAddress.
BootTypeToLiteralMap boot_image_type_patches_;
- // PC-relative type patch info.
+ // PC-relative type patch info for kBootImageLinkTimePcRelative.
ArenaDeque<PcRelativePatchInfo> pc_relative_type_patches_;
+ // PC-relative type patch info for kBssEntry.
+ ArenaDeque<PcRelativePatchInfo> type_bss_entry_patches_;
// Deduplication map for patchable boot image addresses.
Uint32ToLiteralMap boot_image_address_patches_;
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index f13b60aebf..cc727d2068 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -225,8 +225,8 @@ class LoadStringSlowPathX86 : public SlowPathCode {
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
- __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index));
+ const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
+ __ movl(calling_convention.GetRegisterAt(0), Immediate(string_index.index_));
x86_codegen->InvokeRuntime(kQuickResolveString, instruction_, instruction_->GetDexPc(), this);
CheckEntrypointTypes<kQuickResolveString, void*, uint32_t>();
x86_codegen->Move32(locations->Out(), Location::RegisterLocation(EAX));
@@ -254,21 +254,24 @@ class LoadClassSlowPathX86 : public SlowPathCode {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorX86* x86_codegen = down_cast<CodeGeneratorX86*>(codegen);
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
InvokeRuntimeCallingConvention calling_convention;
- __ movl(calling_convention.GetRegisterAt(0), Immediate(cls_->GetTypeIndex().index_));
+ dex::TypeIndex type_index = cls_->GetTypeIndex();
+ __ movl(calling_convention.GetRegisterAt(0), Immediate(type_index.index_));
x86_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage
: kQuickInitializeType,
- at_, dex_pc_, this);
+ instruction_,
+ dex_pc_,
+ this);
if (do_clinit_) {
CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
} else {
@@ -281,8 +284,17 @@ class LoadClassSlowPathX86 : public SlowPathCode {
DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg()));
x86_codegen->Move32(out, Location::RegisterLocation(EAX));
}
-
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ Register method_address = locations->InAt(0).AsRegister<Register>();
+ __ movl(Address(method_address, CodeGeneratorX86::kDummy32BitOffset),
+ locations->Out().AsRegister<Register>());
+ Label* fixup_label = x86_codegen->NewTypeBssEntryPatch(cls_);
+ __ Bind(fixup_label);
+ }
__ jmp(GetExitLabel());
}
@@ -292,10 +304,6 @@ class LoadClassSlowPathX86 : public SlowPathCode {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -1009,7 +1017,8 @@ CodeGeneratorX86::CodeGeneratorX86(HGraph* graph,
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
constant_area_start_(-1),
@@ -2244,6 +2253,14 @@ void InstructionCodeGeneratorX86::VisitInvokeInterface(HInvokeInterface* invoke)
codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
}
+void LocationsBuilderX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorX86::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
void LocationsBuilderX86::VisitNeg(HNeg* neg) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
@@ -4150,7 +4167,6 @@ void LocationsBuilderX86::VisitNewInstance(HNewInstance* instruction) {
} else {
InvokeRuntimeCallingConvention calling_convention;
locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
}
}
@@ -4166,7 +4182,7 @@ void InstructionCodeGeneratorX86::VisitNewInstance(HNewInstance* instruction) {
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
DCHECK(!codegen_->IsLeafMethod());
}
}
@@ -4505,7 +4521,7 @@ Location CodeGeneratorX86::GenerateCalleeMethodStaticOrDirectCall(HInvokeStaticO
__ movl(temp.AsRegister<Register>(), Address(base_reg, kDummy32BitOffset));
// Bind a new fixup label at the end of the "movl" insn.
uint32_t offset = invoke->GetDexCacheArrayOffset();
- __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
+ __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset));
break;
}
case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
@@ -4594,9 +4610,15 @@ void CodeGeneratorX86::RecordBootStringPatch(HLoadString* load_string) {
__ Bind(&string_patches_.back().label);
}
-void CodeGeneratorX86::RecordTypePatch(HLoadClass* load_class) {
- type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
- __ Bind(&type_patches_.back().label);
+void CodeGeneratorX86::RecordBootTypePatch(HLoadClass* load_class) {
+ boot_image_type_patches_.emplace_back(load_class->GetDexFile(),
+ load_class->GetTypeIndex().index_);
+ __ Bind(&boot_image_type_patches_.back().label);
+}
+
+Label* CodeGeneratorX86::NewTypeBssEntryPatch(HLoadClass* load_class) {
+ type_bss_entry_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
+ return &type_bss_entry_patches_.back().label;
}
Label* CodeGeneratorX86::NewStringBssEntryPatch(HLoadString* load_string) {
@@ -4633,7 +4655,8 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
pc_relative_dex_cache_patches_.size() +
simple_patches_.size() +
string_patches_.size() +
- type_patches_.size();
+ boot_image_type_patches_.size() +
+ type_bss_entry_patches_.size();
linker_patches->reserve(size);
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
@@ -4642,24 +4665,26 @@ void CodeGeneratorX86::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_patche
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(boot_image_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
} else if (GetCompilerOptions().GetCompilePic()) {
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
} else {
+ for (const PatchInfo<Label>& info : boot_image_type_patches_) {
+ uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
+ linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index));
+ }
for (const PatchInfo<Label>& info : string_patches_) {
uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
linker_patches->push_back(
LinkerPatch::StringPatch(literal_offset, &info.dex_file, info.index));
}
}
- if (GetCompilerOptions().GetCompilePic()) {
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
- } else {
- for (const PatchInfo<Label>& info : type_patches_) {
- uint32_t literal_offset = info.label.Position() - kLabelPositionToLiteralOffsetAdjustment;
- linker_patches->push_back(LinkerPatch::TypePatch(literal_offset, &info.dex_file, info.index));
- }
- }
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
+ linker_patches);
+ DCHECK_EQ(size, linker_patches->size());
}
void CodeGeneratorX86::MarkGCCard(Register temp,
@@ -5978,7 +6003,7 @@ HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
DCHECK(GetCompilerOptions().GetCompilePic());
FALLTHROUGH_INTENDED;
- case HLoadClass::LoadKind::kDexCachePcRelative:
+ case HLoadClass::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation()); // Note: boot image is also non-JIT.
// We disable pc-relative load when there is an irreducible loop, as the optimization
// is incompatible with it.
@@ -6000,15 +6025,16 @@ HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind(
}
void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(EAX),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(EAX));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -6019,11 +6045,9 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
- load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
+ load_kind == HLoadClass::LoadKind::kBssEntry) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
@@ -6031,23 +6055,26 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
Label* CodeGeneratorX86::NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex dex_index,
- uint64_t address) {
- jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
+ Handle<mirror::Class> handle) {
+ jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index),
+ reinterpret_cast64<uint64_t>(handle.GetReference()));
// Add a patch entry and return the label.
jit_class_patches_.emplace_back(dex_file, dex_index.index_);
PatchInfo<Label>* info = &jit_class_patches_.back();
return &info->label;
}
-void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
@@ -6055,7 +6082,7 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -6070,63 +6097,48 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimeAddress: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
__ movl(out, Immediate(/* placeholder */ 0));
- codegen_->RecordTypePatch(cls);
+ codegen_->RecordBootTypePatch(cls);
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
Register method_address = locations->InAt(0).AsRegister<Register>();
__ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
- codegen_->RecordTypePatch(cls);
+ codegen_->RecordBootTypePatch(cls);
break;
}
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ movl(out, Immediate(address));
codegen_->RecordSimplePatch();
break;
}
+ case HLoadClass::LoadKind::kBssEntry: {
+ Register method_address = locations->InAt(0).AsRegister<Register>();
+ Address address(method_address, CodeGeneratorX86::kDummy32BitOffset);
+ Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
+ GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
+ generate_null_check = true;
+ break;
+ }
case HLoadClass::LoadKind::kJitTableAddress: {
Address address = Address::Absolute(CodeGeneratorX86::kDummy32BitOffset);
Label* fixup_label = codegen_->NewJitRootClassPatch(
- cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
+ cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
// /* GcRoot<mirror::Class> */ out = *address
GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- Register base_reg = locations->InAt(0).AsRegister<Register>();
- uint32_t offset = cls->GetDexCacheElementOffset();
- Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
- // /* GcRoot<mirror::Class> */ out = *(base + offset) /* PC-relative */
- GenerateGcRootFieldLoad(cls,
- out_loc,
- Address(base_reg, CodeGeneratorX86::kDummy32BitOffset),
- fixup_label,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = locations->InAt(0).AsRegister<Register>();
- __ movl(out, Address(current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kX86PointerSize).Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- GenerateGcRootFieldLoad(cls,
- out_loc,
- Address(out,
- CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
- /* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
@@ -6196,11 +6208,11 @@ HLoadString::LoadKind CodeGeneratorX86::GetSupportedLoadStringKind(
break;
case HLoadString::LoadKind::kBootImageAddress:
break;
- case HLoadString::LoadKind::kDexCacheViaMethod:
- break;
case HLoadString::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
break;
+ case HLoadString::LoadKind::kDexCacheViaMethod:
+ break;
}
return desired_string_load_kind;
}
@@ -6251,11 +6263,13 @@ void InstructionCodeGeneratorX86::VisitLoadString(HLoadString* load) NO_THREAD_S
switch (load->GetLoadKind()) {
case HLoadString::LoadKind::kBootImageLinkTimeAddress: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ movl(out, Immediate(/* placeholder */ 0));
codegen_->RecordBootStringPatch(load);
return; // No dex cache slow path.
}
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
Register method_address = locations->InAt(0).AsRegister<Register>();
__ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset));
codegen_->RecordBootStringPatch(load);
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index dd1628c867..9eb97658da 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -412,13 +412,16 @@ class CodeGeneratorX86 : public CodeGenerator {
void RecordSimplePatch();
void RecordBootStringPatch(HLoadString* load_string);
- void RecordTypePatch(HLoadClass* load_class);
+ void RecordBootTypePatch(HLoadClass* load_class);
+ Label* NewTypeBssEntryPatch(HLoadClass* load_class);
Label* NewStringBssEntryPatch(HLoadString* load_string);
Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset);
Label* NewJitRootStringPatch(const DexFile& dex_file,
dex::StringIndex dex_index,
Handle<mirror::String> handle);
- Label* NewJitRootClassPatch(const DexFile& dex_file, dex::TypeIndex dex_index, uint64_t address);
+ Label* NewJitRootClassPatch(const DexFile& dex_file,
+ dex::TypeIndex dex_index,
+ Handle<mirror::Class> handle);
void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
@@ -621,8 +624,10 @@ class CodeGeneratorX86 : public CodeGenerator {
ArenaDeque<Label> simple_patches_;
// String patch locations; type depends on configuration (app .bss or boot image PIC/non-PIC).
ArenaDeque<PatchInfo<Label>> string_patches_;
- // Type patch locations.
- ArenaDeque<PatchInfo<Label>> type_patches_;
+ // Type patch locations for boot image; type depends on configuration (boot image PIC/non-PIC).
+ ArenaDeque<PatchInfo<Label>> boot_image_type_patches_;
+ // Type patch locations for kBssEntry.
+ ArenaDeque<PatchInfo<Label>> type_bss_entry_patches_;
// Patches for string root accesses in JIT compiled code.
ArenaDeque<PatchInfo<Label>> jit_string_patches_;
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index 89f4ae04d7..9adedab130 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -234,12 +234,12 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
HInstruction* at,
uint32_t dex_pc,
bool do_clinit)
- : SlowPathCode(at), cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) {
+ : SlowPathCode(at), cls_(cls), dex_pc_(dex_pc), do_clinit_(do_clinit) {
DCHECK(at->IsLoadClass() || at->IsClinitCheck());
}
void EmitNativeCode(CodeGenerator* codegen) OVERRIDE {
- LocationSummary* locations = at_->GetLocations();
+ LocationSummary* locations = instruction_->GetLocations();
CodeGeneratorX86_64* x86_64_codegen = down_cast<CodeGeneratorX86_64*>(codegen);
__ Bind(GetEntryLabel());
@@ -249,7 +249,7 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
__ movl(CpuRegister(calling_convention.GetRegisterAt(0)),
Immediate(cls_->GetTypeIndex().index_));
x86_64_codegen->InvokeRuntime(do_clinit_ ? kQuickInitializeStaticStorage : kQuickInitializeType,
- at_,
+ instruction_,
dex_pc_,
this);
if (do_clinit_) {
@@ -266,6 +266,15 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
}
RestoreLiveRegisters(codegen, locations);
+ // For HLoadClass/kBssEntry, store the resolved Class to the BSS entry.
+ DCHECK_EQ(instruction_->IsLoadClass(), cls_ == instruction_);
+ if (cls_ == instruction_ && cls_->GetLoadKind() == HLoadClass::LoadKind::kBssEntry) {
+ DCHECK(out.IsValid());
+ __ movl(Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false),
+ locations->Out().AsRegister<CpuRegister>());
+ Label* fixup_label = x86_64_codegen->NewTypeBssEntryPatch(cls_);
+ __ Bind(fixup_label);
+ }
__ jmp(GetExitLabel());
}
@@ -275,10 +284,6 @@ class LoadClassSlowPathX86_64 : public SlowPathCode {
// The class this slow path will load.
HLoadClass* const cls_;
- // The instruction where this slow path is happening.
- // (Might be the load class or an initialization check).
- HInstruction* const at_;
-
// The dex PC of `at_`.
const uint32_t dex_pc_;
@@ -300,9 +305,9 @@ class LoadStringSlowPathX86_64 : public SlowPathCode {
__ Bind(GetEntryLabel());
SaveLiveRegisters(codegen, locations);
- const uint32_t string_index = instruction_->AsLoadString()->GetStringIndex().index_;
+ const dex::StringIndex string_index = instruction_->AsLoadString()->GetStringIndex();
// Custom calling convention: RAX serves as both input and output.
- __ movl(CpuRegister(RAX), Immediate(string_index));
+ __ movl(CpuRegister(RAX), Immediate(string_index.index_));
x86_64_codegen->InvokeRuntime(kQuickResolveString,
instruction_,
instruction_->GetDexPc(),
@@ -986,7 +991,7 @@ Location CodeGeneratorX86_64::GenerateCalleeMethodStaticOrDirectCall(HInvokeStat
Address::Absolute(kDummy32BitOffset, /* no_rip */ false));
// Bind a new fixup label at the end of the "movl" insn.
uint32_t offset = invoke->GetDexCacheArrayOffset();
- __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFile(), offset));
+ __ Bind(NewPcRelativeDexCacheArrayPatch(invoke->GetDexFileForPcRelativeDexCache(), offset));
break;
}
case HInvokeStaticOrDirect::MethodLoadKind::kDexCacheViaMethod: {
@@ -1079,9 +1084,15 @@ void CodeGeneratorX86_64::RecordBootStringPatch(HLoadString* load_string) {
__ Bind(&string_patches_.back().label);
}
-void CodeGeneratorX86_64::RecordTypePatch(HLoadClass* load_class) {
- type_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
- __ Bind(&type_patches_.back().label);
+void CodeGeneratorX86_64::RecordBootTypePatch(HLoadClass* load_class) {
+ boot_image_type_patches_.emplace_back(load_class->GetDexFile(),
+ load_class->GetTypeIndex().index_);
+ __ Bind(&boot_image_type_patches_.back().label);
+}
+
+Label* CodeGeneratorX86_64::NewTypeBssEntryPatch(HLoadClass* load_class) {
+ type_bss_entry_patches_.emplace_back(load_class->GetDexFile(), load_class->GetTypeIndex().index_);
+ return &type_bss_entry_patches_.back().label;
}
Label* CodeGeneratorX86_64::NewStringBssEntryPatch(HLoadString* load_string) {
@@ -1118,7 +1129,8 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
pc_relative_dex_cache_patches_.size() +
simple_patches_.size() +
string_patches_.size() +
- type_patches_.size();
+ boot_image_type_patches_.size() +
+ type_bss_entry_patches_.size();
linker_patches->reserve(size);
EmitPcRelativeLinkerPatches<LinkerPatch::DexCacheArrayPatch>(pc_relative_dex_cache_patches_,
linker_patches);
@@ -1127,13 +1139,17 @@ void CodeGeneratorX86_64::EmitLinkerPatches(ArenaVector<LinkerPatch>* linker_pat
linker_patches->push_back(LinkerPatch::RecordPosition(literal_offset));
}
if (!GetCompilerOptions().IsBootImage()) {
+ DCHECK(boot_image_type_patches_.empty());
EmitPcRelativeLinkerPatches<LinkerPatch::StringBssEntryPatch>(string_patches_, linker_patches);
} else {
- // These are always PC-relative, see GetSupportedLoadStringKind().
+ // These are always PC-relative, see GetSupportedLoadClassKind()/GetSupportedLoadStringKind().
+ EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(boot_image_type_patches_,
+ linker_patches);
EmitPcRelativeLinkerPatches<LinkerPatch::RelativeStringPatch>(string_patches_, linker_patches);
}
- // These are always PC-relative, see GetSupportedLoadClassKind().
- EmitPcRelativeLinkerPatches<LinkerPatch::RelativeTypePatch>(type_patches_, linker_patches);
+ EmitPcRelativeLinkerPatches<LinkerPatch::TypeBssEntryPatch>(type_bss_entry_patches_,
+ linker_patches);
+ DCHECK_EQ(size, linker_patches->size());
}
void CodeGeneratorX86_64::DumpCoreRegister(std::ostream& stream, int reg) const {
@@ -1214,7 +1230,8 @@ CodeGeneratorX86_64::CodeGeneratorX86_64(HGraph* graph,
pc_relative_dex_cache_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
simple_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
- type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ boot_image_type_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
+ type_bss_entry_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
fixups_to_jump_tables_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_string_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)),
jit_class_patches_(graph->GetArena()->Adapter(kArenaAllocCodeGenerator)) {
@@ -2423,6 +2440,14 @@ void InstructionCodeGeneratorX86_64::VisitInvokeInterface(HInvokeInterface* invo
codegen_->RecordPcInfo(invoke, invoke->GetDexPc());
}
+void LocationsBuilderX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ HandleInvoke(invoke);
+}
+
+void InstructionCodeGeneratorX86_64::VisitInvokePolymorphic(HInvokePolymorphic* invoke) {
+ codegen_->GenerateInvokePolymorphicCall(invoke);
+}
+
void LocationsBuilderX86_64::VisitNeg(HNeg* neg) {
LocationSummary* locations =
new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall);
@@ -4038,7 +4063,6 @@ void LocationsBuilderX86_64::VisitNewInstance(HNewInstance* instruction) {
locations->AddTemp(Location::RegisterLocation(kMethodRegisterArgument));
} else {
locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0)));
- locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1)));
}
locations->SetOut(Location::RegisterLocation(RAX));
}
@@ -4055,7 +4079,7 @@ void InstructionCodeGeneratorX86_64::VisitNewInstance(HNewInstance* instruction)
codegen_->RecordPcInfo(instruction, instruction->GetDexPc());
} else {
codegen_->InvokeRuntime(instruction->GetEntrypoint(), instruction, instruction->GetDexPc());
- CheckEntrypointTypes<kQuickAllocObjectWithAccessCheck, void*, uint32_t, ArtMethod*>();
+ CheckEntrypointTypes<kQuickAllocObjectWithChecks, void*, mirror::Class*>();
DCHECK(!codegen_->IsLeafMethod());
}
}
@@ -5417,11 +5441,12 @@ HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
break;
case HLoadClass::LoadKind::kBootImageAddress:
break;
- case HLoadClass::LoadKind::kJitTableAddress:
- break;
- case HLoadClass::LoadKind::kDexCachePcRelative:
+ case HLoadClass::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
+ case HLoadClass::LoadKind::kJitTableAddress:
+ DCHECK(Runtime::Current()->UseJitCompilation());
+ break;
case HLoadClass::LoadKind::kDexCacheViaMethod:
break;
}
@@ -5429,15 +5454,16 @@ HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind(
}
void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(RAX),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(RAX));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5448,9 +5474,7 @@ void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
@@ -5458,23 +5482,26 @@ void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
Label* CodeGeneratorX86_64::NewJitRootClassPatch(const DexFile& dex_file,
dex::TypeIndex dex_index,
- uint64_t address) {
- jit_class_roots_.Overwrite(TypeReference(&dex_file, dex_index), address);
+ Handle<mirror::Class> handle) {
+ jit_class_roots_.Overwrite(
+ TypeReference(&dex_file, dex_index), reinterpret_cast64<uint64_t>(handle.GetReference()));
// Add a patch entry and return the label.
jit_class_patches_.emplace_back(dex_file, dex_index.index_);
PatchInfo<Label>* info = &jit_class_patches_.back();
return &info->label;
}
-void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+// NO_THREAD_SAFETY_ANALYSIS as we manipulate handles whose internal object we know does not
+// move.
+void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) NO_THREAD_SAFETY_ANALYSIS {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
CpuRegister out = out_loc.AsRegister<CpuRegister>();
@@ -5482,7 +5509,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5497,52 +5524,36 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
break;
}
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
__ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
- codegen_->RecordTypePatch(cls);
+ codegen_->RecordBootTypePatch(cls);
break;
case HLoadClass::LoadKind::kBootImageAddress: {
DCHECK_EQ(read_barrier_option, kWithoutReadBarrier);
- DCHECK_NE(cls->GetAddress(), 0u);
- uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress());
+ uint32_t address = dchecked_integral_cast<uint32_t>(
+ reinterpret_cast<uintptr_t>(cls->GetClass().Get()));
+ DCHECK_NE(address, 0u);
__ movl(out, Immediate(address)); // Zero-extended.
codegen_->RecordSimplePatch();
break;
}
- case HLoadClass::LoadKind::kJitTableAddress: {
- Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
- /* no_rip */ true);
- Label* fixup_label =
- codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetAddress());
- // /* GcRoot<mirror::Class> */ out = *address
- GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
- break;
- }
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- uint32_t offset = cls->GetDexCacheElementOffset();
- Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset);
+ case HLoadClass::LoadKind::kBssEntry: {
Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
/* no_rip */ false);
+ Label* fixup_label = codegen_->NewTypeBssEntryPatch(cls);
// /* GcRoot<mirror::Class> */ out = *address /* PC-relative */
GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
+ generate_null_check = true;
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
- __ movq(out,
- Address(current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- GenerateGcRootFieldLoad(
- cls,
- out_loc,
- Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
- /* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
+ case HLoadClass::LoadKind::kJitTableAddress: {
+ Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset,
+ /* no_rip */ true);
+ Label* fixup_label =
+ codegen_->NewJitRootClassPatch(cls->GetDexFile(), cls->GetTypeIndex(), cls->GetClass());
+ // /* GcRoot<mirror::Class> */ out = *address
+ GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
break;
}
default:
@@ -5600,11 +5611,11 @@ HLoadString::LoadKind CodeGeneratorX86_64::GetSupportedLoadStringKind(
case HLoadString::LoadKind::kBssEntry:
DCHECK(!Runtime::Current()->UseJitCompilation());
break;
- case HLoadString::LoadKind::kDexCacheViaMethod:
- break;
case HLoadString::LoadKind::kJitTableAddress:
DCHECK(Runtime::Current()->UseJitCompilation());
break;
+ case HLoadString::LoadKind::kDexCacheViaMethod:
+ break;
}
return desired_string_load_kind;
}
@@ -5650,6 +5661,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadString(HLoadString* load) NO_THREA
switch (load->GetLoadKind()) {
case HLoadString::LoadKind::kBootImageLinkTimePcRelative: {
+ DCHECK(codegen_->GetCompilerOptions().IsBootImage());
__ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false));
codegen_->RecordBootStringPatch(load);
return; // No dex cache slow path.
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 32d006c5f3..3438b8159f 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -409,13 +409,16 @@ class CodeGeneratorX86_64 : public CodeGenerator {
void RecordSimplePatch();
void RecordBootStringPatch(HLoadString* load_string);
- void RecordTypePatch(HLoadClass* load_class);
+ void RecordBootTypePatch(HLoadClass* load_class);
+ Label* NewTypeBssEntryPatch(HLoadClass* load_class);
Label* NewStringBssEntryPatch(HLoadString* load_string);
Label* NewPcRelativeDexCacheArrayPatch(const DexFile& dex_file, uint32_t element_offset);
Label* NewJitRootStringPatch(const DexFile& dex_file,
dex::StringIndex dex_index,
Handle<mirror::String> handle);
- Label* NewJitRootClassPatch(const DexFile& dex_file, dex::TypeIndex dex_index, uint64_t address);
+ Label* NewJitRootClassPatch(const DexFile& dex_file,
+ dex::TypeIndex dex_index,
+ Handle<mirror::Class> handle);
void MoveFromReturnRegister(Location trg, Primitive::Type type) OVERRIDE;
@@ -604,8 +607,10 @@ class CodeGeneratorX86_64 : public CodeGenerator {
ArenaDeque<Label> simple_patches_;
// String patch locations; type depends on configuration (app .bss or boot image PIC).
ArenaDeque<PatchInfo<Label>> string_patches_;
- // Type patch locations.
- ArenaDeque<PatchInfo<Label>> type_patches_;
+ // Type patch locations for boot image (always PIC).
+ ArenaDeque<PatchInfo<Label>> boot_image_type_patches_;
+ // Type patch locations for kBssEntry.
+ ArenaDeque<PatchInfo<Label>> type_bss_entry_patches_;
// Fixups for jump tables need to be handled specially.
ArenaVector<JumpTableRIPFixup*> fixups_to_jump_tables_;
diff --git a/compiler/optimizing/dex_cache_array_fixups_arm.cc b/compiler/optimizing/dex_cache_array_fixups_arm.cc
index 10a36c6ff4..9ddcd563ca 100644
--- a/compiler/optimizing/dex_cache_array_fixups_arm.cc
+++ b/compiler/optimizing/dex_cache_array_fixups_arm.cc
@@ -59,29 +59,15 @@ class DexCacheArrayFixupsVisitor : public HGraphVisitor {
}
private:
- void VisitLoadClass(HLoadClass* load_class) OVERRIDE {
- // If this is a load with PC-relative access to the dex cache types array,
- // we need to add the dex cache arrays base as the special input.
- if (load_class->GetLoadKind() == HLoadClass::LoadKind::kDexCachePcRelative) {
- // Initialize base for target dex file if needed.
- const DexFile& dex_file = load_class->GetDexFile();
- HArmDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(dex_file);
- // Update the element offset in base.
- DexCacheArraysLayout layout(kArmPointerSize, &dex_file);
- base->UpdateElementOffset(layout.TypeOffset(load_class->GetTypeIndex()));
- // Add the special argument base to the load.
- load_class->AddSpecialInput(base);
- }
- }
-
void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
// If this is an invoke with PC-relative access to the dex cache methods array,
// we need to add the dex cache arrays base as the special input.
if (invoke->HasPcRelativeDexCache() &&
!IsCallFreeIntrinsic<IntrinsicLocationsBuilderARMType>(invoke, codegen_)) {
- HArmDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(invoke->GetDexFile());
+ HArmDexCacheArraysBase* base =
+ GetOrCreateDexCacheArrayBase(invoke->GetDexFileForPcRelativeDexCache());
// Update the element offset in base.
- DexCacheArraysLayout layout(kArmPointerSize, &invoke->GetDexFile());
+ DexCacheArraysLayout layout(kArmPointerSize, &invoke->GetDexFileForPcRelativeDexCache());
base->UpdateElementOffset(layout.MethodOffset(invoke->GetDexMethodIndex()));
// Add the special argument base to the method.
DCHECK(!invoke->HasCurrentMethodInput());
diff --git a/compiler/optimizing/dex_cache_array_fixups_mips.cc b/compiler/optimizing/dex_cache_array_fixups_mips.cc
index 31fff26dd5..04a4294c48 100644
--- a/compiler/optimizing/dex_cache_array_fixups_mips.cc
+++ b/compiler/optimizing/dex_cache_array_fixups_mips.cc
@@ -53,30 +53,16 @@ class DexCacheArrayFixupsVisitor : public HGraphVisitor {
}
private:
- void VisitLoadClass(HLoadClass* load_class) OVERRIDE {
- // If this is a load with PC-relative access to the dex cache types array,
- // we need to add the dex cache arrays base as the special input.
- if (load_class->GetLoadKind() == HLoadClass::LoadKind::kDexCachePcRelative) {
- // Initialize base for target dex file if needed.
- const DexFile& dex_file = load_class->GetDexFile();
- HMipsDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(dex_file);
- // Update the element offset in base.
- DexCacheArraysLayout layout(kMipsPointerSize, &dex_file);
- base->UpdateElementOffset(layout.TypeOffset(load_class->GetTypeIndex()));
- // Add the special argument base to the load.
- load_class->AddSpecialInput(base);
- }
- }
-
void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE {
// If this is an invoke with PC-relative access to the dex cache methods array,
// we need to add the dex cache arrays base as the special input.
if (invoke->HasPcRelativeDexCache() &&
!IsCallFreeIntrinsic<IntrinsicLocationsBuilderMIPS>(invoke, codegen_)) {
// Initialize base for target method dex file if needed.
- HMipsDexCacheArraysBase* base = GetOrCreateDexCacheArrayBase(invoke->GetDexFile());
+ HMipsDexCacheArraysBase* base =
+ GetOrCreateDexCacheArrayBase(invoke->GetDexFileForPcRelativeDexCache());
// Update the element offset in base.
- DexCacheArraysLayout layout(kMipsPointerSize, &invoke->GetDexFile());
+ DexCacheArraysLayout layout(kMipsPointerSize, &invoke->GetDexFileForPcRelativeDexCache());
base->UpdateElementOffset(layout.MethodOffset(invoke->GetDexMethodIndex()));
// Add the special argument base to the method.
DCHECK(!invoke->HasCurrentMethodInput());
diff --git a/compiler/optimizing/graph_visualizer.cc b/compiler/optimizing/graph_visualizer.cc
index 09dcefa02c..f6fba883bd 100644
--- a/compiler/optimizing/graph_visualizer.cc
+++ b/compiler/optimizing/graph_visualizer.cc
@@ -464,6 +464,11 @@ class HGraphVisualizerPrinter : public HGraphDelegateVisitor {
StartAttributeStream("intrinsic") << invoke->GetIntrinsic();
}
+ void VisitInvokePolymorphic(HInvokePolymorphic* invoke) OVERRIDE {
+ VisitInvoke(invoke);
+ StartAttributeStream("invoke_type") << "InvokePolymorphic";
+ }
+
void VisitInstanceFieldGet(HInstanceFieldGet* iget) OVERRIDE {
StartAttributeStream("field_name") <<
iget->GetFieldInfo().GetDexFile().PrettyField(iget->GetFieldInfo().GetFieldIndex(),
diff --git a/compiler/optimizing/induction_var_range.cc b/compiler/optimizing/induction_var_range.cc
index d5c4c2fa69..6d8ae75460 100644
--- a/compiler/optimizing/induction_var_range.cc
+++ b/compiler/optimizing/induction_var_range.cc
@@ -368,10 +368,14 @@ void InductionVarRange::Replace(HInstruction* instruction,
}
}
-bool InductionVarRange::IsFinite(HLoopInformation* loop) const {
+bool InductionVarRange::IsFinite(HLoopInformation* loop, /*out*/ int64_t* tc) const {
HInductionVarAnalysis::InductionInfo *trip =
induction_analysis_->LookupInfo(loop, GetLoopControl(loop));
- return trip != nullptr && !IsUnsafeTripCount(trip);
+ if (trip != nullptr && !IsUnsafeTripCount(trip)) {
+ IsConstant(trip->op_a, kExact, tc);
+ return true;
+ }
+ return false;
}
//
diff --git a/compiler/optimizing/induction_var_range.h b/compiler/optimizing/induction_var_range.h
index ba14847d82..6c424b78b9 100644
--- a/compiler/optimizing/induction_var_range.h
+++ b/compiler/optimizing/induction_var_range.h
@@ -150,9 +150,9 @@ class InductionVarRange {
}
/**
- * Checks if header logic of a loop terminates.
+ * Checks if header logic of a loop terminates. Sets trip-count tc if known.
*/
- bool IsFinite(HLoopInformation* loop) const;
+ bool IsFinite(HLoopInformation* loop, /*out*/ int64_t* tc) const;
private:
/*
diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc
index e5d05e9e6d..50aa4425d9 100644
--- a/compiler/optimizing/inliner.cc
+++ b/compiler/optimizing/inliner.cc
@@ -308,8 +308,10 @@ ArtMethod* HInliner::TryCHADevirtualization(ArtMethod* resolved_method) {
}
bool HInliner::TryInline(HInvoke* invoke_instruction) {
- if (invoke_instruction->IsInvokeUnresolved()) {
- return false; // Don't bother to move further if we know the method is unresolved.
+ if (invoke_instruction->IsInvokeUnresolved() ||
+ invoke_instruction->IsInvokePolymorphic()) {
+ return false; // Don't bother to move further if we know the method is unresolved or an
+ // invoke-polymorphic.
}
ScopedObjectAccess soa(Thread::Current());
@@ -472,10 +474,10 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction,
HInstruction* receiver = invoke_instruction->InputAt(0);
HInstruction* cursor = invoke_instruction->GetPrevious();
HBasicBlock* bb_cursor = invoke_instruction->GetBlock();
- Handle<mirror::Class> handle = handles_->NewHandle(GetMonomorphicType(classes));
+ Handle<mirror::Class> monomorphic_type = handles_->NewHandle(GetMonomorphicType(classes));
if (!TryInlineAndReplace(invoke_instruction,
resolved_method,
- ReferenceTypeInfo::Create(handle, /* is_exact */ true),
+ ReferenceTypeInfo::Create(monomorphic_type, /* is_exact */ true),
/* do_rtp */ false,
/* cha_devirtualize */ false)) {
return false;
@@ -486,7 +488,7 @@ bool HInliner::TryInlineMonomorphicCall(HInvoke* invoke_instruction,
cursor,
bb_cursor,
class_index,
- GetMonomorphicType(classes),
+ monomorphic_type,
invoke_instruction,
/* with_deoptimization */ true);
@@ -531,11 +533,9 @@ HInstruction* HInliner::AddTypeGuard(HInstruction* receiver,
HInstruction* cursor,
HBasicBlock* bb_cursor,
dex::TypeIndex class_index,
- mirror::Class* klass,
+ Handle<mirror::Class> klass,
HInstruction* invoke_instruction,
bool with_deoptimization) {
- ScopedAssertNoThreadSuspension sants("Adding compiler type guard");
-
ClassLinker* class_linker = caller_compilation_unit_.GetClassLinker();
HInstanceFieldGet* receiver_class = BuildGetReceiverClass(
class_linker, receiver, invoke_instruction->GetDexPc());
@@ -546,19 +546,20 @@ HInstruction* HInliner::AddTypeGuard(HInstruction* receiver,
}
const DexFile& caller_dex_file = *caller_compilation_unit_.GetDexFile();
- bool is_referrer = (klass == outermost_graph_->GetArtMethod()->GetDeclaringClass());
+ bool is_referrer = (klass.Get() == outermost_graph_->GetArtMethod()->GetDeclaringClass());
// Note that we will just compare the classes, so we don't need Java semantics access checks.
// Note that the type index and the dex file are relative to the method this type guard is
// inlined into.
HLoadClass* load_class = new (graph_->GetArena()) HLoadClass(graph_->GetCurrentMethod(),
class_index,
caller_dex_file,
+ klass,
is_referrer,
invoke_instruction->GetDexPc(),
/* needs_access_check */ false);
bb_cursor->InsertInstructionAfter(load_class, receiver_class);
// Sharpen after adding the instruction, as the sharpening may remove inputs.
- HSharpening::SharpenClass(load_class, klass, handles_, codegen_, compiler_driver_);
+ HSharpening::SharpenClass(load_class, codegen_, compiler_driver_);
// TODO: Extend reference type propagation to understand the guard.
HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, receiver_class);
@@ -635,7 +636,7 @@ bool HInliner::TryInlinePolymorphicCall(HInvoke* invoke_instruction,
cursor,
bb_cursor,
class_index,
- handle.Get(),
+ handle,
invoke_instruction,
deoptimize);
if (deoptimize) {
@@ -1428,15 +1429,6 @@ bool HInliner::TryBuildAndInlineHelper(HInvoke* invoke_instruction,
return false;
}
- if (current->IsNewInstance() &&
- (current->AsNewInstance()->GetEntrypoint() == kQuickAllocObjectWithAccessCheck)) {
- VLOG(compiler) << "Method " << callee_dex_file.PrettyMethod(method_index)
- << " could not be inlined because it is using an entrypoint"
- << " with access checks";
- // Allocation entrypoint does not handle inlined frames.
- return false;
- }
-
if (current->IsNewArray() &&
(current->AsNewArray()->GetEntrypoint() == kQuickAllocArrayWithAccessCheck)) {
VLOG(compiler) << "Method " << callee_dex_file.PrettyMethod(method_index)
diff --git a/compiler/optimizing/inliner.h b/compiler/optimizing/inliner.h
index 4c0b990f26..11aacab802 100644
--- a/compiler/optimizing/inliner.h
+++ b/compiler/optimizing/inliner.h
@@ -170,7 +170,7 @@ class HInliner : public HOptimization {
HInstruction* cursor,
HBasicBlock* bb_cursor,
dex::TypeIndex class_index,
- mirror::Class* klass,
+ Handle<mirror::Class> klass,
HInstruction* invoke_instruction,
bool with_deoptimization)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc
index 768b1d80a1..8ed0e7fa06 100644
--- a/compiler/optimizing/instruction_builder.cc
+++ b/compiler/optimizing/instruction_builder.cc
@@ -1,3 +1,4 @@
+
/*
* Copyright (C) 2016 The Android Open Source Project
*
@@ -207,10 +208,8 @@ void HInstructionBuilder::InitializeInstruction(HInstruction* instruction) {
HEnvironment* environment = new (arena_) HEnvironment(
arena_,
current_locals_->size(),
- graph_->GetDexFile(),
- graph_->GetMethodIdx(),
+ graph_->GetArtMethod(),
instruction->GetDexPc(),
- graph_->GetInvokeType(),
instruction);
environment->CopyFrom(*current_locals_);
instruction->SetRawEnvironment(environment);
@@ -906,51 +905,69 @@ bool HInstructionBuilder::BuildInvoke(const Instruction& instruction,
false /* is_unresolved */);
}
+bool HInstructionBuilder::BuildInvokePolymorphic(const Instruction& instruction ATTRIBUTE_UNUSED,
+ uint32_t dex_pc,
+ uint32_t method_idx,
+ uint32_t proto_idx,
+ uint32_t number_of_vreg_arguments,
+ bool is_range,
+ uint32_t* args,
+ uint32_t register_index) {
+ const char* descriptor = dex_file_->GetShorty(proto_idx);
+ DCHECK_EQ(1 + ArtMethod::NumArgRegisters(descriptor), number_of_vreg_arguments);
+ Primitive::Type return_type = Primitive::GetType(descriptor[0]);
+ size_t number_of_arguments = strlen(descriptor);
+ HInvoke* invoke = new (arena_) HInvokePolymorphic(arena_,
+ number_of_arguments,
+ return_type,
+ dex_pc,
+ method_idx);
+ return HandleInvoke(invoke,
+ number_of_vreg_arguments,
+ args,
+ register_index,
+ is_range,
+ descriptor,
+ nullptr /* clinit_check */,
+ false /* is_unresolved */);
+}
+
bool HInstructionBuilder::BuildNewInstance(dex::TypeIndex type_index, uint32_t dex_pc) {
ScopedObjectAccess soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
Handle<mirror::DexCache> dex_cache = dex_compilation_unit_->GetDexCache();
- Handle<mirror::Class> resolved_class(hs.NewHandle(dex_cache->GetResolvedType(type_index)));
- const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile();
Handle<mirror::DexCache> outer_dex_cache = outer_compilation_unit_->GetDexCache();
- bool finalizable;
- bool needs_access_check = NeedsAccessCheck(type_index, dex_cache, &finalizable);
-
- // Only the non-resolved entrypoint handles the finalizable class case. If we
- // need access checks, then we haven't resolved the method and the class may
- // again be finalizable.
- QuickEntrypointEnum entrypoint = (finalizable || needs_access_check)
- ? kQuickAllocObject
- : kQuickAllocObjectInitialized;
-
if (outer_dex_cache.Get() != dex_cache.Get()) {
// We currently do not support inlining allocations across dex files.
return false;
}
- HLoadClass* load_class = new (arena_) HLoadClass(
- graph_->GetCurrentMethod(),
- type_index,
- outer_dex_file,
- IsOutermostCompilingClass(type_index),
- dex_pc,
- needs_access_check);
+ HLoadClass* load_class = BuildLoadClass(type_index, dex_pc, /* check_access */ true);
- AppendInstruction(load_class);
HInstruction* cls = load_class;
- if (!IsInitialized(resolved_class)) {
+ Handle<mirror::Class> klass = load_class->GetClass();
+
+ if (!IsInitialized(klass)) {
cls = new (arena_) HClinitCheck(load_class, dex_pc);
AppendInstruction(cls);
}
+ // Only the access check entrypoint handles the finalizable class case. If we
+ // need access checks, then we haven't resolved the method and the class may
+ // again be finalizable.
+ QuickEntrypointEnum entrypoint = kQuickAllocObjectInitialized;
+ if (load_class->NeedsAccessCheck() || klass->IsFinalizable() || !klass->IsInstantiable()) {
+ entrypoint = kQuickAllocObjectWithChecks;
+ }
+
+ // Consider classes we haven't resolved as potentially finalizable.
+ bool finalizable = (klass.Get() == nullptr) || klass->IsFinalizable();
+
AppendInstruction(new (arena_) HNewInstance(
cls,
- graph_->GetCurrentMethod(),
dex_pc,
type_index,
*dex_compilation_unit_->GetDexFile(),
- needs_access_check,
finalizable,
entrypoint));
return true;
@@ -991,7 +1008,6 @@ HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
ArtMethod* resolved_method,
uint32_t method_idx,
HInvokeStaticOrDirect::ClinitCheckRequirement* clinit_check_requirement) {
- const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile();
Thread* self = Thread::Current();
StackHandleScope<2> hs(self);
Handle<mirror::DexCache> dex_cache = dex_compilation_unit_->GetDexCache();
@@ -1019,15 +1035,9 @@ HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke(
*clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kNone;
} else if (storage_index.IsValid()) {
*clinit_check_requirement = HInvokeStaticOrDirect::ClinitCheckRequirement::kExplicit;
- HLoadClass* load_class = new (arena_) HLoadClass(
- graph_->GetCurrentMethod(),
- storage_index,
- outer_dex_file,
- is_outer_class,
- dex_pc,
- /*needs_access_check*/ false);
- AppendInstruction(load_class);
- clinit_check = new (arena_) HClinitCheck(load_class, dex_pc);
+ HLoadClass* cls = BuildLoadClass(
+ storage_index, dex_pc, /* check_access */ false, /* outer */ true);
+ clinit_check = new (arena_) HClinitCheck(cls, dex_pc);
AppendInstruction(clinit_check);
}
return clinit_check;
@@ -1349,7 +1359,6 @@ bool HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
}
Primitive::Type field_type = resolved_field->GetTypeAsPrimitiveType();
- const DexFile& outer_dex_file = *outer_compilation_unit_->GetDexFile();
Handle<mirror::DexCache> outer_dex_cache = outer_compilation_unit_->GetDexCache();
Handle<mirror::Class> outer_class(hs.NewHandle(GetOutermostCompilingClass()));
@@ -1377,16 +1386,10 @@ bool HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction,
}
}
- HLoadClass* constant = new (arena_) HLoadClass(graph_->GetCurrentMethod(),
- storage_index,
- outer_dex_file,
- is_outer_class,
- dex_pc,
- /*needs_access_check*/ false);
- AppendInstruction(constant);
+ HLoadClass* constant = BuildLoadClass(
+ storage_index, dex_pc, /* check_access */ false, /* outer */ true);
HInstruction* cls = constant;
-
Handle<mirror::Class> klass(hs.NewHandle(resolved_field->GetDeclaringClass()));
if (!IsInitialized(klass)) {
cls = new (arena_) HClinitCheck(constant, dex_pc);
@@ -1633,33 +1636,53 @@ static TypeCheckKind ComputeTypeCheckKind(Handle<mirror::Class> cls)
}
}
-void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
- uint8_t destination,
- uint8_t reference,
- dex::TypeIndex type_index,
- uint32_t dex_pc) {
+HLoadClass* HInstructionBuilder::BuildLoadClass(dex::TypeIndex type_index,
+ uint32_t dex_pc,
+ bool check_access,
+ bool outer) {
ScopedObjectAccess soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- const DexFile& dex_file = *dex_compilation_unit_->GetDexFile();
- Handle<mirror::DexCache> dex_cache = dex_compilation_unit_->GetDexCache();
- Handle<mirror::Class> resolved_class(hs.NewHandle(dex_cache->GetResolvedType(type_index)));
-
- bool can_access = compiler_driver_->CanAccessTypeWithoutChecks(
- dex_compilation_unit_->GetDexMethodIndex(),
- dex_cache,
- type_index);
+ const DexCompilationUnit* compilation_unit =
+ outer ? outer_compilation_unit_ : dex_compilation_unit_;
+ const DexFile& dex_file = *compilation_unit->GetDexFile();
+ Handle<mirror::DexCache> dex_cache = compilation_unit->GetDexCache();
+ bool is_accessible = false;
+ Handle<mirror::Class> klass = handles_->NewHandle(dex_cache->GetResolvedType(type_index));
+ if (!check_access) {
+ is_accessible = true;
+ } else if (klass.Get() != nullptr) {
+ if (klass->IsPublic()) {
+ is_accessible = true;
+ } else {
+ mirror::Class* compiling_class = GetCompilingClass();
+ if (compiling_class != nullptr && compiling_class->CanAccess(klass.Get())) {
+ is_accessible = true;
+ }
+ }
+ }
- HInstruction* object = LoadLocal(reference, Primitive::kPrimNot);
- HLoadClass* cls = new (arena_) HLoadClass(
+ HLoadClass* load_class = new (arena_) HLoadClass(
graph_->GetCurrentMethod(),
type_index,
dex_file,
- IsOutermostCompilingClass(type_index),
+ klass,
+ klass.Get() != nullptr && (klass.Get() == GetOutermostCompilingClass()),
dex_pc,
- !can_access);
- AppendInstruction(cls);
+ !is_accessible);
- TypeCheckKind check_kind = ComputeTypeCheckKind(resolved_class);
+ AppendInstruction(load_class);
+ return load_class;
+}
+
+void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction,
+ uint8_t destination,
+ uint8_t reference,
+ dex::TypeIndex type_index,
+ uint32_t dex_pc) {
+ HInstruction* object = LoadLocal(reference, Primitive::kPrimNot);
+ HLoadClass* cls = BuildLoadClass(type_index, dex_pc, /* check_access */ true);
+
+ ScopedObjectAccess soa(Thread::Current());
+ TypeCheckKind check_kind = ComputeTypeCheckKind(cls->GetClass());
if (instruction.Opcode() == Instruction::INSTANCE_OF) {
AppendInstruction(new (arena_) HInstanceOf(object, cls, check_kind, dex_pc));
UpdateLocal(destination, current_block_->GetLastInstruction());
@@ -1916,6 +1939,37 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
break;
}
+ case Instruction::INVOKE_POLYMORPHIC: {
+ uint16_t method_idx = instruction.VRegB_45cc();
+ uint16_t proto_idx = instruction.VRegH_45cc();
+ uint32_t number_of_vreg_arguments = instruction.VRegA_45cc();
+ uint32_t args[5];
+ instruction.GetVarArgs(args);
+ return BuildInvokePolymorphic(instruction,
+ dex_pc,
+ method_idx,
+ proto_idx,
+ number_of_vreg_arguments,
+ false,
+ args,
+ -1);
+ }
+
+ case Instruction::INVOKE_POLYMORPHIC_RANGE: {
+ uint16_t method_idx = instruction.VRegB_4rcc();
+ uint16_t proto_idx = instruction.VRegH_4rcc();
+ uint32_t number_of_vreg_arguments = instruction.VRegA_4rcc();
+ uint32_t register_index = instruction.VRegC_4rcc();
+ return BuildInvokePolymorphic(instruction,
+ dex_pc,
+ method_idx,
+ proto_idx,
+ number_of_vreg_arguments,
+ true,
+ nullptr,
+ register_index);
+ }
+
case Instruction::NEG_INT: {
Unop_12x<HNeg>(instruction, Primitive::kPrimInt, dex_pc);
break;
@@ -2632,21 +2686,7 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction,
case Instruction::CONST_CLASS: {
dex::TypeIndex type_index(instruction.VRegB_21c());
- // `CanAccessTypeWithoutChecks` will tell whether the method being
- // built is trying to access its own class, so that the generated
- // code can optimize for this case. However, the optimization does not
- // work for inlining, so we use `IsOutermostCompilingClass` instead.
- ScopedObjectAccess soa(Thread::Current());
- Handle<mirror::DexCache> dex_cache = dex_compilation_unit_->GetDexCache();
- bool can_access = compiler_driver_->CanAccessTypeWithoutChecks(
- dex_compilation_unit_->GetDexMethodIndex(), dex_cache, type_index);
- AppendInstruction(new (arena_) HLoadClass(
- graph_->GetCurrentMethod(),
- type_index,
- *dex_file_,
- IsOutermostCompilingClass(type_index),
- dex_pc,
- !can_access));
+ BuildLoadClass(type_index, dex_pc, /* check_access */ true);
UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction());
break;
}
diff --git a/compiler/optimizing/instruction_builder.h b/compiler/optimizing/instruction_builder.h
index f29e522040..5efe95094c 100644
--- a/compiler/optimizing/instruction_builder.h
+++ b/compiler/optimizing/instruction_builder.h
@@ -46,9 +46,11 @@ class HInstructionBuilder : public ValueObject {
CompilerDriver* driver,
const uint8_t* interpreter_metadata,
OptimizingCompilerStats* compiler_stats,
- Handle<mirror::DexCache> dex_cache)
+ Handle<mirror::DexCache> dex_cache,
+ VariableSizedHandleScope* handles)
: arena_(graph->GetArena()),
graph_(graph),
+ handles_(handles),
dex_file_(dex_file),
code_item_(code_item),
return_type_(return_type),
@@ -175,6 +177,17 @@ class HInstructionBuilder : public ValueObject {
uint32_t* args,
uint32_t register_index);
+ // Builds an invocation node for invoke-polymorphic and returns whether the
+ // instruction is supported.
+ bool BuildInvokePolymorphic(const Instruction& instruction,
+ uint32_t dex_pc,
+ uint32_t method_idx,
+ uint32_t proto_idx,
+ uint32_t number_of_vreg_arguments,
+ bool is_range,
+ uint32_t* args,
+ uint32_t register_index);
+
// Builds a new array node and the instructions that fill it.
void BuildFilledNewArray(uint32_t dex_pc,
dex::TypeIndex type_index,
@@ -212,6 +225,14 @@ class HInstructionBuilder : public ValueObject {
// Builds an instruction sequence for a switch statement.
void BuildSwitch(const Instruction& instruction, uint32_t dex_pc);
+ // Builds a `HLoadClass` loading the given `type_index`. If `outer` is true,
+ // this method will use the outer class's dex file to lookup the type at
+ // `type_index`.
+ HLoadClass* BuildLoadClass(dex::TypeIndex type_index,
+ uint32_t dex_pc,
+ bool check_access,
+ bool outer = false);
+
// Returns the outer-most compiling method's class.
mirror::Class* GetOutermostCompilingClass() const;
@@ -271,6 +292,7 @@ class HInstructionBuilder : public ValueObject {
ArenaAllocator* const arena_;
HGraph* const graph_;
+ VariableSizedHandleScope* handles_;
// The dex file where the method being compiled is, and the bytecode data.
const DexFile* const dex_file_;
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index fc6ff7b197..17d683f357 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -145,7 +145,7 @@ void IntrinsicsRecognizer::Run() {
if (!CheckInvokeType(intrinsic, invoke)) {
LOG(WARNING) << "Found an intrinsic with unexpected invoke type: "
<< intrinsic << " for "
- << invoke->GetDexFile().PrettyMethod(invoke->GetDexMethodIndex())
+ << art_method->PrettyMethod()
<< invoke->DebugName();
} else {
invoke->SetIntrinsic(intrinsic,
diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc
index cda3185a45..f1ae549928 100644
--- a/compiler/optimizing/intrinsics_mips.cc
+++ b/compiler/optimizing/intrinsics_mips.cc
@@ -752,8 +752,9 @@ static void MathAbsFP(LocationSummary* locations,
FRegister in = locations->InAt(0).AsFpuRegister<FRegister>();
FRegister out = locations->Out().AsFpuRegister<FRegister>();
- // As a "quality of implementation", rather than pure "spec compliance", it is required that
- // Math.abs() clears the sign bit (but changes nothing else) for all numbers, including NaN.
+ // Note, as a "quality of implementation", rather than pure "spec compliance", we require that
+ // Math.abs() clears the sign bit (but changes nothing else) for all numbers, including NaN
+ // (signaling NaN may become quiet though).
//
// The ABS.fmt instructions (abs.s and abs.d) do exactly that when NAN2008=1 (R6). For this case,
// both regular floating point numbers and NAN values are treated alike, only the sign bit is
diff --git a/compiler/optimizing/load_store_elimination.cc b/compiler/optimizing/load_store_elimination.cc
index 2856c3ea11..2d3c00fb97 100644
--- a/compiler/optimizing/load_store_elimination.cc
+++ b/compiler/optimizing/load_store_elimination.cc
@@ -943,6 +943,10 @@ class LSEVisitor : public HGraphVisitor {
HandleInvoke(invoke);
}
+ void VisitInvokePolymorphic(HInvokePolymorphic* invoke) OVERRIDE {
+ HandleInvoke(invoke);
+ }
+
void VisitClinitCheck(HClinitCheck* clinit) OVERRIDE {
HandleInvoke(clinit);
}
@@ -975,7 +979,7 @@ class LSEVisitor : public HGraphVisitor {
}
if (ref_info->IsSingletonAndRemovable() &&
!new_instance->IsFinalizable() &&
- !new_instance->NeedsAccessCheck()) {
+ !new_instance->NeedsChecks()) {
singleton_new_instances_.push_back(new_instance);
}
ArenaVector<HInstruction*>& heap_values =
diff --git a/compiler/optimizing/loop_optimization.cc b/compiler/optimizing/loop_optimization.cc
index 9d73e29602..95838380cc 100644
--- a/compiler/optimizing/loop_optimization.cc
+++ b/compiler/optimizing/loop_optimization.cc
@@ -161,26 +161,27 @@ void HLoopOptimization::RemoveLoop(LoopNode* node) {
void HLoopOptimization::TraverseLoopsInnerToOuter(LoopNode* node) {
for ( ; node != nullptr; node = node->next) {
+ // Visit inner loops first.
int current_induction_simplification_count = induction_simplication_count_;
if (node->inner != nullptr) {
TraverseLoopsInnerToOuter(node->inner);
}
- // Visit loop after its inner loops have been visited. If the induction of any inner
- // loop has been simplified, recompute the induction information of this loop first.
+ // Recompute induction information of this loop if the induction
+ // of any inner loop has been simplified.
if (current_induction_simplification_count != induction_simplication_count_) {
induction_range_.ReVisit(node->loop_info);
}
- // Repeat simplifications until no more changes occur. Note that since
- // each simplification consists of eliminating code (without introducing
- // new code), this process is always finite.
+ // Repeat simplifications in the body of this loop until no more changes occur.
+ // Note that since each simplification consists of eliminating code (without
+ // introducing new code), this process is always finite.
do {
simplified_ = false;
- SimplifyBlocks(node);
SimplifyInduction(node);
+ SimplifyBlocks(node);
} while (simplified_);
- // Remove inner loops when empty.
+ // Simplify inner loop.
if (node->inner == nullptr) {
- RemoveIfEmptyInnerLoop(node);
+ SimplifyInnerLoop(node);
}
}
}
@@ -198,7 +199,7 @@ void HLoopOptimization::SimplifyInduction(LoopNode* node) {
iset_->clear();
int32_t use_count = 0;
if (IsPhiInduction(phi) &&
- IsOnlyUsedAfterLoop(node->loop_info, phi, &use_count) &&
+ IsOnlyUsedAfterLoop(node->loop_info, phi, /*collect_loop_uses*/ false, &use_count) &&
// No uses, or no early-exit with proper replacement.
(use_count == 0 ||
(!IsEarlyExit(node->loop_info) && TryReplaceWithLastValue(phi, preheader)))) {
@@ -206,7 +207,6 @@ void HLoopOptimization::SimplifyInduction(LoopNode* node) {
RemoveFromCycle(i);
}
simplified_ = true;
- induction_simplication_count_++;
}
}
}
@@ -216,24 +216,14 @@ void HLoopOptimization::SimplifyBlocks(LoopNode* node) {
for (HBlocksInLoopIterator it(*node->loop_info); !it.Done(); it.Advance()) {
HBasicBlock* block = it.Current();
// Remove dead instructions from the loop-body.
- for (HBackwardInstructionIterator i(block->GetInstructions()); !i.Done(); i.Advance()) {
- HInstruction* instruction = i.Current();
- if (instruction->IsDeadAndRemovable()) {
- simplified_ = true;
- block->RemoveInstruction(instruction);
- }
- }
+ RemoveDeadInstructions(block->GetPhis());
+ RemoveDeadInstructions(block->GetInstructions());
// Remove trivial control flow blocks from the loop-body.
- HBasicBlock* succ = nullptr;
- if (IsGotoBlock(block, &succ) && succ->GetPredecessors().size() == 1) {
- // Trivial goto block can be removed.
- HBasicBlock* pred = block->GetSinglePredecessor();
+ if (block->GetPredecessors().size() == 1 &&
+ block->GetSuccessors().size() == 1 &&
+ block->GetSingleSuccessor()->GetPredecessors().size() == 1) {
simplified_ = true;
- pred->ReplaceSuccessor(block, succ);
- block->RemoveDominatedBlock(succ);
- block->DisconnectAndDelete();
- pred->AddDominatedBlock(succ);
- succ->SetDominator(pred);
+ block->MergeWith(block->GetSingleSuccessor());
} else if (block->GetSuccessors().size() == 2) {
// Trivial if block can be bypassed to either branch.
HBasicBlock* succ0 = block->GetSuccessors()[0];
@@ -258,55 +248,66 @@ void HLoopOptimization::SimplifyBlocks(LoopNode* node) {
}
}
-void HLoopOptimization::RemoveIfEmptyInnerLoop(LoopNode* node) {
+bool HLoopOptimization::SimplifyInnerLoop(LoopNode* node) {
HBasicBlock* header = node->loop_info->GetHeader();
HBasicBlock* preheader = node->loop_info->GetPreHeader();
// Ensure loop header logic is finite.
- if (!induction_range_.IsFinite(node->loop_info)) {
- return;
+ int64_t tc = 0;
+ if (!induction_range_.IsFinite(node->loop_info, &tc)) {
+ return false;
}
// Ensure there is only a single loop-body (besides the header).
HBasicBlock* body = nullptr;
for (HBlocksInLoopIterator it(*node->loop_info); !it.Done(); it.Advance()) {
if (it.Current() != header) {
if (body != nullptr) {
- return;
+ return false;
}
body = it.Current();
}
}
// Ensure there is only a single exit point.
if (header->GetSuccessors().size() != 2) {
- return;
+ return false;
}
HBasicBlock* exit = (header->GetSuccessors()[0] == body)
? header->GetSuccessors()[1]
: header->GetSuccessors()[0];
// Ensure exit can only be reached by exiting loop.
if (exit->GetPredecessors().size() != 1) {
- return;
+ return false;
}
- // Detect an empty loop: no side effects other than plain iteration. Replace
- // subsequent index uses, if any, with the last value and remove the loop.
+ // Detect either an empty loop (no side effects other than plain iteration) or
+ // a trivial loop (just iterating once). Replace subsequent index uses, if any,
+ // with the last value and remove the loop, possibly after unrolling its body.
+ HInstruction* phi = header->GetFirstPhi();
iset_->clear();
int32_t use_count = 0;
- if (IsEmptyHeader(header) &&
- IsEmptyBody(body) &&
- IsOnlyUsedAfterLoop(node->loop_info, header->GetFirstPhi(), &use_count) &&
- // No uses, or proper replacement.
- (use_count == 0 || TryReplaceWithLastValue(header->GetFirstPhi(), preheader))) {
- body->DisconnectAndDelete();
- exit->RemovePredecessor(header);
- header->RemoveSuccessor(exit);
- header->RemoveDominatedBlock(exit);
- header->DisconnectAndDelete();
- preheader->AddSuccessor(exit);
- preheader->AddInstruction(new (graph_->GetArena()) HGoto()); // global allocator
- preheader->AddDominatedBlock(exit);
- exit->SetDominator(preheader);
- // Update hierarchy.
- RemoveLoop(node);
+ if (IsEmptyHeader(header)) {
+ bool is_empty = IsEmptyBody(body);
+ if ((is_empty || tc == 1) &&
+ IsOnlyUsedAfterLoop(node->loop_info, phi, /*collect_loop_uses*/ true, &use_count) &&
+ // No uses, or proper replacement.
+ (use_count == 0 || TryReplaceWithLastValue(phi, preheader))) {
+ if (!is_empty) {
+ // Unroll the loop body, which sees initial value of the index.
+ phi->ReplaceWith(phi->InputAt(0));
+ preheader->MergeInstructionsWith(body);
+ }
+ body->DisconnectAndDelete();
+ exit->RemovePredecessor(header);
+ header->RemoveSuccessor(exit);
+ header->RemoveDominatedBlock(exit);
+ header->DisconnectAndDelete();
+ preheader->AddSuccessor(exit);
+ preheader->AddInstruction(new (graph_->GetArena()) HGoto()); // global allocator
+ preheader->AddDominatedBlock(exit);
+ exit->SetDominator(preheader);
+ RemoveLoop(node); // update hierarchy
+ return true;
+ }
}
+ return false;
}
bool HLoopOptimization::IsPhiInduction(HPhi* phi) {
@@ -374,12 +375,19 @@ bool HLoopOptimization::IsEmptyBody(HBasicBlock* block) {
bool HLoopOptimization::IsOnlyUsedAfterLoop(HLoopInformation* loop_info,
HInstruction* instruction,
+ bool collect_loop_uses,
/*out*/ int32_t* use_count) {
for (const HUseListNode<HInstruction*>& use : instruction->GetUses()) {
HInstruction* user = use.GetUser();
if (iset_->find(user) == iset_->end()) { // not excluded?
HLoopInformation* other_loop_info = user->GetBlock()->GetLoopInformation();
if (other_loop_info != nullptr && other_loop_info->IsIn(*loop_info)) {
+ // If collect_loop_uses is set, simply keep adding those uses to the set.
+ // Otherwise, reject uses inside the loop that were not already in the set.
+ if (collect_loop_uses) {
+ iset_->insert(user);
+ continue;
+ }
return false;
}
++*use_count;
@@ -388,40 +396,48 @@ bool HLoopOptimization::IsOnlyUsedAfterLoop(HLoopInformation* loop_info,
return true;
}
-void HLoopOptimization::ReplaceAllUses(HInstruction* instruction, HInstruction* replacement) {
- const HUseList<HInstruction*>& uses = instruction->GetUses();
- for (auto it = uses.begin(), end = uses.end(); it != end;) {
- HInstruction* user = it->GetUser();
- size_t index = it->GetIndex();
- ++it; // increment before replacing
- if (iset_->find(user) == iset_->end()) { // not excluded?
- user->ReplaceInput(replacement, index);
- induction_range_.Replace(user, instruction, replacement); // update induction
- }
- }
- const HUseList<HEnvironment*>& env_uses = instruction->GetEnvUses();
- for (auto it = env_uses.begin(), end = env_uses.end(); it != end;) {
- HEnvironment* user = it->GetUser();
- size_t index = it->GetIndex();
- ++it; // increment before replacing
- if (iset_->find(user->GetHolder()) == iset_->end()) { // not excluded?
- user->RemoveAsUserOfInput(index);
- user->SetRawEnvAt(index, replacement);
- replacement->AddEnvUseAt(user, index);
- }
- }
-}
-
bool HLoopOptimization::TryReplaceWithLastValue(HInstruction* instruction, HBasicBlock* block) {
// Try to replace outside uses with the last value. Environment uses can consume this
// value too, since any first true use is outside the loop (although this may imply
// that de-opting may look "ahead" a bit on the phi value). If there are only environment
// uses, the value is dropped altogether, since the computations have no effect.
if (induction_range_.CanGenerateLastValue(instruction)) {
- ReplaceAllUses(instruction, induction_range_.GenerateLastValue(instruction, graph_, block));
+ HInstruction* replacement = induction_range_.GenerateLastValue(instruction, graph_, block);
+ const HUseList<HInstruction*>& uses = instruction->GetUses();
+ for (auto it = uses.begin(), end = uses.end(); it != end;) {
+ HInstruction* user = it->GetUser();
+ size_t index = it->GetIndex();
+ ++it; // increment before replacing
+ if (iset_->find(user) == iset_->end()) { // not excluded?
+ user->ReplaceInput(replacement, index);
+ induction_range_.Replace(user, instruction, replacement); // update induction
+ }
+ }
+ const HUseList<HEnvironment*>& env_uses = instruction->GetEnvUses();
+ for (auto it = env_uses.begin(), end = env_uses.end(); it != end;) {
+ HEnvironment* user = it->GetUser();
+ size_t index = it->GetIndex();
+ ++it; // increment before replacing
+ if (iset_->find(user->GetHolder()) == iset_->end()) { // not excluded?
+ user->RemoveAsUserOfInput(index);
+ user->SetRawEnvAt(index, replacement);
+ replacement->AddEnvUseAt(user, index);
+ }
+ }
+ induction_simplication_count_++;
return true;
}
return false;
}
+void HLoopOptimization::RemoveDeadInstructions(const HInstructionList& list) {
+ for (HBackwardInstructionIterator i(list); !i.Done(); i.Advance()) {
+ HInstruction* instruction = i.Current();
+ if (instruction->IsDeadAndRemovable()) {
+ simplified_ = true;
+ instruction->GetBlock()->RemoveInstructionOrPhi(instruction);
+ }
+ }
+}
+
} // namespace art
diff --git a/compiler/optimizing/loop_optimization.h b/compiler/optimizing/loop_optimization.h
index 0f05b24c37..9ddab4150c 100644
--- a/compiler/optimizing/loop_optimization.h
+++ b/compiler/optimizing/loop_optimization.h
@@ -60,19 +60,21 @@ class HLoopOptimization : public HOptimization {
void TraverseLoopsInnerToOuter(LoopNode* node);
+ // Simplification.
void SimplifyInduction(LoopNode* node);
void SimplifyBlocks(LoopNode* node);
- void RemoveIfEmptyInnerLoop(LoopNode* node);
+ bool SimplifyInnerLoop(LoopNode* node);
+ // Helpers.
bool IsPhiInduction(HPhi* phi);
bool IsEmptyHeader(HBasicBlock* block);
bool IsEmptyBody(HBasicBlock* block);
-
bool IsOnlyUsedAfterLoop(HLoopInformation* loop_info,
HInstruction* instruction,
+ bool collect_loop_uses,
/*out*/ int32_t* use_count);
- void ReplaceAllUses(HInstruction* instruction, HInstruction* replacement);
bool TryReplaceWithLastValue(HInstruction* instruction, HBasicBlock* block);
+ void RemoveDeadInstructions(const HInstructionList& list);
// Range information based on prior induction variable analysis.
InductionVarRange induction_range_;
diff --git a/compiler/optimizing/nodes.cc b/compiler/optimizing/nodes.cc
index d45fa11534..d15145e673 100644
--- a/compiler/optimizing/nodes.cc
+++ b/compiler/optimizing/nodes.cc
@@ -1853,6 +1853,14 @@ void HBasicBlock::DisconnectAndDelete() {
SetGraph(nullptr);
}
+void HBasicBlock::MergeInstructionsWith(HBasicBlock* other) {
+ DCHECK(EndsWithControlFlowInstruction());
+ RemoveInstruction(GetLastInstruction());
+ instructions_.Add(other->GetInstructions());
+ other->instructions_.SetBlockOfInstructions(this);
+ other->instructions_.Clear();
+}
+
void HBasicBlock::MergeWith(HBasicBlock* other) {
DCHECK_EQ(GetGraph(), other->GetGraph());
DCHECK(ContainsElement(dominated_blocks_, other));
@@ -1861,11 +1869,7 @@ void HBasicBlock::MergeWith(HBasicBlock* other) {
DCHECK(other->GetPhis().IsEmpty());
// Move instructions from `other` to `this`.
- DCHECK(EndsWithControlFlowInstruction());
- RemoveInstruction(GetLastInstruction());
- instructions_.Add(other->GetInstructions());
- other->instructions_.SetBlockOfInstructions(this);
- other->instructions_.Clear();
+ MergeInstructionsWith(other);
// Remove `other` from the loops it is included in.
for (HLoopInformationOutwardIterator it(*other); !it.Done(); it.Advance()) {
@@ -2387,6 +2391,14 @@ bool HInvoke::NeedsEnvironment() const {
return !opt.GetDoesNotNeedEnvironment();
}
+const DexFile& HInvokeStaticOrDirect::GetDexFileForPcRelativeDexCache() const {
+ ArtMethod* caller = GetEnvironment()->GetMethod();
+ ScopedObjectAccess soa(Thread::Current());
+ // `caller` is null for a top-level graph representing a method whose declaring
+ // class was not resolved.
+ return caller == nullptr ? GetBlock()->GetGraph()->GetDexFile() : *caller->GetDexFile();
+}
+
bool HInvokeStaticOrDirect::NeedsDexCacheOfDeclaringClass() const {
if (GetMethodLoadKind() != MethodLoadKind::kDexCacheViaMethod) {
return false;
@@ -2430,17 +2442,6 @@ std::ostream& operator<<(std::ostream& os, HInvokeStaticOrDirect::ClinitCheckReq
}
}
-// Helper for InstructionDataEquals to fetch the mirror Class out
-// from a kJitTableAddress LoadClass kind.
-// NO_THREAD_SAFETY_ANALYSIS because even though we're accessing
-// mirrors, they are stored in a variable size handle scope which is always
-// visited during a pause. Also, the only caller of this helper
-// only uses the mirror for pointer comparison.
-static inline mirror::Class* AsMirrorInternal(uint64_t address)
- NO_THREAD_SAFETY_ANALYSIS {
- return reinterpret_cast<StackReference<mirror::Class>*>(address)->AsMirrorPtr();
-}
-
bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
const HLoadClass* other_load_class = other->AsLoadClass();
// TODO: To allow GVN for HLoadClass from different dex files, we should compare the type
@@ -2451,11 +2452,12 @@ bool HLoadClass::InstructionDataEquals(const HInstruction* other) const {
}
switch (GetLoadKind()) {
case LoadKind::kBootImageAddress:
- return GetAddress() == other_load_class->GetAddress();
- case LoadKind::kJitTableAddress:
- return AsMirrorInternal(GetAddress()) == AsMirrorInternal(other_load_class->GetAddress());
+ case LoadKind::kJitTableAddress: {
+ ScopedObjectAccess soa(Thread::Current());
+ return GetClass().Get() == other_load_class->GetClass().Get();
+ }
default:
- DCHECK(HasTypeReference(GetLoadKind()) || HasDexCacheReference(GetLoadKind()));
+ DCHECK(HasTypeReference(GetLoadKind()));
return IsSameDexFile(GetDexFile(), other_load_class->GetDexFile());
}
}
@@ -2486,10 +2488,10 @@ std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs) {
return os << "BootImageLinkTimePcRelative";
case HLoadClass::LoadKind::kBootImageAddress:
return os << "BootImageAddress";
+ case HLoadClass::LoadKind::kBssEntry:
+ return os << "BssEntry";
case HLoadClass::LoadKind::kJitTableAddress:
return os << "JitTableAddress";
- case HLoadClass::LoadKind::kDexCachePcRelative:
- return os << "DexCachePcRelative";
case HLoadClass::LoadKind::kDexCacheViaMethod:
return os << "DexCacheViaMethod";
default:
@@ -2498,17 +2500,6 @@ std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs) {
}
}
-// Helper for InstructionDataEquals to fetch the mirror String out
-// from a kJitTableAddress LoadString kind.
-// NO_THREAD_SAFETY_ANALYSIS because even though we're accessing
-// mirrors, they are stored in a variable size handle scope which is always
-// visited during a pause. Also, the only caller of this helper
-// only uses the mirror for pointer comparison.
-static inline mirror::String* AsMirrorInternal(Handle<mirror::String> handle)
- NO_THREAD_SAFETY_ANALYSIS {
- return handle.Get();
-}
-
bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
const HLoadString* other_load_string = other->AsLoadString();
// TODO: To allow GVN for HLoadString from different dex files, we should compare the strings
@@ -2519,8 +2510,10 @@ bool HLoadString::InstructionDataEquals(const HInstruction* other) const {
}
switch (GetLoadKind()) {
case LoadKind::kBootImageAddress:
- case LoadKind::kJitTableAddress:
- return AsMirrorInternal(GetString()) == AsMirrorInternal(other_load_string->GetString());
+ case LoadKind::kJitTableAddress: {
+ ScopedObjectAccess soa(Thread::Current());
+ return GetString().Get() == other_load_string->GetString().Get();
+ }
default:
return IsSameDexFile(GetDexFile(), other_load_string->GetDexFile());
}
@@ -2551,10 +2544,10 @@ std::ostream& operator<<(std::ostream& os, HLoadString::LoadKind rhs) {
return os << "BootImageAddress";
case HLoadString::LoadKind::kBssEntry:
return os << "BssEntry";
- case HLoadString::LoadKind::kDexCacheViaMethod:
- return os << "DexCacheViaMethod";
case HLoadString::LoadKind::kJitTableAddress:
return os << "JitTableAddress";
+ case HLoadString::LoadKind::kDexCacheViaMethod:
+ return os << "DexCacheViaMethod";
default:
LOG(FATAL) << "Unknown HLoadString::LoadKind: " << static_cast<int>(rhs);
UNREACHABLE();
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 7d6f6164ec..53b0fdde75 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -1097,6 +1097,9 @@ class HBasicBlock : public ArenaObject<kArenaAllocBasicBlock> {
// with a control flow instruction).
void ReplaceWith(HBasicBlock* other);
+ // Merges the instructions of `other` at the end of `this`.
+ void MergeInstructionsWith(HBasicBlock* other);
+
// Merge `other` at the end of `this`. This method updates loops, reverse post
// order, links to predecessors, successors, dominators and deletes the block
// from the graph. The two blocks must be successive, i.e. `this` the only
@@ -1291,6 +1294,7 @@ class HLoopInformationOutwardIterator : public ValueObject {
M(InvokeInterface, Invoke) \
M(InvokeStaticOrDirect, Invoke) \
M(InvokeVirtual, Invoke) \
+ M(InvokePolymorphic, Invoke) \
M(LessThan, Condition) \
M(LessThanOrEqual, Condition) \
M(LoadClass, Instruction) \
@@ -1720,28 +1724,22 @@ class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
public:
HEnvironment(ArenaAllocator* arena,
size_t number_of_vregs,
- const DexFile& dex_file,
- uint32_t method_idx,
+ ArtMethod* method,
uint32_t dex_pc,
- InvokeType invoke_type,
HInstruction* holder)
: vregs_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentVRegs)),
locations_(number_of_vregs, arena->Adapter(kArenaAllocEnvironmentLocations)),
parent_(nullptr),
- dex_file_(dex_file),
- method_idx_(method_idx),
+ method_(method),
dex_pc_(dex_pc),
- invoke_type_(invoke_type),
holder_(holder) {
}
HEnvironment(ArenaAllocator* arena, const HEnvironment& to_copy, HInstruction* holder)
: HEnvironment(arena,
to_copy.Size(),
- to_copy.GetDexFile(),
- to_copy.GetMethodIdx(),
+ to_copy.GetMethod(),
to_copy.GetDexPc(),
- to_copy.GetInvokeType(),
holder) {}
void SetAndCopyParentChain(ArenaAllocator* allocator, HEnvironment* parent) {
@@ -1790,16 +1788,8 @@ class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
return dex_pc_;
}
- uint32_t GetMethodIdx() const {
- return method_idx_;
- }
-
- InvokeType GetInvokeType() const {
- return invoke_type_;
- }
-
- const DexFile& GetDexFile() const {
- return dex_file_;
+ ArtMethod* GetMethod() const {
+ return method_;
}
HInstruction* GetHolder() const {
@@ -1815,10 +1805,8 @@ class HEnvironment : public ArenaObject<kArenaAllocEnvironment> {
ArenaVector<HUserRecord<HEnvironment*>> vregs_;
ArenaVector<Location> locations_;
HEnvironment* parent_;
- const DexFile& dex_file_;
- const uint32_t method_idx_;
+ ArtMethod* method_;
const uint32_t dex_pc_;
- const InvokeType invoke_type_;
// The instruction that holds this environment.
HInstruction* const holder_;
@@ -3774,24 +3762,20 @@ class HCompare FINAL : public HBinaryOperation {
DISALLOW_COPY_AND_ASSIGN(HCompare);
};
-class HNewInstance FINAL : public HExpression<2> {
+class HNewInstance FINAL : public HExpression<1> {
public:
HNewInstance(HInstruction* cls,
- HCurrentMethod* current_method,
uint32_t dex_pc,
dex::TypeIndex type_index,
const DexFile& dex_file,
- bool needs_access_check,
bool finalizable,
QuickEntrypointEnum entrypoint)
: HExpression(Primitive::kPrimNot, SideEffects::CanTriggerGC(), dex_pc),
type_index_(type_index),
dex_file_(dex_file),
entrypoint_(entrypoint) {
- SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
SetPackedFlag<kFlagFinalizable>(finalizable);
SetRawInputAt(0, cls);
- SetRawInputAt(1, current_method);
}
dex::TypeIndex GetTypeIndex() const { return type_index_; }
@@ -3803,8 +3787,9 @@ class HNewInstance FINAL : public HExpression<2> {
// Can throw errors when out-of-memory or if it's not instantiable/accessible.
bool CanThrow() const OVERRIDE { return true; }
- // Needs to call into runtime to make sure it's instantiable/accessible.
- bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
+ bool NeedsChecks() const {
+ return entrypoint_ == kQuickAllocObjectWithChecks;
+ }
bool IsFinalizable() const { return GetPackedFlag<kFlagFinalizable>(); }
@@ -3821,8 +3806,7 @@ class HNewInstance FINAL : public HExpression<2> {
DECLARE_INSTRUCTION(NewInstance);
private:
- static constexpr size_t kFlagNeedsAccessCheck = kNumberOfExpressionPackedBits;
- static constexpr size_t kFlagFinalizable = kFlagNeedsAccessCheck + 1;
+ static constexpr size_t kFlagFinalizable = kNumberOfExpressionPackedBits;
static constexpr size_t kNumberOfNewInstancePackedBits = kFlagFinalizable + 1;
static_assert(kNumberOfNewInstancePackedBits <= kMaxNumberOfPackedBits,
"Too many packed fields.");
@@ -3868,7 +3852,6 @@ class HInvoke : public HVariableInputSizeInstruction {
Primitive::Type GetType() const OVERRIDE { return GetPackedField<ReturnTypeField>(); }
uint32_t GetDexMethodIndex() const { return dex_method_index_; }
- const DexFile& GetDexFile() const { return GetEnvironment()->GetDexFile(); }
InvokeType GetInvokeType() const {
return GetPackedField<InvokeTypeField>();
@@ -3985,6 +3968,28 @@ class HInvokeUnresolved FINAL : public HInvoke {
DISALLOW_COPY_AND_ASSIGN(HInvokeUnresolved);
};
+class HInvokePolymorphic FINAL : public HInvoke {
+ public:
+ HInvokePolymorphic(ArenaAllocator* arena,
+ uint32_t number_of_arguments,
+ Primitive::Type return_type,
+ uint32_t dex_pc,
+ uint32_t dex_method_index)
+ : HInvoke(arena,
+ number_of_arguments,
+ 0u /* number_of_other_inputs */,
+ return_type,
+ dex_pc,
+ dex_method_index,
+ nullptr,
+ kVirtual) {}
+
+ DECLARE_INSTRUCTION(InvokePolymorphic);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(HInvokePolymorphic);
+};
+
class HInvokeStaticOrDirect FINAL : public HInvoke {
public:
// Requirements of this method call regarding the class
@@ -4166,6 +4171,8 @@ class HInvokeStaticOrDirect FINAL : public HInvoke {
return dispatch_info_.method_load_data;
}
+ const DexFile& GetDexFileForPcRelativeDexCache() const;
+
ClinitCheckRequirement GetClinitCheckRequirement() const {
return GetPackedField<ClinitCheckRequirementField>();
}
@@ -5425,10 +5432,10 @@ class HBoundsCheck FINAL : public HExpression<2> {
HBoundsCheck(HInstruction* index,
HInstruction* length,
uint32_t dex_pc,
- uint32_t string_char_at_method_index = DexFile::kDexNoIndex)
- : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc),
- string_char_at_method_index_(string_char_at_method_index) {
+ bool string_char_at = false)
+ : HExpression(index->GetType(), SideEffects::CanTriggerGC(), dex_pc) {
DCHECK_EQ(Primitive::kPrimInt, Primitive::PrimitiveKind(index->GetType()));
+ SetPackedFlag<kFlagIsStringCharAt>(string_char_at);
SetRawInputAt(0, index);
SetRawInputAt(1, length);
}
@@ -5442,22 +5449,14 @@ class HBoundsCheck FINAL : public HExpression<2> {
bool CanThrow() const OVERRIDE { return true; }
- bool IsStringCharAt() const { return GetStringCharAtMethodIndex() != DexFile::kDexNoIndex; }
- uint32_t GetStringCharAtMethodIndex() const { return string_char_at_method_index_; }
+ bool IsStringCharAt() const { return GetPackedFlag<kFlagIsStringCharAt>(); }
HInstruction* GetIndex() const { return InputAt(0); }
DECLARE_INSTRUCTION(BoundsCheck);
private:
- // We treat a String as an array, creating the HBoundsCheck from String.charAt()
- // intrinsic in the instruction simplifier. We want to include the String.charAt()
- // in the stack trace if we actually throw the StringIndexOutOfBoundsException,
- // so we need to create an HEnvironment which will be translated to an InlineInfo
- // indicating the extra stack frame. Since we add this HEnvironment quite late,
- // in the PrepareForRegisterAllocation pass, we need to remember the method index
- // from the invoke as we don't want to look again at the dex bytecode.
- uint32_t string_char_at_method_index_; // DexFile::kDexNoIndex if regular array.
+ static constexpr size_t kFlagIsStringCharAt = kNumberOfExpressionPackedBits;
DISALLOW_COPY_AND_ASSIGN(HBoundsCheck);
};
@@ -5525,14 +5524,13 @@ class HLoadClass FINAL : public HInstruction {
// GetIncludePatchInformation().
kBootImageAddress,
+ // Load from an entry in the .bss section using a PC-relative load.
+ // Used for classes outside boot image when .bss is accessible with a PC-relative load.
+ kBssEntry,
+
// Load from the root table associated with the JIT compiled method.
kJitTableAddress,
- // Load from resolved types array in the dex cache using a PC-relative load.
- // Used for classes outside boot image when we know that we can access
- // the dex cache arrays using a PC-relative load.
- kDexCachePcRelative,
-
// Load from resolved types array accessed through the class loaded from
// the compiled method's own ArtMethod*. This is the default access type when
// all other types are unavailable.
@@ -5544,6 +5542,7 @@ class HLoadClass FINAL : public HInstruction {
HLoadClass(HCurrentMethod* current_method,
dex::TypeIndex type_index,
const DexFile& dex_file,
+ Handle<mirror::Class> klass,
bool is_referrers_class,
uint32_t dex_pc,
bool needs_access_check)
@@ -5551,6 +5550,7 @@ class HLoadClass FINAL : public HInstruction {
special_input_(HUserRecord<HInstruction*>(current_method)),
type_index_(type_index),
dex_file_(dex_file),
+ klass_(klass),
loaded_class_rti_(ReferenceTypeInfo::CreateInvalid()) {
// Referrers class should not need access check. We never inline unverified
// methods so we can't possibly end up in this situation.
@@ -5559,14 +5559,11 @@ class HLoadClass FINAL : public HInstruction {
SetPackedField<LoadKindField>(
is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
- SetPackedFlag<kFlagIsInDexCache>(false);
SetPackedFlag<kFlagIsInBootImage>(false);
SetPackedFlag<kFlagGenerateClInitCheck>(false);
}
- void SetLoadKindWithAddress(LoadKind load_kind, uint64_t address) {
- DCHECK(HasAddress(load_kind));
- load_data_.address = address;
+ void SetLoadKind(LoadKind load_kind) {
SetLoadKindInternal(load_kind);
}
@@ -5579,15 +5576,6 @@ class HLoadClass FINAL : public HInstruction {
SetLoadKindInternal(load_kind);
}
- void SetLoadKindWithDexCacheReference(LoadKind load_kind,
- const DexFile& dex_file,
- uint32_t element_index) {
- DCHECK(HasDexCacheReference(load_kind));
- DCHECK(IsSameDexFile(dex_file_, dex_file));
- load_data_.dex_cache_element_index = element_index;
- SetLoadKindInternal(load_kind);
- }
-
LoadKind GetLoadKind() const {
return GetPackedField<LoadKindField>();
}
@@ -5612,13 +5600,21 @@ class HLoadClass FINAL : public HInstruction {
}
bool CanCallRuntime() const {
- return MustGenerateClinitCheck() ||
- (!IsReferrersClass() && !IsInDexCache()) ||
- NeedsAccessCheck();
+ return NeedsAccessCheck() ||
+ MustGenerateClinitCheck() ||
+ GetLoadKind() == LoadKind::kDexCacheViaMethod ||
+ GetLoadKind() == LoadKind::kBssEntry;
}
bool CanThrow() const OVERRIDE {
- return CanCallRuntime();
+ return NeedsAccessCheck() ||
+ MustGenerateClinitCheck() ||
+ // If the class is in the boot image, the lookup in the runtime call cannot throw.
+ // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
+ // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
+ ((GetLoadKind() == LoadKind::kDexCacheViaMethod ||
+ GetLoadKind() == LoadKind::kBssEntry) &&
+ !IsInBootImage());
}
ReferenceTypeInfo GetLoadedClassRTI() {
@@ -5634,15 +5630,8 @@ class HLoadClass FINAL : public HInstruction {
dex::TypeIndex GetTypeIndex() const { return type_index_; }
const DexFile& GetDexFile() const { return dex_file_; }
- uint32_t GetDexCacheElementOffset() const;
-
- uint64_t GetAddress() const {
- DCHECK(HasAddress(GetLoadKind()));
- return load_data_.address;
- }
-
bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
- return !IsReferrersClass();
+ return GetLoadKind() == LoadKind::kDexCacheViaMethod;
}
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -5651,17 +5640,9 @@ class HLoadClass FINAL : public HInstruction {
bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
- bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
- void MarkInDexCache() {
- SetPackedFlag<kFlagIsInDexCache>(true);
- DCHECK(!NeedsEnvironment());
- RemoveEnvironment();
- SetSideEffects(SideEffects::None());
- }
-
void MarkInBootImage() {
SetPackedFlag<kFlagIsInBootImage>(true);
}
@@ -5678,12 +5659,15 @@ class HLoadClass FINAL : public HInstruction {
return Primitive::kPrimNot;
}
+ Handle<mirror::Class> GetClass() const {
+ return klass_;
+ }
+
DECLARE_INSTRUCTION(LoadClass);
private:
static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
- static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1;
- static constexpr size_t kFlagIsInBootImage = kFlagIsInDexCache + 1;
+ static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1;
// Whether this instruction must generate the initialization check.
// Used for code generation.
static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
@@ -5695,35 +5679,24 @@ class HLoadClass FINAL : public HInstruction {
using LoadKindField = BitField<LoadKind, kFieldLoadKind, kFieldLoadKindSize>;
static bool HasTypeReference(LoadKind load_kind) {
- return load_kind == LoadKind::kBootImageLinkTimeAddress ||
+ return load_kind == LoadKind::kReferrersClass ||
+ load_kind == LoadKind::kBootImageLinkTimeAddress ||
load_kind == LoadKind::kBootImageLinkTimePcRelative ||
- load_kind == LoadKind::kDexCacheViaMethod ||
- load_kind == LoadKind::kReferrersClass;
- }
-
- static bool HasAddress(LoadKind load_kind) {
- return load_kind == LoadKind::kBootImageAddress ||
- load_kind == LoadKind::kJitTableAddress;
- }
-
- static bool HasDexCacheReference(LoadKind load_kind) {
- return load_kind == LoadKind::kDexCachePcRelative;
+ load_kind == LoadKind::kBssEntry ||
+ load_kind == LoadKind::kDexCacheViaMethod;
}
void SetLoadKindInternal(LoadKind load_kind);
// The special input is the HCurrentMethod for kDexCacheViaMethod or kReferrersClass.
// For other load kinds it's empty or possibly some architecture-specific instruction
- // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
+ // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
HUserRecord<HInstruction*> special_input_;
const dex::TypeIndex type_index_;
const DexFile& dex_file_;
- union {
- uint32_t dex_cache_element_index; // Only for dex cache reference.
- uint64_t address; // Up to 64-bit, needed for kJitTableAddress on 64-bit targets.
- } load_data_;
+ Handle<mirror::Class> klass_;
ReferenceTypeInfo loaded_class_rti_;
@@ -5732,19 +5705,13 @@ class HLoadClass FINAL : public HInstruction {
std::ostream& operator<<(std::ostream& os, HLoadClass::LoadKind rhs);
// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
-inline uint32_t HLoadClass::GetDexCacheElementOffset() const {
- DCHECK(HasDexCacheReference(GetLoadKind())) << GetLoadKind();
- return load_data_.dex_cache_element_index;
-}
-
-// Note: defined outside class to see operator<<(., HLoadClass::LoadKind).
inline void HLoadClass::AddSpecialInput(HInstruction* special_input) {
// The special input is used for PC-relative loads on some architectures,
// including literal pool loads, which are PC-relative too.
DCHECK(GetLoadKind() == LoadKind::kBootImageLinkTimePcRelative ||
- GetLoadKind() == LoadKind::kDexCachePcRelative ||
GetLoadKind() == LoadKind::kBootImageLinkTimeAddress ||
- GetLoadKind() == LoadKind::kBootImageAddress) << GetLoadKind();
+ GetLoadKind() == LoadKind::kBootImageAddress ||
+ GetLoadKind() == LoadKind::kBssEntry) << GetLoadKind();
DCHECK(special_input_.GetInstruction() == nullptr);
special_input_ = HUserRecord<HInstruction*>(special_input);
special_input->AddUseAt(this, 0);
@@ -5772,15 +5739,15 @@ class HLoadString FINAL : public HInstruction {
// Used for strings outside boot image when .bss is accessible with a PC-relative load.
kBssEntry,
+ // Load from the root table associated with the JIT compiled method.
+ kJitTableAddress,
+
// Load from resolved strings array accessed through the class loaded from
// the compiled method's own ArtMethod*. This is the default access type when
// all other types are unavailable.
kDexCacheViaMethod,
- // Load from the root table associated with the JIT compiled method.
- kJitTableAddress,
-
- kLast = kJitTableAddress,
+ kLast = kDexCacheViaMethod,
};
HLoadString(HCurrentMethod* current_method,
@@ -5872,7 +5839,7 @@ class HLoadString FINAL : public HInstruction {
// The special input is the HCurrentMethod for kDexCacheViaMethod.
// For other load kinds it's empty or possibly some architecture-specific instruction
- // for PC-relative loads, i.e. kDexCachePcRelative or kBootImageLinkTimePcRelative.
+ // for PC-relative loads, i.e. kBssEntry or kBootImageLinkTimePcRelative.
HUserRecord<HInstruction*> special_input_;
dex::StringIndex string_index_;
diff --git a/compiler/optimizing/nodes_test.cc b/compiler/optimizing/nodes_test.cc
index 5d9a6528ca..7686ba851b 100644
--- a/compiler/optimizing/nodes_test.cc
+++ b/compiler/optimizing/nodes_test.cc
@@ -52,7 +52,7 @@ TEST(Node, RemoveInstruction) {
exit_block->AddInstruction(new (&allocator) HExit());
HEnvironment* environment = new (&allocator) HEnvironment(
- &allocator, 1, graph->GetDexFile(), graph->GetMethodIdx(), 0, kStatic, null_check);
+ &allocator, 1, graph->GetArtMethod(), 0, null_check);
null_check->SetRawEnvironment(environment);
environment->SetRawEnvAt(0, parameter);
parameter->AddEnvUseAt(null_check->GetEnvironment(), 0);
@@ -137,7 +137,7 @@ TEST(Node, ParentEnvironment) {
ASSERT_TRUE(parameter1->GetUses().HasExactlyOneElement());
HEnvironment* environment = new (&allocator) HEnvironment(
- &allocator, 1, graph->GetDexFile(), graph->GetMethodIdx(), 0, kStatic, with_environment);
+ &allocator, 1, graph->GetArtMethod(), 0, with_environment);
ArenaVector<HInstruction*> array(allocator.Adapter());
array.push_back(parameter1);
@@ -148,13 +148,13 @@ TEST(Node, ParentEnvironment) {
ASSERT_TRUE(parameter1->GetEnvUses().HasExactlyOneElement());
HEnvironment* parent1 = new (&allocator) HEnvironment(
- &allocator, 1, graph->GetDexFile(), graph->GetMethodIdx(), 0, kStatic, nullptr);
+ &allocator, 1, graph->GetArtMethod(), 0, nullptr);
parent1->CopyFrom(array);
ASSERT_EQ(parameter1->GetEnvUses().SizeSlow(), 2u);
HEnvironment* parent2 = new (&allocator) HEnvironment(
- &allocator, 1, graph->GetDexFile(), graph->GetMethodIdx(), 0, kStatic, nullptr);
+ &allocator, 1, graph->GetArtMethod(), 0, nullptr);
parent2->CopyFrom(array);
parent1->SetAndCopyParentChain(&allocator, parent2);
diff --git a/compiler/optimizing/pc_relative_fixups_mips.cc b/compiler/optimizing/pc_relative_fixups_mips.cc
index e321b9e3aa..a0fdde169d 100644
--- a/compiler/optimizing/pc_relative_fixups_mips.cc
+++ b/compiler/optimizing/pc_relative_fixups_mips.cc
@@ -62,8 +62,9 @@ class PCRelativeHandlerVisitor : public HGraphVisitor {
HLoadClass::LoadKind load_kind = load_class->GetLoadKind();
switch (load_kind) {
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
- case HLoadClass::LoadKind::kBootImageAddress:
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadClass::LoadKind::kBootImageAddress:
+ case HLoadClass::LoadKind::kBssEntry:
// Add a base register for PC-relative literals on R2.
InitializePCRelativeBasePointer();
load_class->AddSpecialInput(base_);
diff --git a/compiler/optimizing/pc_relative_fixups_x86.cc b/compiler/optimizing/pc_relative_fixups_x86.cc
index b1fdb1792d..2befc8ca4e 100644
--- a/compiler/optimizing/pc_relative_fixups_x86.cc
+++ b/compiler/optimizing/pc_relative_fixups_x86.cc
@@ -83,7 +83,7 @@ class PCRelativeHandlerVisitor : public HGraphVisitor {
void VisitLoadClass(HLoadClass* load_class) OVERRIDE {
HLoadClass::LoadKind load_kind = load_class->GetLoadKind();
if (load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative ||
- load_kind == HLoadClass::LoadKind::kDexCachePcRelative) {
+ load_kind == HLoadClass::LoadKind::kBssEntry) {
InitializePCRelativeBasePointer();
load_class->AddSpecialInput(base_);
}
diff --git a/compiler/optimizing/prepare_for_register_allocation.cc b/compiler/optimizing/prepare_for_register_allocation.cc
index f9ac3a0f72..efbaf6c221 100644
--- a/compiler/optimizing/prepare_for_register_allocation.cc
+++ b/compiler/optimizing/prepare_for_register_allocation.cc
@@ -16,6 +16,9 @@
#include "prepare_for_register_allocation.h"
+#include "jni_internal.h"
+#include "well_known_classes.h"
+
namespace art {
void PrepareForRegisterAllocation::Run() {
@@ -42,16 +45,12 @@ void PrepareForRegisterAllocation::VisitBoundsCheck(HBoundsCheck* check) {
if (check->IsStringCharAt()) {
// Add a fake environment for String.charAt() inline info as we want
// the exception to appear as being thrown from there.
- const DexFile& dex_file = check->GetEnvironment()->GetDexFile();
- DCHECK_STREQ(dex_file.PrettyMethod(check->GetStringCharAtMethodIndex()).c_str(),
- "char java.lang.String.charAt(int)");
+ ArtMethod* char_at_method = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
ArenaAllocator* arena = GetGraph()->GetArena();
HEnvironment* environment = new (arena) HEnvironment(arena,
/* number_of_vregs */ 0u,
- dex_file,
- check->GetStringCharAtMethodIndex(),
+ char_at_method,
/* dex_pc */ DexFile::kDexNoIndex,
- kVirtual,
check);
check->InsertRawEnvironment(environment);
}
@@ -134,39 +133,6 @@ void PrepareForRegisterAllocation::VisitClinitCheck(HClinitCheck* check) {
}
}
-void PrepareForRegisterAllocation::VisitNewInstance(HNewInstance* instruction) {
- HLoadClass* load_class = instruction->InputAt(0)->AsLoadClass();
- const bool has_only_one_use = load_class->HasOnlyOneNonEnvironmentUse();
- // Change the entrypoint to kQuickAllocObject if either:
- // - the class is finalizable (only kQuickAllocObject handles finalizable classes),
- // - the class needs access checks (we do not know if it's finalizable),
- // - or the load class has only one use.
- if (instruction->IsFinalizable() || has_only_one_use || load_class->NeedsAccessCheck()) {
- instruction->SetEntrypoint(kQuickAllocObject);
- instruction->ReplaceInput(GetGraph()->GetIntConstant(load_class->GetTypeIndex().index_), 0);
- if (has_only_one_use) {
- // We've just removed the only use of the HLoadClass. Since we don't run DCE after this pass,
- // do it manually if possible.
- if (!load_class->CanThrow()) {
- // If the load class can not throw, it has no side effects and can be removed if there is
- // only one use.
- load_class->GetBlock()->RemoveInstruction(load_class);
- } else if (!instruction->GetEnvironment()->IsFromInlinedInvoke() &&
- CanMoveClinitCheck(load_class, instruction)) {
- // The allocation entry point that deals with access checks does not work with inlined
- // methods, so we need to check whether this allocation comes from an inlined method.
- // We also need to make the same check as for moving clinit check, whether the HLoadClass
- // has the clinit check responsibility or not (HLoadClass can throw anyway).
- // If it needed access checks, we delegate the access check to the allocation.
- if (load_class->NeedsAccessCheck()) {
- instruction->SetEntrypoint(kQuickAllocObjectWithAccessCheck);
- }
- load_class->GetBlock()->RemoveInstruction(load_class);
- }
- }
- }
-}
-
bool PrepareForRegisterAllocation::CanEmitConditionAt(HCondition* condition,
HInstruction* user) const {
if (condition->GetNext() != user) {
@@ -232,8 +198,7 @@ bool PrepareForRegisterAllocation::CanMoveClinitCheck(HInstruction* input,
return false;
}
if (user_environment->GetDexPc() != input_environment->GetDexPc() ||
- user_environment->GetMethodIdx() != input_environment->GetMethodIdx() ||
- !IsSameDexFile(user_environment->GetDexFile(), input_environment->GetDexFile())) {
+ user_environment->GetMethod() != input_environment->GetMethod()) {
return false;
}
user_environment = user_environment->GetParent();
diff --git a/compiler/optimizing/prepare_for_register_allocation.h b/compiler/optimizing/prepare_for_register_allocation.h
index a6791482a7..c128227654 100644
--- a/compiler/optimizing/prepare_for_register_allocation.h
+++ b/compiler/optimizing/prepare_for_register_allocation.h
@@ -44,7 +44,6 @@ class PrepareForRegisterAllocation : public HGraphDelegateVisitor {
void VisitClinitCheck(HClinitCheck* check) OVERRIDE;
void VisitCondition(HCondition* condition) OVERRIDE;
void VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) OVERRIDE;
- void VisitNewInstance(HNewInstance* instruction) OVERRIDE;
bool CanMoveClinitCheck(HInstruction* input, HInstruction* user) const;
bool CanEmitConditionAt(HCondition* condition, HInstruction* user) const;
diff --git a/compiler/optimizing/reference_type_propagation.cc b/compiler/optimizing/reference_type_propagation.cc
index f8a4469712..a4d59ab587 100644
--- a/compiler/optimizing/reference_type_propagation.cc
+++ b/compiler/optimizing/reference_type_propagation.cc
@@ -295,13 +295,13 @@ static void BoundTypeForClassCheck(HInstruction* check) {
}
if (check->IsIf()) {
- HBasicBlock* trueBlock = check->IsEqual()
+ HBasicBlock* trueBlock = compare->IsEqual()
? check->AsIf()->IfTrueSuccessor()
: check->AsIf()->IfFalseSuccessor();
BoundTypeIn(receiver, trueBlock, /* start_instruction */ nullptr, class_rti);
} else {
DCHECK(check->IsDeoptimize());
- if (check->IsEqual()) {
+ if (compare->IsEqual()) {
BoundTypeIn(receiver, check->GetBlock(), check, class_rti);
}
}
@@ -499,18 +499,19 @@ void ReferenceTypePropagation::RTPVisitor::SetClassAsTypeInfo(HInstruction* inst
if (instr->IsInvokeStaticOrDirect() && instr->AsInvokeStaticOrDirect()->IsStringInit()) {
// Calls to String.<init> are replaced with a StringFactory.
if (kIsDebugBuild) {
- HInvoke* invoke = instr->AsInvoke();
+ HInvokeStaticOrDirect* invoke = instr->AsInvokeStaticOrDirect();
ClassLinker* cl = Runtime::Current()->GetClassLinker();
Thread* self = Thread::Current();
StackHandleScope<2> hs(self);
+ const DexFile& dex_file = *invoke->GetTargetMethod().dex_file;
Handle<mirror::DexCache> dex_cache(
- hs.NewHandle(FindDexCacheWithHint(self, invoke->GetDexFile(), hint_dex_cache_)));
+ hs.NewHandle(FindDexCacheWithHint(self, dex_file, hint_dex_cache_)));
// Use a null loader. We should probably use the compiling method's class loader,
// but then we would need to pass it to RTPVisitor just for this debug check. Since
// the method is from the String class, the null loader is good enough.
Handle<mirror::ClassLoader> loader;
ArtMethod* method = cl->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
- invoke->GetDexFile(), invoke->GetDexMethodIndex(), dex_cache, loader, nullptr, kDirect);
+ dex_file, invoke->GetDexMethodIndex(), dex_cache, loader, nullptr, kDirect);
DCHECK(method != nullptr);
mirror::Class* declaring_class = method->GetDeclaringClass();
DCHECK(declaring_class != nullptr);
@@ -619,14 +620,10 @@ void ReferenceTypePropagation::RTPVisitor::VisitUnresolvedStaticFieldGet(
void ReferenceTypePropagation::RTPVisitor::VisitLoadClass(HLoadClass* instr) {
ScopedObjectAccess soa(Thread::Current());
- // Get type from dex cache assuming it was populated by the verifier.
- mirror::Class* resolved_class = GetClassFromDexCache(soa.Self(),
- instr->GetDexFile(),
- instr->GetTypeIndex(),
- hint_dex_cache_);
- if (IsAdmissible(resolved_class)) {
+ Handle<mirror::Class> resolved_class = instr->GetClass();
+ if (IsAdmissible(resolved_class.Get())) {
instr->SetLoadedClassRTI(ReferenceTypeInfo::Create(
- handle_cache_->NewHandle(resolved_class), /* is_exact */ true));
+ resolved_class, /* is_exact */ true));
}
instr->SetReferenceTypeInfo(
ReferenceTypeInfo::Create(handle_cache_->GetClassClassHandle(), /* is_exact */ true));
@@ -844,10 +841,8 @@ void ReferenceTypePropagation::RTPVisitor::VisitInvoke(HInvoke* instr) {
ScopedObjectAccess soa(Thread::Current());
ClassLinker* cl = Runtime::Current()->GetClassLinker();
- mirror::DexCache* dex_cache =
- FindDexCacheWithHint(soa.Self(), instr->GetDexFile(), hint_dex_cache_);
PointerSize pointer_size = cl->GetImagePointerSize();
- ArtMethod* method = dex_cache->GetResolvedMethod(instr->GetDexMethodIndex(), pointer_size);
+ ArtMethod* method = instr->GetResolvedMethod();
mirror::Class* klass = (method == nullptr) ? nullptr : method->GetReturnType(false, pointer_size);
SetClassAsTypeInfo(instr, klass, /* is_exact */ false);
}
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index dc8ee23ba4..c5294107ae 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -133,72 +133,65 @@ void HSharpening::ProcessInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) {
void HSharpening::ProcessLoadClass(HLoadClass* load_class) {
ScopedObjectAccess soa(Thread::Current());
- StackHandleScope<1> hs(soa.Self());
- Runtime* runtime = Runtime::Current();
- ClassLinker* class_linker = runtime->GetClassLinker();
- const DexFile& dex_file = load_class->GetDexFile();
- dex::TypeIndex type_index = load_class->GetTypeIndex();
- Handle<mirror::DexCache> dex_cache = IsSameDexFile(dex_file, *compilation_unit_.GetDexFile())
- ? compilation_unit_.GetDexCache()
- : hs.NewHandle(class_linker->FindDexCache(soa.Self(), dex_file));
- mirror::Class* cls = dex_cache->GetResolvedType(type_index);
- SharpenClass(load_class, cls, handles_, codegen_, compiler_driver_);
+ SharpenClass(load_class, codegen_, compiler_driver_);
}
void HSharpening::SharpenClass(HLoadClass* load_class,
- mirror::Class* klass,
- VariableSizedHandleScope* handles,
CodeGenerator* codegen,
CompilerDriver* compiler_driver) {
- ScopedAssertNoThreadSuspension sants("Sharpening class in compiler");
+ Handle<mirror::Class> klass = load_class->GetClass();
DCHECK(load_class->GetLoadKind() == HLoadClass::LoadKind::kDexCacheViaMethod ||
load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass)
<< load_class->GetLoadKind();
- DCHECK(!load_class->IsInDexCache()) << "HLoadClass should not be optimized before sharpening.";
DCHECK(!load_class->IsInBootImage()) << "HLoadClass should not be optimized before sharpening.";
+ if (load_class->NeedsAccessCheck()) {
+ // We need to call the runtime anyway, so we simply get the class as that call's return value.
+ return;
+ }
+
+ if (load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass) {
+ // Loading from the ArtMethod* is the most efficient retrieval in code size.
+ // TODO: This may not actually be true for all architectures and
+ // locations of target classes. The additional register pressure
+ // for using the ArtMethod* should be considered.
+ return;
+ }
+
const DexFile& dex_file = load_class->GetDexFile();
dex::TypeIndex type_index = load_class->GetTypeIndex();
- bool is_in_dex_cache = false;
bool is_in_boot_image = false;
HLoadClass::LoadKind desired_load_kind = static_cast<HLoadClass::LoadKind>(-1);
- uint64_t address = 0u; // Class or dex cache element address.
Runtime* runtime = Runtime::Current();
if (codegen->GetCompilerOptions().IsBootImage()) {
// Compiling boot image. Check if the class is a boot image class.
DCHECK(!runtime->UseJitCompilation());
if (!compiler_driver->GetSupportBootImageFixup()) {
- // MIPS64 or compiler_driver_test. Do not sharpen.
+ // compiler_driver_test. Do not sharpen.
desired_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
- } else if ((klass != nullptr) && compiler_driver->IsImageClass(
+ } else if ((klass.Get() != nullptr) && compiler_driver->IsImageClass(
dex_file.StringDataByIdx(dex_file.GetTypeId(type_index).descriptor_idx_))) {
is_in_boot_image = true;
- is_in_dex_cache = true;
desired_load_kind = codegen->GetCompilerOptions().GetCompilePic()
? HLoadClass::LoadKind::kBootImageLinkTimePcRelative
: HLoadClass::LoadKind::kBootImageLinkTimeAddress;
} else {
- // Not a boot image class. We must go through the dex cache.
+ // Not a boot image class.
DCHECK(ContainsElement(compiler_driver->GetDexFilesForOatFile(), &dex_file));
- desired_load_kind = HLoadClass::LoadKind::kDexCachePcRelative;
+ desired_load_kind = HLoadClass::LoadKind::kBssEntry;
}
} else {
- is_in_boot_image = (klass != nullptr) && runtime->GetHeap()->ObjectIsInBootImageSpace(klass);
+ is_in_boot_image = (klass.Get() != nullptr) &&
+ runtime->GetHeap()->ObjectIsInBootImageSpace(klass.Get());
if (runtime->UseJitCompilation()) {
// TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus.
// DCHECK(!codegen_->GetCompilerOptions().GetCompilePic());
- is_in_dex_cache = (klass != nullptr);
if (is_in_boot_image) {
// TODO: Use direct pointers for all non-moving spaces, not just boot image. Bug: 29530787
desired_load_kind = HLoadClass::LoadKind::kBootImageAddress;
- address = reinterpret_cast64<uint64_t>(klass);
- } else if (is_in_dex_cache) {
+ } else if (klass.Get() != nullptr) {
desired_load_kind = HLoadClass::LoadKind::kJitTableAddress;
- // We store in the address field the location of the stack reference maintained
- // by the handle. We do this now so that the code generation does not need to figure
- // out which class loader to use.
- address = reinterpret_cast<uint64_t>(handles->NewHandle(klass).GetReference());
} else {
// Class not loaded yet. This happens when the dex code requesting
// this `HLoadClass` hasn't been executed in the interpreter.
@@ -209,15 +202,9 @@ void HSharpening::SharpenClass(HLoadClass* load_class,
} else if (is_in_boot_image && !codegen->GetCompilerOptions().GetCompilePic()) {
// AOT app compilation. Check if the class is in the boot image.
desired_load_kind = HLoadClass::LoadKind::kBootImageAddress;
- address = reinterpret_cast64<uint64_t>(klass);
} else {
// Not JIT and either the klass is not in boot image or we are compiling in PIC mode.
- // Use PC-relative load from the dex cache if the dex file belongs
- // to the oat file that we're currently compiling.
- desired_load_kind =
- ContainsElement(compiler_driver->GetDexFilesForOatFile(), &load_class->GetDexFile())
- ? HLoadClass::LoadKind::kDexCachePcRelative
- : HLoadClass::LoadKind::kDexCacheViaMethod;
+ desired_load_kind = HLoadClass::LoadKind::kBssEntry;
}
}
DCHECK_NE(desired_load_kind, static_cast<HLoadClass::LoadKind>(-1));
@@ -226,42 +213,18 @@ void HSharpening::SharpenClass(HLoadClass* load_class,
load_class->MarkInBootImage();
}
- if (load_class->NeedsAccessCheck()) {
- // We need to call the runtime anyway, so we simply get the class as that call's return value.
- return;
- }
-
- if (load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass) {
- // Loading from the ArtMethod* is the most efficient retrieval in code size.
- // TODO: This may not actually be true for all architectures and
- // locations of target classes. The additional register pressure
- // for using the ArtMethod* should be considered.
- return;
- }
-
- if (is_in_dex_cache) {
- load_class->MarkInDexCache();
- }
-
HLoadClass::LoadKind load_kind = codegen->GetSupportedLoadClassKind(desired_load_kind);
switch (load_kind) {
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
case HLoadClass::LoadKind::kBootImageLinkTimePcRelative:
+ case HLoadClass::LoadKind::kBssEntry:
case HLoadClass::LoadKind::kDexCacheViaMethod:
load_class->SetLoadKindWithTypeReference(load_kind, dex_file, type_index);
break;
case HLoadClass::LoadKind::kBootImageAddress:
case HLoadClass::LoadKind::kJitTableAddress:
- DCHECK_NE(address, 0u);
- load_class->SetLoadKindWithAddress(load_kind, address);
- break;
- case HLoadClass::LoadKind::kDexCachePcRelative: {
- PointerSize pointer_size = InstructionSetPointerSize(codegen->GetInstructionSet());
- DexCacheArraysLayout layout(pointer_size, &dex_file);
- size_t element_index = layout.TypeOffset(type_index);
- load_class->SetLoadKindWithDexCacheReference(load_kind, dex_file, element_index);
+ load_class->SetLoadKind(load_kind);
break;
- }
default:
LOG(FATAL) << "Unexpected load kind: " << load_kind;
UNREACHABLE();
@@ -274,7 +237,7 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) {
const DexFile& dex_file = load_string->GetDexFile();
dex::StringIndex string_index = load_string->GetStringIndex();
- HLoadString::LoadKind desired_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
+ HLoadString::LoadKind desired_load_kind = static_cast<HLoadString::LoadKind>(-1);
{
Runtime* runtime = Runtime::Current();
ClassLinker* class_linker = runtime->GetClassLinker();
@@ -297,8 +260,8 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) {
? HLoadString::LoadKind::kBootImageLinkTimePcRelative
: HLoadString::LoadKind::kBootImageLinkTimeAddress;
} else {
- // MIPS64 or compiler_driver_test. Do not sharpen.
- DCHECK_EQ(desired_load_kind, HLoadString::LoadKind::kDexCacheViaMethod);
+ // compiler_driver_test. Do not sharpen.
+ desired_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
}
} else if (runtime->UseJitCompilation()) {
// TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus.
@@ -310,6 +273,8 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) {
} else {
desired_load_kind = HLoadString::LoadKind::kJitTableAddress;
}
+ } else {
+ desired_load_kind = HLoadString::LoadKind::kDexCacheViaMethod;
}
} else {
// AOT app compilation. Try to lookup the string without allocating if not found.
@@ -326,6 +291,7 @@ void HSharpening::ProcessLoadString(HLoadString* load_string) {
load_string->SetString(handles_->NewHandle(string));
}
}
+ DCHECK_NE(desired_load_kind, static_cast<HLoadString::LoadKind>(-1));
HLoadString::LoadKind load_kind = codegen_->GetSupportedLoadStringKind(desired_load_kind);
load_string->SetLoadKind(load_kind);
diff --git a/compiler/optimizing/sharpening.h b/compiler/optimizing/sharpening.h
index ae5ccb33ab..ae3d83ef2c 100644
--- a/compiler/optimizing/sharpening.h
+++ b/compiler/optimizing/sharpening.h
@@ -49,8 +49,6 @@ class HSharpening : public HOptimization {
// Used internally but also by the inliner.
static void SharpenClass(HLoadClass* load_class,
- mirror::Class* klass,
- VariableSizedHandleScope* handles,
CodeGenerator* codegen,
CompilerDriver* compiler_driver)
REQUIRES_SHARED(Locks::mutator_lock_);
diff --git a/compiler/optimizing/stack_map_stream.cc b/compiler/optimizing/stack_map_stream.cc
index fc8af6462a..6087e36507 100644
--- a/compiler/optimizing/stack_map_stream.cc
+++ b/compiler/optimizing/stack_map_stream.cc
@@ -13,8 +13,13 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
#include "stack_map_stream.h"
+#include "art_method.h"
+#include "runtime.h"
+#include "scoped_thread_state_change-inl.h"
+
namespace art {
void StackMapStream::BeginStackMapEntry(uint32_t dex_pc,
@@ -98,15 +103,27 @@ void StackMapStream::AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t
current_dex_register_++;
}
-void StackMapStream::BeginInlineInfoEntry(uint32_t method_index,
+static bool EncodeArtMethodInInlineInfo(ArtMethod* method ATTRIBUTE_UNUSED) {
+ // Note: the runtime is null only for unit testing.
+ return Runtime::Current() == nullptr || !Runtime::Current()->IsAotCompiler();
+}
+
+void StackMapStream::BeginInlineInfoEntry(ArtMethod* method,
uint32_t dex_pc,
- InvokeType invoke_type,
- uint32_t num_dex_registers) {
+ uint32_t num_dex_registers,
+ const DexFile* outer_dex_file) {
DCHECK(!in_inline_frame_);
in_inline_frame_ = true;
- current_inline_info_.method_index = method_index;
+ if (EncodeArtMethodInInlineInfo(method)) {
+ current_inline_info_.method = method;
+ } else {
+ if (dex_pc != static_cast<uint32_t>(-1) && kIsDebugBuild) {
+ ScopedObjectAccess soa(Thread::Current());
+ DCHECK(IsSameDexFile(*outer_dex_file, *method->GetDexFile()));
+ }
+ current_inline_info_.method_index = method->GetDexMethodIndexUnchecked();
+ }
current_inline_info_.dex_pc = dex_pc;
- current_inline_info_.invoke_type = invoke_type;
current_inline_info_.num_dex_registers = num_dex_registers;
current_inline_info_.dex_register_locations_start_index = dex_register_locations_.size();
if (num_dex_registers != 0) {
@@ -229,25 +246,32 @@ size_t StackMapStream::ComputeDexRegisterMapsSize() const {
void StackMapStream::ComputeInlineInfoEncoding() {
uint32_t method_index_max = 0;
uint32_t dex_pc_max = DexFile::kDexNoIndex;
- uint32_t invoke_type_max = 0;
+ uint32_t extra_data_max = 0;
uint32_t inline_info_index = 0;
for (const StackMapEntry& entry : stack_maps_) {
for (size_t j = 0; j < entry.inlining_depth; ++j) {
InlineInfoEntry inline_entry = inline_infos_[inline_info_index++];
- method_index_max = std::max(method_index_max, inline_entry.method_index);
+ if (inline_entry.method == nullptr) {
+ method_index_max = std::max(method_index_max, inline_entry.method_index);
+ extra_data_max = std::max(extra_data_max, 1u);
+ } else {
+ method_index_max = std::max(
+ method_index_max, High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
+ extra_data_max = std::max(
+ extra_data_max, Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
+ }
if (inline_entry.dex_pc != DexFile::kDexNoIndex &&
(dex_pc_max == DexFile::kDexNoIndex || dex_pc_max < inline_entry.dex_pc)) {
dex_pc_max = inline_entry.dex_pc;
}
- invoke_type_max = std::max(invoke_type_max, static_cast<uint32_t>(inline_entry.invoke_type));
}
}
DCHECK_EQ(inline_info_index, inline_infos_.size());
inline_info_encoding_.SetFromSizes(method_index_max,
dex_pc_max,
- invoke_type_max,
+ extra_data_max,
dex_register_maps_size_);
}
@@ -354,9 +378,20 @@ void StackMapStream::FillIn(MemoryRegion region) {
DCHECK_LE(entry.inline_infos_start_index + entry.inlining_depth, inline_infos_.size());
for (size_t depth = 0; depth < entry.inlining_depth; ++depth) {
InlineInfoEntry inline_entry = inline_infos_[depth + entry.inline_infos_start_index];
- inline_info.SetMethodIndexAtDepth(inline_info_encoding_, depth, inline_entry.method_index);
+ if (inline_entry.method != nullptr) {
+ inline_info.SetMethodIndexAtDepth(
+ inline_info_encoding_,
+ depth,
+ High32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
+ inline_info.SetExtraDataAtDepth(
+ inline_info_encoding_,
+ depth,
+ Low32Bits(reinterpret_cast<uintptr_t>(inline_entry.method)));
+ } else {
+ inline_info.SetMethodIndexAtDepth(inline_info_encoding_, depth, inline_entry.method_index);
+ inline_info.SetExtraDataAtDepth(inline_info_encoding_, depth, 1);
+ }
inline_info.SetDexPcAtDepth(inline_info_encoding_, depth, inline_entry.dex_pc);
- inline_info.SetInvokeTypeAtDepth(inline_info_encoding_, depth, inline_entry.invoke_type);
if (inline_entry.num_dex_registers == 0) {
// No dex map available.
inline_info.SetDexRegisterMapOffsetAtDepth(inline_info_encoding_,
@@ -544,10 +579,13 @@ void StackMapStream::CheckCodeInfo(MemoryRegion region) const {
InlineInfoEntry inline_entry = inline_infos_[inline_info_index];
DCHECK_EQ(inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, d),
inline_entry.dex_pc);
- DCHECK_EQ(inline_info.GetMethodIndexAtDepth(encoding.inline_info_encoding, d),
- inline_entry.method_index);
- DCHECK_EQ(inline_info.GetInvokeTypeAtDepth(encoding.inline_info_encoding, d),
- inline_entry.invoke_type);
+ if (inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, d)) {
+ DCHECK_EQ(inline_info.GetArtMethodAtDepth(encoding.inline_info_encoding, d),
+ inline_entry.method);
+ } else {
+ DCHECK_EQ(inline_info.GetMethodIndexAtDepth(encoding.inline_info_encoding, d),
+ inline_entry.method_index);
+ }
CheckDexRegisterMap(code_info,
code_info.GetDexRegisterMapAtDepth(
diff --git a/compiler/optimizing/stack_map_stream.h b/compiler/optimizing/stack_map_stream.h
index 53a9795d52..d6f42b373c 100644
--- a/compiler/optimizing/stack_map_stream.h
+++ b/compiler/optimizing/stack_map_stream.h
@@ -109,8 +109,8 @@ class StackMapStream : public ValueObject {
struct InlineInfoEntry {
uint32_t dex_pc; // DexFile::kDexNoIndex for intrinsified native methods.
+ ArtMethod* method;
uint32_t method_index;
- InvokeType invoke_type;
uint32_t num_dex_registers;
BitVector* live_dex_registers_mask;
size_t dex_register_locations_start_index;
@@ -126,10 +126,10 @@ class StackMapStream : public ValueObject {
void AddDexRegisterEntry(DexRegisterLocation::Kind kind, int32_t value);
- void BeginInlineInfoEntry(uint32_t method_index,
+ void BeginInlineInfoEntry(ArtMethod* method,
uint32_t dex_pc,
- InvokeType invoke_type,
- uint32_t num_dex_registers);
+ uint32_t num_dex_registers,
+ const DexFile* outer_dex_file = nullptr);
void EndInlineInfoEntry();
size_t GetNumberOfStackMaps() const {
diff --git a/compiler/optimizing/stack_map_test.cc b/compiler/optimizing/stack_map_test.cc
index 967fd96561..22810ea4f7 100644
--- a/compiler/optimizing/stack_map_test.cc
+++ b/compiler/optimizing/stack_map_test.cc
@@ -16,6 +16,7 @@
#include "stack_map.h"
+#include "art_method.h"
#include "base/arena_bit_vector.h"
#include "stack_map_stream.h"
@@ -128,6 +129,7 @@ TEST(StackMapTest, Test2) {
ArenaPool pool;
ArenaAllocator arena(&pool);
StackMapStream stream(&arena);
+ ArtMethod art_method;
ArenaBitVector sp_mask1(&arena, 0, true);
sp_mask1.SetBit(2);
@@ -137,9 +139,9 @@ TEST(StackMapTest, Test2) {
stream.BeginStackMapEntry(0, 64, 0x3, &sp_mask1, number_of_dex_registers, 2);
stream.AddDexRegisterEntry(Kind::kInStack, 0); // Short location.
stream.AddDexRegisterEntry(Kind::kConstant, -2); // Large location.
- stream.BeginInlineInfoEntry(82, 3, kDirect, number_of_dex_registers_in_inline_info);
+ stream.BeginInlineInfoEntry(&art_method, 3, number_of_dex_registers_in_inline_info);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(42, 2, kStatic, number_of_dex_registers_in_inline_info);
+ stream.BeginInlineInfoEntry(&art_method, 2, number_of_dex_registers_in_inline_info);
stream.EndInlineInfoEntry();
stream.EndStackMapEntry();
@@ -238,12 +240,10 @@ TEST(StackMapTest, Test2) {
ASSERT_TRUE(stack_map.HasInlineInfo(encoding.stack_map_encoding));
InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
ASSERT_EQ(2u, inline_info.GetDepth(encoding.inline_info_encoding));
- ASSERT_EQ(82u, inline_info.GetMethodIndexAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(42u, inline_info.GetMethodIndexAtDepth(encoding.inline_info_encoding, 1));
ASSERT_EQ(3u, inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
ASSERT_EQ(2u, inline_info.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(kDirect, inline_info.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(kStatic, inline_info.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
+ ASSERT_TRUE(inline_info.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
}
// Second stack map.
@@ -662,6 +662,7 @@ TEST(StackMapTest, InlineTest) {
ArenaPool pool;
ArenaAllocator arena(&pool);
StackMapStream stream(&arena);
+ ArtMethod art_method;
ArenaBitVector sp_mask1(&arena, 0, true);
sp_mask1.SetBit(2);
@@ -672,10 +673,10 @@ TEST(StackMapTest, InlineTest) {
stream.AddDexRegisterEntry(Kind::kInStack, 0);
stream.AddDexRegisterEntry(Kind::kConstant, 4);
- stream.BeginInlineInfoEntry(42, 2, kStatic, 1);
+ stream.BeginInlineInfoEntry(&art_method, 2, 1);
stream.AddDexRegisterEntry(Kind::kInStack, 8);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(82, 3, kStatic, 3);
+ stream.BeginInlineInfoEntry(&art_method, 3, 3);
stream.AddDexRegisterEntry(Kind::kInStack, 16);
stream.AddDexRegisterEntry(Kind::kConstant, 20);
stream.AddDexRegisterEntry(Kind::kInRegister, 15);
@@ -688,15 +689,15 @@ TEST(StackMapTest, InlineTest) {
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
- stream.BeginInlineInfoEntry(42, 2, kDirect, 1);
+ stream.BeginInlineInfoEntry(&art_method, 2, 1);
stream.AddDexRegisterEntry(Kind::kInStack, 12);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(82, 3, kStatic, 3);
+ stream.BeginInlineInfoEntry(&art_method, 3, 3);
stream.AddDexRegisterEntry(Kind::kInStack, 80);
stream.AddDexRegisterEntry(Kind::kConstant, 10);
stream.AddDexRegisterEntry(Kind::kInRegister, 5);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(52, 5, kVirtual, 0);
+ stream.BeginInlineInfoEntry(&art_method, 5, 0);
stream.EndInlineInfoEntry();
stream.EndStackMapEntry();
@@ -712,12 +713,12 @@ TEST(StackMapTest, InlineTest) {
stream.AddDexRegisterEntry(Kind::kInStack, 56);
stream.AddDexRegisterEntry(Kind::kConstant, 0);
- stream.BeginInlineInfoEntry(42, 2, kVirtual, 0);
+ stream.BeginInlineInfoEntry(&art_method, 2, 0);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(52, 5, kInterface, 1);
+ stream.BeginInlineInfoEntry(&art_method, 5, 1);
stream.AddDexRegisterEntry(Kind::kInRegister, 2);
stream.EndInlineInfoEntry();
- stream.BeginInlineInfoEntry(52, 10, kStatic, 2);
+ stream.BeginInlineInfoEntry(&art_method, 10, 2);
stream.AddDexRegisterEntry(Kind::kNone, 0);
stream.AddDexRegisterEntry(Kind::kInRegister, 3);
stream.EndInlineInfoEntry();
@@ -743,11 +744,9 @@ TEST(StackMapTest, InlineTest) {
InlineInfo if0 = ci.GetInlineInfoOf(sm0, encoding);
ASSERT_EQ(2u, if0.GetDepth(encoding.inline_info_encoding));
ASSERT_EQ(2u, if0.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(42u, if0.GetMethodIndexAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(kStatic, if0.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 0));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
ASSERT_EQ(3u, if0.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(82u, if0.GetMethodIndexAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(kStatic, if0.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_TRUE(if0.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if0, encoding, 1);
ASSERT_EQ(8, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
@@ -769,14 +768,11 @@ TEST(StackMapTest, InlineTest) {
InlineInfo if1 = ci.GetInlineInfoOf(sm1, encoding);
ASSERT_EQ(3u, if1.GetDepth(encoding.inline_info_encoding));
ASSERT_EQ(2u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(42u, if1.GetMethodIndexAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(kDirect, if1.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 0));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
ASSERT_EQ(3u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(82u, if1.GetMethodIndexAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(kStatic, if1.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
ASSERT_EQ(5u, if1.GetDexPcAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_EQ(52u, if1.GetMethodIndexAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_EQ(kVirtual, if1.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 2));
+ ASSERT_TRUE(if1.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 2));
DexRegisterMap dex_registers1 = ci.GetDexRegisterMapAtDepth(0, if1, encoding, 1);
ASSERT_EQ(12, dex_registers1.GetStackOffsetInBytes(0, 1, ci, encoding));
@@ -810,14 +806,11 @@ TEST(StackMapTest, InlineTest) {
InlineInfo if2 = ci.GetInlineInfoOf(sm3, encoding);
ASSERT_EQ(3u, if2.GetDepth(encoding.inline_info_encoding));
ASSERT_EQ(2u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(42u, if2.GetMethodIndexAtDepth(encoding.inline_info_encoding, 0));
- ASSERT_EQ(kVirtual, if2.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 0));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 0));
ASSERT_EQ(5u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(52u, if2.GetMethodIndexAtDepth(encoding.inline_info_encoding, 1));
- ASSERT_EQ(kInterface, if2.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 1));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 1));
ASSERT_EQ(10u, if2.GetDexPcAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_EQ(52u, if2.GetMethodIndexAtDepth(encoding.inline_info_encoding, 2));
- ASSERT_EQ(kStatic, if2.GetInvokeTypeAtDepth(encoding.inline_info_encoding, 2));
+ ASSERT_TRUE(if2.EncodesArtMethodAtDepth(encoding.inline_info_encoding, 2));
ASSERT_FALSE(if2.HasDexRegisterMapAtDepth(encoding.inline_info_encoding, 0));