Make runtime call on main for HLoadClass/kDexCacheViaMethod.
Remove dependency of the compiled code on types dex cache
array in preparation for changing to a hash-based array.
Test: m test-art-host
Test: m test-art-target on Nexus 9
Bug: 30627598
Change-Id: I3c426ed762c12eb9eb4bb61ea9a23a0659abf0a2
diff --git a/compiler/optimizing/code_generator.cc b/compiler/optimizing/code_generator.cc
index 9ca7b19..6c680c8 100644
--- a/compiler/optimizing/code_generator.cc
+++ b/compiler/optimizing/code_generator.cc
@@ -497,30 +497,33 @@
}
}
-// TODO: Remove argument `code_generator_supports_read_barrier` when
-// all code generators have read barrier support.
-void CodeGenerator::CreateLoadClassLocationSummary(HLoadClass* cls,
- Location runtime_type_index_location,
- Location runtime_return_location,
- bool code_generator_supports_read_barrier) {
- ArenaAllocator* allocator = cls->GetBlock()->GetGraph()->GetArena();
- LocationSummary::CallKind call_kind = cls->NeedsAccessCheck()
- ? LocationSummary::kCallOnMainOnly
- : (((code_generator_supports_read_barrier && kEmitCompilerReadBarrier) ||
- cls->CanCallRuntime())
- ? LocationSummary::kCallOnSlowPath
- : LocationSummary::kNoCall);
- LocationSummary* locations = new (allocator) LocationSummary(cls, call_kind);
- if (cls->NeedsAccessCheck()) {
- locations->SetInAt(0, Location::NoLocation());
- locations->AddTemp(runtime_type_index_location);
- locations->SetOut(runtime_return_location);
- } else {
- locations->SetInAt(0, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister());
- }
+void CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
+ Location runtime_type_index_location,
+ Location runtime_return_location) {
+ DCHECK_EQ(cls->GetLoadKind(), HLoadClass::LoadKind::kDexCacheViaMethod);
+ DCHECK_EQ(cls->InputCount(), 1u);
+ LocationSummary* locations = new (cls->GetBlock()->GetGraph()->GetArena()) LocationSummary(
+ cls, LocationSummary::kCallOnMainOnly);
+ locations->SetInAt(0, Location::NoLocation());
+ locations->AddTemp(runtime_type_index_location);
+ locations->SetOut(runtime_return_location);
}
+void CodeGenerator::GenerateLoadClassRuntimeCall(HLoadClass* cls) {
+ DCHECK_EQ(cls->GetLoadKind(), HLoadClass::LoadKind::kDexCacheViaMethod);
+ LocationSummary* locations = cls->GetLocations();
+ MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
+ if (cls->NeedsAccessCheck()) {
+ CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
+ } else if (cls->MustGenerateClinitCheck()) {
+ CheckEntrypointTypes<kQuickInitializeStaticStorage, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeStaticStorage, cls, cls->GetDexPc());
+ } else {
+ CheckEntrypointTypes<kQuickInitializeType, void*, uint32_t>();
+ InvokeRuntime(kQuickInitializeType, cls, cls->GetDexPc());
+ }
+}
void CodeGenerator::BlockIfInRegister(Location location, bool is_out) const {
// The DCHECKS below check that a register is not specified twice in
diff --git a/compiler/optimizing/code_generator.h b/compiler/optimizing/code_generator.h
index 7e2dd48..38d532e 100644
--- a/compiler/optimizing/code_generator.h
+++ b/compiler/optimizing/code_generator.h
@@ -509,11 +509,10 @@
uint32_t dex_pc,
const FieldAccessCallingConvention& calling_convention);
- // TODO: This overlaps a bit with MoveFromReturnRegister. Refactor for a better design.
- static void CreateLoadClassLocationSummary(HLoadClass* cls,
- Location runtime_type_index_location,
- Location runtime_return_location,
- bool code_generator_supports_read_barrier = false);
+ static void CreateLoadClassRuntimeCallLocationSummary(HLoadClass* cls,
+ Location runtime_type_index_location,
+ Location runtime_return_location);
+ void GenerateLoadClassRuntimeCall(HLoadClass* cls);
static void CreateSystemArrayCopyLocationSummary(HInvoke* invoke);
@@ -523,7 +522,7 @@
virtual void InvokeRuntime(QuickEntrypointEnum entrypoint,
HInstruction* instruction,
uint32_t dex_pc,
- SlowPathCode* slow_path) = 0;
+ SlowPathCode* slow_path = nullptr) = 0;
// Check if the desired_string_load_kind is supported. If it is, return it,
// otherwise return a fall-back kind that should be used instead.
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc
index 4a771cb..6c9e83e 100644
--- a/compiler/optimizing/code_generator_arm.cc
+++ b/compiler/optimizing/code_generator_arm.cc
@@ -5725,15 +5725,16 @@
}
void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(R0),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(R0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5744,23 +5745,21 @@
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
@@ -5768,7 +5767,7 @@
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5814,19 +5813,9 @@
GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = locations->InAt(0).AsRegister<Register>();
- __ LoadFromOffset(kLoadWord,
- out,
- current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc
index fb0871b..8c4503d 100644
--- a/compiler/optimizing/code_generator_arm64.cc
+++ b/compiler/optimizing/code_generator_arm64.cc
@@ -4316,15 +4316,16 @@
}
void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
LocationFrom(calling_convention.GetRegisterAt(0)),
- LocationFrom(vixl::aarch64::x0),
- /* code_generator_supports_read_barrier */ true);
+ LocationFrom(vixl::aarch64::x0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -4335,21 +4336,19 @@
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(cls->GetLocations()->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
Location out_loc = cls->GetLocations()->Out();
Register out = OutputRegister(cls);
@@ -4358,7 +4357,7 @@
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -4408,23 +4407,9 @@
kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- MemberOffset resolved_types_offset =
- ArtMethod::DexCacheResolvedTypesOffset(kArm64PointerSize);
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = InputRegisterAt(cls, 0);
- __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- GenerateGcRootFieldLoad(cls,
- out_loc,
- out.X(),
- CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_),
- /* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_arm_vixl.cc b/compiler/optimizing/code_generator_arm_vixl.cc
index d8a5c5e..877c698 100644
--- a/compiler/optimizing/code_generator_arm_vixl.cc
+++ b/compiler/optimizing/code_generator_arm_vixl.cc
@@ -5806,15 +5806,16 @@
}
void LocationsBuilderARMVIXL::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConventionARMVIXL calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
LocationFrom(calling_convention.GetRegisterAt(0)),
- LocationFrom(r0),
- /* code_generator_supports_read_barrier */ true);
+ LocationFrom(r0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5825,23 +5826,21 @@
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
void InstructionCodeGeneratorARMVIXL::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
vixl32::Register out = OutputRegister(cls);
@@ -5849,7 +5848,7 @@
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5890,21 +5889,9 @@
GenerateGcRootFieldLoad(cls, out_loc, out, /* offset */ 0, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- vixl32::Register current_method = InputRegisterAt(cls, 0);
- const int32_t resolved_types_offset =
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value();
- GetAssembler()->LoadFromOffset(kLoadWord, out, current_method, resolved_types_offset);
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset, read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
- default:
- TODO_VIXL32(FATAL);
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_mips.cc b/compiler/optimizing/code_generator_mips.cc
index add9930..8498b73 100644
--- a/compiler/optimizing/code_generator_mips.cc
+++ b/compiler/optimizing/code_generator_mips.cc
@@ -5430,21 +5430,21 @@
}
void LocationsBuilderMIPS::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(V0),
- /* code_generator_supports_read_barrier */ false); // TODO: revisit this bool.
+ Location::RegisterLocation(V0));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
? LocationSummary::kCallOnSlowPath
: LocationSummary::kNoCall;
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
switch (load_kind) {
// We need an extra register for PC-relative literals on R2.
case HLoadClass::LoadKind::kBootImageLinkTimeAddress:
@@ -5455,7 +5455,6 @@
}
FALLTHROUGH_INTENDED;
case HLoadClass::LoadKind::kReferrersClass:
- case HLoadClass::LoadKind::kDexCacheViaMethod:
locations->SetInAt(0, Location::RequiresRegister());
break;
default:
@@ -5465,15 +5464,14 @@
}
void InstructionCodeGeneratorMIPS::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
Register base_or_current_method_reg;
@@ -5533,18 +5531,9 @@
LOG(FATAL) << "Unimplemented";
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- __ LoadFromOffset(kLoadWord,
- out,
- base_or_current_method_reg,
- ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset);
- generate_null_check = !cls->IsInDexCache();
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_mips64.cc b/compiler/optimizing/code_generator_mips64.cc
index c28a3fe..4d8f7ec 100644
--- a/compiler/optimizing/code_generator_mips64.cc
+++ b/compiler/optimizing/code_generator_mips64.cc
@@ -3479,38 +3479,36 @@
}
void LocationsBuilderMIPS64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- calling_convention.GetReturnLocation(Primitive::kPrimNot),
- /* code_generator_supports_read_barrier */ false);
+ calling_convention.GetReturnLocation(Primitive::kPrimNot));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier)
? LocationSummary::kCallOnSlowPath
: LocationSummary::kNoCall;
LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind);
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
}
void InstructionCodeGeneratorMIPS64::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
GpuRegister out = out_loc.AsRegister<GpuRegister>();
GpuRegister current_method_reg = ZERO;
@@ -3558,18 +3556,9 @@
LOG(FATAL) << "Unimplemented";
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- __ LoadFromOffset(kLoadDoubleword,
- out,
- current_method_reg,
- ArtMethod::DexCacheResolvedTypesOffset(kMips64PointerSize).Int32Value());
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_);
- GenerateGcRootFieldLoad(cls, out_loc, out, offset);
- generate_null_check = !cls->IsInDexCache();
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 16a2725..8b14513 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -6005,15 +6005,16 @@
}
void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(EAX),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(EAX));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -6024,9 +6025,7 @@
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod ||
load_kind == HLoadClass::LoadKind::kBootImageLinkTimePcRelative) {
locations->SetInAt(0, Location::RequiresRegister());
}
@@ -6044,14 +6043,14 @@
}
void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
Register out = out_loc.AsRegister<Register>();
@@ -6059,7 +6058,7 @@
const ReadBarrierOption read_barrier_option = cls->IsInBootImage()
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -6102,22 +6101,9 @@
GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- Register current_method = locations->InAt(0).AsRegister<Register>();
- __ movl(out, Address(current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kX86PointerSize).Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- GenerateGcRootFieldLoad(cls,
- out_loc,
- Address(out,
- CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
- /* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
+ case HLoadClass::LoadKind::kDexCacheViaMethod:
+ LOG(FATAL) << "UNREACHABLE";
+ UNREACHABLE();
}
if (generate_null_check || cls->MustGenerateClinitCheck()) {
diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc
index a1b9d92..32ade92 100644
--- a/compiler/optimizing/code_generator_x86_64.cc
+++ b/compiler/optimizing/code_generator_x86_64.cc
@@ -5433,15 +5433,16 @@
}
void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) {
- if (cls->NeedsAccessCheck()) {
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
InvokeRuntimeCallingConvention calling_convention;
- CodeGenerator::CreateLoadClassLocationSummary(
+ CodeGenerator::CreateLoadClassRuntimeCallLocationSummary(
cls,
Location::RegisterLocation(calling_convention.GetRegisterAt(0)),
- Location::RegisterLocation(RAX),
- /* code_generator_supports_read_barrier */ true);
+ Location::RegisterLocation(RAX));
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage();
LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier)
@@ -5452,9 +5453,7 @@
locations->SetCustomSlowPathCallerSaves(RegisterSet::Empty()); // No caller-save registers.
}
- HLoadClass::LoadKind load_kind = cls->GetLoadKind();
- if (load_kind == HLoadClass::LoadKind::kReferrersClass ||
- load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ if (load_kind == HLoadClass::LoadKind::kReferrersClass) {
locations->SetInAt(0, Location::RequiresRegister());
}
locations->SetOut(Location::RequiresRegister());
@@ -5471,14 +5470,14 @@
}
void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) {
- LocationSummary* locations = cls->GetLocations();
- if (cls->NeedsAccessCheck()) {
- codegen_->MoveConstant(locations->GetTemp(0), cls->GetTypeIndex().index_);
- codegen_->InvokeRuntime(kQuickInitializeTypeAndVerifyAccess, cls, cls->GetDexPc());
- CheckEntrypointTypes<kQuickInitializeTypeAndVerifyAccess, void*, uint32_t>();
+ HLoadClass::LoadKind load_kind = cls->GetLoadKind();
+ if (load_kind == HLoadClass::LoadKind::kDexCacheViaMethod) {
+ codegen_->GenerateLoadClassRuntimeCall(cls);
return;
}
+ DCHECK(!cls->NeedsAccessCheck());
+ LocationSummary* locations = cls->GetLocations();
Location out_loc = locations->Out();
CpuRegister out = out_loc.AsRegister<CpuRegister>();
@@ -5486,7 +5485,7 @@
? kWithoutReadBarrier
: kCompilerReadBarrierOption;
bool generate_null_check = false;
- switch (cls->GetLoadKind()) {
+ switch (load_kind) {
case HLoadClass::LoadKind::kReferrersClass: {
DCHECK(!cls->CanCallRuntime());
DCHECK(!cls->MustGenerateClinitCheck());
@@ -5522,23 +5521,6 @@
GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, kCompilerReadBarrierOption);
break;
}
- case HLoadClass::LoadKind::kDexCacheViaMethod: {
- // /* GcRoot<mirror::Class>[] */ out =
- // current_method.ptr_sized_fields_->dex_cache_resolved_types_
- CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>();
- __ movq(out,
- Address(current_method,
- ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value()));
- // /* GcRoot<mirror::Class> */ out = out[type_index]
- GenerateGcRootFieldLoad(
- cls,
- out_loc,
- Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex().index_)),
- /* fixup_label */ nullptr,
- read_barrier_option);
- generate_null_check = !cls->IsInDexCache();
- break;
- }
default:
LOG(FATAL) << "Unexpected load kind: " << cls->GetLoadKind();
UNREACHABLE();
diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h
index 740affe..d92092c 100644
--- a/compiler/optimizing/nodes.h
+++ b/compiler/optimizing/nodes.h
@@ -5579,7 +5579,6 @@
SetPackedField<LoadKindField>(
is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod);
SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check);
- SetPackedFlag<kFlagIsInDexCache>(false);
SetPackedFlag<kFlagIsInBootImage>(false);
SetPackedFlag<kFlagGenerateClInitCheck>(false);
}
@@ -5624,13 +5623,18 @@
}
bool CanCallRuntime() const {
- return MustGenerateClinitCheck() ||
- (!IsReferrersClass() && !IsInDexCache()) ||
- NeedsAccessCheck();
+ return NeedsAccessCheck() ||
+ MustGenerateClinitCheck() ||
+ GetLoadKind() == LoadKind::kDexCacheViaMethod;
}
bool CanThrow() const OVERRIDE {
- return CanCallRuntime();
+ return NeedsAccessCheck() ||
+ MustGenerateClinitCheck() ||
+ // If the class is in the boot image, the lookup in the runtime call cannot throw.
+ // This keeps CanThrow() consistent between non-PIC (using kBootImageAddress) and
+ // PIC and subsequently avoids a DCE behavior dependency on the PIC option.
+ (GetLoadKind() == LoadKind::kDexCacheViaMethod && !IsInBootImage());
}
ReferenceTypeInfo GetLoadedClassRTI() {
@@ -5652,7 +5656,7 @@
}
bool NeedsDexCacheOfDeclaringClass() const OVERRIDE {
- return !IsReferrersClass();
+ return GetLoadKind() == LoadKind::kDexCacheViaMethod;
}
static SideEffects SideEffectsForArchRuntimeCalls() {
@@ -5661,17 +5665,9 @@
bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; }
bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); }
- bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); }
bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); }
bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); }
- void MarkInDexCache() {
- SetPackedFlag<kFlagIsInDexCache>(true);
- DCHECK(!NeedsEnvironment());
- RemoveEnvironment();
- SetSideEffects(SideEffects::None());
- }
-
void MarkInBootImage() {
SetPackedFlag<kFlagIsInBootImage>(true);
}
@@ -5692,8 +5688,7 @@
private:
static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits;
- static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1;
- static constexpr size_t kFlagIsInBootImage = kFlagIsInDexCache + 1;
+ static constexpr size_t kFlagIsInBootImage = kFlagNeedsAccessCheck + 1;
// Whether this instruction must generate the initialization check.
// Used for code generation.
static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1;
diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc
index 70db383..122eedc 100644
--- a/compiler/optimizing/sharpening.cc
+++ b/compiler/optimizing/sharpening.cc
@@ -154,13 +154,24 @@
DCHECK(load_class->GetLoadKind() == HLoadClass::LoadKind::kDexCacheViaMethod ||
load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass)
<< load_class->GetLoadKind();
- DCHECK(!load_class->IsInDexCache()) << "HLoadClass should not be optimized before sharpening.";
DCHECK(!load_class->IsInBootImage()) << "HLoadClass should not be optimized before sharpening.";
+ if (load_class->NeedsAccessCheck()) {
+ // We need to call the runtime anyway, so we simply get the class as that call's return value.
+ return;
+ }
+
+ if (load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass) {
+ // Loading from the ArtMethod* is the most efficient retrieval in code size.
+ // TODO: This may not actually be true for all architectures and
+ // locations of target classes. The additional register pressure
+ // for using the ArtMethod* should be considered.
+ return;
+ }
+
const DexFile& dex_file = load_class->GetDexFile();
dex::TypeIndex type_index = load_class->GetTypeIndex();
- bool is_in_dex_cache = false;
bool is_in_boot_image = false;
HLoadClass::LoadKind desired_load_kind = static_cast<HLoadClass::LoadKind>(-1);
uint64_t address = 0u; // Class or dex cache element address.
@@ -174,12 +185,11 @@
} else if ((klass != nullptr) && compiler_driver->IsImageClass(
dex_file.StringDataByIdx(dex_file.GetTypeId(type_index).descriptor_idx_))) {
is_in_boot_image = true;
- is_in_dex_cache = true;
desired_load_kind = codegen->GetCompilerOptions().GetCompilePic()
? HLoadClass::LoadKind::kBootImageLinkTimePcRelative
: HLoadClass::LoadKind::kBootImageLinkTimeAddress;
} else {
- // Not a boot image class. We must go through the dex cache.
+ // Not a boot image class. We must call the runtime entrypoint.
// TODO: Implement kBssEntry similar to HLoadString::LoadKind::kBssEntry.
DCHECK(ContainsElement(compiler_driver->GetDexFilesForOatFile(), &dex_file));
desired_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
@@ -189,12 +199,11 @@
if (runtime->UseJitCompilation()) {
// TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus.
// DCHECK(!codegen_->GetCompilerOptions().GetCompilePic());
- is_in_dex_cache = (klass != nullptr);
if (is_in_boot_image) {
// TODO: Use direct pointers for all non-moving spaces, not just boot image. Bug: 29530787
desired_load_kind = HLoadClass::LoadKind::kBootImageAddress;
address = reinterpret_cast64<uint64_t>(klass);
- } else if (is_in_dex_cache) {
+ } else if (klass != nullptr) {
desired_load_kind = HLoadClass::LoadKind::kJitTableAddress;
// We store in the address field the location of the stack reference maintained
// by the handle. We do this now so that the code generation does not need to figure
@@ -213,7 +222,7 @@
address = reinterpret_cast64<uint64_t>(klass);
} else {
// Not JIT and either the klass is not in boot image or we are compiling in PIC mode.
- // We must go through the dex cache.
+ // We must call the runtime entrypoint.
// TODO: Implement kBssEntry similar to HLoadString::LoadKind::kBssEntry.
desired_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod;
}
@@ -224,23 +233,6 @@
load_class->MarkInBootImage();
}
- if (load_class->NeedsAccessCheck()) {
- // We need to call the runtime anyway, so we simply get the class as that call's return value.
- return;
- }
-
- if (load_class->GetLoadKind() == HLoadClass::LoadKind::kReferrersClass) {
- // Loading from the ArtMethod* is the most efficient retrieval in code size.
- // TODO: This may not actually be true for all architectures and
- // locations of target classes. The additional register pressure
- // for using the ArtMethod* should be considered.
- return;
- }
-
- if (is_in_dex_cache) {
- load_class->MarkInDexCache();
- }
-
HLoadClass::LoadKind load_kind = codegen->GetSupportedLoadClassKind(desired_load_kind);
switch (load_kind) {
case HLoadClass::LoadKind::kBootImageLinkTimeAddress: