diff options
| -rw-r--r-- | compiler/optimizing/code_generator_arm.cc | 42 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm.h | 5 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.cc | 59 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_arm64.h | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.cc | 55 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86.h | 3 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.cc | 53 | ||||
| -rw-r--r-- | compiler/optimizing/code_generator_x86_64.h | 3 | ||||
| -rw-r--r-- | compiler/optimizing/inliner.cc | 3 | ||||
| -rw-r--r-- | compiler/optimizing/instruction_builder.cc | 15 | ||||
| -rw-r--r-- | compiler/optimizing/nodes.h | 12 | ||||
| -rw-r--r-- | compiler/optimizing/sharpening.cc | 71 |
12 files changed, 183 insertions, 141 deletions
diff --git a/compiler/optimizing/code_generator_arm.cc b/compiler/optimizing/code_generator_arm.cc index 2ef1802522..882a874819 100644 --- a/compiler/optimizing/code_generator_arm.cc +++ b/compiler/optimizing/code_generator_arm.cc @@ -5337,17 +5337,6 @@ void ParallelMoveResolverARM::RestoreScratch(int reg) { HLoadClass::LoadKind CodeGeneratorARM::GetSupportedLoadClassKind( HLoadClass::LoadKind desired_class_load_kind) { - if (kEmitCompilerReadBarrier) { - switch (desired_class_load_kind) { - case HLoadClass::LoadKind::kBootImageLinkTimeAddress: - case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: - case HLoadClass::LoadKind::kBootImageAddress: - // TODO: Implement for read barrier. - return HLoadClass::LoadKind::kDexCacheViaMethod; - default: - break; - } - } switch (desired_class_load_kind) { case HLoadClass::LoadKind::kReferrersClass: break; @@ -5389,11 +5378,12 @@ void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) { return; } - LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier) + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); + LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier) ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); - if (kUseBakerReadBarrier && !cls->NeedsEnvironment()) { + if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) { locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers. } @@ -5418,6 +5408,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { Location out_loc = locations->Out(); Register out = out_loc.AsRegister<Register>(); + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); bool generate_null_check = false; switch (cls->GetLoadKind()) { case HLoadClass::LoadKind::kReferrersClass: { @@ -5425,18 +5416,21 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { DCHECK(!cls->MustGenerateClinitCheck()); // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ Register current_method = locations->InAt(0).AsRegister<Register>(); - GenerateGcRootFieldLoad( - cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value()); + GenerateGcRootFieldLoad(cls, + out_loc, + current_method, + ArtMethod::DeclaringClassOffset().Int32Value(), + requires_read_barrier); break; } case HLoadClass::LoadKind::kBootImageLinkTimeAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); __ LoadLiteral(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(), cls->GetTypeIndex())); break; } case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); CodeGeneratorARM::PcRelativePatchInfo* labels = codegen_->NewPcRelativeTypePatch(cls->GetDexFile(), cls->GetTypeIndex()); __ BindTrackedLabel(&labels->movw_label); @@ -5448,7 +5442,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { break; } case HLoadClass::LoadKind::kBootImageAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); DCHECK_NE(cls->GetAddress(), 0u); uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress()); __ LoadLiteral(out, codegen_->DeduplicateBootImageAddressLiteral(address)); @@ -5468,7 +5462,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { uint32_t offset = address & MaxInt<uint32_t>(offset_bits); __ LoadLiteral(out, codegen_->DeduplicateDexCacheAddressLiteral(base_address)); // /* GcRoot<mirror::Class> */ out = *(base_address + offset) - GenerateGcRootFieldLoad(cls, out_loc, out, offset); + GenerateGcRootFieldLoad(cls, out_loc, out, offset, requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -5477,7 +5471,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { HArmDexCacheArraysBase* base = cls->InputAt(0)->AsArmDexCacheArraysBase(); int32_t offset = cls->GetDexCacheElementOffset() - base->GetElementOffset(); // /* GcRoot<mirror::Class> */ out = *(dex_cache_arrays_base + offset) - GenerateGcRootFieldLoad(cls, out_loc, base_reg, offset); + GenerateGcRootFieldLoad(cls, out_loc, base_reg, offset, requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -5491,7 +5485,7 @@ void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { ArtMethod::DexCacheResolvedTypesOffset(kArmPointerSize).Int32Value()); // /* GcRoot<mirror::Class> */ out = out[type_index] size_t offset = CodeGenerator::GetCacheOffset(cls->GetTypeIndex()); - GenerateGcRootFieldLoad(cls, out_loc, out, offset); + GenerateGcRootFieldLoad(cls, out_loc, out, offset, requires_read_barrier); generate_null_check = !cls->IsInDexCache(); } } @@ -6403,9 +6397,11 @@ void InstructionCodeGeneratorARM::GenerateReferenceLoadTwoRegisters(HInstruction void InstructionCodeGeneratorARM::GenerateGcRootFieldLoad(HInstruction* instruction, Location root, Register obj, - uint32_t offset) { + uint32_t offset, + bool requires_read_barrier) { Register root_reg = root.AsRegister<Register>(); - if (kEmitCompilerReadBarrier) { + if (requires_read_barrier) { + DCHECK(kEmitCompilerReadBarrier); if (kUseBakerReadBarrier) { // Fast path implementation of art::ReadBarrier::BarrierForRoot when // Baker's read barrier are used: diff --git a/compiler/optimizing/code_generator_arm.h b/compiler/optimizing/code_generator_arm.h index ac10e2364a..ce9d7e6056 100644 --- a/compiler/optimizing/code_generator_arm.h +++ b/compiler/optimizing/code_generator_arm.h @@ -271,11 +271,12 @@ class InstructionCodeGeneratorARM : public InstructionCodeGenerator { // // root <- *(obj + offset) // - // while honoring read barriers (if any). + // while honoring read barriers if requires_read_barrier is true. void GenerateGcRootFieldLoad(HInstruction* instruction, Location root, Register obj, - uint32_t offset); + uint32_t offset, + bool requires_read_barrier = kEmitCompilerReadBarrier); void GenerateTestAndBranch(HInstruction* instruction, size_t condition_input_index, Label* true_target, diff --git a/compiler/optimizing/code_generator_arm64.cc b/compiler/optimizing/code_generator_arm64.cc index ceceedd793..36f7b4d914 100644 --- a/compiler/optimizing/code_generator_arm64.cc +++ b/compiler/optimizing/code_generator_arm64.cc @@ -4044,17 +4044,6 @@ void InstructionCodeGeneratorARM64::VisitInvokeVirtual(HInvokeVirtual* invoke) { HLoadClass::LoadKind CodeGeneratorARM64::GetSupportedLoadClassKind( HLoadClass::LoadKind desired_class_load_kind) { - if (kEmitCompilerReadBarrier) { - switch (desired_class_load_kind) { - case HLoadClass::LoadKind::kBootImageLinkTimeAddress: - case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: - case HLoadClass::LoadKind::kBootImageAddress: - // TODO: Implement for read barrier. - return HLoadClass::LoadKind::kDexCacheViaMethod; - default: - break; - } - } switch (desired_class_load_kind) { case HLoadClass::LoadKind::kReferrersClass: break; @@ -4089,11 +4078,12 @@ void LocationsBuilderARM64::VisitLoadClass(HLoadClass* cls) { return; } - LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier) + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); + LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier) ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); - if (kUseBakerReadBarrier && !cls->NeedsEnvironment()) { + if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) { locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers. } @@ -4116,6 +4106,7 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { Location out_loc = cls->GetLocations()->Out(); Register out = OutputRegister(cls); + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); bool generate_null_check = false; switch (cls->GetLoadKind()) { case HLoadClass::LoadKind::kReferrersClass: { @@ -4123,17 +4114,21 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { DCHECK(!cls->MustGenerateClinitCheck()); // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ Register current_method = InputRegisterAt(cls, 0); - GenerateGcRootFieldLoad( - cls, out_loc, current_method, ArtMethod::DeclaringClassOffset().Int32Value()); + GenerateGcRootFieldLoad(cls, + out_loc, + current_method, + ArtMethod::DeclaringClassOffset().Int32Value(), + /*fixup_label*/ nullptr, + requires_read_barrier); break; } case HLoadClass::LoadKind::kBootImageLinkTimeAddress: - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); __ Ldr(out, codegen_->DeduplicateBootImageTypeLiteral(cls->GetDexFile(), cls->GetTypeIndex())); break; case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); // Add ADRP with its PC-relative type patch. const DexFile& dex_file = cls->GetDexFile(); uint32_t type_index = cls->GetTypeIndex(); @@ -4154,7 +4149,7 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { break; } case HLoadClass::LoadKind::kBootImageAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); DCHECK(cls->GetAddress() != 0u && IsUint<32>(cls->GetAddress())); __ Ldr(out.W(), codegen_->DeduplicateBootImageAddressLiteral(cls->GetAddress())); break; @@ -4172,7 +4167,12 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { uint32_t offset = cls->GetAddress() & MaxInt<uint64_t>(offset_bits); __ Ldr(out.X(), codegen_->DeduplicateDexCacheAddressLiteral(base_address)); // /* GcRoot<mirror::Class> */ out = *(base_address + offset) - GenerateGcRootFieldLoad(cls, out_loc, out.X(), offset); + GenerateGcRootFieldLoad(cls, + out_loc, + out.X(), + offset, + /*fixup_label*/ nullptr, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -4191,7 +4191,12 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { vixl::aarch64::Label* ldr_label = codegen_->NewPcRelativeDexCacheArrayPatch(dex_file, element_offset, adrp_label); // /* GcRoot<mirror::Class> */ out = *(base_address + offset) /* PC-relative */ - GenerateGcRootFieldLoad(cls, out_loc, out.X(), /* offset placeholder */ 0, ldr_label); + GenerateGcRootFieldLoad(cls, + out_loc, + out.X(), + /* offset placeholder */ 0, + ldr_label, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -4203,8 +4208,12 @@ void InstructionCodeGeneratorARM64::VisitLoadClass(HLoadClass* cls) { Register current_method = InputRegisterAt(cls, 0); __ Ldr(out.X(), MemOperand(current_method, resolved_types_offset.Int32Value())); // /* GcRoot<mirror::Class> */ out = out[type_index] - GenerateGcRootFieldLoad( - cls, out_loc, out.X(), CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); + GenerateGcRootFieldLoad(cls, + out_loc, + out.X(), + CodeGenerator::GetCacheOffset(cls->GetTypeIndex()), + /*fixup_label*/ nullptr, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -5106,9 +5115,11 @@ void InstructionCodeGeneratorARM64::GenerateGcRootFieldLoad(HInstruction* instru Location root, Register obj, uint32_t offset, - vixl::aarch64::Label* fixup_label) { + vixl::aarch64::Label* fixup_label, + bool requires_read_barrier) { Register root_reg = RegisterFrom(root, Primitive::kPrimNot); - if (kEmitCompilerReadBarrier) { + if (requires_read_barrier) { + DCHECK(kEmitCompilerReadBarrier); if (kUseBakerReadBarrier) { // Fast path implementation of art::ReadBarrier::BarrierForRoot when // Baker's read barrier are used: diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h index 78db80307c..f0d79106dc 100644 --- a/compiler/optimizing/code_generator_arm64.h +++ b/compiler/optimizing/code_generator_arm64.h @@ -294,7 +294,8 @@ class InstructionCodeGeneratorARM64 : public InstructionCodeGenerator { Location root, vixl::aarch64::Register obj, uint32_t offset, - vixl::aarch64::Label* fixup_label = nullptr); + vixl::aarch64::Label* fixup_label = nullptr, + bool requires_read_barrier = kEmitCompilerReadBarrier); // Generate a floating-point comparison. void GenerateFcmp(HInstruction* instruction); diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc index 8858def40a..4689ccb05c 100644 --- a/compiler/optimizing/code_generator_x86.cc +++ b/compiler/optimizing/code_generator_x86.cc @@ -5961,17 +5961,6 @@ void ParallelMoveResolverX86::RestoreScratch(int reg) { HLoadClass::LoadKind CodeGeneratorX86::GetSupportedLoadClassKind( HLoadClass::LoadKind desired_class_load_kind) { - if (kEmitCompilerReadBarrier) { - switch (desired_class_load_kind) { - case HLoadClass::LoadKind::kBootImageLinkTimeAddress: - case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: - case HLoadClass::LoadKind::kBootImageAddress: - // TODO: Implement for read barrier. - return HLoadClass::LoadKind::kDexCacheViaMethod; - default: - break; - } - } switch (desired_class_load_kind) { case HLoadClass::LoadKind::kReferrersClass: break; @@ -6013,11 +6002,12 @@ void LocationsBuilderX86::VisitLoadClass(HLoadClass* cls) { return; } - LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier) + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); + LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier) ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); - if (kUseBakerReadBarrier && !cls->NeedsEnvironment()) { + if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) { locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers. } @@ -6044,6 +6034,7 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { Register out = out_loc.AsRegister<Register>(); bool generate_null_check = false; + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); switch (cls->GetLoadKind()) { case HLoadClass::LoadKind::kReferrersClass: { DCHECK(!cls->CanCallRuntime()); @@ -6051,24 +6042,28 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ Register current_method = locations->InAt(0).AsRegister<Register>(); GenerateGcRootFieldLoad( - cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); + cls, + out_loc, + Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()), + /*fixup_label*/ nullptr, + requires_read_barrier); break; } case HLoadClass::LoadKind::kBootImageLinkTimeAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); __ movl(out, Immediate(/* placeholder */ 0)); codegen_->RecordTypePatch(cls); break; } case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); Register method_address = locations->InAt(0).AsRegister<Register>(); __ leal(out, Address(method_address, CodeGeneratorX86::kDummy32BitOffset)); codegen_->RecordTypePatch(cls); break; } case HLoadClass::LoadKind::kBootImageAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); DCHECK_NE(cls->GetAddress(), 0u); uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress()); __ movl(out, Immediate(address)); @@ -6079,7 +6074,11 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { DCHECK_NE(cls->GetAddress(), 0u); uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress()); // /* GcRoot<mirror::Class> */ out = *address - GenerateGcRootFieldLoad(cls, out_loc, Address::Absolute(address)); + GenerateGcRootFieldLoad(cls, + out_loc, + Address::Absolute(address), + /*fixup_label*/ nullptr, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -6088,8 +6087,11 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { uint32_t offset = cls->GetDexCacheElementOffset(); Label* fixup_label = codegen_->NewPcRelativeDexCacheArrayPatch(cls->GetDexFile(), offset); // /* GcRoot<mirror::Class> */ out = *(base + offset) /* PC-relative */ - GenerateGcRootFieldLoad( - cls, out_loc, Address(base_reg, CodeGeneratorX86::kDummy32BitOffset), fixup_label); + GenerateGcRootFieldLoad(cls, + out_loc, + Address(base_reg, CodeGeneratorX86::kDummy32BitOffset), + fixup_label, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -6100,8 +6102,11 @@ void InstructionCodeGeneratorX86::VisitLoadClass(HLoadClass* cls) { __ movl(out, Address(current_method, ArtMethod::DexCacheResolvedTypesOffset(kX86PointerSize).Int32Value())); // /* GcRoot<mirror::Class> */ out = out[type_index] - GenerateGcRootFieldLoad( - cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); + GenerateGcRootFieldLoad(cls, + out_loc, + Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())), + /*fixup_label*/ nullptr, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -6938,9 +6943,11 @@ void InstructionCodeGeneratorX86::GenerateReferenceLoadTwoRegisters(HInstruction void InstructionCodeGeneratorX86::GenerateGcRootFieldLoad(HInstruction* instruction, Location root, const Address& address, - Label* fixup_label) { + Label* fixup_label, + bool requires_read_barrier) { Register root_reg = root.AsRegister<Register>(); - if (kEmitCompilerReadBarrier) { + if (requires_read_barrier) { + DCHECK(kEmitCompilerReadBarrier); if (kUseBakerReadBarrier) { // Fast path implementation of art::ReadBarrier::BarrierForRoot when // Baker's read barrier are used: diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h index df65fa2f4a..e2250981bb 100644 --- a/compiler/optimizing/code_generator_x86.h +++ b/compiler/optimizing/code_generator_x86.h @@ -263,7 +263,8 @@ class InstructionCodeGeneratorX86 : public InstructionCodeGenerator { void GenerateGcRootFieldLoad(HInstruction* instruction, Location root, const Address& address, - Label* fixup_label = nullptr); + Label* fixup_label = nullptr, + bool requires_read_barrier = kEmitCompilerReadBarrier); // Push value to FPU stack. `is_fp` specifies whether the value is floating point or not. // `is_wide` specifies whether it is long/double or not. diff --git a/compiler/optimizing/code_generator_x86_64.cc b/compiler/optimizing/code_generator_x86_64.cc index 5230269730..a21a09ee8a 100644 --- a/compiler/optimizing/code_generator_x86_64.cc +++ b/compiler/optimizing/code_generator_x86_64.cc @@ -5408,17 +5408,6 @@ void InstructionCodeGeneratorX86_64::GenerateClassInitializationCheck( HLoadClass::LoadKind CodeGeneratorX86_64::GetSupportedLoadClassKind( HLoadClass::LoadKind desired_class_load_kind) { - if (kEmitCompilerReadBarrier) { - switch (desired_class_load_kind) { - case HLoadClass::LoadKind::kBootImageLinkTimeAddress: - case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: - case HLoadClass::LoadKind::kBootImageAddress: - // TODO: Implement for read barrier. - return HLoadClass::LoadKind::kDexCacheViaMethod; - default: - break; - } - } switch (desired_class_load_kind) { case HLoadClass::LoadKind::kReferrersClass: break; @@ -5454,11 +5443,12 @@ void LocationsBuilderX86_64::VisitLoadClass(HLoadClass* cls) { return; } - LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || kEmitCompilerReadBarrier) + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); + LocationSummary::CallKind call_kind = (cls->NeedsEnvironment() || requires_read_barrier) ? LocationSummary::kCallOnSlowPath : LocationSummary::kNoCall; LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); - if (kUseBakerReadBarrier && !cls->NeedsEnvironment()) { + if (kUseBakerReadBarrier && requires_read_barrier && !cls->NeedsEnvironment()) { locations->SetCustomSlowPathCallerSaves(RegisterSet()); // No caller-save registers. } @@ -5482,6 +5472,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { Location out_loc = locations->Out(); CpuRegister out = out_loc.AsRegister<CpuRegister>(); + const bool requires_read_barrier = kEmitCompilerReadBarrier && !cls->IsInBootImage(); bool generate_null_check = false; switch (cls->GetLoadKind()) { case HLoadClass::LoadKind::kReferrersClass: { @@ -5490,16 +5481,20 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { // /* GcRoot<mirror::Class> */ out = current_method->declaring_class_ CpuRegister current_method = locations->InAt(0).AsRegister<CpuRegister>(); GenerateGcRootFieldLoad( - cls, out_loc, Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value())); + cls, + out_loc, + Address(current_method, ArtMethod::DeclaringClassOffset().Int32Value()), + /*fixup_label*/nullptr, + requires_read_barrier); break; } case HLoadClass::LoadKind::kBootImageLinkTimePcRelative: - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); __ leal(out, Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false)); codegen_->RecordTypePatch(cls); break; case HLoadClass::LoadKind::kBootImageAddress: { - DCHECK(!kEmitCompilerReadBarrier); + DCHECK(!requires_read_barrier); DCHECK_NE(cls->GetAddress(), 0u); uint32_t address = dchecked_integral_cast<uint32_t>(cls->GetAddress()); __ movl(out, Immediate(address)); // Zero-extended. @@ -5511,11 +5506,19 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { // /* GcRoot<mirror::Class> */ out = *address if (IsUint<32>(cls->GetAddress())) { Address address = Address::Absolute(cls->GetAddress(), /* no_rip */ true); - GenerateGcRootFieldLoad(cls, out_loc, address); + GenerateGcRootFieldLoad(cls, + out_loc, + address, + /*fixup_label*/nullptr, + requires_read_barrier); } else { // TODO: Consider using opcode A1, i.e. movl eax, moff32 (with 64-bit address). __ movq(out, Immediate(cls->GetAddress())); - GenerateGcRootFieldLoad(cls, out_loc, Address(out, 0)); + GenerateGcRootFieldLoad(cls, + out_loc, + Address(out, 0), + /*fixup_label*/nullptr, + requires_read_barrier); } generate_null_check = !cls->IsInDexCache(); break; @@ -5526,7 +5529,7 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { Address address = Address::Absolute(CodeGeneratorX86_64::kDummy32BitOffset, /* no_rip */ false); // /* GcRoot<mirror::Class> */ out = *address /* PC-relative */ - GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label); + GenerateGcRootFieldLoad(cls, out_loc, address, fixup_label, requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -5539,7 +5542,11 @@ void InstructionCodeGeneratorX86_64::VisitLoadClass(HLoadClass* cls) { ArtMethod::DexCacheResolvedTypesOffset(kX86_64PointerSize).Int32Value())); // /* GcRoot<mirror::Class> */ out = out[type_index] GenerateGcRootFieldLoad( - cls, out_loc, Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex()))); + cls, + out_loc, + Address(out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())), + /*fixup_label*/nullptr, + requires_read_barrier); generate_null_check = !cls->IsInDexCache(); break; } @@ -6387,9 +6394,11 @@ void InstructionCodeGeneratorX86_64::GenerateReferenceLoadTwoRegisters(HInstruct void InstructionCodeGeneratorX86_64::GenerateGcRootFieldLoad(HInstruction* instruction, Location root, const Address& address, - Label* fixup_label) { + Label* fixup_label, + bool requires_read_barrier) { CpuRegister root_reg = root.AsRegister<CpuRegister>(); - if (kEmitCompilerReadBarrier) { + if (requires_read_barrier) { + DCHECK(kEmitCompilerReadBarrier); if (kUseBakerReadBarrier) { // Fast path implementation of art::ReadBarrier::BarrierForRoot when // Baker's read barrier are used: diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h index f23bff5e1f..d93908343d 100644 --- a/compiler/optimizing/code_generator_x86_64.h +++ b/compiler/optimizing/code_generator_x86_64.h @@ -257,7 +257,8 @@ class InstructionCodeGeneratorX86_64 : public InstructionCodeGenerator { void GenerateGcRootFieldLoad(HInstruction* instruction, Location root, const Address& address, - Label* fixup_label = nullptr); + Label* fixup_label = nullptr, + bool requires_read_barrier = kEmitCompilerReadBarrier); void PushOntoFPStack(Location source, uint32_t temp_offset, uint32_t stack_adjustment, bool is_float); diff --git a/compiler/optimizing/inliner.cc b/compiler/optimizing/inliner.cc index 1e5f0b6c75..ce53134235 100644 --- a/compiler/optimizing/inliner.cc +++ b/compiler/optimizing/inliner.cc @@ -452,7 +452,8 @@ HInstruction* HInliner::AddTypeGuard(HInstruction* receiver, is_referrer, invoke_instruction->GetDexPc(), /* needs_access_check */ false, - /* is_in_dex_cache */ true); + /* is_in_dex_cache */ true, + /* is_in_boot_image */ false); HNotEqual* compare = new (graph_->GetArena()) HNotEqual(load_class, receiver_class); // TODO: Extend reference type propagation to understand the guard. diff --git a/compiler/optimizing/instruction_builder.cc b/compiler/optimizing/instruction_builder.cc index 32dcc2814b..d7e4c53df0 100644 --- a/compiler/optimizing/instruction_builder.cc +++ b/compiler/optimizing/instruction_builder.cc @@ -935,7 +935,8 @@ bool HInstructionBuilder::BuildNewInstance(uint16_t type_index, uint32_t dex_pc) IsOutermostCompilingClass(type_index), dex_pc, needs_access_check, - /* is_in_dex_cache */ false); + /* is_in_dex_cache */ false, + /* is_in_boot_image */ false); AppendInstruction(load_class); HInstruction* cls = load_class; @@ -1026,7 +1027,8 @@ HClinitCheck* HInstructionBuilder::ProcessClinitCheckForInvoke( is_outer_class, dex_pc, /*needs_access_check*/ false, - /* is_in_dex_cache */ false); + /* is_in_dex_cache */ false, + /* is_in_boot_image */ false); AppendInstruction(load_class); clinit_check = new (arena_) HClinitCheck(load_class, dex_pc); AppendInstruction(clinit_check); @@ -1384,7 +1386,8 @@ bool HInstructionBuilder::BuildStaticFieldAccess(const Instruction& instruction, is_outer_class, dex_pc, /*needs_access_check*/ false, - /* is_in_dex_cache */ false); + /* is_in_dex_cache */ false, + /* is_in_boot_image */ false); AppendInstruction(constant); HInstruction* cls = constant; @@ -1659,7 +1662,8 @@ void HInstructionBuilder::BuildTypeCheck(const Instruction& instruction, IsOutermostCompilingClass(type_index), dex_pc, !can_access, - /* is_in_dex_cache */ false); + /* is_in_dex_cache */ false, + /* is_in_boot_image */ false); AppendInstruction(cls); TypeCheckKind check_kind = ComputeTypeCheckKind(resolved_class); @@ -2634,7 +2638,8 @@ bool HInstructionBuilder::ProcessDexInstruction(const Instruction& instruction, IsOutermostCompilingClass(type_index), dex_pc, !can_access, - /* is_in_dex_cache */ false)); + /* is_in_dex_cache */ false, + /* is_in_boot_image */ false)); UpdateLocal(instruction.VRegA_21c(), current_block_->GetLastInstruction()); break; } diff --git a/compiler/optimizing/nodes.h b/compiler/optimizing/nodes.h index 19e499ba8c..149a71d1b9 100644 --- a/compiler/optimizing/nodes.h +++ b/compiler/optimizing/nodes.h @@ -5461,7 +5461,8 @@ class HLoadClass FINAL : public HInstruction { bool is_referrers_class, uint32_t dex_pc, bool needs_access_check, - bool is_in_dex_cache) + bool is_in_dex_cache, + bool is_in_boot_image) : HInstruction(SideEffectsForArchRuntimeCalls(), dex_pc), special_input_(HUserRecord<HInstruction*>(current_method)), type_index_(type_index), @@ -5475,6 +5476,7 @@ class HLoadClass FINAL : public HInstruction { is_referrers_class ? LoadKind::kReferrersClass : LoadKind::kDexCacheViaMethod); SetPackedFlag<kFlagNeedsAccessCheck>(needs_access_check); SetPackedFlag<kFlagIsInDexCache>(is_in_dex_cache); + SetPackedFlag<kFlagIsInBootImage>(is_in_boot_image); SetPackedFlag<kFlagGenerateClInitCheck>(false); } @@ -5565,6 +5567,7 @@ class HLoadClass FINAL : public HInstruction { bool IsReferrersClass() const { return GetLoadKind() == LoadKind::kReferrersClass; } bool NeedsAccessCheck() const { return GetPackedFlag<kFlagNeedsAccessCheck>(); } bool IsInDexCache() const { return GetPackedFlag<kFlagIsInDexCache>(); } + bool IsInBootImage() const { return GetPackedFlag<kFlagIsInBootImage>(); } bool MustGenerateClinitCheck() const { return GetPackedFlag<kFlagGenerateClInitCheck>(); } void MarkInDexCache() { @@ -5574,6 +5577,10 @@ class HLoadClass FINAL : public HInstruction { SetSideEffects(SideEffects::None()); } + void MarkInBootImage() { + SetPackedFlag<kFlagIsInBootImage>(true); + } + void AddSpecialInput(HInstruction* special_input); using HInstruction::GetInputRecords; // Keep the const version visible. @@ -5591,9 +5598,10 @@ class HLoadClass FINAL : public HInstruction { private: static constexpr size_t kFlagNeedsAccessCheck = kNumberOfGenericPackedBits; static constexpr size_t kFlagIsInDexCache = kFlagNeedsAccessCheck + 1; + static constexpr size_t kFlagIsInBootImage = kFlagIsInDexCache + 1; // Whether this instruction must generate the initialization check. // Used for code generation. - static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInDexCache + 1; + static constexpr size_t kFlagGenerateClInitCheck = kFlagIsInBootImage + 1; static constexpr size_t kFieldLoadKind = kFlagGenerateClInitCheck + 1; static constexpr size_t kFieldLoadKindSize = MinimumBitsToStore(static_cast<size_t>(LoadKind::kLast)); diff --git a/compiler/optimizing/sharpening.cc b/compiler/optimizing/sharpening.cc index 81163e296e..8d4d143696 100644 --- a/compiler/optimizing/sharpening.cc +++ b/compiler/optimizing/sharpening.cc @@ -176,6 +176,7 @@ void HSharpening::ProcessLoadClass(HLoadClass* load_class) { uint32_t type_index = load_class->GetTypeIndex(); bool is_in_dex_cache = false; + bool is_in_boot_image = false; HLoadClass::LoadKind desired_load_kind; uint64_t address = 0u; // Class or dex cache element address. { @@ -192,45 +193,42 @@ void HSharpening::ProcessLoadClass(HLoadClass* load_class) { // Compiling boot image. Check if the class is a boot image class. DCHECK(!runtime->UseJitCompilation()); if (!compiler_driver_->GetSupportBootImageFixup()) { - // MIPS/MIPS64 or compiler_driver_test. Do not sharpen. + // MIPS64 or compiler_driver_test. Do not sharpen. desired_load_kind = HLoadClass::LoadKind::kDexCacheViaMethod; + } else if ((klass != nullptr) && compiler_driver_->IsImageClass( + dex_file.StringDataByIdx(dex_file.GetTypeId(type_index).descriptor_idx_))) { + is_in_boot_image = true; + is_in_dex_cache = true; + desired_load_kind = codegen_->GetCompilerOptions().GetCompilePic() + ? HLoadClass::LoadKind::kBootImageLinkTimePcRelative + : HLoadClass::LoadKind::kBootImageLinkTimeAddress; } else { - if (klass != nullptr && - compiler_driver_->IsImageClass( - dex_file.StringDataByIdx(dex_file.GetTypeId(type_index).descriptor_idx_))) { - is_in_dex_cache = true; - desired_load_kind = codegen_->GetCompilerOptions().GetCompilePic() - ? HLoadClass::LoadKind::kBootImageLinkTimePcRelative - : HLoadClass::LoadKind::kBootImageLinkTimeAddress; - } else { - // Not a boot image class. We must go through the dex cache. - DCHECK(ContainsElement(compiler_driver_->GetDexFilesForOatFile(), &dex_file)); - desired_load_kind = HLoadClass::LoadKind::kDexCachePcRelative; - } - } - } else if (runtime->UseJitCompilation()) { - // TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus. - // DCHECK(!codegen_->GetCompilerOptions().GetCompilePic()); - is_in_dex_cache = (klass != nullptr); - if (klass != nullptr && runtime->GetHeap()->ObjectIsInBootImageSpace(klass)) { - // TODO: Use direct pointers for all non-moving spaces, not just boot image. Bug: 29530787 - desired_load_kind = HLoadClass::LoadKind::kBootImageAddress; - address = reinterpret_cast64<uint64_t>(klass); - } else { - // Note: If the class is not in the dex cache or isn't initialized, the - // instruction needs environment and will not be inlined across dex files. - // Within a dex file, the slow-path helper loads the correct class and - // inlined frames are used correctly for OOM stack trace. - // TODO: Write a test for this. Bug: 29416588 - desired_load_kind = HLoadClass::LoadKind::kDexCacheAddress; - void* dex_cache_element_address = &dex_cache->GetResolvedTypes()[type_index]; - address = reinterpret_cast64<uint64_t>(dex_cache_element_address); + // Not a boot image class. We must go through the dex cache. + DCHECK(ContainsElement(compiler_driver_->GetDexFilesForOatFile(), &dex_file)); + desired_load_kind = HLoadClass::LoadKind::kDexCachePcRelative; } } else { - // AOT app compilation. Check if the class is in the boot image. - if ((klass != nullptr) && - runtime->GetHeap()->ObjectIsInBootImageSpace(klass) && - !codegen_->GetCompilerOptions().GetCompilePic()) { + is_in_boot_image = (klass != nullptr) && runtime->GetHeap()->ObjectIsInBootImageSpace(klass); + if (runtime->UseJitCompilation()) { + // TODO: Make sure we don't set the "compile PIC" flag for JIT as that's bogus. + // DCHECK(!codegen_->GetCompilerOptions().GetCompilePic()); + is_in_dex_cache = (klass != nullptr); + if (is_in_boot_image) { + // TODO: Use direct pointers for all non-moving spaces, not just boot image. Bug: 29530787 + desired_load_kind = HLoadClass::LoadKind::kBootImageAddress; + address = reinterpret_cast64<uint64_t>(klass); + } else { + // Note: If the class is not in the dex cache or isn't initialized, the + // instruction needs environment and will not be inlined across dex files. + // Within a dex file, the slow-path helper loads the correct class and + // inlined frames are used correctly for OOM stack trace. + // TODO: Write a test for this. Bug: 29416588 + desired_load_kind = HLoadClass::LoadKind::kDexCacheAddress; + void* dex_cache_element_address = &dex_cache->GetResolvedTypes()[type_index]; + address = reinterpret_cast64<uint64_t>(dex_cache_element_address); + } + // AOT app compilation. Check if the class is in the boot image. + } else if (is_in_boot_image && !codegen_->GetCompilerOptions().GetCompilePic()) { desired_load_kind = HLoadClass::LoadKind::kBootImageAddress; address = reinterpret_cast64<uint64_t>(klass); } else { @@ -247,6 +245,9 @@ void HSharpening::ProcessLoadClass(HLoadClass* load_class) { if (is_in_dex_cache) { load_class->MarkInDexCache(); } + if (is_in_boot_image) { + load_class->MarkInBootImage(); + } HLoadClass::LoadKind load_kind = codegen_->GetSupportedLoadClassKind(desired_load_kind); switch (load_kind) { |