diff options
Diffstat (limited to 'compiler/optimizing/intrinsics_arm_vixl.cc')
-rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 63 |
1 files changed, 62 insertions, 1 deletions
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index 2901c472bc..f629ad3c51 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -2991,6 +2991,68 @@ void IntrinsicCodeGeneratorARMVIXL::VisitIntegerValueOf(HInvoke* invoke) { } } +void IntrinsicLocationsBuilderARMVIXL::VisitReferenceGetReferent(HInvoke* invoke) { + IntrinsicVisitor::CreateReferenceGetReferentLocations(invoke, codegen_); +} + +void IntrinsicCodeGeneratorARMVIXL::VisitReferenceGetReferent(HInvoke* invoke) { + ArmVIXLAssembler* assembler = GetAssembler(); + LocationSummary* locations = invoke->GetLocations(); + + Location obj = locations->InAt(0); + Location out = locations->Out(); + + SlowPathCodeARMVIXL* slow_path = new (GetAllocator()) IntrinsicSlowPathARMVIXL(invoke); + codegen_->AddSlowPath(slow_path); + + if (kEmitCompilerReadBarrier) { + // Check self->GetWeakRefAccessEnabled(). + UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); + vixl32::Register temp = temps.Acquire(); + __ Ldr(temp, + MemOperand(tr, Thread::WeakRefAccessEnabledOffset<kArmPointerSize>().Uint32Value())); + __ Cmp(temp, 0); + __ B(eq, slow_path->GetEntryLabel()); + } + + { + // Load the java.lang.ref.Reference class. + UseScratchRegisterScope temps(assembler->GetVIXLAssembler()); + vixl32::Register temp = temps.Acquire(); + codegen_->LoadIntrinsicDeclaringClass(temp, invoke); + + // Check static fields java.lang.ref.Reference.{disableIntrinsic,slowPathEnabled} together. + MemberOffset disable_intrinsic_offset = IntrinsicVisitor::GetReferenceDisableIntrinsicOffset(); + DCHECK_ALIGNED(disable_intrinsic_offset.Uint32Value(), 2u); + DCHECK_EQ(disable_intrinsic_offset.Uint32Value() + 1u, + IntrinsicVisitor::GetReferenceSlowPathEnabledOffset().Uint32Value()); + __ Ldrh(temp, MemOperand(temp, disable_intrinsic_offset.Uint32Value())); + __ Cmp(temp, 0); + __ B(ne, slow_path->GetEntryLabel()); + } + + // Load the value from the field. + uint32_t referent_offset = mirror::Reference::ReferentOffset().Uint32Value(); + if (kEmitCompilerReadBarrier && kUseBakerReadBarrier) { + codegen_->GenerateFieldLoadWithBakerReadBarrier(invoke, + out, + RegisterFrom(obj), + referent_offset, + /*maybe_temp=*/ Location::NoLocation(), + /*needs_null_check=*/ true); + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. + } else { + { + vixl::EmissionCheckScope guard(codegen_->GetVIXLAssembler(), kMaxMacroInstructionSizeInBytes); + __ Ldr(RegisterFrom(out), MemOperand(RegisterFrom(obj), referent_offset)); + codegen_->MaybeRecordImplicitNullCheck(invoke); + } + codegen_->GenerateMemoryBarrier(MemBarrierKind::kLoadAny); // `referent` is volatile. + codegen_->MaybeGenerateReadBarrierSlow(invoke, out, out, obj, referent_offset); + } + __ Bind(slow_path->GetExitLabel()); +} + void IntrinsicLocationsBuilderARMVIXL::VisitThreadInterrupted(HInvoke* invoke) { LocationSummary* locations = new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); @@ -3049,7 +3111,6 @@ void IntrinsicCodeGeneratorARMVIXL::VisitIntegerDivideUnsigned(HInvoke* invoke) UNIMPLEMENTED_INTRINSIC(ARMVIXL, MathRoundDouble) // Could be done by changing rounding mode, maybe? UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeCASLong) // High register pressure. UNIMPLEMENTED_INTRINSIC(ARMVIXL, SystemArrayCopyChar) -UNIMPLEMENTED_INTRINSIC(ARMVIXL, ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(ARMVIXL, LongDivideUnsigned) UNIMPLEMENTED_INTRINSIC(ARMVIXL, CRC32Update) UNIMPLEMENTED_INTRINSIC(ARMVIXL, CRC32UpdateBytes) |