ART: Reference.getReferent intrinsic for arm and arm64
Test: m test-art-host
Test: m test-art-target
Test: export ART_HEAP_POISONING=true; m test-art-host
Test: export ART_HEAP_POISONING=true; m test-art-target
Bug: 32535355
Change-Id: Ie63317689dd9e03a24e701c30411f8014970173a
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index 8f64fae..c262cf9 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -2592,6 +2592,58 @@
__ Lsr(out, out, 5);
}
+void IntrinsicLocationsBuilderARM::VisitReferenceGetReferent(HInvoke* invoke) {
+ if (kEmitCompilerReadBarrier) {
+ // Do not intrinsify this call with the read barrier configuration.
+ return;
+ }
+ LocationSummary* locations = new (arena_) LocationSummary(invoke,
+ LocationSummary::kCallOnSlowPath,
+ kIntrinsified);
+ locations->SetInAt(0, Location::RequiresRegister());
+ locations->SetOut(Location::SameAsFirstInput());
+ locations->AddTemp(Location::RequiresRegister());
+}
+
+void IntrinsicCodeGeneratorARM::VisitReferenceGetReferent(HInvoke* invoke) {
+ DCHECK(!kEmitCompilerReadBarrier);
+ ArmAssembler* const assembler = GetAssembler();
+ LocationSummary* locations = invoke->GetLocations();
+
+ Register obj = locations->InAt(0).AsRegister<Register>();
+ Register out = locations->Out().AsRegister<Register>();
+
+ SlowPathCode* slow_path = new (GetAllocator()) IntrinsicSlowPathARM(invoke);
+ codegen_->AddSlowPath(slow_path);
+
+ // Load ArtMethod first.
+ HInvokeStaticOrDirect* invoke_direct = invoke->AsInvokeStaticOrDirect();
+ DCHECK(invoke_direct != nullptr);
+ Register temp = codegen_->GenerateCalleeMethodStaticOrDirectCall(
+ invoke_direct, locations->GetTemp(0)).AsRegister<Register>();
+
+ // Now get declaring class.
+ __ ldr(temp, Address(temp, ArtMethod::DeclaringClassOffset().Int32Value()));
+
+ uint32_t slow_path_flag_offset = codegen_->GetReferenceSlowFlagOffset();
+ uint32_t disable_flag_offset = codegen_->GetReferenceDisableFlagOffset();
+ DCHECK_NE(slow_path_flag_offset, 0u);
+ DCHECK_NE(disable_flag_offset, 0u);
+ DCHECK_NE(slow_path_flag_offset, disable_flag_offset);
+
+ // Check static flags that prevent using intrinsic.
+ __ ldr(IP, Address(temp, disable_flag_offset));
+ __ ldr(temp, Address(temp, slow_path_flag_offset));
+ __ orr(IP, IP, ShifterOperand(temp));
+ __ CompareAndBranchIfNonZero(IP, slow_path->GetEntryLabel());
+
+ // Fast path.
+ __ ldr(out, Address(obj, mirror::Reference::ReferentOffset().Int32Value()));
+ codegen_->MaybeRecordImplicitNullCheck(invoke);
+ __ MaybeUnpoisonHeapReference(out);
+ __ Bind(slow_path->GetExitLabel());
+}
+
UNIMPLEMENTED_INTRINSIC(ARM, MathMinDoubleDouble)
UNIMPLEMENTED_INTRINSIC(ARM, MathMinFloatFloat)
UNIMPLEMENTED_INTRINSIC(ARM, MathMaxDoubleDouble)
@@ -2605,7 +2657,6 @@
UNIMPLEMENTED_INTRINSIC(ARM, MathRoundFloat) // Could be done by changing rounding mode, maybe?
UNIMPLEMENTED_INTRINSIC(ARM, UnsafeCASLong) // High register pressure.
UNIMPLEMENTED_INTRINSIC(ARM, SystemArrayCopyChar)
-UNIMPLEMENTED_INTRINSIC(ARM, ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(ARM, IntegerHighestOneBit)
UNIMPLEMENTED_INTRINSIC(ARM, LongHighestOneBit)
UNIMPLEMENTED_INTRINSIC(ARM, IntegerLowestOneBit)