diff options
| author | 2018-03-09 17:05:28 -0800 | |
|---|---|---|
| committer | 2018-03-29 10:42:58 -0700 | |
| commit | c7b28de9f8bf407d91cff22de782d022492b45f7 (patch) | |
| tree | 7d85879a528f21e25b5ab36f20de716b4cf5892d /compiler | |
| parent | 8fd8cdc43e10a421f6f63afb87f6f99c086058de (diff) | |
Add reachabilityFence intrinsics
Add intrinsics that generate no code or do nothing for all architectures
and for the interpreter. The only impact is to keep the argument live at
all suspend points preceding the call. We ensure that the code is not
moved across other memory accesses by declaring it to have write side-effects.
Add a minimal test.
Modify 036-finalizer to use a reachabilityFence, hopefully making it
more robust to dead refererence elimination.
Bug: 72698200
Test: Build and boot AOSP.
art/test.py --host -r -t 072-reachability-fence
Look at generated code.
Change-Id: I0f298bf5cc375d8ebc19bb791cc05a8490d55430
Diffstat (limited to 'compiler')
| -rw-r--r-- | compiler/optimizing/intrinsics_arm64.cc | 8 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_arm_vixl.cc | 8 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips.cc | 8 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_mips64.cc | 8 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86.cc | 7 | ||||
| -rw-r--r-- | compiler/optimizing/intrinsics_x86_64.cc | 8 |
6 files changed, 47 insertions, 0 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc index 81c0b50932..c3d643a7d1 100644 --- a/compiler/optimizing/intrinsics_arm64.cc +++ b/compiler/optimizing/intrinsics_arm64.cc @@ -2875,6 +2875,14 @@ void IntrinsicCodeGeneratorARM64::VisitThreadInterrupted(HInvoke* invoke) { __ Bind(&done); } +void IntrinsicLocationsBuilderARM64::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorARM64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } + UNIMPLEMENTED_INTRINSIC(ARM64, ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(ARM64, StringStringIndexOf); diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc index e61a0b0809..29aecbc097 100644 --- a/compiler/optimizing/intrinsics_arm_vixl.cc +++ b/compiler/optimizing/intrinsics_arm_vixl.cc @@ -3028,6 +3028,14 @@ void IntrinsicCodeGeneratorARMVIXL::VisitThreadInterrupted(HInvoke* invoke) { } } +void IntrinsicLocationsBuilderARMVIXL::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorARMVIXL::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } + UNIMPLEMENTED_INTRINSIC(ARMVIXL, MathRoundDouble) // Could be done by changing rounding mode, maybe? UNIMPLEMENTED_INTRINSIC(ARMVIXL, UnsafeCASLong) // High register pressure. UNIMPLEMENTED_INTRINSIC(ARMVIXL, SystemArrayCopyChar) diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc index bc1292b2b7..ae248a3e5c 100644 --- a/compiler/optimizing/intrinsics_mips.cc +++ b/compiler/optimizing/intrinsics_mips.cc @@ -2693,6 +2693,14 @@ void IntrinsicCodeGeneratorMIPS::VisitThreadInterrupted(HInvoke* invoke) { __ Bind(&done); } +void IntrinsicLocationsBuilderMIPS::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorMIPS::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } + // Unimplemented intrinsics. UNIMPLEMENTED_INTRINSIC(MIPS, MathCeil) diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc index f429afde2c..9a9ae714bc 100644 --- a/compiler/optimizing/intrinsics_mips64.cc +++ b/compiler/optimizing/intrinsics_mips64.cc @@ -2354,6 +2354,14 @@ void IntrinsicCodeGeneratorMIPS64::VisitThreadInterrupted(HInvoke* invoke) { __ Bind(&done); } +void IntrinsicLocationsBuilderMIPS64::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorMIPS64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } + UNIMPLEMENTED_INTRINSIC(MIPS64, ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(MIPS64, SystemArrayCopy) diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc index c4f322bf0c..f84a33bb8e 100644 --- a/compiler/optimizing/intrinsics_x86.cc +++ b/compiler/optimizing/intrinsics_x86.cc @@ -2928,6 +2928,13 @@ void IntrinsicCodeGeneratorX86::VisitThreadInterrupted(HInvoke* invoke) { __ Bind(&done); } +void IntrinsicLocationsBuilderX86::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorX86::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble) UNIMPLEMENTED_INTRINSIC(X86, ReferenceGetReferent) diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc index 437bc3dd3c..7627dc9490 100644 --- a/compiler/optimizing/intrinsics_x86_64.cc +++ b/compiler/optimizing/intrinsics_x86_64.cc @@ -2737,6 +2737,14 @@ void IntrinsicCodeGeneratorX86_64::VisitThreadInterrupted(HInvoke* invoke) { __ Bind(&done); } +void IntrinsicLocationsBuilderX86_64::VisitReachabilityFence(HInvoke* invoke) { + LocationSummary* locations = + new (allocator_) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified); + locations->SetInAt(0, Location::Any()); +} + +void IntrinsicCodeGeneratorX86_64::VisitReachabilityFence(HInvoke* invoke ATTRIBUTE_UNUSED) { } + UNIMPLEMENTED_INTRINSIC(X86_64, ReferenceGetReferent) UNIMPLEMENTED_INTRINSIC(X86_64, FloatIsInfinite) UNIMPLEMENTED_INTRINSIC(X86_64, DoubleIsInfinite) |