summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc3
-rw-r--r--compiler/optimizing/intrinsics_arm_vixl.cc4
-rw-r--r--compiler/optimizing/intrinsics_x86.cc5
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc5
4 files changed, 10 insertions, 7 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 77b55e455e..0236f0d5a9 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -3475,7 +3475,8 @@ void IntrinsicCodeGeneratorARM64::VisitReferenceGetReferent(HInvoke* invoke) {
Register temp = temps.AcquireW();
__ Ldr(temp,
MemOperand(tr, Thread::WeakRefAccessEnabledOffset<kArm64PointerSize>().Uint32Value()));
- __ Cbz(temp, slow_path->GetEntryLabel());
+ static_assert(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled) == 0);
+ __ Cbnz(temp, slow_path->GetEntryLabel());
}
{
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index a4a3457c37..303ac171a7 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -2517,8 +2517,8 @@ void IntrinsicCodeGeneratorARMVIXL::VisitReferenceGetReferent(HInvoke* invoke) {
vixl32::Register temp = temps.Acquire();
__ Ldr(temp,
MemOperand(tr, Thread::WeakRefAccessEnabledOffset<kArmPointerSize>().Uint32Value()));
- __ Cmp(temp, 0);
- __ B(eq, slow_path->GetEntryLabel());
+ __ Cmp(temp, enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled));
+ __ B(ne, slow_path->GetEntryLabel());
}
{
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 7c2537495a..3a3886432a 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -3346,8 +3346,9 @@ void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
if (kEmitCompilerReadBarrier) {
// Check self->GetWeakRefAccessEnabled().
ThreadOffset32 offset = Thread::WeakRefAccessEnabledOffset<kX86PointerSize>();
- __ fs()->cmpl(Address::Absolute(offset), Immediate(0));
- __ j(kEqual, slow_path->GetEntryLabel());
+ __ fs()->cmpl(Address::Absolute(offset),
+ Immediate(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled)));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
}
// Load the java.lang.ref.Reference class, use the output register as a temporary.
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index d5a7cb10e1..e3be98732b 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -3098,8 +3098,9 @@ void IntrinsicCodeGeneratorX86_64::VisitReferenceGetReferent(HInvoke* invoke) {
if (kEmitCompilerReadBarrier) {
// Check self->GetWeakRefAccessEnabled().
ThreadOffset64 offset = Thread::WeakRefAccessEnabledOffset<kX86_64PointerSize>();
- __ gs()->cmpl(Address::Absolute(offset, /* no_rip= */ true), Immediate(0));
- __ j(kEqual, slow_path->GetEntryLabel());
+ __ gs()->cmpl(Address::Absolute(offset, /* no_rip= */ true),
+ Immediate(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled)));
+ __ j(kNotEqual, slow_path->GetEntryLabel());
}
// Load the java.lang.ref.Reference class, use the output register as a temporary.