summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc3
-rw-r--r--compiler/optimizing/intrinsics_arm_vixl.cc4
-rw-r--r--compiler/optimizing/intrinsics_x86.cc5
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc5
4 files changed, 7 insertions, 10 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 0236f0d5a9..77b55e455e 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -3475,8 +3475,7 @@ void IntrinsicCodeGeneratorARM64::VisitReferenceGetReferent(HInvoke* invoke) {
Register temp = temps.AcquireW();
__ Ldr(temp,
MemOperand(tr, Thread::WeakRefAccessEnabledOffset<kArm64PointerSize>().Uint32Value()));
- static_assert(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled) == 0);
- __ Cbnz(temp, slow_path->GetEntryLabel());
+ __ Cbz(temp, slow_path->GetEntryLabel());
}
{
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index 303ac171a7..a4a3457c37 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -2517,8 +2517,8 @@ void IntrinsicCodeGeneratorARMVIXL::VisitReferenceGetReferent(HInvoke* invoke) {
vixl32::Register temp = temps.Acquire();
__ Ldr(temp,
MemOperand(tr, Thread::WeakRefAccessEnabledOffset<kArmPointerSize>().Uint32Value()));
- __ Cmp(temp, enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled));
- __ B(ne, slow_path->GetEntryLabel());
+ __ Cmp(temp, 0);
+ __ B(eq, slow_path->GetEntryLabel());
}
{
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 3a3886432a..7c2537495a 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -3346,9 +3346,8 @@ void IntrinsicCodeGeneratorX86::VisitReferenceGetReferent(HInvoke* invoke) {
if (kEmitCompilerReadBarrier) {
// Check self->GetWeakRefAccessEnabled().
ThreadOffset32 offset = Thread::WeakRefAccessEnabledOffset<kX86PointerSize>();
- __ fs()->cmpl(Address::Absolute(offset),
- Immediate(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled)));
- __ j(kNotEqual, slow_path->GetEntryLabel());
+ __ fs()->cmpl(Address::Absolute(offset), Immediate(0));
+ __ j(kEqual, slow_path->GetEntryLabel());
}
// Load the java.lang.ref.Reference class, use the output register as a temporary.
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index e3be98732b..d5a7cb10e1 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -3098,9 +3098,8 @@ void IntrinsicCodeGeneratorX86_64::VisitReferenceGetReferent(HInvoke* invoke) {
if (kEmitCompilerReadBarrier) {
// Check self->GetWeakRefAccessEnabled().
ThreadOffset64 offset = Thread::WeakRefAccessEnabledOffset<kX86_64PointerSize>();
- __ gs()->cmpl(Address::Absolute(offset, /* no_rip= */ true),
- Immediate(enum_cast<int32_t>(WeakRefAccessState::kVisiblyEnabled)));
- __ j(kNotEqual, slow_path->GetEntryLabel());
+ __ gs()->cmpl(Address::Absolute(offset, /* no_rip= */ true), Immediate(0));
+ __ j(kEqual, slow_path->GetEntryLabel());
}
// Load the java.lang.ref.Reference class, use the output register as a temporary.