X86: Add the other variants of VarHandle.compareAndSet
This commit implements VarHandle WeakCompareAndSet,
WeakCompareAndSetPlain, WeakCompareAndSetAcquire,
WeakCompareAndSetRelease.
Test: art/test.py --host -r -t 712-varhandle-invocation --32
Change-Id: I1e0f5e7dcb21cd0f107d5a74cb534cef992ef9fc
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 4a0eb2e..1a83976 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -33,6 +33,7 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
+#include "mirror/var_handle.h"
#include "scoped_thread_state_change-inl.h"
#include "thread.h"
#include "utils/assembler.h"
@@ -572,8 +573,10 @@
<< "Unexpected instruction in read barrier marking and field updating slow path: "
<< instruction_->DebugName();
DCHECK(instruction_->GetLocations()->Intrinsified());
- DCHECK(instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeCASObject ||
- instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kVarHandleCompareAndSet);
+ Intrinsics intrinsic = instruction_->AsInvoke()->GetIntrinsic();
+ static constexpr auto kVarHandleCAS = mirror::VarHandle::AccessModeTemplate::kCompareAndSet;
+ DCHECK(intrinsic == Intrinsics::kUnsafeCASObject ||
+ mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) == kVarHandleCAS);
__ Bind(GetEntryLabel());
if (unpoison_ref_before_marking_) {
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index f10b511..1c1b4ea 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -3572,7 +3572,7 @@
__ Bind(slow_path->GetExitLabel());
}
-void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+static void CreateVarHandleCompareAndSetLocations(HInvoke* invoke) {
// The only read barrier implementation supporting the
// VarHandleGet intrinsic is the Baker-style read barriers.
if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
@@ -3598,7 +3598,8 @@
return;
}
- LocationSummary* locations = new (allocator_) LocationSummary(
+ ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
+ LocationSummary* locations = new (allocator) LocationSummary(
invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
locations->AddTemp(Location::RequiresRegister());
locations->AddTemp(Location::RequiresRegister());
@@ -3628,12 +3629,12 @@
locations->SetOut(Location::RegisterLocation(EAX));
}
-void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+static void GenerateVarHandleCompareAndSet(HInvoke* invoke, CodeGeneratorX86* codegen) {
// The only read barrier implementation supporting the
// VarHandleGet intrinsic is the Baker-style read barriers.
DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
- X86Assembler* assembler = codegen_->GetAssembler();
+ X86Assembler* assembler = codegen->GetAssembler();
LocationSummary* locations = invoke->GetLocations();
uint32_t number_of_arguments = invoke->GetNumberOfArguments();
uint32_t expected_value_index = number_of_arguments - 2;
@@ -3646,8 +3647,8 @@
Register offset = locations->GetTemp(0).AsRegister<Register>();
Register temp = locations->GetTemp(1).AsRegister<Register>();
Register temp2 = locations->GetTemp(2).AsRegister<Register>();
- SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
- codegen_->AddSlowPath(slow_path);
+ SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
+ codegen->AddSlowPath(slow_path);
GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler);
// Check the varType.primitiveType against the type of the expected value.
@@ -3672,7 +3673,7 @@
// Get the field referred by the VarHandle. The returned register contains the object reference
// or the declaring class. The field offset will be placed in 'offset'. For static fields, the
// declaring class will be placed in 'temp' register.
- Register reference = GenerateVarHandleFieldReference(invoke, codegen_, temp, offset);
+ Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset);
uint32_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
// For generating the compare and exchange, we need 2 temporaries. In case of a static field, the
@@ -3681,17 +3682,60 @@
temp = (expected_coordinates_count == 1u) ? temp : locations->GetTemp(3).AsRegister<Register>();
DCHECK_NE(temp, reference);
+ // We are using `lock cmpxchg` in all cases because there is no CAS equivalent that has weak
+ // failure semantics. `lock cmpxchg` has full barrier semantics, and we don't need scheduling
+ // barriers at this time.
+
if (type == DataType::Type::kReference) {
- GenReferenceCAS(invoke, codegen_, expected_value, new_value, reference, offset, temp, temp2);
+ GenReferenceCAS(invoke, codegen, expected_value, new_value, reference, offset, temp, temp2);
} else {
Location out = locations->Out();
- GenPrimitiveCAS(type, codegen_, expected_value, new_value, reference, offset, out, temp);
+ GenPrimitiveCAS(type, codegen, expected_value, new_value, reference, offset, out, temp);
}
- codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
__ Bind(slow_path->GetExitLabel());
}
+void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
UNIMPLEMENTED_INTRINSIC(X86, ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
@@ -3767,10 +3811,6 @@
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetOpaque)
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetRelease)
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetVolatile)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSet)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetAcquire)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetPlain)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetRelease)
UNREACHABLE_INTRINSICS(X86)