summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Andra Danciu <andradanciu@google.com> 2020-09-15 14:27:21 +0000
committer Orion Hodson <oth@google.com> 2020-09-16 14:47:30 +0000
commit52d2c0c3af51b9da9e20cfe6b10d72b816ddb7af (patch)
tree73545ea3b9948923bcd94ec7cdbf9b1b17d2edfb /compiler/optimizing
parentafad9f95c5321e2d6fe312c377dc8a11336f5fa4 (diff)
X86: Add the other variants of VarHandle.compareAndSet
This commit implements VarHandle WeakCompareAndSet, WeakCompareAndSetPlain, WeakCompareAndSetAcquire, WeakCompareAndSetRelease. Test: art/test.py --host -r -t 712-varhandle-invocation --32 Change-Id: I1e0f5e7dcb21cd0f107d5a74cb534cef992ef9fc
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/code_generator_x86.cc7
-rw-r--r--compiler/optimizing/intrinsics_x86.cc68
2 files changed, 59 insertions, 16 deletions
diff --git a/compiler/optimizing/code_generator_x86.cc b/compiler/optimizing/code_generator_x86.cc
index 4a0eb2e42e..1a83976d60 100644
--- a/compiler/optimizing/code_generator_x86.cc
+++ b/compiler/optimizing/code_generator_x86.cc
@@ -33,6 +33,7 @@
#include "lock_word.h"
#include "mirror/array-inl.h"
#include "mirror/class-inl.h"
+#include "mirror/var_handle.h"
#include "scoped_thread_state_change-inl.h"
#include "thread.h"
#include "utils/assembler.h"
@@ -572,8 +573,10 @@ class ReadBarrierMarkAndUpdateFieldSlowPathX86 : public SlowPathCode {
<< "Unexpected instruction in read barrier marking and field updating slow path: "
<< instruction_->DebugName();
DCHECK(instruction_->GetLocations()->Intrinsified());
- DCHECK(instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kUnsafeCASObject ||
- instruction_->AsInvoke()->GetIntrinsic() == Intrinsics::kVarHandleCompareAndSet);
+ Intrinsics intrinsic = instruction_->AsInvoke()->GetIntrinsic();
+ static constexpr auto kVarHandleCAS = mirror::VarHandle::AccessModeTemplate::kCompareAndSet;
+ DCHECK(intrinsic == Intrinsics::kUnsafeCASObject ||
+ mirror::VarHandle::GetAccessModeTemplateByIntrinsic(intrinsic) == kVarHandleCAS);
__ Bind(GetEntryLabel());
if (unpoison_ref_before_marking_) {
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index f10b5112f9..1c1b4ea62e 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -3572,7 +3572,7 @@ void IntrinsicCodeGeneratorX86::VisitVarHandleSet(HInvoke* invoke) {
__ Bind(slow_path->GetExitLabel());
}
-void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+static void CreateVarHandleCompareAndSetLocations(HInvoke* invoke) {
// The only read barrier implementation supporting the
// VarHandleGet intrinsic is the Baker-style read barriers.
if (kEmitCompilerReadBarrier && !kUseBakerReadBarrier) {
@@ -3598,7 +3598,8 @@ void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke)
return;
}
- LocationSummary* locations = new (allocator_) LocationSummary(
+ ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
+ LocationSummary* locations = new (allocator) LocationSummary(
invoke, LocationSummary::kCallOnSlowPath, kIntrinsified);
locations->AddTemp(Location::RequiresRegister());
locations->AddTemp(Location::RequiresRegister());
@@ -3628,12 +3629,12 @@ void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke)
locations->SetOut(Location::RegisterLocation(EAX));
}
-void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+static void GenerateVarHandleCompareAndSet(HInvoke* invoke, CodeGeneratorX86* codegen) {
// The only read barrier implementation supporting the
// VarHandleGet intrinsic is the Baker-style read barriers.
DCHECK(!kEmitCompilerReadBarrier || kUseBakerReadBarrier);
- X86Assembler* assembler = codegen_->GetAssembler();
+ X86Assembler* assembler = codegen->GetAssembler();
LocationSummary* locations = invoke->GetLocations();
uint32_t number_of_arguments = invoke->GetNumberOfArguments();
uint32_t expected_value_index = number_of_arguments - 2;
@@ -3646,8 +3647,8 @@ void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
Register offset = locations->GetTemp(0).AsRegister<Register>();
Register temp = locations->GetTemp(1).AsRegister<Register>();
Register temp2 = locations->GetTemp(2).AsRegister<Register>();
- SlowPathCode* slow_path = new (codegen_->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
- codegen_->AddSlowPath(slow_path);
+ SlowPathCode* slow_path = new (codegen->GetScopedAllocator()) IntrinsicSlowPathX86(invoke);
+ codegen->AddSlowPath(slow_path);
GenerateVarHandleCommonChecks(invoke, temp, slow_path, assembler);
// Check the varType.primitiveType against the type of the expected value.
@@ -3672,7 +3673,7 @@ void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
// Get the field referred by the VarHandle. The returned register contains the object reference
// or the declaring class. The field offset will be placed in 'offset'. For static fields, the
// declaring class will be placed in 'temp' register.
- Register reference = GenerateVarHandleFieldReference(invoke, codegen_, temp, offset);
+ Register reference = GenerateVarHandleFieldReference(invoke, codegen, temp, offset);
uint32_t expected_coordinates_count = GetExpectedVarHandleCoordinatesCount(invoke);
// For generating the compare and exchange, we need 2 temporaries. In case of a static field, the
@@ -3681,17 +3682,60 @@ void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
temp = (expected_coordinates_count == 1u) ? temp : locations->GetTemp(3).AsRegister<Register>();
DCHECK_NE(temp, reference);
+ // We are using `lock cmpxchg` in all cases because there is no CAS equivalent that has weak
+ // failure semantics. `lock cmpxchg` has full barrier semantics, and we don't need scheduling
+ // barriers at this time.
+
if (type == DataType::Type::kReference) {
- GenReferenceCAS(invoke, codegen_, expected_value, new_value, reference, offset, temp, temp2);
+ GenReferenceCAS(invoke, codegen, expected_value, new_value, reference, offset, temp, temp2);
} else {
Location out = locations->Out();
- GenPrimitiveCAS(type, codegen_, expected_value, new_value, reference, offset, out, temp);
+ GenPrimitiveCAS(type, codegen, expected_value, new_value, reference, offset, out, temp);
}
- codegen_->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
__ Bind(slow_path->GetExitLabel());
}
+void IntrinsicLocationsBuilderX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleCompareAndSet(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSet(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetPlain(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetAcquire(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
+void IntrinsicLocationsBuilderX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) {
+ CreateVarHandleCompareAndSetLocations(invoke);
+}
+
+void IntrinsicCodeGeneratorX86::VisitVarHandleWeakCompareAndSetRelease(HInvoke* invoke) {
+ GenerateVarHandleCompareAndSet(invoke, codegen_);
+}
+
UNIMPLEMENTED_INTRINSIC(X86, MathRoundDouble)
UNIMPLEMENTED_INTRINSIC(X86, ReferenceGetReferent)
UNIMPLEMENTED_INTRINSIC(X86, FloatIsInfinite)
@@ -3767,10 +3811,6 @@ UNIMPLEMENTED_INTRINSIC(X86, VarHandleGetVolatile)
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetOpaque)
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetRelease)
UNIMPLEMENTED_INTRINSIC(X86, VarHandleSetVolatile)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSet)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetAcquire)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetPlain)
-UNIMPLEMENTED_INTRINSIC(X86, VarHandleWeakCompareAndSetRelease)
UNREACHABLE_INTRINSICS(X86)