summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
author Santiago Aboy Solanes <solanes@google.com> 2024-10-11 15:40:57 +0000
committer Treehugger Robot <android-test-infra-autosubmit@system.gserviceaccount.com> 2024-10-11 18:16:08 +0000
commitc1bd4376b18ff46e5156cc597bd3311dd86fabd9 (patch)
tree290cc78dc2bc21f0eb2ad87deb524d2fc9d767b2 /compiler/optimizing
parent23cfc82c8eaad0b7e613dcbc2447061c06f310a0 (diff)
Revert "Add intrinsics for the absolute forms of unsafe.{get,put}Int"
This reverts commit bcb5c19e5e200607fe76294aeb5273ddac5f04ae. Bug: 370098695 Reason for revert: Breaks LUCI builds for arm https://ci.chromium.org/ui/p/art/builders/ci/angler-armv7-non-gen-cc/4506/overview Change-Id: Ia72938c65c45db0ccd14f8901e715a9ec2930087
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc99
-rw-r--r--compiler/optimizing/intrinsics_arm_vixl.cc303
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc88
-rw-r--r--compiler/optimizing/intrinsics_x86.cc156
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc97
5 files changed, 61 insertions, 682 deletions
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 9ff34c74de..851b6e96ac 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -707,6 +707,7 @@ static bool ReadBarrierNeedsTemp(bool is_volatile, HInvoke* invoke) {
invoke->InputAt(2)->AsLongConstant()->GetValue() >= kReferenceLoadMinFarOffset;
}
+
static void GenUnsafeGet(HInvoke* invoke,
DataType::Type type,
bool is_volatile,
@@ -769,26 +770,6 @@ static void GenUnsafeGet(HInvoke* invoke,
}
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- DataType::Type type,
- bool is_volatile,
- CodeGeneratorARM64* codegen) {
- LocationSummary* locations = invoke->GetLocations();
- DCHECK((type == DataType::Type::kInt8) ||
- (type == DataType::Type::kInt32) ||
- (type == DataType::Type::kInt64));
- Location address_loc = locations->InAt(1);
- MemOperand mem_op = MemOperand(XRegisterFrom(address_loc));
- Location trg_loc = locations->Out();
- Register trg = RegisterFrom(trg_loc, type);
-
- if (is_volatile) {
- codegen->LoadAcquire(invoke, type, trg, mem_op, /* needs_null_check= */ true);
- } else {
- codegen->Load(type, trg, mem_op);
- }
-}
-
static void CreateUnsafeGetLocations(ArenaAllocator* allocator,
HInvoke* invoke,
CodeGeneratorARM64* codegen,
@@ -815,21 +796,9 @@ static void CreateUnsafeGetLocations(ArenaAllocator* allocator,
(can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
}
-static void CreateUnsafeGetAbsoluteLocations(ArenaAllocator* allocator,
- HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-}
-
void IntrinsicLocationsBuilderARM64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderARM64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicLocationsBuilderARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -848,12 +817,10 @@ void IntrinsicLocationsBuilderARM64::VisitUnsafeGetObjectVolatile(HInvoke* invok
void IntrinsicLocationsBuilderARM64::VisitUnsafeGetByte(HInvoke* invoke) {
VisitJdkUnsafeGetByte(invoke);
}
+
void IntrinsicLocationsBuilderARM64::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateUnsafeGetLocations(allocator_, invoke, codegen_);
}
-void IntrinsicLocationsBuilderARM64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateUnsafeGetAbsoluteLocations(allocator_, invoke);
-}
void IntrinsicLocationsBuilderARM64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateUnsafeGetLocations(allocator_, invoke, codegen_, /* is_volatile= */ true);
}
@@ -885,9 +852,6 @@ void IntrinsicLocationsBuilderARM64::VisitJdkUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorARM64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorARM64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicCodeGeneratorARM64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -910,9 +874,6 @@ void IntrinsicCodeGeneratorARM64::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorARM64::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
-void IntrinsicCodeGeneratorARM64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
-}
void IntrinsicCodeGeneratorARM64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_);
}
@@ -961,29 +922,9 @@ static void CreateUnsafePutLocations(ArenaAllocator* allocator, HInvoke* invoke)
}
}
-static void CreateUnsafePutAbsoluteLocations(ArenaAllocator* allocator, HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- static constexpr int kAddressIndex = 1;
- static constexpr int kValueIndex = 2;
- // Unused receiver.
- locations->SetInAt(0, Location::NoLocation());
- // The address.
- locations->SetInAt(kAddressIndex, Location::RequiresRegister());
- // The value.
- if (IsZeroBitPattern(invoke->InputAt(kValueIndex))) {
- locations->SetInAt(kValueIndex, Location::ConstantLocation(invoke->InputAt(kValueIndex)));
- } else {
- locations->SetInAt(kValueIndex, Location::RequiresRegister());
- }
-}
-
void IntrinsicLocationsBuilderARM64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderARM64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicLocationsBuilderARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -1015,9 +956,6 @@ void IntrinsicLocationsBuilderARM64::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicLocationsBuilderARM64::VisitJdkUnsafePut(HInvoke* invoke) {
CreateUnsafePutLocations(allocator_, invoke);
}
-void IntrinsicLocationsBuilderARM64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateUnsafePutAbsoluteLocations(allocator_, invoke);
-}
void IntrinsicLocationsBuilderARM64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
CreateUnsafePutLocations(allocator_, invoke);
}
@@ -1066,7 +1004,7 @@ static void GenUnsafePut(HInvoke* invoke,
static constexpr int kOffsetIndex = 2;
static constexpr int kValueIndex = 3;
Register base = WRegisterFrom(locations->InAt(1)); // Object pointer.
- Location offset = locations->InAt(kOffsetIndex); // Long offset.
+ Location offset = locations->InAt(kOffsetIndex); // Long offset.
CPURegister value = InputCPURegisterOrZeroRegAt(invoke, kValueIndex);
CPURegister source = value;
MemOperand mem_op;
@@ -1104,33 +1042,9 @@ static void GenUnsafePut(HInvoke* invoke,
}
}
-static void GenUnsafePutAbsolute(HInvoke* invoke,
- DataType::Type type,
- bool is_volatile,
- bool is_ordered,
- CodeGeneratorARM64* codegen) {
- LocationSummary* locations = invoke->GetLocations();
- MacroAssembler* masm = codegen->GetVIXLAssembler();
-
- static constexpr int kAddressIndex = 1;
- static constexpr int kValueIndex = 2;
- Location address_loc = locations->InAt(kAddressIndex);
- MemOperand mem_op = MemOperand(WRegisterFrom(address_loc).X());
- CPURegister value = InputCPURegisterOrZeroRegAt(invoke, kValueIndex);
-
- if (is_volatile || is_ordered) {
- codegen->StoreRelease(invoke, type, value, mem_op, /* needs_null_check= */ false);
- } else {
- codegen->Store(type, value, mem_op);
- }
-}
-
void IntrinsicCodeGeneratorARM64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorARM64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicCodeGeneratorARM64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -1166,13 +1080,6 @@ void IntrinsicCodeGeneratorARM64::VisitJdkUnsafePut(HInvoke* invoke) {
/*is_ordered=*/ false,
codegen_);
}
-void IntrinsicCodeGeneratorARM64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(invoke,
- DataType::Type::kInt32,
- /*is_volatile=*/ false,
- /*is_ordered=*/ false,
- codegen_);
-}
void IntrinsicCodeGeneratorARM64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
GenUnsafePut(invoke,
DataType::Type::kInt32,
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index c8cf1690e3..7f89f56d0c 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -17,7 +17,6 @@
#include "intrinsics_arm_vixl.h"
#include "aarch32/constants-aarch32.h"
-#include "aarch32/operands-aarch32.h"
#include "arch/arm/callee_save_frame_arm.h"
#include "arch/arm/instruction_set_features_arm.h"
#include "art_method.h"
@@ -2581,29 +2580,8 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
DCHECK(acquire_barrier || order == std::memory_order_relaxed);
DCHECK(atomic || order == std::memory_order_relaxed);
- MemOperand address(base);
- MemOperand absolute_address = address;
ArmVIXLAssembler* assembler = codegen->GetAssembler();
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
-
- // If offset is valid then this is a get from a relative address.
- if (offset.IsValid()) {
- address = MemOperand(base, offset);
- switch (type) {
- case DataType::Type::kInt64:
- case DataType::Type::kFloat32:
- case DataType::Type::kFloat64: {
- vixl32::Register temp_reg_absolute_address = temps.Acquire();
- __ Add(temp_reg_absolute_address, base, offset);
- absolute_address = MemOperand(temp_reg_absolute_address);
- break;
- }
- default:
- // No need
- break;
- }
- }
-
+ MemOperand address(base, offset);
switch (type) {
case DataType::Type::kBool:
__ Ldrb(RegisterFrom(out), address);
@@ -2623,10 +2601,13 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
case DataType::Type::kInt64:
if (Use64BitExclusiveLoadStore(atomic, codegen)) {
vixl32::Register strexd_tmp = RegisterFrom(maybe_temp);
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
vixl32::Label loop;
__ Bind(&loop);
- __ Ldrexd(LowRegisterFrom(out), HighRegisterFrom(out), absolute_address);
- __ Strexd(strexd_tmp, LowRegisterFrom(out), HighRegisterFrom(out), absolute_address);
+ __ Ldrexd(LowRegisterFrom(out), HighRegisterFrom(out), MemOperand(temp_reg));
+ __ Strexd(strexd_tmp, LowRegisterFrom(out), HighRegisterFrom(out), MemOperand(temp_reg));
__ Cmp(strexd_tmp, 0);
__ B(ne, &loop);
} else {
@@ -2645,23 +2626,29 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
}
break;
case DataType::Type::kFloat32: {
- __ Vldr(SRegisterFrom(out), absolute_address);
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
+ __ Vldr(SRegisterFrom(out), MemOperand(temp_reg));
break;
}
case DataType::Type::kFloat64: {
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
if (Use64BitExclusiveLoadStore(atomic, codegen)) {
vixl32::Register lo = RegisterFrom(maybe_temp);
vixl32::Register hi = RegisterFrom(maybe_temp2);
vixl32::Register strexd_tmp = RegisterFrom(maybe_temp3);
vixl32::Label loop;
__ Bind(&loop);
- __ Ldrexd(lo, hi, absolute_address);
- __ Strexd(strexd_tmp, lo, hi, absolute_address);
+ __ Ldrexd(lo, hi, MemOperand(temp_reg));
+ __ Strexd(strexd_tmp, lo, hi, MemOperand(temp_reg));
__ Cmp(strexd_tmp, 0);
__ B(ne, &loop);
__ Vmov(DRegisterFrom(out), lo, hi);
} else {
- __ Vldr(DRegisterFrom(out), absolute_address);
+ __ Vldr(DRegisterFrom(out), MemOperand(temp_reg));
}
break;
}
@@ -2709,17 +2696,6 @@ static void CreateUnsafeGetLocations(HInvoke* invoke,
}
}
-static void CreateUnsafeGetAbsoluteLocations(HInvoke* invoke) {
- ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke,
- LocationSummary::kNoCall,
- kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-}
-
static void GenUnsafeGet(HInvoke* invoke,
CodeGeneratorARMVIXL* codegen,
DataType::Type type,
@@ -2747,47 +2723,14 @@ static void GenUnsafeGet(HInvoke* invoke,
/*maybe_temp3=*/ Location::NoLocation());
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic) {
- LocationSummary* locations = invoke->GetLocations();
- vixl32::Register address = LowRegisterFrom(locations->InAt(1)); // Long offset, lo part only.
- Location out = locations->Out();
- Location maybe_temp = Location::NoLocation();
- if (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen)) {
- maybe_temp = locations->GetTemp(0);
- }
- GenerateIntrinsicGet(invoke,
- codegen,
- type,
- order,
- atomic,
- address,
- NoReg, // No offset.
- out,
- maybe_temp,
- /*maybe_temp2=*/ Location::NoLocation(),
- /*maybe_temp3=*/ Location::NoLocation());
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -2840,20 +2783,11 @@ void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateUnsafeGetLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateUnsafeGetAbsoluteLocations(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(
invoke, codegen_, DataType::Type::kInt32, std::memory_order_relaxed, /*atomic=*/ false);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(
- invoke, codegen_, DataType::Type::kInt32, std::memory_order_relaxed, /*atomic=*/ false);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateUnsafeGetLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ true);
}
@@ -2935,16 +2869,38 @@ void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGetByte(HInvoke* invoke) {
invoke, codegen_, DataType::Type::kInt8, std::memory_order_relaxed, /*atomic=*/ false);
}
-static void GenerateIntrinsicSetStore(CodeGeneratorARMVIXL* codegen,
- ArmVIXLAssembler* assembler,
- DataType::Type type,
- bool atomic,
- vixl32::MemOperand address,
- Location value,
- bool seq_cst_barrier,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
+static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
+ DataType::Type type,
+ std::memory_order order,
+ bool atomic,
+ vixl32::Register base,
+ vixl32::Register offset,
+ Location value,
+ Location maybe_temp,
+ Location maybe_temp2,
+ Location maybe_temp3) {
+ bool seq_cst_barrier = (order == std::memory_order_seq_cst);
+ bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
+ DCHECK(release_barrier || order == std::memory_order_relaxed);
+ DCHECK(atomic || order == std::memory_order_relaxed);
+
+ ArmVIXLAssembler* assembler = codegen->GetAssembler();
+ if (release_barrier) {
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
+ }
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ if (kPoisonHeapReferences && type == DataType::Type::kReference) {
+ vixl32::Register temp = temps.Acquire();
+ __ Mov(temp, RegisterFrom(value));
+ assembler->PoisonHeapReference(temp);
+ value = LocationFrom(temp);
+ }
+ MemOperand address = offset.IsValid() ? MemOperand(base, offset) : MemOperand(base);
+ if (offset.IsValid() && (DataType::Is64BitType(type) || type == DataType::Type::kFloat32)) {
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
+ address = MemOperand(temp_reg);
+ }
switch (type) {
case DataType::Type::kBool:
case DataType::Type::kInt8:
@@ -2995,103 +2951,11 @@ static void GenerateIntrinsicSetStore(CodeGeneratorARMVIXL* codegen,
LOG(FATAL) << "Unexpected type " << type;
UNREACHABLE();
}
-
if (seq_cst_barrier) {
codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
}
}
-static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- vixl32::Register address,
- Location value,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
- bool seq_cst_barrier = order == std::memory_order_seq_cst;
- bool release_barrier = seq_cst_barrier || order == std::memory_order_release;
- DCHECK(release_barrier || order == std::memory_order_relaxed);
- DCHECK(atomic || order == std::memory_order_relaxed);
-
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
- if (release_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- }
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
- GenerateIntrinsicSetStore(codegen,
- assembler,
- type,
- atomic,
- MemOperand(address),
- value,
- seq_cst_barrier,
- maybe_temp,
- maybe_temp2,
- maybe_temp3);
-}
-
-static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- vixl32::Register base,
- vixl32::Register offset,
- Location value,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
- bool seq_cst_barrier = (order == std::memory_order_seq_cst);
- bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
- DCHECK(release_barrier || order == std::memory_order_relaxed);
- DCHECK(atomic || order == std::memory_order_relaxed);
-
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
- if (release_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- }
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
- if (kPoisonHeapReferences && type == DataType::Type::kReference) {
- vixl32::Register temp = temps.Acquire();
- __ Mov(temp, RegisterFrom(value));
- assembler->PoisonHeapReference(temp);
- value = LocationFrom(temp);
- }
- MemOperand address = offset.IsValid() ? MemOperand(base, offset) : MemOperand(base);
- if (offset.IsValid() && (DataType::Is64BitType(type) || type == DataType::Type::kFloat32)) {
- const vixl32::Register temp_reg = temps.Acquire();
- __ Add(temp_reg, base, offset);
- address = MemOperand(temp_reg);
- }
- GenerateIntrinsicSetStore(codegen,
- assembler,
- type,
- atomic,
- address,
- value,
- seq_cst_barrier,
- maybe_temp,
- maybe_temp2,
- maybe_temp3);
-}
-
-static void CreateUnsafePutTempLocations(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- bool atomic,
- LocationSummary* locations) {
- if (type == DataType::Type::kInt64) {
- // Potentially need temps for ldrexd-strexd loop.
- if (Use64BitExclusiveLoadStore(atomic, codegen)) {
- locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
- locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
- }
- } else if (type == DataType::Type::kReference) {
- // Temp for card-marking.
- locations->AddTemp(Location::RequiresRegister()); // Temp.
- }
-}
-
static void CreateUnsafePutLocations(HInvoke* invoke,
CodeGeneratorARMVIXL* codegen,
DataType::Type type,
@@ -3103,20 +2967,17 @@ static void CreateUnsafePutLocations(HInvoke* invoke,
locations->SetInAt(1, Location::RequiresRegister());
locations->SetInAt(2, Location::RequiresRegister());
locations->SetInAt(3, Location::RequiresRegister());
- CreateUnsafePutTempLocations(codegen, type, atomic, locations);
-}
-static void CreateUnsafePutAbsoluteLocations(HInvoke* invoke,
- CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- bool atomic) {
- ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetInAt(2, Location::RequiresRegister());
- CreateUnsafePutTempLocations(codegen, type, atomic, locations);
+ if (type == DataType::Type::kInt64) {
+ // Potentially need temps for ldrexd-strexd loop.
+ if (Use64BitExclusiveLoadStore(atomic, codegen)) {
+ locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
+ locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
+ }
+ } else if (type == DataType::Type::kReference) {
+ // Temp for card-marking.
+ locations->AddTemp(Location::RequiresRegister()); // Temp.
+ }
}
static void GenUnsafePut(HInvoke* invoke,
@@ -3157,50 +3018,14 @@ static void GenUnsafePut(HInvoke* invoke,
}
}
-static void GenUnsafePutAbsolute(HInvoke* invoke,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- CodeGeneratorARMVIXL* codegen) {
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
-
- LocationSummary* locations = invoke->GetLocations();
- vixl32::Register address = LowRegisterFrom(locations->InAt(1)); // Long offset, lo part only.
- Location value = locations->InAt(2);
- Location maybe_temp = Location::NoLocation();
- Location maybe_temp2 = Location::NoLocation();
- if (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen)) {
- maybe_temp = locations->GetTemp(0);
- maybe_temp2 = locations->GetTemp(1);
- }
-
- GenerateIntrinsicSet(codegen,
- type,
- order,
- atomic,
- address,
- value,
- maybe_temp,
- maybe_temp2,
- /*maybe_temp3=*/ Location::NoLocation());
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -3276,10 +3101,6 @@ void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
CreateUnsafePutLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateUnsafePutAbsoluteLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke,
DataType::Type::kInt32,
@@ -3288,14 +3109,6 @@ void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
codegen_);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(invoke,
- DataType::Type::kInt32,
- std::memory_order_relaxed,
- /*atomic=*/false,
- codegen_);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePutByte(HInvoke* invoke) {
CreateUnsafePutLocations(invoke, codegen_, DataType::Type::kInt8, /*atomic=*/ false);
}
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 73118ca8c9..f705aa2a60 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -2331,15 +2331,6 @@ static void CreateUnsafeGetLocations(ArenaAllocator* allocator,
(can_call ? Location::kOutputOverlap : Location::kNoOutputOverlap));
}
-static void CreateUnsafeGetAbsoluteLocations(ArenaAllocator* allocator,
- HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-}
-
static void GenUnsafeGet(HInvoke* invoke,
CodeGeneratorRISCV64* codegen,
std::memory_order order,
@@ -2392,49 +2383,14 @@ static void GenUnsafeGet(HInvoke* invoke,
}
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- CodeGeneratorRISCV64* codegen,
- std::memory_order order,
- DataType::Type type) {
- DCHECK((type == DataType::Type::kInt8) ||
- (type == DataType::Type::kInt32) ||
- (type == DataType::Type::kInt64));
- LocationSummary* locations = invoke->GetLocations();
- Location address_loc = locations->InAt(1);
- XRegister address = address_loc.AsRegister<XRegister>();
- Location out_loc = locations->Out();
-
- bool seq_cst_barrier = order == std::memory_order_seq_cst;
- bool acquire_barrier = seq_cst_barrier || order == std::memory_order_acquire;
- DCHECK(acquire_barrier || order == std::memory_order_relaxed);
-
- if (seq_cst_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
- }
-
- codegen->GetInstructionVisitor()->Load(out_loc, address, /*offset=*/ 0, type);
-
- if (acquire_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kLoadAny);
- }
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderRISCV64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorRISCV64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorRISCV64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -2487,18 +2443,10 @@ void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateUnsafeGetLocations(allocator_, invoke, codegen_);
}
-void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateUnsafeGetAbsoluteLocations(allocator_, invoke);
-}
-
void IntrinsicCodeGeneratorRISCV64::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(invoke, codegen_, std::memory_order_relaxed, DataType::Type::kInt32);
}
-void IntrinsicCodeGeneratorRISCV64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(invoke, codegen_, std::memory_order_relaxed, DataType::Type::kInt32);
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafeGetAcquire(HInvoke* invoke) {
CreateUnsafeGetLocations(allocator_, invoke, codegen_);
}
@@ -2583,14 +2531,6 @@ static void CreateUnsafePutLocations(ArenaAllocator* allocator, HInvoke* invoke)
}
}
-static void CreateUnsafePutAbsoluteLocations(ArenaAllocator* allocator, HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetInAt(2, Location::RequiresRegister());
-}
-
static void GenUnsafePut(HInvoke* invoke,
CodeGeneratorRISCV64* codegen,
std::memory_order order,
@@ -2619,34 +2559,14 @@ static void GenUnsafePut(HInvoke* invoke,
}
}
-static void GenUnsafePutAbsolute(HInvoke* invoke,
- CodeGeneratorRISCV64* codegen,
- std::memory_order order,
- DataType::Type type) {
- Riscv64Assembler* assembler = codegen->GetAssembler();
- LocationSummary* locations = invoke->GetLocations();
- XRegister address = locations->InAt(1).AsRegister<XRegister>();
- Location value = locations->InAt(2);
-
- GenerateSet(codegen, order, value, address, /*offset=*/ 0, type);
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderRISCV64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorRISCV64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorRISCV64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2723,18 +2643,10 @@ void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafePut(HInvoke* invoke) {
CreateUnsafePutLocations(allocator_, invoke);
}
-void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateUnsafePutAbsoluteLocations(allocator_, invoke);
-}
-
void IntrinsicCodeGeneratorRISCV64::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke, codegen_, std::memory_order_relaxed, DataType::Type::kInt32);
}
-void IntrinsicCodeGeneratorRISCV64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(invoke, codegen_, std::memory_order_relaxed, DataType::Type::kInt32);
-}
-
void IntrinsicLocationsBuilderRISCV64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
CreateUnsafePutLocations(allocator_, invoke);
}
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index f71689230d..c1ec8bda82 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -1731,74 +1731,6 @@ static void GenUnsafeGet(HInvoke* invoke,
}
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- DataType::Type type,
- bool is_volatile,
- CodeGeneratorX86* codegen) {
- X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
- LocationSummary* locations = invoke->GetLocations();
- Register address = locations->InAt(1).AsRegisterPairLow<Register>();
- Address address_offset(address, 0);
- Location output_loc = locations->Out();
-
- switch (type) {
- case DataType::Type::kInt8: {
- Register output = output_loc.AsRegister<Register>();
- __ movsxb(output, address_offset);
- break;
- }
-
- case DataType::Type::kInt32: {
- Register output = output_loc.AsRegister<Register>();
- __ movl(output, address_offset);
- break;
- }
-
- case DataType::Type::kInt64: {
- Register output_lo = output_loc.AsRegisterPairLow<Register>();
- Register output_hi = output_loc.AsRegisterPairHigh<Register>();
- if (is_volatile) {
- // Need to use a XMM to read atomically.
- XmmRegister temp = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
- __ movsd(temp, address_offset);
- __ movd(output_lo, temp);
- __ psrlq(temp, Immediate(32));
- __ movd(output_hi, temp);
- } else {
- Address address_hi(address, 4);
- __ movl(output_lo, address_offset);
- __ movl(output_hi, address_hi);
- }
- }
- break;
-
- default:
- LOG(FATAL) << "Unsupported op size " << type;
- UNREACHABLE();
- }
-}
-
-static void CreateIntIntToIntLocations(ArenaAllocator* allocator,
- HInvoke* invoke,
- DataType::Type type,
- bool is_volatile) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- if (type == DataType::Type::kInt64) {
- if (is_volatile) {
- // Need to use XMM to read volatile.
- locations->AddTemp(Location::RequiresFpuRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
- } else {
- locations->SetOut(Location::RequiresRegister(), Location::kOutputOverlap);
- }
- } else {
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
- }
-}
-
static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
HInvoke* invoke,
CodeGeneratorX86* codegen,
@@ -1834,9 +1766,6 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
void IntrinsicLocationsBuilderX86::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderX86::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicLocationsBuilderX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -1859,9 +1788,6 @@ void IntrinsicLocationsBuilderX86::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorX86::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicCodeGeneratorX86::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -1885,9 +1811,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateIntIntIntToIntLocations(
allocator_, invoke, codegen_, DataType::Type::kInt32, /*is_volatile=*/ false);
}
-void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateIntIntToIntLocations(allocator_, invoke, DataType::Type::kInt32, /*is_volatile=*/false);
-}
void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateIntIntIntToIntLocations(
allocator_, invoke, codegen_, DataType::Type::kInt32, /*is_volatile=*/ true);
@@ -1928,9 +1851,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
-void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
-}
void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_);
}
@@ -1959,26 +1879,6 @@ void IntrinsicCodeGeneratorX86::VisitJdkUnsafeGetByte(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt8, /*is_volatile=*/ false, codegen_);
}
-static void CreateIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
- DataType::Type type,
- HInvoke* invoke,
- bool is_volatile) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- if (type == DataType::Type::kInt8 || type == DataType::Type::kUint8) {
- // Ensure the value is in a byte register
- locations->SetInAt(2, Location::ByteRegisterOrConstant(EAX, invoke->InputAt(3)));
- } else {
- locations->SetInAt(2, Location::RequiresRegister());
- }
- if (type == DataType::Type::kInt64 && is_volatile) {
- locations->AddTemp(Location::RequiresFpuRegister());
- locations->AddTemp(Location::RequiresFpuRegister());
- }
-}
-
static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
DataType::Type type,
HInvoke* invoke,
@@ -2008,9 +1908,6 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator
void IntrinsicLocationsBuilderX86::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderX86::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicLocationsBuilderX86::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2043,10 +1940,6 @@ void IntrinsicLocationsBuilderX86::VisitJdkUnsafePut(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(
allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false);
}
-void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateIntIntIntToVoidPlusTempsLocations(
- allocator_, DataType::Type::kInt64, invoke, /*is_volatile=*/ false);
-}
void IntrinsicLocationsBuilderX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(
allocator_, DataType::Type::kInt32, invoke, /*is_volatile=*/ false);
@@ -2152,54 +2045,9 @@ static void GenUnsafePut(LocationSummary* locations,
}
}
-// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
-// memory model.
-static void GenUnsafePutAbsolute(LocationSummary* locations,
- DataType::Type type,
- bool is_volatile,
- CodeGeneratorX86* codegen) {
- X86Assembler* assembler = down_cast<X86Assembler*>(codegen->GetAssembler());
- Register address = locations->InAt(1).AsRegisterPairLow<Register>();
- Address address_offset(address, 0);
- Location value_loc = locations->InAt(2);
-
- if (type == DataType::Type::kInt64) {
- Register value_lo = value_loc.AsRegisterPairLow<Register>();
- Register value_hi = value_loc.AsRegisterPairHigh<Register>();
- if (is_volatile) {
- XmmRegister temp1 = locations->GetTemp(0).AsFpuRegister<XmmRegister>();
- XmmRegister temp2 = locations->GetTemp(1).AsFpuRegister<XmmRegister>();
- __ movd(temp1, value_lo);
- __ movd(temp2, value_hi);
- __ punpckldq(temp1, temp2);
- __ movsd(address_offset, temp1);
- } else {
- __ movl(address_offset, value_lo);
- __ movl(Address(address, 4), value_hi);
- }
- } else if (type == DataType::Type::kInt32) {
- __ movl(address_offset, value_loc.AsRegister<Register>());
- } else {
- CHECK_EQ(type, DataType::Type::kInt8) << "Unimplemented GenUnsafePut data type";
- if (value_loc.IsRegister()) {
- __ movb(address_offset, value_loc.AsRegister<ByteRegister>());
- } else {
- __ movb(address_offset,
- Immediate(CodeGenerator::GetInt8ValueOf(value_loc.GetConstant())));
- }
- }
-
- if (is_volatile) {
- codegen->MemoryFence();
- }
-}
-
void IntrinsicCodeGeneratorX86::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorX86::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicCodeGeneratorX86::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2231,10 +2079,6 @@ void IntrinsicCodeGeneratorX86::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
-void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(
- invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/false, codegen_);
-}
void IntrinsicCodeGeneratorX86::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 849203e286..a8e7bf4881 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1889,43 +1889,6 @@ static void GenUnsafeGet(HInvoke* invoke,
}
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- DataType::Type type,
- CodeGeneratorX86_64* codegen) {
- X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler());
- LocationSummary* locations = invoke->GetLocations();
- Location address_loc = locations->InAt(1);
- Address address = Address(address_loc.AsRegister<CpuRegister>(), 0);
- Location output_loc = locations->Out();
- CpuRegister output = output_loc.AsRegister<CpuRegister>();
-
- switch (type) {
- case DataType::Type::kInt8:
- __ movsxb(output, address);
- break;
-
- case DataType::Type::kInt32:
- __ movl(output, address);
- break;
-
- case DataType::Type::kInt64:
- __ movq(output, address);
- break;
-
- default:
- LOG(FATAL) << "Unsupported op size " << type;
- UNREACHABLE();
- }
-}
-
-static void CreateIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-}
-
static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
HInvoke* invoke,
CodeGeneratorX86_64* codegen) {
@@ -1949,9 +1912,6 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
void IntrinsicLocationsBuilderX86_64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -1974,9 +1934,6 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateIntIntIntToIntLocations(allocator_, invoke, codegen_);
}
-void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateIntIntToIntLocations(allocator_, invoke);
-}
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateIntIntIntToIntLocations(allocator_, invoke, codegen_);
}
@@ -2008,9 +1965,6 @@ void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -2033,9 +1987,6 @@ void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
-void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(invoke, DataType::Type::kInt32, codegen_);
-}
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_);
}
@@ -2064,16 +2015,6 @@ void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetByte(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt8, /*is_volatile=*/false, codegen_);
}
-static void CreateIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
- [[maybe_unused]] DataType::Type type,
- HInvoke* invoke) {
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetInAt(2, Location::RequiresRegister());
-}
-
static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
DataType::Type type,
HInvoke* invoke) {
@@ -2093,9 +2034,6 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator
void IntrinsicLocationsBuilderX86_64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderX86_64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicLocationsBuilderX86_64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2127,9 +2065,6 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePut(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
}
-void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
-}
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
}
@@ -2204,37 +2139,9 @@ static void GenUnsafePut(LocationSummary* locations, DataType::Type type, bool i
}
}
-// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
-// memory model.
-static void GenUnsafePutAbsolute(LocationSummary* locations,
- DataType::Type type,
- bool is_volatile,
- CodeGeneratorX86_64* codegen) {
- X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler());
- CpuRegister address_reg = locations->InAt(1).AsRegister<CpuRegister>();
- Address address = Address(address_reg, 0);
- CpuRegister value = locations->InAt(2).AsRegister<CpuRegister>();
-
- if (type == DataType::Type::kInt64) {
- __ movq(address, value);
- } else if (type == DataType::Type::kInt32) {
- __ movl(address, value);
- } else {
- CHECK_EQ(type, DataType::Type::kInt8) << "Unimplemented GenUnsafePut data type";
- __ movb(address, value);
- }
-
- if (is_volatile) {
- codegen->MemoryFence();
- }
-}
-
void IntrinsicCodeGeneratorX86_64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorX86_64::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
void IntrinsicCodeGeneratorX86_64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2266,10 +2173,6 @@ void IntrinsicCodeGeneratorX86_64::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
-void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(
- invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/false, codegen_);
-}
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}