summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics_arm_vixl.cc
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing/intrinsics_arm_vixl.cc')
-rw-r--r--compiler/optimizing/intrinsics_arm_vixl.cc303
1 files changed, 58 insertions, 245 deletions
diff --git a/compiler/optimizing/intrinsics_arm_vixl.cc b/compiler/optimizing/intrinsics_arm_vixl.cc
index c8cf1690e3..7f89f56d0c 100644
--- a/compiler/optimizing/intrinsics_arm_vixl.cc
+++ b/compiler/optimizing/intrinsics_arm_vixl.cc
@@ -17,7 +17,6 @@
#include "intrinsics_arm_vixl.h"
#include "aarch32/constants-aarch32.h"
-#include "aarch32/operands-aarch32.h"
#include "arch/arm/callee_save_frame_arm.h"
#include "arch/arm/instruction_set_features_arm.h"
#include "art_method.h"
@@ -2581,29 +2580,8 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
DCHECK(acquire_barrier || order == std::memory_order_relaxed);
DCHECK(atomic || order == std::memory_order_relaxed);
- MemOperand address(base);
- MemOperand absolute_address = address;
ArmVIXLAssembler* assembler = codegen->GetAssembler();
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
-
- // If offset is valid then this is a get from a relative address.
- if (offset.IsValid()) {
- address = MemOperand(base, offset);
- switch (type) {
- case DataType::Type::kInt64:
- case DataType::Type::kFloat32:
- case DataType::Type::kFloat64: {
- vixl32::Register temp_reg_absolute_address = temps.Acquire();
- __ Add(temp_reg_absolute_address, base, offset);
- absolute_address = MemOperand(temp_reg_absolute_address);
- break;
- }
- default:
- // No need
- break;
- }
- }
-
+ MemOperand address(base, offset);
switch (type) {
case DataType::Type::kBool:
__ Ldrb(RegisterFrom(out), address);
@@ -2623,10 +2601,13 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
case DataType::Type::kInt64:
if (Use64BitExclusiveLoadStore(atomic, codegen)) {
vixl32::Register strexd_tmp = RegisterFrom(maybe_temp);
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
vixl32::Label loop;
__ Bind(&loop);
- __ Ldrexd(LowRegisterFrom(out), HighRegisterFrom(out), absolute_address);
- __ Strexd(strexd_tmp, LowRegisterFrom(out), HighRegisterFrom(out), absolute_address);
+ __ Ldrexd(LowRegisterFrom(out), HighRegisterFrom(out), MemOperand(temp_reg));
+ __ Strexd(strexd_tmp, LowRegisterFrom(out), HighRegisterFrom(out), MemOperand(temp_reg));
__ Cmp(strexd_tmp, 0);
__ B(ne, &loop);
} else {
@@ -2645,23 +2626,29 @@ static void GenerateIntrinsicGet(HInvoke* invoke,
}
break;
case DataType::Type::kFloat32: {
- __ Vldr(SRegisterFrom(out), absolute_address);
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
+ __ Vldr(SRegisterFrom(out), MemOperand(temp_reg));
break;
}
case DataType::Type::kFloat64: {
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
if (Use64BitExclusiveLoadStore(atomic, codegen)) {
vixl32::Register lo = RegisterFrom(maybe_temp);
vixl32::Register hi = RegisterFrom(maybe_temp2);
vixl32::Register strexd_tmp = RegisterFrom(maybe_temp3);
vixl32::Label loop;
__ Bind(&loop);
- __ Ldrexd(lo, hi, absolute_address);
- __ Strexd(strexd_tmp, lo, hi, absolute_address);
+ __ Ldrexd(lo, hi, MemOperand(temp_reg));
+ __ Strexd(strexd_tmp, lo, hi, MemOperand(temp_reg));
__ Cmp(strexd_tmp, 0);
__ B(ne, &loop);
__ Vmov(DRegisterFrom(out), lo, hi);
} else {
- __ Vldr(DRegisterFrom(out), absolute_address);
+ __ Vldr(DRegisterFrom(out), MemOperand(temp_reg));
}
break;
}
@@ -2709,17 +2696,6 @@ static void CreateUnsafeGetLocations(HInvoke* invoke,
}
}
-static void CreateUnsafeGetAbsoluteLocations(HInvoke* invoke) {
- ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke,
- LocationSummary::kNoCall,
- kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
-}
-
static void GenUnsafeGet(HInvoke* invoke,
CodeGeneratorARMVIXL* codegen,
DataType::Type type,
@@ -2747,47 +2723,14 @@ static void GenUnsafeGet(HInvoke* invoke,
/*maybe_temp3=*/ Location::NoLocation());
}
-static void GenUnsafeGetAbsolute(HInvoke* invoke,
- CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic) {
- LocationSummary* locations = invoke->GetLocations();
- vixl32::Register address = LowRegisterFrom(locations->InAt(1)); // Long offset, lo part only.
- Location out = locations->Out();
- Location maybe_temp = Location::NoLocation();
- if (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen)) {
- maybe_temp = locations->GetTemp(0);
- }
- GenerateIntrinsicGet(invoke,
- codegen,
- type,
- order,
- atomic,
- address,
- NoReg, // No offset.
- out,
- maybe_temp,
- /*maybe_temp2=*/ Location::NoLocation(),
- /*maybe_temp3=*/ Location::NoLocation());
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitUnsafeGetAbsolute(HInvoke* invoke) {
- VisitJdkUnsafeGetAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -2840,20 +2783,11 @@ void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateUnsafeGetLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- CreateUnsafeGetAbsoluteLocations(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(
invoke, codegen_, DataType::Type::kInt32, std::memory_order_relaxed, /*atomic=*/ false);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
- GenUnsafeGetAbsolute(
- invoke, codegen_, DataType::Type::kInt32, std::memory_order_relaxed, /*atomic=*/ false);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateUnsafeGetLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ true);
}
@@ -2935,16 +2869,38 @@ void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafeGetByte(HInvoke* invoke) {
invoke, codegen_, DataType::Type::kInt8, std::memory_order_relaxed, /*atomic=*/ false);
}
-static void GenerateIntrinsicSetStore(CodeGeneratorARMVIXL* codegen,
- ArmVIXLAssembler* assembler,
- DataType::Type type,
- bool atomic,
- vixl32::MemOperand address,
- Location value,
- bool seq_cst_barrier,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
+static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
+ DataType::Type type,
+ std::memory_order order,
+ bool atomic,
+ vixl32::Register base,
+ vixl32::Register offset,
+ Location value,
+ Location maybe_temp,
+ Location maybe_temp2,
+ Location maybe_temp3) {
+ bool seq_cst_barrier = (order == std::memory_order_seq_cst);
+ bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
+ DCHECK(release_barrier || order == std::memory_order_relaxed);
+ DCHECK(atomic || order == std::memory_order_relaxed);
+
+ ArmVIXLAssembler* assembler = codegen->GetAssembler();
+ if (release_barrier) {
+ codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
+ }
+ UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
+ if (kPoisonHeapReferences && type == DataType::Type::kReference) {
+ vixl32::Register temp = temps.Acquire();
+ __ Mov(temp, RegisterFrom(value));
+ assembler->PoisonHeapReference(temp);
+ value = LocationFrom(temp);
+ }
+ MemOperand address = offset.IsValid() ? MemOperand(base, offset) : MemOperand(base);
+ if (offset.IsValid() && (DataType::Is64BitType(type) || type == DataType::Type::kFloat32)) {
+ const vixl32::Register temp_reg = temps.Acquire();
+ __ Add(temp_reg, base, offset);
+ address = MemOperand(temp_reg);
+ }
switch (type) {
case DataType::Type::kBool:
case DataType::Type::kInt8:
@@ -2995,103 +2951,11 @@ static void GenerateIntrinsicSetStore(CodeGeneratorARMVIXL* codegen,
LOG(FATAL) << "Unexpected type " << type;
UNREACHABLE();
}
-
if (seq_cst_barrier) {
codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyAny);
}
}
-static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- vixl32::Register address,
- Location value,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
- bool seq_cst_barrier = order == std::memory_order_seq_cst;
- bool release_barrier = seq_cst_barrier || order == std::memory_order_release;
- DCHECK(release_barrier || order == std::memory_order_relaxed);
- DCHECK(atomic || order == std::memory_order_relaxed);
-
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
- if (release_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- }
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
- GenerateIntrinsicSetStore(codegen,
- assembler,
- type,
- atomic,
- MemOperand(address),
- value,
- seq_cst_barrier,
- maybe_temp,
- maybe_temp2,
- maybe_temp3);
-}
-
-static void GenerateIntrinsicSet(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- vixl32::Register base,
- vixl32::Register offset,
- Location value,
- Location maybe_temp,
- Location maybe_temp2,
- Location maybe_temp3) {
- bool seq_cst_barrier = (order == std::memory_order_seq_cst);
- bool release_barrier = seq_cst_barrier || (order == std::memory_order_release);
- DCHECK(release_barrier || order == std::memory_order_relaxed);
- DCHECK(atomic || order == std::memory_order_relaxed);
-
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
- if (release_barrier) {
- codegen->GenerateMemoryBarrier(MemBarrierKind::kAnyStore);
- }
- UseScratchRegisterScope temps(assembler->GetVIXLAssembler());
- if (kPoisonHeapReferences && type == DataType::Type::kReference) {
- vixl32::Register temp = temps.Acquire();
- __ Mov(temp, RegisterFrom(value));
- assembler->PoisonHeapReference(temp);
- value = LocationFrom(temp);
- }
- MemOperand address = offset.IsValid() ? MemOperand(base, offset) : MemOperand(base);
- if (offset.IsValid() && (DataType::Is64BitType(type) || type == DataType::Type::kFloat32)) {
- const vixl32::Register temp_reg = temps.Acquire();
- __ Add(temp_reg, base, offset);
- address = MemOperand(temp_reg);
- }
- GenerateIntrinsicSetStore(codegen,
- assembler,
- type,
- atomic,
- address,
- value,
- seq_cst_barrier,
- maybe_temp,
- maybe_temp2,
- maybe_temp3);
-}
-
-static void CreateUnsafePutTempLocations(CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- bool atomic,
- LocationSummary* locations) {
- if (type == DataType::Type::kInt64) {
- // Potentially need temps for ldrexd-strexd loop.
- if (Use64BitExclusiveLoadStore(atomic, codegen)) {
- locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
- locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
- }
- } else if (type == DataType::Type::kReference) {
- // Temp for card-marking.
- locations->AddTemp(Location::RequiresRegister()); // Temp.
- }
-}
-
static void CreateUnsafePutLocations(HInvoke* invoke,
CodeGeneratorARMVIXL* codegen,
DataType::Type type,
@@ -3103,20 +2967,17 @@ static void CreateUnsafePutLocations(HInvoke* invoke,
locations->SetInAt(1, Location::RequiresRegister());
locations->SetInAt(2, Location::RequiresRegister());
locations->SetInAt(3, Location::RequiresRegister());
- CreateUnsafePutTempLocations(codegen, type, atomic, locations);
-}
-static void CreateUnsafePutAbsoluteLocations(HInvoke* invoke,
- CodeGeneratorARMVIXL* codegen,
- DataType::Type type,
- bool atomic) {
- ArenaAllocator* allocator = invoke->GetBlock()->GetGraph()->GetAllocator();
- LocationSummary* locations =
- new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
- locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
- locations->SetInAt(1, Location::RequiresRegister());
- locations->SetInAt(2, Location::RequiresRegister());
- CreateUnsafePutTempLocations(codegen, type, atomic, locations);
+ if (type == DataType::Type::kInt64) {
+ // Potentially need temps for ldrexd-strexd loop.
+ if (Use64BitExclusiveLoadStore(atomic, codegen)) {
+ locations->AddTemp(Location::RequiresRegister()); // Temp_lo.
+ locations->AddTemp(Location::RequiresRegister()); // Temp_hi.
+ }
+ } else if (type == DataType::Type::kReference) {
+ // Temp for card-marking.
+ locations->AddTemp(Location::RequiresRegister()); // Temp.
+ }
}
static void GenUnsafePut(HInvoke* invoke,
@@ -3157,50 +3018,14 @@ static void GenUnsafePut(HInvoke* invoke,
}
}
-static void GenUnsafePutAbsolute(HInvoke* invoke,
- DataType::Type type,
- std::memory_order order,
- bool atomic,
- CodeGeneratorARMVIXL* codegen) {
- ArmVIXLAssembler* assembler = codegen->GetAssembler();
-
- LocationSummary* locations = invoke->GetLocations();
- vixl32::Register address = LowRegisterFrom(locations->InAt(1)); // Long offset, lo part only.
- Location value = locations->InAt(2);
- Location maybe_temp = Location::NoLocation();
- Location maybe_temp2 = Location::NoLocation();
- if (type == DataType::Type::kInt64 && Use64BitExclusiveLoadStore(atomic, codegen)) {
- maybe_temp = locations->GetTemp(0);
- maybe_temp2 = locations->GetTemp(1);
- }
-
- GenerateIntrinsicSet(codegen,
- type,
- order,
- atomic,
- address,
- value,
- maybe_temp,
- maybe_temp2,
- /*maybe_temp3=*/ Location::NoLocation());
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitUnsafePutAbsolute(HInvoke* invoke) {
- VisitJdkUnsafePutAbsolute(invoke);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -3276,10 +3101,6 @@ void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
CreateUnsafePutLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
}
-void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- CreateUnsafePutAbsoluteLocations(invoke, codegen_, DataType::Type::kInt32, /*atomic=*/ false);
-}
-
void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke,
DataType::Type::kInt32,
@@ -3288,14 +3109,6 @@ void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePut(HInvoke* invoke) {
codegen_);
}
-void IntrinsicCodeGeneratorARMVIXL::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
- GenUnsafePutAbsolute(invoke,
- DataType::Type::kInt32,
- std::memory_order_relaxed,
- /*atomic=*/false,
- codegen_);
-}
-
void IntrinsicLocationsBuilderARMVIXL::VisitJdkUnsafePutByte(HInvoke* invoke) {
CreateUnsafePutLocations(invoke, codegen_, DataType::Type::kInt8, /*atomic=*/ false);
}