summaryrefslogtreecommitdiff
path: root/compiler/optimizing/intrinsics_x86_64.cc
diff options
context:
space:
mode:
author Shai Barack <shayba@google.com> 2024-10-16 13:29:02 +0000
committer Santiago Aboy Solanes <solanes@google.com> 2024-10-17 08:04:44 +0000
commit8555b07360ec214671603c8648c1b1ff90417462 (patch)
tree0d7d12167f843cc490f7f63452c9228ee7f53065 /compiler/optimizing/intrinsics_x86_64.cc
parent7a21281cfc89ac8cc542f54b27c047cc20bd9d88 (diff)
Revert^2 "Add intrinsics for the absolute forms of unsafe.{get,put}Int"
This reverts commit c1bd4376b18ff46e5156cc597bd3311dd86fabd9. Reason for revert: fixed original root cause for breakage Change-Id: Ic9c214ebd691abf7f163b6399e8a94fc6f277ea0
Diffstat (limited to 'compiler/optimizing/intrinsics_x86_64.cc')
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc97
1 files changed, 97 insertions, 0 deletions
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 5eb7ee6b25..c7ee518acb 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -1889,6 +1889,43 @@ static void GenUnsafeGet(HInvoke* invoke,
}
}
+static void GenUnsafeGetAbsolute(HInvoke* invoke,
+ DataType::Type type,
+ CodeGeneratorX86_64* codegen) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler());
+ LocationSummary* locations = invoke->GetLocations();
+ Location address_loc = locations->InAt(1);
+ Address address = Address(address_loc.AsRegister<CpuRegister>(), 0);
+ Location output_loc = locations->Out();
+ CpuRegister output = output_loc.AsRegister<CpuRegister>();
+
+ switch (type) {
+ case DataType::Type::kInt8:
+ __ movsxb(output, address);
+ break;
+
+ case DataType::Type::kInt32:
+ __ movl(output, address);
+ break;
+
+ case DataType::Type::kInt64:
+ __ movq(output, address);
+ break;
+
+ default:
+ LOG(FATAL) << "Unsupported op size " << type;
+ UNREACHABLE();
+ }
+}
+
+static void CreateIntIntToIntLocations(ArenaAllocator* allocator, HInvoke* invoke) {
+ LocationSummary* locations =
+ new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
+ locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap);
+}
+
static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
HInvoke* invoke,
CodeGeneratorX86_64* codegen) {
@@ -1912,6 +1949,9 @@ static void CreateIntIntIntToIntLocations(ArenaAllocator* allocator,
void IntrinsicLocationsBuilderX86_64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
+void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
+ VisitJdkUnsafeGetAbsolute(invoke);
+}
void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -1934,6 +1974,9 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGet(HInvoke* invoke) {
CreateIntIntIntToIntLocations(allocator_, invoke, codegen_);
}
+void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
+ CreateIntIntToIntLocations(allocator_, invoke);
+}
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
CreateIntIntIntToIntLocations(allocator_, invoke, codegen_);
}
@@ -1965,6 +2008,9 @@ void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitUnsafeGet(HInvoke* invoke) {
VisitJdkUnsafeGet(invoke);
}
+void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetAbsolute(HInvoke* invoke) {
+ VisitJdkUnsafeGetAbsolute(invoke);
+}
void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetVolatile(HInvoke* invoke) {
VisitJdkUnsafeGetVolatile(invoke);
}
@@ -1987,6 +2033,9 @@ void IntrinsicCodeGeneratorX86_64::VisitUnsafeGetByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGet(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
+void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetAbsolute(HInvoke* invoke) {
+ GenUnsafeGetAbsolute(invoke, DataType::Type::kInt32, codegen_);
+}
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetVolatile(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt32, /*is_volatile=*/ true, codegen_);
}
@@ -2015,6 +2064,16 @@ void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafeGetByte(HInvoke* invoke) {
GenUnsafeGet(invoke, DataType::Type::kInt8, /*is_volatile=*/false, codegen_);
}
+static void CreateIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
+ [[maybe_unused]] DataType::Type type,
+ HInvoke* invoke) {
+ LocationSummary* locations =
+ new (allocator) LocationSummary(invoke, LocationSummary::kNoCall, kIntrinsified);
+ locations->SetInAt(0, Location::NoLocation()); // Unused receiver.
+ locations->SetInAt(1, Location::RequiresRegister());
+ locations->SetInAt(2, Location::RequiresRegister());
+}
+
static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator,
DataType::Type type,
HInvoke* invoke) {
@@ -2034,6 +2093,9 @@ static void CreateIntIntIntIntToVoidPlusTempsLocations(ArenaAllocator* allocator
void IntrinsicLocationsBuilderX86_64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
+void IntrinsicLocationsBuilderX86_64::VisitUnsafePutAbsolute(HInvoke* invoke) {
+ VisitJdkUnsafePutAbsolute(invoke);
+}
void IntrinsicLocationsBuilderX86_64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2065,6 +2127,9 @@ void IntrinsicLocationsBuilderX86_64::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePut(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
}
+void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
+ CreateIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
+}
void IntrinsicLocationsBuilderX86_64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
CreateIntIntIntIntToVoidPlusTempsLocations(allocator_, DataType::Type::kInt32, invoke);
}
@@ -2139,9 +2204,37 @@ static void GenUnsafePut(LocationSummary* locations, DataType::Type type, bool i
}
}
+// We don't care for ordered: it requires an AnyStore barrier, which is already given by the x86
+// memory model.
+static void GenUnsafePutAbsolute(LocationSummary* locations,
+ DataType::Type type,
+ bool is_volatile,
+ CodeGeneratorX86_64* codegen) {
+ X86_64Assembler* assembler = down_cast<X86_64Assembler*>(codegen->GetAssembler());
+ CpuRegister address_reg = locations->InAt(1).AsRegister<CpuRegister>();
+ Address address = Address(address_reg, 0);
+ CpuRegister value = locations->InAt(2).AsRegister<CpuRegister>();
+
+ if (type == DataType::Type::kInt64) {
+ __ movq(address, value);
+ } else if (type == DataType::Type::kInt32) {
+ __ movl(address, value);
+ } else {
+ CHECK_EQ(type, DataType::Type::kInt8) << "Unimplemented GenUnsafePut data type";
+ __ movb(address, value);
+ }
+
+ if (is_volatile) {
+ codegen->MemoryFence();
+ }
+}
+
void IntrinsicCodeGeneratorX86_64::VisitUnsafePut(HInvoke* invoke) {
VisitJdkUnsafePut(invoke);
}
+void IntrinsicCodeGeneratorX86_64::VisitUnsafePutAbsolute(HInvoke* invoke) {
+ VisitJdkUnsafePutAbsolute(invoke);
+}
void IntrinsicCodeGeneratorX86_64::VisitUnsafePutOrdered(HInvoke* invoke) {
VisitJdkUnsafePutOrdered(invoke);
}
@@ -2173,6 +2266,10 @@ void IntrinsicCodeGeneratorX86_64::VisitUnsafePutByte(HInvoke* invoke) {
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePut(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}
+void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePutAbsolute(HInvoke* invoke) {
+ GenUnsafePutAbsolute(
+ invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/false, codegen_);
+}
void IntrinsicCodeGeneratorX86_64::VisitJdkUnsafePutOrdered(HInvoke* invoke) {
GenUnsafePut(invoke->GetLocations(), DataType::Type::kInt32, /*is_volatile=*/ false, codegen_);
}