summaryrefslogtreecommitdiff
path: root/compiler
diff options
context:
space:
mode:
author Roman Artemev <roman.artemev@syntacore.com> 2024-05-24 17:03:23 +0300
committer VladimĂ­r Marko <vmarko@google.com> 2024-06-12 12:16:35 +0000
commite159e4eacaff78180be0b11d087009b912e8d01d (patch)
treef348c8f31deb3dacf456a620310f569722746289 /compiler
parentb658a268e2e76e429702c2929d24ebbbf909e947 (diff)
Implement {Integer|Long}.remainderUnsigned intrinsic
Implement intrinsic for RISC-V target Integer.remainderUnsigned: +13.71% Long.remainderUnsigned: +1623.19% Test: testrunner.py --target --64 --ndebug --optimizing Change-Id: I4c3c47bad8fcbdde723e68d19aad45be0d4ff8bb
Diffstat (limited to 'compiler')
-rw-r--r--compiler/optimizing/code_generator_arm64.h2
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h2
-rw-r--r--compiler/optimizing/code_generator_x86.h2
-rw-r--r--compiler/optimizing/code_generator_x86_64.h2
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc36
5 files changed, 38 insertions, 6 deletions
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 78049c5675..04694ece3e 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -124,6 +124,8 @@ const vixl::aarch64::CPURegList callee_saved_fp_registers(vixl::aarch64::CPURegi
Location ARM64ReturnLocation(DataType::Type return_type);
#define UNIMPLEMENTED_INTRINSIC_LIST_ARM64(V) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(StringStringIndexOf) \
V(StringStringIndexOfAfter) \
V(StringBufferAppend) \
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 51bee1cd77..05e5bdf2b7 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -124,6 +124,8 @@ using VIXLUInt32Literal = vixl::aarch32::Literal<uint32_t>;
V(UnsafeCASLong) /* High register pressure */ \
V(SystemArrayCopyChar) \
V(LongDivideUnsigned) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(CRC32Update) \
V(CRC32UpdateBytes) \
V(CRC32UpdateByteBuffer) \
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 93f8e6ed9b..6ce0c506a0 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -55,6 +55,8 @@ static constexpr size_t kRuntimeParameterFpuRegistersLength =
V(IntegerHighestOneBit) \
V(LongHighestOneBit) \
V(LongDivideUnsigned) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(CRC32Update) \
V(CRC32UpdateBytes) \
V(CRC32UpdateByteBuffer) \
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 7a5e4aa894..81c8ead32e 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -69,6 +69,8 @@ static constexpr FloatRegister non_volatile_xmm_regs[] = { XMM12, XMM13, XMM14,
V(FP16Compare) \
V(FP16Min) \
V(FP16Max) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(StringStringIndexOf) \
V(StringStringIndexOfAfter) \
V(StringBufferAppend) \
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 0a9ac872db..b71232b4e4 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -595,7 +595,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitLongNumberOfTrailingZeros(HInvoke* invo
EmitIntegralUnOp(invoke, [&](XRegister rd, XRegister rs1) { __ Ctz(rd, rs1); });
}
-static void GenerateDivideUnsigned(HInvoke* invoke, CodeGeneratorRISCV64* codegen) {
+static void GenerateDivRemUnsigned(HInvoke* invoke, bool is_div, CodeGeneratorRISCV64* codegen) {
LocationSummary* locations = invoke->GetLocations();
Riscv64Assembler* assembler = codegen->GetAssembler();
DataType::Type type = invoke->GetType();
@@ -611,10 +611,18 @@ static void GenerateDivideUnsigned(HInvoke* invoke, CodeGeneratorRISCV64* codege
codegen->AddSlowPath(slow_path);
__ Beqz(divisor, slow_path->GetEntryLabel());
- if (type == DataType::Type::kInt32) {
- __ Divuw(out, dividend, divisor);
+ if (is_div) {
+ if (type == DataType::Type::kInt32) {
+ __ Divuw(out, dividend, divisor);
+ } else {
+ __ Divu(out, dividend, divisor);
+ }
} else {
- __ Divu(out, dividend, divisor);
+ if (type == DataType::Type::kInt32) {
+ __ Remuw(out, dividend, divisor);
+ } else {
+ __ Remu(out, dividend, divisor);
+ }
}
__ Bind(slow_path->GetExitLabel());
@@ -625,7 +633,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitIntegerDivideUnsigned(HInvoke* invok
}
void IntrinsicCodeGeneratorRISCV64::VisitIntegerDivideUnsigned(HInvoke* invoke) {
- GenerateDivideUnsigned(invoke, codegen_);
+ GenerateDivRemUnsigned(invoke, /*is_div=*/true, codegen_);
}
void IntrinsicLocationsBuilderRISCV64::VisitLongDivideUnsigned(HInvoke* invoke) {
@@ -633,7 +641,23 @@ void IntrinsicLocationsBuilderRISCV64::VisitLongDivideUnsigned(HInvoke* invoke)
}
void IntrinsicCodeGeneratorRISCV64::VisitLongDivideUnsigned(HInvoke* invoke) {
- GenerateDivideUnsigned(invoke, codegen_);
+ GenerateDivRemUnsigned(invoke, /*is_div=*/true, codegen_);
+}
+
+void IntrinsicLocationsBuilderRISCV64::VisitIntegerRemainderUnsigned(HInvoke* invoke) {
+ CreateIntIntToIntSlowPathCallLocations(allocator_, invoke);
+}
+
+void IntrinsicCodeGeneratorRISCV64::VisitIntegerRemainderUnsigned(HInvoke* invoke) {
+ GenerateDivRemUnsigned(invoke, /*is_div=*/false, codegen_);
+}
+
+void IntrinsicLocationsBuilderRISCV64::VisitLongRemainderUnsigned(HInvoke* invoke) {
+ CreateIntIntToIntSlowPathCallLocations(allocator_, invoke);
+}
+
+void IntrinsicCodeGeneratorRISCV64::VisitLongRemainderUnsigned(HInvoke* invoke) {
+ GenerateDivRemUnsigned(invoke, /*is_div=*/false, codegen_);
}
#define VISIT_INTRINSIC(name, low, high, type, start_index) \