summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--compiler/optimizing/code_generator_arm64.h2
-rw-r--r--compiler/optimizing/code_generator_arm_vixl.h2
-rw-r--r--compiler/optimizing/code_generator_x86.h2
-rw-r--r--compiler/optimizing/code_generator_x86_64.h2
-rw-r--r--compiler/optimizing/intrinsics_riscv64.cc36
-rw-r--r--runtime/intrinsics_list.h2
-rw-r--r--runtime/oat/image.cc4
7 files changed, 42 insertions, 8 deletions
diff --git a/compiler/optimizing/code_generator_arm64.h b/compiler/optimizing/code_generator_arm64.h
index 78049c5675..04694ece3e 100644
--- a/compiler/optimizing/code_generator_arm64.h
+++ b/compiler/optimizing/code_generator_arm64.h
@@ -124,6 +124,8 @@ const vixl::aarch64::CPURegList callee_saved_fp_registers(vixl::aarch64::CPURegi
Location ARM64ReturnLocation(DataType::Type return_type);
#define UNIMPLEMENTED_INTRINSIC_LIST_ARM64(V) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(StringStringIndexOf) \
V(StringStringIndexOfAfter) \
V(StringBufferAppend) \
diff --git a/compiler/optimizing/code_generator_arm_vixl.h b/compiler/optimizing/code_generator_arm_vixl.h
index 51bee1cd77..05e5bdf2b7 100644
--- a/compiler/optimizing/code_generator_arm_vixl.h
+++ b/compiler/optimizing/code_generator_arm_vixl.h
@@ -124,6 +124,8 @@ using VIXLUInt32Literal = vixl::aarch32::Literal<uint32_t>;
V(UnsafeCASLong) /* High register pressure */ \
V(SystemArrayCopyChar) \
V(LongDivideUnsigned) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(CRC32Update) \
V(CRC32UpdateBytes) \
V(CRC32UpdateByteBuffer) \
diff --git a/compiler/optimizing/code_generator_x86.h b/compiler/optimizing/code_generator_x86.h
index 93f8e6ed9b..6ce0c506a0 100644
--- a/compiler/optimizing/code_generator_x86.h
+++ b/compiler/optimizing/code_generator_x86.h
@@ -55,6 +55,8 @@ static constexpr size_t kRuntimeParameterFpuRegistersLength =
V(IntegerHighestOneBit) \
V(LongHighestOneBit) \
V(LongDivideUnsigned) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(CRC32Update) \
V(CRC32UpdateBytes) \
V(CRC32UpdateByteBuffer) \
diff --git a/compiler/optimizing/code_generator_x86_64.h b/compiler/optimizing/code_generator_x86_64.h
index 7a5e4aa894..81c8ead32e 100644
--- a/compiler/optimizing/code_generator_x86_64.h
+++ b/compiler/optimizing/code_generator_x86_64.h
@@ -69,6 +69,8 @@ static constexpr FloatRegister non_volatile_xmm_regs[] = { XMM12, XMM13, XMM14,
V(FP16Compare) \
V(FP16Min) \
V(FP16Max) \
+ V(IntegerRemainderUnsigned) \
+ V(LongRemainderUnsigned) \
V(StringStringIndexOf) \
V(StringStringIndexOfAfter) \
V(StringBufferAppend) \
diff --git a/compiler/optimizing/intrinsics_riscv64.cc b/compiler/optimizing/intrinsics_riscv64.cc
index 0a9ac872db..b71232b4e4 100644
--- a/compiler/optimizing/intrinsics_riscv64.cc
+++ b/compiler/optimizing/intrinsics_riscv64.cc
@@ -595,7 +595,7 @@ void IntrinsicCodeGeneratorRISCV64::VisitLongNumberOfTrailingZeros(HInvoke* invo
EmitIntegralUnOp(invoke, [&](XRegister rd, XRegister rs1) { __ Ctz(rd, rs1); });
}
-static void GenerateDivideUnsigned(HInvoke* invoke, CodeGeneratorRISCV64* codegen) {
+static void GenerateDivRemUnsigned(HInvoke* invoke, bool is_div, CodeGeneratorRISCV64* codegen) {
LocationSummary* locations = invoke->GetLocations();
Riscv64Assembler* assembler = codegen->GetAssembler();
DataType::Type type = invoke->GetType();
@@ -611,10 +611,18 @@ static void GenerateDivideUnsigned(HInvoke* invoke, CodeGeneratorRISCV64* codege
codegen->AddSlowPath(slow_path);
__ Beqz(divisor, slow_path->GetEntryLabel());
- if (type == DataType::Type::kInt32) {
- __ Divuw(out, dividend, divisor);
+ if (is_div) {
+ if (type == DataType::Type::kInt32) {
+ __ Divuw(out, dividend, divisor);
+ } else {
+ __ Divu(out, dividend, divisor);
+ }
} else {
- __ Divu(out, dividend, divisor);
+ if (type == DataType::Type::kInt32) {
+ __ Remuw(out, dividend, divisor);
+ } else {
+ __ Remu(out, dividend, divisor);
+ }
}
__ Bind(slow_path->GetExitLabel());
@@ -625,7 +633,7 @@ void IntrinsicLocationsBuilderRISCV64::VisitIntegerDivideUnsigned(HInvoke* invok
}
void IntrinsicCodeGeneratorRISCV64::VisitIntegerDivideUnsigned(HInvoke* invoke) {
- GenerateDivideUnsigned(invoke, codegen_);
+ GenerateDivRemUnsigned(invoke, /*is_div=*/true, codegen_);
}
void IntrinsicLocationsBuilderRISCV64::VisitLongDivideUnsigned(HInvoke* invoke) {
@@ -633,7 +641,23 @@ void IntrinsicLocationsBuilderRISCV64::VisitLongDivideUnsigned(HInvoke* invoke)
}
void IntrinsicCodeGeneratorRISCV64::VisitLongDivideUnsigned(HInvoke* invoke) {
- GenerateDivideUnsigned(invoke, codegen_);
+ GenerateDivRemUnsigned(invoke, /*is_div=*/true, codegen_);
+}
+
+void IntrinsicLocationsBuilderRISCV64::VisitIntegerRemainderUnsigned(HInvoke* invoke) {
+ CreateIntIntToIntSlowPathCallLocations(allocator_, invoke);
+}
+
+void IntrinsicCodeGeneratorRISCV64::VisitIntegerRemainderUnsigned(HInvoke* invoke) {
+ GenerateDivRemUnsigned(invoke, /*is_div=*/false, codegen_);
+}
+
+void IntrinsicLocationsBuilderRISCV64::VisitLongRemainderUnsigned(HInvoke* invoke) {
+ CreateIntIntToIntSlowPathCallLocations(allocator_, invoke);
+}
+
+void IntrinsicCodeGeneratorRISCV64::VisitLongRemainderUnsigned(HInvoke* invoke) {
+ GenerateDivRemUnsigned(invoke, /*is_div=*/false, codegen_);
}
#define VISIT_INTRINSIC(name, low, high, type, start_index) \
diff --git a/runtime/intrinsics_list.h b/runtime/intrinsics_list.h
index d6fa39f1fc..862c3cbd52 100644
--- a/runtime/intrinsics_list.h
+++ b/runtime/intrinsics_list.h
@@ -137,6 +137,7 @@
V(IntegerReverseBytes, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "reverseBytes", "(I)I") \
V(IntegerBitCount, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "bitCount", "(I)I") \
V(IntegerDivideUnsigned, kStatic, kNeedsEnvironment, kNoSideEffects, kCanThrow, "Ljava/lang/Integer;", "divideUnsigned", "(II)I") \
+ V(IntegerRemainderUnsigned, kStatic, kNeedsEnvironment, kNoSideEffects, kCanThrow, "Ljava/lang/Integer;", "remainderUnsigned", "(II)I") \
V(IntegerHighestOneBit, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "highestOneBit", "(I)I") \
V(IntegerLowestOneBit, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "lowestOneBit", "(I)I") \
V(IntegerNumberOfLeadingZeros, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Integer;", "numberOfLeadingZeros", "(I)I") \
@@ -145,6 +146,7 @@
V(LongReverseBytes, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Long;", "reverseBytes", "(J)J") \
V(LongBitCount, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Long;", "bitCount", "(J)I") \
V(LongDivideUnsigned, kStatic, kNeedsEnvironment, kNoSideEffects, kCanThrow, "Ljava/lang/Long;", "divideUnsigned", "(JJ)J") \
+ V(LongRemainderUnsigned, kStatic, kNeedsEnvironment, kNoSideEffects, kCanThrow, "Ljava/lang/Long;", "remainderUnsigned", "(JJ)J") \
V(LongHighestOneBit, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Long;", "highestOneBit", "(J)J") \
V(LongLowestOneBit, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Long;", "lowestOneBit", "(J)J") \
V(LongNumberOfLeadingZeros, kStatic, kNeedsEnvironment, kNoSideEffects, kNoThrow, "Ljava/lang/Long;", "numberOfLeadingZeros", "(J)I") \
diff --git a/runtime/oat/image.cc b/runtime/oat/image.cc
index af221b993b..f69989b592 100644
--- a/runtime/oat/image.cc
+++ b/runtime/oat/image.cc
@@ -34,8 +34,8 @@
namespace art HIDDEN {
const uint8_t ImageHeader::kImageMagic[] = { 'a', 'r', 't', '\n' };
-// Last change: Add JniStubMethodsSection.
-const uint8_t ImageHeader::kImageVersion[] = { '1', '1', '0', '\0' };
+// Last change: Add unsignedRemainder intrinsics.
+const uint8_t ImageHeader::kImageVersion[] = { '1', '1', '1', '\0' };
ImageHeader::ImageHeader(uint32_t image_reservation_size,
uint32_t component_count,