summaryrefslogtreecommitdiff
path: root/compiler/optimizing
diff options
context:
space:
mode:
Diffstat (limited to 'compiler/optimizing')
-rw-r--r--compiler/optimizing/intrinsics.cc50
-rw-r--r--compiler/optimizing/intrinsics_arm.cc24
-rw-r--r--compiler/optimizing/intrinsics_arm64.cc24
-rw-r--r--compiler/optimizing/intrinsics_list.h12
-rw-r--r--compiler/optimizing/intrinsics_mips.cc15
-rw-r--r--compiler/optimizing/intrinsics_mips64.cc14
-rw-r--r--compiler/optimizing/intrinsics_x86.cc18
-rw-r--r--compiler/optimizing/intrinsics_x86_64.cc16
8 files changed, 164 insertions, 9 deletions
diff --git a/compiler/optimizing/intrinsics.cc b/compiler/optimizing/intrinsics.cc
index 5caf077858..a6be324730 100644
--- a/compiler/optimizing/intrinsics.cc
+++ b/compiler/optimizing/intrinsics.cc
@@ -131,6 +131,16 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
return ((method.d.data & kIntrinsicFlagToFloatingPoint) == 0) ?
Intrinsics::kFloatFloatToRawIntBits : Intrinsics::kFloatIntBitsToFloat;
+ // Floating-point tests.
+ case kIntrinsicFloatIsInfinite:
+ return Intrinsics::kFloatIsInfinite;
+ case kIntrinsicDoubleIsInfinite:
+ return Intrinsics::kDoubleIsInfinite;
+ case kIntrinsicFloatIsNaN:
+ return Intrinsics::kFloatIsNaN;
+ case kIntrinsicDoubleIsNaN:
+ return Intrinsics::kDoubleIsNaN;
+
// Bit manipulations.
case kIntrinsicReverseBits:
switch (GetType(method.d.data, true)) {
@@ -186,6 +196,36 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
UNREACHABLE();
}
+ case kIntrinsicCompare:
+ switch (GetType(method.d.data, true)) {
+ case Primitive::kPrimInt:
+ return Intrinsics::kIntegerCompare;
+ case Primitive::kPrimLong:
+ return Intrinsics::kLongCompare;
+ default:
+ LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
+ UNREACHABLE();
+ }
+ case kIntrinsicHighestOneBit:
+ switch (GetType(method.d.data, true)) {
+ case Primitive::kPrimInt:
+ return Intrinsics::kIntegerHighestOneBit;
+ case Primitive::kPrimLong:
+ return Intrinsics::kLongHighestOneBit;
+ default:
+ LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
+ UNREACHABLE();
+ }
+ case kIntrinsicLowestOneBit:
+ switch (GetType(method.d.data, true)) {
+ case Primitive::kPrimInt:
+ return Intrinsics::kIntegerLowestOneBit;
+ case Primitive::kPrimLong:
+ return Intrinsics::kLongLowestOneBit;
+ default:
+ LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
+ UNREACHABLE();
+ }
case kIntrinsicNumberOfLeadingZeros:
switch (GetType(method.d.data, true)) {
case Primitive::kPrimInt:
@@ -206,6 +246,16 @@ static Intrinsics GetIntrinsic(InlineMethod method) {
LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
UNREACHABLE();
}
+ case kIntrinsicSignum:
+ switch (GetType(method.d.data, true)) {
+ case Primitive::kPrimInt:
+ return Intrinsics::kIntegerSignum;
+ case Primitive::kPrimLong:
+ return Intrinsics::kLongSignum;
+ default:
+ LOG(FATAL) << "Unknown/unsupported op size " << method.d.data;
+ UNREACHABLE();
+ }
// Abs.
case kIntrinsicAbsDouble:
diff --git a/compiler/optimizing/intrinsics_arm.cc b/compiler/optimizing/intrinsics_arm.cc
index e72f927e44..97fe5872bf 100644
--- a/compiler/optimizing/intrinsics_arm.cc
+++ b/compiler/optimizing/intrinsics_arm.cc
@@ -1580,13 +1580,9 @@ void IntrinsicCodeGeneratorARM::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED)
UNIMPLEMENTED_INTRINSIC(IntegerBitCount)
UNIMPLEMENTED_INTRINSIC(IntegerReverse)
UNIMPLEMENTED_INTRINSIC(IntegerReverseBytes)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
UNIMPLEMENTED_INTRINSIC(LongBitCount)
UNIMPLEMENTED_INTRINSIC(LongReverse)
UNIMPLEMENTED_INTRINSIC(LongReverseBytes)
-UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
-UNIMPLEMENTED_INTRINSIC(LongRotateRight)
UNIMPLEMENTED_INTRINSIC(ShortReverseBytes)
UNIMPLEMENTED_INTRINSIC(MathMinDoubleDouble)
UNIMPLEMENTED_INTRINSIC(MathMinFloatFloat)
@@ -1621,6 +1617,26 @@ UNIMPLEMENTED_INTRINSIC(MathSinh)
UNIMPLEMENTED_INTRINSIC(MathTan)
UNIMPLEMENTED_INTRINSIC(MathTanh)
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
+// Rotate operations are handled as HRor instructions.
+UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
+UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
+UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
+UNIMPLEMENTED_INTRINSIC(LongRotateRight)
+
#undef UNIMPLEMENTED_INTRINSIC
#undef __
diff --git a/compiler/optimizing/intrinsics_arm64.cc b/compiler/optimizing/intrinsics_arm64.cc
index 8cf2d4f393..c888f01841 100644
--- a/compiler/optimizing/intrinsics_arm64.cc
+++ b/compiler/optimizing/intrinsics_arm64.cc
@@ -1465,11 +1465,7 @@ void IntrinsicCodeGeneratorARM64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED
}
UNIMPLEMENTED_INTRINSIC(IntegerBitCount)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
-UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
UNIMPLEMENTED_INTRINSIC(LongBitCount)
-UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
-UNIMPLEMENTED_INTRINSIC(LongRotateRight)
UNIMPLEMENTED_INTRINSIC(SystemArrayCopyChar)
UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
@@ -1493,6 +1489,26 @@ UNIMPLEMENTED_INTRINSIC(MathSinh)
UNIMPLEMENTED_INTRINSIC(MathTan)
UNIMPLEMENTED_INTRINSIC(MathTanh)
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
+// Rotate operations are handled as HRor instructions.
+UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
+UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
+UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
+UNIMPLEMENTED_INTRINSIC(LongRotateRight)
+
#undef UNIMPLEMENTED_INTRINSIC
#undef __
diff --git a/compiler/optimizing/intrinsics_list.h b/compiler/optimizing/intrinsics_list.h
index ea380347da..88217b308e 100644
--- a/compiler/optimizing/intrinsics_list.h
+++ b/compiler/optimizing/intrinsics_list.h
@@ -23,23 +23,35 @@
#define INTRINSICS_LIST(V) \
V(DoubleDoubleToRawLongBits, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(DoubleIsInfinite, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(DoubleIsNaN, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(DoubleLongBitsToDouble, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(FloatFloatToRawIntBits, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(FloatIsInfinite, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(FloatIsNaN, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(FloatIntBitsToFloat, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerReverse, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerReverseBytes, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerBitCount, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(IntegerCompare, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(IntegerHighestOneBit, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(IntegerLowestOneBit, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerNumberOfLeadingZeros, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerNumberOfTrailingZeros, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerRotateRight, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(IntegerRotateLeft, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(IntegerSignum, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongReverse, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongReverseBytes, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongBitCount, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(LongCompare, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(LongHighestOneBit, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(LongLowestOneBit, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongNumberOfLeadingZeros, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongNumberOfTrailingZeros, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongRotateRight, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(LongRotateLeft, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
+ V(LongSignum, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(ShortReverseBytes, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(MathAbsDouble, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
V(MathAbsFloat, kStatic, kNeedsEnvironmentOrCache, kNoSideEffects, kNoThrow) \
diff --git a/compiler/optimizing/intrinsics_mips.cc b/compiler/optimizing/intrinsics_mips.cc
index 81112b1a34..0d9cf091cc 100644
--- a/compiler/optimizing/intrinsics_mips.cc
+++ b/compiler/optimizing/intrinsics_mips.cc
@@ -1013,6 +1013,21 @@ UNIMPLEMENTED_INTRINSIC(MathNextAfter)
UNIMPLEMENTED_INTRINSIC(MathSinh)
UNIMPLEMENTED_INTRINSIC(MathTan)
UNIMPLEMENTED_INTRINSIC(MathTanh)
+
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
#undef UNIMPLEMENTED_INTRINSIC
#undef __
diff --git a/compiler/optimizing/intrinsics_mips64.cc b/compiler/optimizing/intrinsics_mips64.cc
index 769c4228c4..cba84fa3e3 100644
--- a/compiler/optimizing/intrinsics_mips64.cc
+++ b/compiler/optimizing/intrinsics_mips64.cc
@@ -1760,6 +1760,20 @@ UNIMPLEMENTED_INTRINSIC(MathSinh)
UNIMPLEMENTED_INTRINSIC(MathTan)
UNIMPLEMENTED_INTRINSIC(MathTanh)
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
#undef UNIMPLEMENTED_INTRINSIC
#undef __
diff --git a/compiler/optimizing/intrinsics_x86.cc b/compiler/optimizing/intrinsics_x86.cc
index 3f5688b0db..acc40bc998 100644
--- a/compiler/optimizing/intrinsics_x86.cc
+++ b/compiler/optimizing/intrinsics_x86.cc
@@ -2593,11 +2593,27 @@ void IntrinsicCodeGeneratorX86::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSED)
UNIMPLEMENTED_INTRINSIC(MathRoundDouble)
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
+
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
+// Rotate operations are handled as HRor instructions.
UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
UNIMPLEMENTED_INTRINSIC(LongRotateRight)
UNIMPLEMENTED_INTRINSIC(LongRotateLeft)
-UNIMPLEMENTED_INTRINSIC(SystemArrayCopy)
#undef UNIMPLEMENTED_INTRINSIC
diff --git a/compiler/optimizing/intrinsics_x86_64.cc b/compiler/optimizing/intrinsics_x86_64.cc
index 23a628f243..6ccc5d1e01 100644
--- a/compiler/optimizing/intrinsics_x86_64.cc
+++ b/compiler/optimizing/intrinsics_x86_64.cc
@@ -2592,6 +2592,22 @@ void IntrinsicCodeGeneratorX86_64::Visit ## Name(HInvoke* invoke ATTRIBUTE_UNUSE
}
UNIMPLEMENTED_INTRINSIC(ReferenceGetReferent)
+
+UNIMPLEMENTED_INTRINSIC(FloatIsInfinite)
+UNIMPLEMENTED_INTRINSIC(DoubleIsInfinite)
+UNIMPLEMENTED_INTRINSIC(FloatIsNaN)
+UNIMPLEMENTED_INTRINSIC(DoubleIsNaN)
+
+UNIMPLEMENTED_INTRINSIC(IntegerCompare)
+UNIMPLEMENTED_INTRINSIC(LongCompare)
+UNIMPLEMENTED_INTRINSIC(IntegerHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongHighestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(LongLowestOneBit)
+UNIMPLEMENTED_INTRINSIC(IntegerSignum)
+UNIMPLEMENTED_INTRINSIC(LongSignum)
+
+// Rotate operations are handled as HRor instructions.
UNIMPLEMENTED_INTRINSIC(IntegerRotateLeft)
UNIMPLEMENTED_INTRINSIC(IntegerRotateRight)
UNIMPLEMENTED_INTRINSIC(LongRotateLeft)