summaryrefslogtreecommitdiff
path: root/runtime/arch/stub_test.cc
diff options
context:
space:
mode:
Diffstat (limited to 'runtime/arch/stub_test.cc')
-rw-r--r--runtime/arch/stub_test.cc195
1 files changed, 28 insertions, 167 deletions
diff --git a/runtime/arch/stub_test.cc b/runtime/arch/stub_test.cc
index c82b445f81..2b47cefdd3 100644
--- a/runtime/arch/stub_test.cc
+++ b/runtime/arch/stub_test.cc
@@ -322,138 +322,6 @@ class StubTest : public CommonRuntimeTest {
"d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
"d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
"memory");
-#elif defined(__mips__) && !defined(__LP64__)
- __asm__ __volatile__ (
- // Spill a0-a3 and t0-t7 which we say we don't clobber. May contain args.
- "addiu $sp, $sp, -64\n\t"
- "sw $a0, 0($sp)\n\t"
- "sw $a1, 4($sp)\n\t"
- "sw $a2, 8($sp)\n\t"
- "sw $a3, 12($sp)\n\t"
- "sw $t0, 16($sp)\n\t"
- "sw $t1, 20($sp)\n\t"
- "sw $t2, 24($sp)\n\t"
- "sw $t3, 28($sp)\n\t"
- "sw $t4, 32($sp)\n\t"
- "sw $t5, 36($sp)\n\t"
- "sw $t6, 40($sp)\n\t"
- "sw $t7, 44($sp)\n\t"
- // Spill gp register since it is caller save.
- "sw $gp, 52($sp)\n\t"
-
- "addiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
- "sw %[referrer], 0($sp)\n\t"
-
- // Push everything on the stack, so we don't rely on the order.
- "addiu $sp, $sp, -24\n\t"
- "sw %[arg0], 0($sp)\n\t"
- "sw %[arg1], 4($sp)\n\t"
- "sw %[arg2], 8($sp)\n\t"
- "sw %[code], 12($sp)\n\t"
- "sw %[self], 16($sp)\n\t"
- "sw %[hidden], 20($sp)\n\t"
-
- // Load call params into the right registers.
- "lw $a0, 0($sp)\n\t"
- "lw $a1, 4($sp)\n\t"
- "lw $a2, 8($sp)\n\t"
- "lw $t9, 12($sp)\n\t"
- "lw $s1, 16($sp)\n\t"
- "lw $t7, 20($sp)\n\t"
- "addiu $sp, $sp, 24\n\t"
-
- "jalr $t9\n\t" // Call the stub.
- "nop\n\t"
- "addiu $sp, $sp, 16\n\t" // Drop the quick "frame".
-
- // Restore stuff not named clobbered.
- "lw $a0, 0($sp)\n\t"
- "lw $a1, 4($sp)\n\t"
- "lw $a2, 8($sp)\n\t"
- "lw $a3, 12($sp)\n\t"
- "lw $t0, 16($sp)\n\t"
- "lw $t1, 20($sp)\n\t"
- "lw $t2, 24($sp)\n\t"
- "lw $t3, 28($sp)\n\t"
- "lw $t4, 32($sp)\n\t"
- "lw $t5, 36($sp)\n\t"
- "lw $t6, 40($sp)\n\t"
- "lw $t7, 44($sp)\n\t"
- // Restore gp.
- "lw $gp, 52($sp)\n\t"
- "addiu $sp, $sp, 64\n\t" // Free stack space, now sp as on entry.
-
- "move %[result], $v0\n\t" // Store the call result.
- : [result] "=r" (result)
- : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
- [referrer] "r"(referrer), [hidden] "r"(hidden)
- : "at", "v0", "v1", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1",
- "fp", "ra",
- "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
- "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
- "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
- "memory"); // clobber.
-#elif defined(__mips__) && defined(__LP64__)
- __asm__ __volatile__ (
- // Spill a0-a7 which we say we don't clobber. May contain args.
- "daddiu $sp, $sp, -64\n\t"
- "sd $a0, 0($sp)\n\t"
- "sd $a1, 8($sp)\n\t"
- "sd $a2, 16($sp)\n\t"
- "sd $a3, 24($sp)\n\t"
- "sd $a4, 32($sp)\n\t"
- "sd $a5, 40($sp)\n\t"
- "sd $a6, 48($sp)\n\t"
- "sd $a7, 56($sp)\n\t"
-
- "daddiu $sp, $sp, -16\n\t" // Reserve stack space, 16B aligned.
- "sd %[referrer], 0($sp)\n\t"
-
- // Push everything on the stack, so we don't rely on the order.
- "daddiu $sp, $sp, -48\n\t"
- "sd %[arg0], 0($sp)\n\t"
- "sd %[arg1], 8($sp)\n\t"
- "sd %[arg2], 16($sp)\n\t"
- "sd %[code], 24($sp)\n\t"
- "sd %[self], 32($sp)\n\t"
- "sd %[hidden], 40($sp)\n\t"
-
- // Load call params into the right registers.
- "ld $a0, 0($sp)\n\t"
- "ld $a1, 8($sp)\n\t"
- "ld $a2, 16($sp)\n\t"
- "ld $t9, 24($sp)\n\t"
- "ld $s1, 32($sp)\n\t"
- "ld $t0, 40($sp)\n\t"
- "daddiu $sp, $sp, 48\n\t"
-
- "jalr $t9\n\t" // Call the stub.
- "nop\n\t"
- "daddiu $sp, $sp, 16\n\t" // Drop the quick "frame".
-
- // Restore stuff not named clobbered.
- "ld $a0, 0($sp)\n\t"
- "ld $a1, 8($sp)\n\t"
- "ld $a2, 16($sp)\n\t"
- "ld $a3, 24($sp)\n\t"
- "ld $a4, 32($sp)\n\t"
- "ld $a5, 40($sp)\n\t"
- "ld $a6, 48($sp)\n\t"
- "ld $a7, 56($sp)\n\t"
- "daddiu $sp, $sp, 64\n\t"
-
- "move %[result], $v0\n\t" // Store the call result.
- : [result] "=r" (result)
- : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
- [referrer] "r"(referrer), [hidden] "r"(hidden)
- // Instead aliases t0-t3, register names $12-$15 has been used in the clobber list because
- // t0-t3 are ambiguous.
- : "at", "v0", "v1", "$12", "$13", "$14", "$15", "s0", "s1", "s2", "s3", "s4", "s5", "s6",
- "s7", "t8", "t9", "k0", "k1", "fp", "ra",
- "$f0", "$f1", "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9", "$f10", "$f11",
- "$f12", "$f13", "$f14", "$f15", "$f16", "$f17", "$f18", "$f19", "$f20", "$f21", "$f22",
- "$f23", "$f24", "$f25", "$f26", "$f27", "$f28", "$f29", "$f30", "$f31",
- "memory"); // clobber.
#elif defined(__x86_64__) && !defined(__APPLE__)
#define PUSH(reg) "pushq " # reg "\n\t .cfi_adjust_cfa_offset 8\n\t"
#define POP(reg) "popq " # reg "\n\t .cfi_adjust_cfa_offset -8\n\t"
@@ -546,7 +414,7 @@ class StubTest : public CommonRuntimeTest {
TEST_F(StubTest, Memcpy) {
-#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__)) || defined(__mips__)
+#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Thread* self = Thread::Current();
uint32_t orig[20];
@@ -583,7 +451,7 @@ TEST_F(StubTest, Memcpy) {
}
TEST_F(StubTest, LockObject) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
static constexpr size_t kThinLockLoops = 100;
@@ -657,7 +525,7 @@ class RandGen {
// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
static constexpr size_t kThinLockLoops = 100;
@@ -808,13 +676,13 @@ TEST_F(StubTest, UnlockObject) {
TestUnlockObject(this);
}
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
extern "C" void art_quick_check_instance_of(void);
#endif
TEST_F(StubTest, CheckCast) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
Thread* self = Thread::Current();
@@ -913,7 +781,7 @@ TEST_F(StubTest, CheckCast) {
}
TEST_F(StubTest, AllocObject) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
// This will lead to OOM error messages in the log.
ScopedLogSeverity sls(LogSeverity::FATAL);
@@ -1030,7 +898,7 @@ TEST_F(StubTest, AllocObject) {
}
TEST_F(StubTest, AllocObjectArray) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
// TODO: Check the "Unresolved" allocation stubs
@@ -1095,8 +963,7 @@ TEST_F(StubTest, AllocObjectArray) {
TEST_F(StubTest, StringCompareTo) {
TEST_DISABLED_FOR_STRING_COMPRESSION();
// There is no StringCompareTo runtime entrypoint for __arm__ or __aarch64__.
-#if defined(__i386__) || defined(__mips__) || \
- (defined(__x86_64__) && !defined(__APPLE__))
+#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
// TODO: Check the "Unresolved" allocation stubs
Thread* self = Thread::Current();
@@ -1178,7 +1045,7 @@ TEST_F(StubTest, StringCompareTo) {
static void GetSetBooleanStatic(ArtField* f, Thread* self,
ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
constexpr size_t num_values = 5;
uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
@@ -1209,7 +1076,7 @@ static void GetSetBooleanStatic(ArtField* f, Thread* self,
static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
int8_t values[] = { -128, -64, 0, 64, 127 };
@@ -1240,7 +1107,7 @@ static void GetSetByteStatic(ArtField* f, Thread* self, ArtMethod* referrer,
static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thread* self,
ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
uint8_t values[] = { 0, true, 2, 128, 0xFF };
@@ -1275,7 +1142,7 @@ static void GetSetBooleanInstance(Handle<mirror::Object>* obj, ArtField* f, Thre
static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
int8_t values[] = { -128, -64, 0, 64, 127 };
@@ -1310,7 +1177,7 @@ static void GetSetByteInstance(Handle<mirror::Object>* obj, ArtField* f,
static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
@@ -1340,7 +1207,7 @@ static void GetSetCharStatic(ArtField* f, Thread* self, ArtMethod* referrer,
static void GetSetShortStatic(ArtField* f, Thread* self,
ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
@@ -1371,7 +1238,7 @@ static void GetSetShortStatic(ArtField* f, Thread* self,
static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
uint16_t values[] = { 0, 1, 2, 255, 32768, 0xFFFF };
@@ -1405,7 +1272,7 @@ static void GetSetCharInstance(Handle<mirror::Object>* obj, ArtField* f,
static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
int16_t values[] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
@@ -1440,7 +1307,7 @@ static void GetSetShortInstance(Handle<mirror::Object>* obj, ArtField* f,
static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
@@ -1458,11 +1325,7 @@ static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
self,
referrer);
-#if defined(__mips__) && defined(__LP64__)
- EXPECT_EQ(static_cast<uint32_t>(res), values[i]) << "Iteration " << i;
-#else
EXPECT_EQ(res, values[i]) << "Iteration " << i;
-#endif
}
#else
UNUSED(f, self, referrer, test);
@@ -1476,7 +1339,7 @@ static void GetSet32Static(ArtField* f, Thread* self, ArtMethod* referrer,
static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
uint32_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
@@ -1511,7 +1374,7 @@ static void GetSet32Instance(Handle<mirror::Object>* obj, ArtField* f,
}
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
static void set_and_check_static(uint32_t f_idx,
@@ -1543,7 +1406,7 @@ static void set_and_check_static(uint32_t f_idx,
static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
set_and_check_static(f->GetDexFieldIndex(), nullptr, self, referrer, test);
@@ -1561,7 +1424,7 @@ static void GetSetObjStatic(ArtField* f, Thread* self, ArtMethod* referrer,
}
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
static void set_and_check_instance(ArtField* f,
ObjPtr<mirror::Object> trg,
@@ -1596,7 +1459,7 @@ static void set_and_check_instance(ArtField* f,
static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
@@ -1619,8 +1482,7 @@ static void GetSetObjInstance(Handle<mirror::Object>* obj, ArtField* f,
static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) \
- || defined(__aarch64__)
+#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
for (size_t i = 0; i < arraysize(values); ++i) {
@@ -1652,8 +1514,7 @@ static void GetSet64Static(ArtField* f, Thread* self, ArtMethod* referrer,
static void GetSet64Instance(Handle<mirror::Object>* obj, ArtField* f,
Thread* self, ArtMethod* referrer, StubTest* test)
REQUIRES_SHARED(Locks::mutator_lock_) {
-#if (defined(__x86_64__) && !defined(__APPLE__)) || (defined(__mips__) && defined(__LP64__)) || \
- defined(__aarch64__)
+#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
uint64_t values[] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
for (size_t i = 0; i < arraysize(values); ++i) {
@@ -1842,7 +1703,7 @@ TEST_F(StubTest, Fields64) {
// and gets a bogus OatQuickMethodHeader* pointing into our assembly code just before
// the bridge and uses that to check for inlined frames, crashing in the process.
TEST_F(StubTest, DISABLED_IMT) {
-#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || defined(__mips__) || \
+#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || \
(defined(__x86_64__) && !defined(__APPLE__))
Thread* self = Thread::Current();
@@ -1981,7 +1842,7 @@ TEST_F(StubTest, DISABLED_IMT) {
}
TEST_F(StubTest, StringIndexOf) {
-#if defined(__arm__) || defined(__aarch64__) || defined(__mips__)
+#if defined(__arm__) || defined(__aarch64__)
Thread* self = Thread::Current();
ScopedObjectAccess soa(self);
// garbage is created during ClassLinker::Init
@@ -2058,7 +1919,7 @@ TEST_F(StubTest, StringIndexOf) {
TEST_F(StubTest, ReadBarrier) {
#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
- defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
+ defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)))
Thread* self = Thread::Current();
const uintptr_t readBarrierSlow = StubTest::GetEntrypoint(self, kQuickReadBarrierSlow);
@@ -2094,7 +1955,7 @@ TEST_F(StubTest, ReadBarrier) {
TEST_F(StubTest, ReadBarrierForRoot) {
#if defined(ART_USE_READ_BARRIER) && (defined(__i386__) || defined(__arm__) || \
- defined(__aarch64__) || defined(__mips__) || (defined(__x86_64__) && !defined(__APPLE__)))
+ defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__)))
Thread* self = Thread::Current();
const uintptr_t readBarrierForRootSlow =