blob: 1215d8b322c29283c46d06ba9f8d473197153ae6 [file] [log] [blame]
Andreas Gampe525cde22014-04-22 15:44:50 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Ian Rogerse63db272014-07-15 15:36:11 -070017#include <cstdio>
18
Andreas Gampe525cde22014-04-22 15:44:50 -070019#include "common_runtime_test.h"
Andreas Gampe29b38412014-08-13 00:15:43 -070020#include "entrypoints/quick/quick_entrypoints_enum.h"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070021#include "mirror/art_field-inl.h"
Andreas Gampe51f76352014-05-21 08:28:48 -070022#include "mirror/art_method-inl.h"
23#include "mirror/class-inl.h"
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -070024#include "mirror/string-inl.h"
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "scoped_thread_state_change.h"
Andreas Gampe525cde22014-04-22 15:44:50 -070026
27namespace art {
28
29
30class StubTest : public CommonRuntimeTest {
31 protected:
32 // We need callee-save methods set up in the Runtime for exceptions.
33 void SetUp() OVERRIDE {
34 // Do the normal setup.
35 CommonRuntimeTest::SetUp();
36
37 {
38 // Create callee-save methods
39 ScopedObjectAccess soa(Thread::Current());
Vladimir Marko7624d252014-05-02 14:40:15 +010040 runtime_->SetInstructionSet(kRuntimeISA);
Andreas Gampe525cde22014-04-22 15:44:50 -070041 for (int i = 0; i < Runtime::kLastCalleeSaveType; i++) {
42 Runtime::CalleeSaveType type = Runtime::CalleeSaveType(i);
43 if (!runtime_->HasCalleeSaveMethod(type)) {
Vladimir Marko7624d252014-05-02 14:40:15 +010044 runtime_->SetCalleeSaveMethod(runtime_->CreateCalleeSaveMethod(type), type);
Andreas Gampe525cde22014-04-22 15:44:50 -070045 }
46 }
47 }
48 }
49
Ian Rogerse63db272014-07-15 15:36:11 -070050 void SetUpRuntimeOptions(RuntimeOptions *options) OVERRIDE {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070051 // Use a smaller heap
52 for (std::pair<std::string, const void*>& pair : *options) {
53 if (pair.first.find("-Xmx") == 0) {
54 pair.first = "-Xmx4M"; // Smallest we can go.
55 }
56 }
Andreas Gampe51f76352014-05-21 08:28:48 -070057 options->push_back(std::make_pair("-Xint", nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -070058 }
Andreas Gampe525cde22014-04-22 15:44:50 -070059
Mathieu Chartier119c6bd2014-05-09 14:11:47 -070060 // Helper function needed since TEST_F makes a new class.
61 Thread::tls_ptr_sized_values* GetTlsPtr(Thread* self) {
62 return &self->tlsPtr_;
63 }
64
Andreas Gampe4fc046e2014-05-06 16:56:39 -070065 public:
Andreas Gampe525cde22014-04-22 15:44:50 -070066 size_t Invoke3(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self) {
Andreas Gampe6cf80102014-05-19 11:32:41 -070067 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, nullptr);
Andreas Gampe525cde22014-04-22 15:44:50 -070068 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070069
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070070 // TODO: Set up a frame according to referrer's specs.
71 size_t Invoke3WithReferrer(size_t arg0, size_t arg1, size_t arg2, uintptr_t code, Thread* self,
72 mirror::ArtMethod* referrer) {
73 // Push a transition back into managed code onto the linked list in thread.
74 ManagedStack fragment;
75 self->PushManagedStackFragment(&fragment);
76
77 size_t result;
Andreas Gampe6cf80102014-05-19 11:32:41 -070078 size_t fpr_result = 0;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070079#if defined(__i386__)
80 // TODO: Set the thread?
81 __asm__ __volatile__(
Ian Rogersc5f17732014-06-05 20:48:42 -070082 "subl $12, %%esp\n\t" // Align stack.
83 "pushl %[referrer]\n\t" // Store referrer.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070084 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -070085 "addl $16, %%esp" // Pop referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070086 : "=a" (result)
87 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -070088 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"r"(referrer)
89 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
90 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -070091 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
92 // but compilation fails when declaring that.
93#elif defined(__arm__)
94 __asm__ __volatile__(
95 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
96 ".cfi_adjust_cfa_offset 52\n\t"
97 "push {r9}\n\t"
98 ".cfi_adjust_cfa_offset 4\n\t"
99 "mov r9, %[referrer]\n\n"
100 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
101 ".cfi_adjust_cfa_offset 8\n\t"
102 "ldr r9, [sp, #8]\n\t"
103
104 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
105 "sub sp, sp, #20\n\t"
106 "str %[arg0], [sp]\n\t"
107 "str %[arg1], [sp, #4]\n\t"
108 "str %[arg2], [sp, #8]\n\t"
109 "str %[code], [sp, #12]\n\t"
110 "str %[self], [sp, #16]\n\t"
111 "ldr r0, [sp]\n\t"
112 "ldr r1, [sp, #4]\n\t"
113 "ldr r2, [sp, #8]\n\t"
114 "ldr r3, [sp, #12]\n\t"
115 "ldr r9, [sp, #16]\n\t"
116 "add sp, sp, #20\n\t"
117
118 "blx r3\n\t" // Call the stub
119 "add sp, sp, #12\n\t" // Pop nullptr and padding
120 ".cfi_adjust_cfa_offset -12\n\t"
121 "pop {r1-r12, lr}\n\t" // Restore state
122 ".cfi_adjust_cfa_offset -52\n\t"
123 "mov %[result], r0\n\t" // Save the result
124 : [result] "=r" (result)
125 // Use the result from r0
126 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
127 [referrer] "r"(referrer)
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700128 : "memory"); // clobber.
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700129#elif defined(__aarch64__)
130 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700131 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe6cf80102014-05-19 11:32:41 -0700132 "sub sp, sp, #64\n\t"
133 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700134 "stp x0, x1, [sp]\n\t"
135 "stp x2, x3, [sp, #16]\n\t"
136 "stp x4, x5, [sp, #32]\n\t"
137 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700138
Andreas Gampef39b3782014-06-03 14:38:30 -0700139 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
140 ".cfi_adjust_cfa_offset 16\n\t"
141 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700142
143 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
144 "sub sp, sp, #48\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700145 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700146 // All things are "r" constraints, so direct str/stp should work.
147 "stp %[arg0], %[arg1], [sp]\n\t"
148 "stp %[arg2], %[code], [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700149 "str %[self], [sp, #32]\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700150
151 // Now we definitely have x0-x3 free, use it to garble d8 - d15
152 "movk x0, #0xfad0\n\t"
153 "movk x0, #0xebad, lsl #16\n\t"
154 "movk x0, #0xfad0, lsl #32\n\t"
155 "movk x0, #0xebad, lsl #48\n\t"
156 "fmov d8, x0\n\t"
157 "add x0, x0, 1\n\t"
158 "fmov d9, x0\n\t"
159 "add x0, x0, 1\n\t"
160 "fmov d10, x0\n\t"
161 "add x0, x0, 1\n\t"
162 "fmov d11, x0\n\t"
163 "add x0, x0, 1\n\t"
164 "fmov d12, x0\n\t"
165 "add x0, x0, 1\n\t"
166 "fmov d13, x0\n\t"
167 "add x0, x0, 1\n\t"
168 "fmov d14, x0\n\t"
169 "add x0, x0, 1\n\t"
170 "fmov d15, x0\n\t"
171
Andreas Gampef39b3782014-06-03 14:38:30 -0700172 // Load call params into the right registers.
173 "ldp x0, x1, [sp]\n\t"
174 "ldp x2, x3, [sp, #16]\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700175 "ldr x18, [sp, #32]\n\t"
176 "add sp, sp, #48\n\t"
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700177 ".cfi_adjust_cfa_offset -48\n\t"
178
Andreas Gampe6cf80102014-05-19 11:32:41 -0700179
180 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700181 "mov x8, x0\n\t" // Store result
182 "add sp, sp, #16\n\t" // Drop the quick "frame"
183 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700184
185 // Test d8 - d15. We can use x1 and x2.
186 "movk x1, #0xfad0\n\t"
187 "movk x1, #0xebad, lsl #16\n\t"
188 "movk x1, #0xfad0, lsl #32\n\t"
189 "movk x1, #0xebad, lsl #48\n\t"
190 "fmov x2, d8\n\t"
191 "cmp x1, x2\n\t"
192 "b.ne 1f\n\t"
193 "add x1, x1, 1\n\t"
194
195 "fmov x2, d9\n\t"
196 "cmp x1, x2\n\t"
197 "b.ne 1f\n\t"
198 "add x1, x1, 1\n\t"
199
200 "fmov x2, d10\n\t"
201 "cmp x1, x2\n\t"
202 "b.ne 1f\n\t"
203 "add x1, x1, 1\n\t"
204
205 "fmov x2, d11\n\t"
206 "cmp x1, x2\n\t"
207 "b.ne 1f\n\t"
208 "add x1, x1, 1\n\t"
209
210 "fmov x2, d12\n\t"
211 "cmp x1, x2\n\t"
212 "b.ne 1f\n\t"
213 "add x1, x1, 1\n\t"
214
215 "fmov x2, d13\n\t"
216 "cmp x1, x2\n\t"
217 "b.ne 1f\n\t"
218 "add x1, x1, 1\n\t"
219
220 "fmov x2, d14\n\t"
221 "cmp x1, x2\n\t"
222 "b.ne 1f\n\t"
223 "add x1, x1, 1\n\t"
224
225 "fmov x2, d15\n\t"
226 "cmp x1, x2\n\t"
227 "b.ne 1f\n\t"
228
Andreas Gampef39b3782014-06-03 14:38:30 -0700229 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe6cf80102014-05-19 11:32:41 -0700230
231 // Finish up.
232 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700233 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
234 "ldp x2, x3, [sp, #16]\n\t"
235 "ldp x4, x5, [sp, #32]\n\t"
236 "ldp x6, x7, [sp, #48]\n\t"
237 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe6cf80102014-05-19 11:32:41 -0700238 ".cfi_adjust_cfa_offset -64\n\t"
239
Andreas Gampef39b3782014-06-03 14:38:30 -0700240 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
241 "mov %[result], x8\n\t" // Store the call result
242
Andreas Gampe6cf80102014-05-19 11:32:41 -0700243 "b 3f\n\t" // Goto end
244
245 // Failed fpr verification.
246 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700247 "mov x9, #1\n\t"
Andreas Gampe6cf80102014-05-19 11:32:41 -0700248 "b 2b\n\t" // Goto finish-up
249
250 // End
251 "3:\n\t"
Andreas Gampecf4035a2014-05-28 22:43:01 -0700252 : [result] "=r" (result)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700253 // Use the result from r0
254 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampecf4035a2014-05-28 22:43:01 -0700255 [referrer] "r"(referrer), [fpr_result] "m" (fpr_result)
Andreas Gampef39b3782014-06-03 14:38:30 -0700256 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
257 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
258 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
259 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
260 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700261 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
262 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700263#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700264 // Note: Uses the native convention
265 // TODO: Set the thread?
266 __asm__ __volatile__(
267 "pushq %[referrer]\n\t" // Push referrer
268 "pushq (%%rsp)\n\t" // & 16B alignment padding
269 ".cfi_adjust_cfa_offset 16\n\t"
270 "call *%%rax\n\t" // Call the stub
271 "addq $16, %%rsp\n\t" // Pop nullptr and padding
272 ".cfi_adjust_cfa_offset -16\n\t"
273 : "=a" (result)
274 // Use the result from rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700275 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer)
276 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
277 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
278 "memory"); // clobber all
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700279 // TODO: Should we clobber the other registers?
280#else
281 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
282 result = 0;
283#endif
284 // Pop transition.
285 self->PopManagedStackFragment(fragment);
Andreas Gampe6cf80102014-05-19 11:32:41 -0700286
287 fp_result = fpr_result;
288 EXPECT_EQ(0U, fp_result);
289
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700290 return result;
291 }
292
Andreas Gampe51f76352014-05-21 08:28:48 -0700293 // TODO: Set up a frame according to referrer's specs.
294 size_t Invoke3WithReferrerAndHidden(size_t arg0, size_t arg1, size_t arg2, uintptr_t code,
295 Thread* self, mirror::ArtMethod* referrer, size_t hidden) {
296 // Push a transition back into managed code onto the linked list in thread.
297 ManagedStack fragment;
298 self->PushManagedStackFragment(&fragment);
299
300 size_t result;
301 size_t fpr_result = 0;
302#if defined(__i386__)
303 // TODO: Set the thread?
304 __asm__ __volatile__(
305 "movd %[hidden], %%xmm0\n\t"
Ian Rogersc5f17732014-06-05 20:48:42 -0700306 "subl $12, %%esp\n\t" // Align stack.
Andreas Gampe51f76352014-05-21 08:28:48 -0700307 "pushl %[referrer]\n\t" // Store referrer
308 "call *%%edi\n\t" // Call the stub
Ian Rogersc5f17732014-06-05 20:48:42 -0700309 "addl $16, %%esp" // Pop referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700310 : "=a" (result)
311 // Use the result from eax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700312 : "a"(arg0), "c"(arg1), "d"(arg2), "D"(code), [referrer]"m"(referrer), [hidden]"r"(hidden)
313 // This places code into edi, arg0 into eax, arg1 into ecx, and arg2 into edx
314 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700315 // TODO: Should we clobber the other registers? EBX gets clobbered by some of the stubs,
316 // but compilation fails when declaring that.
317#elif defined(__arm__)
318 __asm__ __volatile__(
319 "push {r1-r12, lr}\n\t" // Save state, 13*4B = 52B
320 ".cfi_adjust_cfa_offset 52\n\t"
321 "push {r9}\n\t"
322 ".cfi_adjust_cfa_offset 4\n\t"
323 "mov r9, %[referrer]\n\n"
324 "str r9, [sp, #-8]!\n\t" // Push referrer, +8B padding so 16B aligned
325 ".cfi_adjust_cfa_offset 8\n\t"
326 "ldr r9, [sp, #8]\n\t"
327
328 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
329 "sub sp, sp, #24\n\t"
330 "str %[arg0], [sp]\n\t"
331 "str %[arg1], [sp, #4]\n\t"
332 "str %[arg2], [sp, #8]\n\t"
333 "str %[code], [sp, #12]\n\t"
334 "str %[self], [sp, #16]\n\t"
335 "str %[hidden], [sp, #20]\n\t"
336 "ldr r0, [sp]\n\t"
337 "ldr r1, [sp, #4]\n\t"
338 "ldr r2, [sp, #8]\n\t"
339 "ldr r3, [sp, #12]\n\t"
340 "ldr r9, [sp, #16]\n\t"
341 "ldr r12, [sp, #20]\n\t"
342 "add sp, sp, #24\n\t"
343
344 "blx r3\n\t" // Call the stub
345 "add sp, sp, #12\n\t" // Pop nullptr and padding
346 ".cfi_adjust_cfa_offset -12\n\t"
347 "pop {r1-r12, lr}\n\t" // Restore state
348 ".cfi_adjust_cfa_offset -52\n\t"
349 "mov %[result], r0\n\t" // Save the result
350 : [result] "=r" (result)
351 // Use the result from r0
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700352 : [arg0] "r"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
353 [referrer] "r"(referrer), [hidden] "r"(hidden)
354 : "memory"); // clobber.
Andreas Gampe51f76352014-05-21 08:28:48 -0700355#elif defined(__aarch64__)
356 __asm__ __volatile__(
Andreas Gampef39b3782014-06-03 14:38:30 -0700357 // Spill x0-x7 which we say we don't clobber. May contain args.
Andreas Gampe51f76352014-05-21 08:28:48 -0700358 "sub sp, sp, #64\n\t"
359 ".cfi_adjust_cfa_offset 64\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700360 "stp x0, x1, [sp]\n\t"
361 "stp x2, x3, [sp, #16]\n\t"
362 "stp x4, x5, [sp, #32]\n\t"
363 "stp x6, x7, [sp, #48]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700364
Andreas Gampef39b3782014-06-03 14:38:30 -0700365 "sub sp, sp, #16\n\t" // Reserve stack space, 16B aligned
366 ".cfi_adjust_cfa_offset 16\n\t"
367 "str %[referrer], [sp]\n\t" // referrer
Andreas Gampe51f76352014-05-21 08:28:48 -0700368
369 // Push everything on the stack, so we don't rely on the order. What a mess. :-(
370 "sub sp, sp, #48\n\t"
371 ".cfi_adjust_cfa_offset 48\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700372 // All things are "r" constraints, so direct str/stp should work.
373 "stp %[arg0], %[arg1], [sp]\n\t"
374 "stp %[arg2], %[code], [sp, #16]\n\t"
375 "stp %[self], %[hidden], [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700376
377 // Now we definitely have x0-x3 free, use it to garble d8 - d15
378 "movk x0, #0xfad0\n\t"
379 "movk x0, #0xebad, lsl #16\n\t"
380 "movk x0, #0xfad0, lsl #32\n\t"
381 "movk x0, #0xebad, lsl #48\n\t"
382 "fmov d8, x0\n\t"
383 "add x0, x0, 1\n\t"
384 "fmov d9, x0\n\t"
385 "add x0, x0, 1\n\t"
386 "fmov d10, x0\n\t"
387 "add x0, x0, 1\n\t"
388 "fmov d11, x0\n\t"
389 "add x0, x0, 1\n\t"
390 "fmov d12, x0\n\t"
391 "add x0, x0, 1\n\t"
392 "fmov d13, x0\n\t"
393 "add x0, x0, 1\n\t"
394 "fmov d14, x0\n\t"
395 "add x0, x0, 1\n\t"
396 "fmov d15, x0\n\t"
397
Andreas Gampef39b3782014-06-03 14:38:30 -0700398 // Load call params into the right registers.
399 "ldp x0, x1, [sp]\n\t"
400 "ldp x2, x3, [sp, #16]\n\t"
401 "ldp x18, x12, [sp, #32]\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700402 "add sp, sp, #48\n\t"
403 ".cfi_adjust_cfa_offset -48\n\t"
404
Andreas Gampe51f76352014-05-21 08:28:48 -0700405 "blr x3\n\t" // Call the stub
Andreas Gampef39b3782014-06-03 14:38:30 -0700406 "mov x8, x0\n\t" // Store result
407 "add sp, sp, #16\n\t" // Drop the quick "frame"
408 ".cfi_adjust_cfa_offset -16\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700409
410 // Test d8 - d15. We can use x1 and x2.
411 "movk x1, #0xfad0\n\t"
412 "movk x1, #0xebad, lsl #16\n\t"
413 "movk x1, #0xfad0, lsl #32\n\t"
414 "movk x1, #0xebad, lsl #48\n\t"
415 "fmov x2, d8\n\t"
416 "cmp x1, x2\n\t"
417 "b.ne 1f\n\t"
418 "add x1, x1, 1\n\t"
419
420 "fmov x2, d9\n\t"
421 "cmp x1, x2\n\t"
422 "b.ne 1f\n\t"
423 "add x1, x1, 1\n\t"
424
425 "fmov x2, d10\n\t"
426 "cmp x1, x2\n\t"
427 "b.ne 1f\n\t"
428 "add x1, x1, 1\n\t"
429
430 "fmov x2, d11\n\t"
431 "cmp x1, x2\n\t"
432 "b.ne 1f\n\t"
433 "add x1, x1, 1\n\t"
434
435 "fmov x2, d12\n\t"
436 "cmp x1, x2\n\t"
437 "b.ne 1f\n\t"
438 "add x1, x1, 1\n\t"
439
440 "fmov x2, d13\n\t"
441 "cmp x1, x2\n\t"
442 "b.ne 1f\n\t"
443 "add x1, x1, 1\n\t"
444
445 "fmov x2, d14\n\t"
446 "cmp x1, x2\n\t"
447 "b.ne 1f\n\t"
448 "add x1, x1, 1\n\t"
449
450 "fmov x2, d15\n\t"
451 "cmp x1, x2\n\t"
452 "b.ne 1f\n\t"
453
Andreas Gampef39b3782014-06-03 14:38:30 -0700454 "mov x9, #0\n\t" // Use x9 as flag, in clobber list
Andreas Gampe51f76352014-05-21 08:28:48 -0700455
456 // Finish up.
457 "2:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700458 "ldp x0, x1, [sp]\n\t" // Restore stuff not named clobbered, may contain fpr_result
459 "ldp x2, x3, [sp, #16]\n\t"
460 "ldp x4, x5, [sp, #32]\n\t"
461 "ldp x6, x7, [sp, #48]\n\t"
462 "add sp, sp, #64\n\t" // Free stack space, now sp as on entry
Andreas Gampe51f76352014-05-21 08:28:48 -0700463 ".cfi_adjust_cfa_offset -64\n\t"
464
Andreas Gampef39b3782014-06-03 14:38:30 -0700465 "str x9, %[fpr_result]\n\t" // Store the FPR comparison result
466 "mov %[result], x8\n\t" // Store the call result
467
Andreas Gampe51f76352014-05-21 08:28:48 -0700468 "b 3f\n\t" // Goto end
469
470 // Failed fpr verification.
471 "1:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700472 "mov x9, #1\n\t"
Andreas Gampe51f76352014-05-21 08:28:48 -0700473 "b 2b\n\t" // Goto finish-up
474
475 // End
476 "3:\n\t"
Andreas Gampef39b3782014-06-03 14:38:30 -0700477 : [result] "=r" (result)
478 // Use the result from r0
Andreas Gampe51f76352014-05-21 08:28:48 -0700479 : [arg0] "0"(arg0), [arg1] "r"(arg1), [arg2] "r"(arg2), [code] "r"(code), [self] "r"(self),
Andreas Gampef39b3782014-06-03 14:38:30 -0700480 [referrer] "r"(referrer), [hidden] "r"(hidden), [fpr_result] "m" (fpr_result)
481 : "x8", "x9", "x10", "x11", "x12", "x13", "x14", "x15", "x16", "x17", "x18", "x19", "x20",
482 "x21", "x22", "x23", "x24", "x25", "x26", "x27", "x28", "x30",
483 "d0", "d1", "d2", "d3", "d4", "d5", "d6", "d7",
484 "d8", "d9", "d10", "d11", "d12", "d13", "d14", "d15",
485 "d16", "d17", "d18", "d19", "d20", "d21", "d22", "d23",
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700486 "d24", "d25", "d26", "d27", "d28", "d29", "d30", "d31",
487 "memory"); // clobber.
Ian Rogersc3ccc102014-06-25 11:52:14 -0700488#elif defined(__x86_64__) && !defined(__APPLE__)
Andreas Gampe51f76352014-05-21 08:28:48 -0700489 // Note: Uses the native convention
490 // TODO: Set the thread?
491 __asm__ __volatile__(
492 "movq %[hidden], %%r9\n\t" // No need to save r9, listed as clobbered
493 "movd %%r9, %%xmm0\n\t"
494 "pushq %[referrer]\n\t" // Push referrer
495 "pushq (%%rsp)\n\t" // & 16B alignment padding
496 ".cfi_adjust_cfa_offset 16\n\t"
497 "call *%%rax\n\t" // Call the stub
498 "addq $16, %%rsp\n\t" // Pop nullptr and padding
499 ".cfi_adjust_cfa_offset -16\n\t"
500 : "=a" (result)
501 // Use the result from rax
502 : "D"(arg0), "S"(arg1), "d"(arg2), "a"(code), [referrer] "m"(referrer), [hidden] "m"(hidden)
503 // This places arg0 into rdi, arg1 into rsi, arg2 into rdx, and code into rax
Andreas Gampe2f6e3512014-06-07 01:32:33 -0700504 : "rbx", "rcx", "rbp", "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15",
505 "memory"); // clobber all
Andreas Gampe51f76352014-05-21 08:28:48 -0700506 // TODO: Should we clobber the other registers?
507#else
508 LOG(WARNING) << "Was asked to invoke for an architecture I do not understand.";
509 result = 0;
510#endif
511 // Pop transition.
512 self->PopManagedStackFragment(fragment);
513
514 fp_result = fpr_result;
515 EXPECT_EQ(0U, fp_result);
516
517 return result;
518 }
519
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700520 // Method with 32b arg0, 64b arg1
521 size_t Invoke3UWithReferrer(size_t arg0, uint64_t arg1, uintptr_t code, Thread* self,
522 mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700523#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700524 // Just pass through.
525 return Invoke3WithReferrer(arg0, arg1, 0U, code, self, referrer);
526#else
527 // Need to split up arguments.
528 uint32_t lower = static_cast<uint32_t>(arg1 & 0xFFFFFFFF);
529 uint32_t upper = static_cast<uint32_t>((arg1 >> 32) & 0xFFFFFFFF);
530
531 return Invoke3WithReferrer(arg0, lower, upper, code, self, referrer);
532#endif
533 }
534
535 // Method with 32b arg0, 32b arg1, 64b arg2
536 size_t Invoke3UUWithReferrer(uint32_t arg0, uint32_t arg1, uint64_t arg2, uintptr_t code,
537 Thread* self, mirror::ArtMethod* referrer) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700538#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700539 // Just pass through.
540 return Invoke3WithReferrer(arg0, arg1, arg2, code, self, referrer);
541#else
542 // TODO: Needs 4-param invoke.
543 return 0;
544#endif
545 }
Andreas Gampe6cf80102014-05-19 11:32:41 -0700546
Andreas Gampe29b38412014-08-13 00:15:43 -0700547 static uintptr_t GetEntrypoint(Thread* self, QuickEntrypointEnum entrypoint) {
548 int32_t offset;
549#ifdef __LP64__
550 offset = GetThreadOffset<8>(entrypoint).Int32Value();
551#else
552 offset = GetThreadOffset<4>(entrypoint).Int32Value();
553#endif
554 return *reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(self) + offset);
555 }
556
Andreas Gampe6cf80102014-05-19 11:32:41 -0700557 protected:
558 size_t fp_result;
Andreas Gampe525cde22014-04-22 15:44:50 -0700559};
560
561
Andreas Gampe525cde22014-04-22 15:44:50 -0700562TEST_F(StubTest, Memcpy) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700563#if defined(__i386__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700564 Thread* self = Thread::Current();
565
566 uint32_t orig[20];
567 uint32_t trg[20];
568 for (size_t i = 0; i < 20; ++i) {
569 orig[i] = i;
570 trg[i] = 0;
571 }
572
573 Invoke3(reinterpret_cast<size_t>(&trg[4]), reinterpret_cast<size_t>(&orig[4]),
Andreas Gampe29b38412014-08-13 00:15:43 -0700574 10 * sizeof(uint32_t), StubTest::GetEntrypoint(self, kQuickMemcpy), self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700575
576 EXPECT_EQ(orig[0], trg[0]);
577
578 for (size_t i = 1; i < 4; ++i) {
579 EXPECT_NE(orig[i], trg[i]);
580 }
581
582 for (size_t i = 4; i < 14; ++i) {
583 EXPECT_EQ(orig[i], trg[i]);
584 }
585
586 for (size_t i = 14; i < 20; ++i) {
587 EXPECT_NE(orig[i], trg[i]);
588 }
589
590 // TODO: Test overlapping?
591
592#else
593 LOG(INFO) << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA;
594 // Force-print to std::cout so it's also outside the logcat.
595 std::cout << "Skipping memcpy as I don't know how to do that on " << kRuntimeISA << std::endl;
596#endif
597}
598
Andreas Gampe525cde22014-04-22 15:44:50 -0700599TEST_F(StubTest, LockObject) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700600#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700601 static constexpr size_t kThinLockLoops = 100;
602
Andreas Gampe525cde22014-04-22 15:44:50 -0700603 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700604
605 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
606
Andreas Gampe525cde22014-04-22 15:44:50 -0700607 // Create an object
608 ScopedObjectAccess soa(self);
609 // garbage is created during ClassLinker::Init
610
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700611 StackHandleScope<2> hs(soa.Self());
612 Handle<mirror::String> obj(
613 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700614 LockWord lock = obj->GetLockWord(false);
615 LockWord::LockState old_state = lock.GetState();
616 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
617
Andreas Gampe29b38412014-08-13 00:15:43 -0700618 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700619
620 LockWord lock_after = obj->GetLockWord(false);
621 LockWord::LockState new_state = lock_after.GetState();
622 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700623 EXPECT_EQ(lock_after.ThinLockCount(), 0U); // Thin lock starts count at zero
624
625 for (size_t i = 1; i < kThinLockLoops; ++i) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700626 Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700627
628 // Check we're at lock count i
629
630 LockWord l_inc = obj->GetLockWord(false);
631 LockWord::LockState l_inc_state = l_inc.GetState();
632 EXPECT_EQ(LockWord::LockState::kThinLocked, l_inc_state);
633 EXPECT_EQ(l_inc.ThinLockCount(), i);
634 }
635
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700636 // Force a fat lock by running identity hashcode to fill up lock word.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700637 Handle<mirror::String> obj2(hs.NewHandle(
638 mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700639
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700640 obj2->IdentityHashCode();
641
Andreas Gampe29b38412014-08-13 00:15:43 -0700642 Invoke3(reinterpret_cast<size_t>(obj2.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700643
644 LockWord lock_after2 = obj2->GetLockWord(false);
645 LockWord::LockState new_state2 = lock_after2.GetState();
646 EXPECT_EQ(LockWord::LockState::kFatLocked, new_state2);
647 EXPECT_NE(lock_after2.FatLockMonitor(), static_cast<Monitor*>(nullptr));
648
649 // Test done.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700650#else
651 LOG(INFO) << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA;
652 // Force-print to std::cout so it's also outside the logcat.
653 std::cout << "Skipping lock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
654#endif
655}
656
Andreas Gampe6e4e59c2014-05-05 20:11:02 -0700657
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700658class RandGen {
659 public:
660 explicit RandGen(uint32_t seed) : val_(seed) {}
661
662 uint32_t next() {
663 val_ = val_ * 48271 % 2147483647 + 13;
664 return val_;
665 }
666
667 uint32_t val_;
668};
669
670
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700671// NO_THREAD_SAFETY_ANALYSIS as we do not want to grab exclusive mutator lock for MonitorInfo.
672static void TestUnlockObject(StubTest* test) NO_THREAD_SAFETY_ANALYSIS {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700673#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -0700674 static constexpr size_t kThinLockLoops = 100;
675
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700676 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700677
678 const uintptr_t art_quick_lock_object = StubTest::GetEntrypoint(self, kQuickLockObject);
679 const uintptr_t art_quick_unlock_object = StubTest::GetEntrypoint(self, kQuickUnlockObject);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700680 // Create an object
681 ScopedObjectAccess soa(self);
682 // garbage is created during ClassLinker::Init
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700683 static constexpr size_t kNumberOfLocks = 10; // Number of objects = lock
684 StackHandleScope<kNumberOfLocks + 1> hs(self);
685 Handle<mirror::String> obj(
686 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700687 LockWord lock = obj->GetLockWord(false);
688 LockWord::LockState old_state = lock.GetState();
689 EXPECT_EQ(LockWord::LockState::kUnlocked, old_state);
690
Andreas Gampe29b38412014-08-13 00:15:43 -0700691 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700692 // This should be an illegal monitor state.
693 EXPECT_TRUE(self->IsExceptionPending());
694 self->ClearException();
695
696 LockWord lock_after = obj->GetLockWord(false);
697 LockWord::LockState new_state = lock_after.GetState();
698 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state);
Andreas Gampe525cde22014-04-22 15:44:50 -0700699
Andreas Gampe29b38412014-08-13 00:15:43 -0700700 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_lock_object, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700701
702 LockWord lock_after2 = obj->GetLockWord(false);
703 LockWord::LockState new_state2 = lock_after2.GetState();
704 EXPECT_EQ(LockWord::LockState::kThinLocked, new_state2);
705
Andreas Gampe29b38412014-08-13 00:15:43 -0700706 test->Invoke3(reinterpret_cast<size_t>(obj.Get()), 0U, 0U, art_quick_unlock_object, self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700707
708 LockWord lock_after3 = obj->GetLockWord(false);
709 LockWord::LockState new_state3 = lock_after3.GetState();
710 EXPECT_EQ(LockWord::LockState::kUnlocked, new_state3);
711
712 // Stress test:
713 // Keep a number of objects and their locks in flight. Randomly lock or unlock one of them in
714 // each step.
715
716 RandGen r(0x1234);
717
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700718 constexpr size_t kIterations = 10000; // Number of iterations
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700719 constexpr size_t kMoveToFat = 1000; // Chance of 1:kMoveFat to make a lock fat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700720
721 size_t counts[kNumberOfLocks];
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700722 bool fat[kNumberOfLocks]; // Whether a lock should be thin or fat.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700723 Handle<mirror::String> objects[kNumberOfLocks];
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700724
725 // Initialize = allocate.
726 for (size_t i = 0; i < kNumberOfLocks; ++i) {
727 counts[i] = 0;
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700728 fat[i] = false;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700729 objects[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), ""));
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700730 }
731
732 for (size_t i = 0; i < kIterations; ++i) {
733 // Select which lock to update.
734 size_t index = r.next() % kNumberOfLocks;
735
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700736 // Make lock fat?
737 if (!fat[index] && (r.next() % kMoveToFat == 0)) {
738 fat[index] = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700739 objects[index]->IdentityHashCode();
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700740
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700741 LockWord lock_iter = objects[index]->GetLockWord(false);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700742 LockWord::LockState iter_state = lock_iter.GetState();
743 if (counts[index] == 0) {
744 EXPECT_EQ(LockWord::LockState::kHashCode, iter_state);
745 } else {
746 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state);
747 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700748 } else {
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700749 bool lock; // Whether to lock or unlock in this step.
750 if (counts[index] == 0) {
751 lock = true;
752 } else if (counts[index] == kThinLockLoops) {
753 lock = false;
754 } else {
755 // Randomly.
756 lock = r.next() % 2 == 0;
757 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700758
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700759 if (lock) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700760 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_lock_object,
761 self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700762 counts[index]++;
763 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700764 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700765 art_quick_unlock_object, self);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700766 counts[index]--;
767 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700768
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700769 EXPECT_FALSE(self->IsExceptionPending());
770
771 // Check the new state.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700772 LockWord lock_iter = objects[index]->GetLockWord(true);
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700773 LockWord::LockState iter_state = lock_iter.GetState();
774 if (fat[index]) {
775 // Abuse MonitorInfo.
776 EXPECT_EQ(LockWord::LockState::kFatLocked, iter_state) << index;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700777 MonitorInfo info(objects[index].Get());
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700778 EXPECT_EQ(counts[index], info.entry_count_) << index;
779 } else {
780 if (counts[index] > 0) {
781 EXPECT_EQ(LockWord::LockState::kThinLocked, iter_state);
782 EXPECT_EQ(counts[index] - 1, lock_iter.ThinLockCount());
783 } else {
784 EXPECT_EQ(LockWord::LockState::kUnlocked, iter_state);
785 }
786 }
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700787 }
788 }
789
790 // Unlock the remaining count times and then check it's unlocked. Then deallocate.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700791 // Go reverse order to correctly handle Handles.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700792 for (size_t i = 0; i < kNumberOfLocks; ++i) {
793 size_t index = kNumberOfLocks - 1 - i;
794 size_t count = counts[index];
795 while (count > 0) {
Andreas Gampe29b38412014-08-13 00:15:43 -0700796 test->Invoke3(reinterpret_cast<size_t>(objects[index].Get()), 0U, 0U, art_quick_unlock_object,
797 self);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700798 count--;
799 }
800
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700801 LockWord lock_after4 = objects[index]->GetLockWord(false);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700802 LockWord::LockState new_state4 = lock_after4.GetState();
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700803 EXPECT_TRUE(LockWord::LockState::kUnlocked == new_state4
804 || LockWord::LockState::kFatLocked == new_state4);
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700805 }
806
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700807 // Test done.
Andreas Gampe525cde22014-04-22 15:44:50 -0700808#else
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700809 LOG(INFO) << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe525cde22014-04-22 15:44:50 -0700810 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe7177d7c2014-05-02 12:10:02 -0700811 std::cout << "Skipping unlock_object as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe525cde22014-04-22 15:44:50 -0700812#endif
813}
814
Andreas Gampe4fc046e2014-05-06 16:56:39 -0700815TEST_F(StubTest, UnlockObject) {
816 TestUnlockObject(this);
817}
Andreas Gampe525cde22014-04-22 15:44:50 -0700818
Ian Rogersc3ccc102014-06-25 11:52:14 -0700819#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700820extern "C" void art_quick_check_cast(void);
821#endif
822
823TEST_F(StubTest, CheckCast) {
Ian Rogersc3ccc102014-06-25 11:52:14 -0700824#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700825 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700826
827 const uintptr_t art_quick_check_cast = StubTest::GetEntrypoint(self, kQuickCheckCast);
828
Andreas Gampe525cde22014-04-22 15:44:50 -0700829 // Find some classes.
830 ScopedObjectAccess soa(self);
831 // garbage is created during ClassLinker::Init
832
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700833 StackHandleScope<2> hs(soa.Self());
834 Handle<mirror::Class> c(
835 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
836 Handle<mirror::Class> c2(
837 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700838
839 EXPECT_FALSE(self->IsExceptionPending());
840
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700841 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700842 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700843
844 EXPECT_FALSE(self->IsExceptionPending());
845
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700846 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700847 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700848
849 EXPECT_FALSE(self->IsExceptionPending());
850
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700851 Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(c2.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700852 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700853
854 EXPECT_FALSE(self->IsExceptionPending());
855
856 // TODO: Make the following work. But that would require correct managed frames.
857
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700858 Invoke3(reinterpret_cast<size_t>(c2.Get()), reinterpret_cast<size_t>(c.Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -0700859 art_quick_check_cast, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700860
861 EXPECT_TRUE(self->IsExceptionPending());
862 self->ClearException();
863
864#else
865 LOG(INFO) << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA;
866 // Force-print to std::cout so it's also outside the logcat.
867 std::cout << "Skipping check_cast as I don't know how to do that on " << kRuntimeISA << std::endl;
868#endif
869}
870
871
Andreas Gampe525cde22014-04-22 15:44:50 -0700872TEST_F(StubTest, APutObj) {
Hiroshi Yamauchid6881ae2014-04-28 17:21:48 -0700873 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
874
Ian Rogersc3ccc102014-06-25 11:52:14 -0700875#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe525cde22014-04-22 15:44:50 -0700876 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -0700877
878 // Do not check non-checked ones, we'd need handlers and stuff...
879 const uintptr_t art_quick_aput_obj_with_null_and_bound_check =
880 StubTest::GetEntrypoint(self, kQuickAputObjectWithNullAndBoundCheck);
881
Andreas Gampe525cde22014-04-22 15:44:50 -0700882 // Create an object
883 ScopedObjectAccess soa(self);
884 // garbage is created during ClassLinker::Init
885
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700886 StackHandleScope<5> hs(soa.Self());
887 Handle<mirror::Class> c(
888 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
889 Handle<mirror::Class> ca(
890 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/String;")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700891
892 // Build a string array of size 1
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700893 Handle<mirror::ObjectArray<mirror::Object>> array(
894 hs.NewHandle(mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), 10)));
Andreas Gampe525cde22014-04-22 15:44:50 -0700895
896 // Build a string -> should be assignable
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700897 Handle<mirror::String> str_obj(
898 hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), "hello, world!")));
Andreas Gampe525cde22014-04-22 15:44:50 -0700899
900 // Build a generic object -> should fail assigning
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700901 Handle<mirror::Object> obj_obj(hs.NewHandle(c->AllocObject(soa.Self())));
Andreas Gampe525cde22014-04-22 15:44:50 -0700902
903 // Play with it...
904
905 // 1) Success cases
Andreas Gampef4e910b2014-04-29 16:55:52 -0700906 // 1.1) Assign str_obj to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700907
908 EXPECT_FALSE(self->IsExceptionPending());
909
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700910 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700911 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700912
913 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700914 EXPECT_EQ(str_obj.Get(), array->Get(0));
Andreas Gampe525cde22014-04-22 15:44:50 -0700915
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700916 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700917 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700918
919 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700920 EXPECT_EQ(str_obj.Get(), array->Get(1));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700921
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700922 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700923 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700924
925 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700926 EXPECT_EQ(str_obj.Get(), array->Get(2));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700927
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700928 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700929 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700930
931 EXPECT_FALSE(self->IsExceptionPending());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700932 EXPECT_EQ(str_obj.Get(), array->Get(3));
Andreas Gampef4e910b2014-04-29 16:55:52 -0700933
934 // 1.2) Assign null to array[0..3]
Andreas Gampe525cde22014-04-22 15:44:50 -0700935
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700936 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700937 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700938
939 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampef4e910b2014-04-29 16:55:52 -0700940 EXPECT_EQ(nullptr, array->Get(0));
941
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700942 Invoke3(reinterpret_cast<size_t>(array.Get()), 1U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700943 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700944
945 EXPECT_FALSE(self->IsExceptionPending());
946 EXPECT_EQ(nullptr, array->Get(1));
947
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700948 Invoke3(reinterpret_cast<size_t>(array.Get()), 2U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700949 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700950
951 EXPECT_FALSE(self->IsExceptionPending());
952 EXPECT_EQ(nullptr, array->Get(2));
953
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700954 Invoke3(reinterpret_cast<size_t>(array.Get()), 3U, reinterpret_cast<size_t>(nullptr),
Andreas Gampe29b38412014-08-13 00:15:43 -0700955 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampef4e910b2014-04-29 16:55:52 -0700956
957 EXPECT_FALSE(self->IsExceptionPending());
958 EXPECT_EQ(nullptr, array->Get(3));
Andreas Gampe525cde22014-04-22 15:44:50 -0700959
960 // TODO: Check _which_ exception is thrown. Then make 3) check that it's the right check order.
961
962 // 2) Failure cases (str into str[])
963 // 2.1) Array = null
964 // TODO: Throwing NPE needs actual DEX code
965
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700966// Invoke3(reinterpret_cast<size_t>(nullptr), 0U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe525cde22014-04-22 15:44:50 -0700967// reinterpret_cast<uintptr_t>(&art_quick_aput_obj_with_null_and_bound_check), self);
968//
969// EXPECT_TRUE(self->IsExceptionPending());
970// self->ClearException();
971
972 // 2.2) Index < 0
973
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 Invoke3(reinterpret_cast<size_t>(array.Get()), static_cast<size_t>(-1),
975 reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700976 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700977
978 EXPECT_TRUE(self->IsExceptionPending());
979 self->ClearException();
980
981 // 2.3) Index > 0
982
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700983 Invoke3(reinterpret_cast<size_t>(array.Get()), 10U, reinterpret_cast<size_t>(str_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700984 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700985
986 EXPECT_TRUE(self->IsExceptionPending());
987 self->ClearException();
988
989 // 3) Failure cases (obj into str[])
990
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700991 Invoke3(reinterpret_cast<size_t>(array.Get()), 0U, reinterpret_cast<size_t>(obj_obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -0700992 art_quick_aput_obj_with_null_and_bound_check, self);
Andreas Gampe525cde22014-04-22 15:44:50 -0700993
994 EXPECT_TRUE(self->IsExceptionPending());
995 self->ClearException();
996
997 // Tests done.
998#else
999 LOG(INFO) << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA;
1000 // Force-print to std::cout so it's also outside the logcat.
1001 std::cout << "Skipping aput_obj as I don't know how to do that on " << kRuntimeISA << std::endl;
1002#endif
1003}
1004
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001005TEST_F(StubTest, AllocObject) {
1006 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1007
Ian Rogersc3ccc102014-06-25 11:52:14 -07001008#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001009 // TODO: Check the "Unresolved" allocation stubs
1010
1011 Thread* self = Thread::Current();
1012 // Create an object
1013 ScopedObjectAccess soa(self);
1014 // garbage is created during ClassLinker::Init
1015
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001016 StackHandleScope<2> hs(soa.Self());
1017 Handle<mirror::Class> c(
1018 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001019
1020 // Play with it...
1021
1022 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001023 {
1024 // Use an arbitrary method from c to use as referrer
1025 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1026 reinterpret_cast<size_t>(c->GetVirtualMethod(0)), // arbitrary
1027 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001028 StubTest::GetEntrypoint(self, kQuickAllocObject),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001029 self);
1030
1031 EXPECT_FALSE(self->IsExceptionPending());
1032 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1033 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001034 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001035 VerifyObject(obj);
1036 }
1037
1038 {
1039 // We can use nullptr in the second argument as we do not need a method here (not used in
1040 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001041 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001042 StubTest::GetEntrypoint(self, kQuickAllocObjectResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001043 self);
1044
1045 EXPECT_FALSE(self->IsExceptionPending());
1046 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1047 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001048 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001049 VerifyObject(obj);
1050 }
1051
1052 {
1053 // We can use nullptr in the second argument as we do not need a method here (not used in
1054 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001055 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001056 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001057 self);
1058
1059 EXPECT_FALSE(self->IsExceptionPending());
1060 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1061 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001062 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001063 VerifyObject(obj);
1064 }
1065
1066 // Failure tests.
1067
1068 // Out-of-memory.
1069 {
1070 Runtime::Current()->GetHeap()->SetIdealFootprint(1 * GB);
1071
1072 // Array helps to fill memory faster.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001073 Handle<mirror::Class> ca(
1074 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
1075
1076 // Use arbitrary large amount for now.
1077 static const size_t kMaxHandles = 1000000;
Ian Rogers700a4022014-05-19 16:49:03 -07001078 std::unique_ptr<StackHandleScope<kMaxHandles>> hsp(new StackHandleScope<kMaxHandles>(self));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001079
1080 std::vector<Handle<mirror::Object>> handles;
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001081 // Start allocating with 128K
1082 size_t length = 128 * KB / 4;
1083 while (length > 10) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001084 Handle<mirror::Object> h(hsp->NewHandle<mirror::Object>(
1085 mirror::ObjectArray<mirror::Object>::Alloc(soa.Self(), ca.Get(), length / 4)));
1086 if (self->IsExceptionPending() || h.Get() == nullptr) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001087 self->ClearException();
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001088
1089 // Try a smaller length
1090 length = length / 8;
1091 // Use at most half the reported free space.
1092 size_t mem = Runtime::Current()->GetHeap()->GetFreeMemory();
1093 if (length * 8 > mem) {
1094 length = mem / 8;
1095 }
1096 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001097 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001098 }
1099 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001100 LOG(INFO) << "Used " << handles.size() << " arrays to fill space.";
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001101
1102 // Allocate simple objects till it fails.
1103 while (!self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001104 Handle<mirror::Object> h = hsp->NewHandle(c->AllocObject(soa.Self()));
1105 if (!self->IsExceptionPending() && h.Get() != nullptr) {
1106 handles.push_back(h);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001107 }
1108 }
1109 self->ClearException();
1110
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001111 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001112 StubTest::GetEntrypoint(self, kQuickAllocObjectInitialized),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001113 self);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001114 EXPECT_TRUE(self->IsExceptionPending());
1115 self->ClearException();
1116 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001117 }
1118
1119 // Tests done.
1120#else
1121 LOG(INFO) << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA;
1122 // Force-print to std::cout so it's also outside the logcat.
1123 std::cout << "Skipping alloc_object as I don't know how to do that on " << kRuntimeISA << std::endl;
1124#endif
1125}
1126
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001127TEST_F(StubTest, AllocObjectArray) {
1128 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1129
Ian Rogersc3ccc102014-06-25 11:52:14 -07001130#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001131 // TODO: Check the "Unresolved" allocation stubs
1132
1133 Thread* self = Thread::Current();
1134 // Create an object
1135 ScopedObjectAccess soa(self);
1136 // garbage is created during ClassLinker::Init
1137
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001138 StackHandleScope<2> hs(self);
1139 Handle<mirror::Class> c(
1140 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "[Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001141
1142 // Needed to have a linked method.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001143 Handle<mirror::Class> c_obj(
1144 hs.NewHandle(class_linker_->FindSystemClass(soa.Self(), "Ljava/lang/Object;")));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001145
1146 // Play with it...
1147
1148 EXPECT_FALSE(self->IsExceptionPending());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001149
1150 // For some reason this does not work, as the type_idx is artificial and outside what the
1151 // resolved types of c_obj allow...
1152
1153 if (false) {
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001154 // Use an arbitrary method from c to use as referrer
1155 size_t result = Invoke3(static_cast<size_t>(c->GetDexTypeIndex()), // type_idx
1156 reinterpret_cast<size_t>(c_obj->GetVirtualMethod(0)), // arbitrary
1157 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001158 StubTest::GetEntrypoint(self, kQuickAllocArray),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001159 self);
1160
1161 EXPECT_FALSE(self->IsExceptionPending());
1162 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1163 mirror::Array* obj = reinterpret_cast<mirror::Array*>(result);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001164 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001165 VerifyObject(obj);
1166 EXPECT_EQ(obj->GetLength(), 10);
1167 }
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001168
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001169 {
1170 // We can use nullptr in the second argument as we do not need a method here (not used in
1171 // resolved/initialized cases)
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001172 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr), 10U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001173 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001174 self);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001175 EXPECT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001176 EXPECT_NE(reinterpret_cast<size_t>(nullptr), result);
1177 mirror::Object* obj = reinterpret_cast<mirror::Object*>(result);
1178 EXPECT_TRUE(obj->IsArrayInstance());
1179 EXPECT_TRUE(obj->IsObjectArray());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001180 EXPECT_EQ(c.Get(), obj->GetClass());
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001181 VerifyObject(obj);
1182 mirror::Array* array = reinterpret_cast<mirror::Array*>(result);
1183 EXPECT_EQ(array->GetLength(), 10);
1184 }
1185
1186 // Failure tests.
1187
1188 // Out-of-memory.
1189 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001190 size_t result = Invoke3(reinterpret_cast<size_t>(c.Get()), reinterpret_cast<size_t>(nullptr),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001191 GB, // that should fail...
Andreas Gampe29b38412014-08-13 00:15:43 -07001192 StubTest::GetEntrypoint(self, kQuickAllocArrayResolved),
Andreas Gampe00c1e6d2014-04-25 15:47:13 -07001193 self);
1194
1195 EXPECT_TRUE(self->IsExceptionPending());
1196 self->ClearException();
1197 EXPECT_EQ(reinterpret_cast<size_t>(nullptr), result);
1198 }
1199
1200 // Tests done.
1201#else
1202 LOG(INFO) << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA;
1203 // Force-print to std::cout so it's also outside the logcat.
1204 std::cout << "Skipping alloc_array as I don't know how to do that on " << kRuntimeISA << std::endl;
1205#endif
1206}
1207
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001208
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001209TEST_F(StubTest, StringCompareTo) {
1210 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1211
Ian Rogersc3ccc102014-06-25 11:52:14 -07001212#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001213 // TODO: Check the "Unresolved" allocation stubs
1214
1215 Thread* self = Thread::Current();
Andreas Gampe29b38412014-08-13 00:15:43 -07001216
1217 const uintptr_t art_quick_string_compareto = StubTest::GetEntrypoint(self, kQuickStringCompareTo);
1218
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001219 ScopedObjectAccess soa(self);
1220 // garbage is created during ClassLinker::Init
1221
1222 // Create some strings
1223 // Use array so we can index into it and use a matrix for expected results
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001224 // Setup: The first half is standard. The second half uses a non-zero offset.
1225 // TODO: Shared backing arrays.
Serban Constantinescu86797a72014-06-19 16:17:56 +01001226 static constexpr size_t kBaseStringCount = 8;
1227 const char* c[kBaseStringCount] = { "", "", "a", "aa", "ab",
1228 "aacaacaacaacaacaac", // This one's under the default limit to go to __memcmp16.
1229 "aacaacaacaacaacaacaacaacaacaacaacaac", // This one's over.
1230 "aacaacaacaacaacaacaacaacaacaacaacaaca" }; // As is this one. We need a separate one to
1231 // defeat object-equal optimizations.
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001232
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001233 static constexpr size_t kStringCount = 2 * kBaseStringCount;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001234
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001235 StackHandleScope<kStringCount> hs(self);
1236 Handle<mirror::String> s[kStringCount];
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001237
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001238 for (size_t i = 0; i < kBaseStringCount; ++i) {
1239 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i]));
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001240 }
1241
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001242 RandGen r(0x1234);
1243
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001244 for (size_t i = kBaseStringCount; i < kStringCount; ++i) {
1245 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c[i - kBaseStringCount]));
1246 int32_t length = s[i]->GetLength();
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001247 if (length > 1) {
1248 // Set a random offset and length.
1249 int32_t new_offset = 1 + (r.next() % (length - 1));
1250 int32_t rest = length - new_offset - 1;
1251 int32_t new_length = 1 + (rest > 0 ? r.next() % rest : 0);
1252
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 s[i]->SetField32<false>(mirror::String::CountOffset(), new_length);
1254 s[i]->SetField32<false>(mirror::String::OffsetOffset(), new_offset);
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001255 }
1256 }
1257
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001258 // TODO: wide characters
1259
1260 // Matrix of expectations. First component is first parameter. Note we only check against the
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001261 // sign, not the value. As we are testing random offsets, we need to compute this and need to
1262 // rely on String::CompareTo being correct.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001263 int32_t expected[kStringCount][kStringCount];
1264 for (size_t x = 0; x < kStringCount; ++x) {
1265 for (size_t y = 0; y < kStringCount; ++y) {
1266 expected[x][y] = s[x]->CompareTo(s[y].Get());
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001267 }
1268 }
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001269
1270 // Play with it...
1271
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001272 for (size_t x = 0; x < kStringCount; ++x) {
1273 for (size_t y = 0; y < kStringCount; ++y) {
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001274 // Test string_compareto x y
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001275 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()),
1276 reinterpret_cast<size_t>(s[y].Get()), 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001277 art_quick_string_compareto, self);
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001278
1279 EXPECT_FALSE(self->IsExceptionPending());
1280
1281 // The result is a 32b signed integer
1282 union {
1283 size_t r;
1284 int32_t i;
1285 } conv;
1286 conv.r = result;
1287 int32_t e = expected[x][y];
Andreas Gampe2ba8d4b2014-05-02 17:33:17 -07001288 EXPECT_TRUE(e == 0 ? conv.i == 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1289 conv.r;
1290 EXPECT_TRUE(e < 0 ? conv.i < 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1291 conv.r;
1292 EXPECT_TRUE(e > 0 ? conv.i > 0 : true) << "x=" << c[x] << " y=" << c[y] << " res=" <<
1293 conv.r;
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001294 }
1295 }
1296
Andreas Gampe7177d7c2014-05-02 12:10:02 -07001297 // TODO: Deallocate things.
1298
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001299 // Tests done.
1300#else
1301 LOG(INFO) << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA;
1302 // Force-print to std::cout so it's also outside the logcat.
1303 std::cout << "Skipping string_compareto as I don't know how to do that on " << kRuntimeISA <<
1304 std::endl;
1305#endif
1306}
1307
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001308
Fred Shih37f05ef2014-07-16 18:38:08 -07001309#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1310extern "C" void art_quick_set8_static(void);
1311extern "C" void art_quick_get_byte_static(void);
1312extern "C" void art_quick_get_boolean_static(void);
1313#endif
1314
1315static void GetSetBooleanStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1316 mirror::ArtMethod* referrer, StubTest* test)
1317 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1318#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1319 constexpr size_t num_values = 5;
1320 uint8_t values[num_values] = { 0, 1, 2, 128, 0xFF };
1321
1322 for (size_t i = 0; i < num_values; ++i) {
1323 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1324 static_cast<size_t>(values[i]),
1325 0U,
1326 StubTest::GetEntrypoint(self, kQuickSet8Static),
1327 self,
1328 referrer);
1329
1330 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1331 0U, 0U,
1332 StubTest::GetEntrypoint(self, kQuickGetBooleanStatic),
1333 self,
1334 referrer);
1335 // Boolean currently stores bools as uint8_t, be more zealous about asserting correct writes/gets.
1336 EXPECT_EQ(values[i], static_cast<uint8_t>(res)) << "Iteration " << i;
1337 }
1338#else
1339 LOG(INFO) << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA;
1340 // Force-print to std::cout so it's also outside the logcat.
1341 std::cout << "Skipping set_boolean_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1342#endif
1343}
1344static void GetSetByteStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1345 mirror::ArtMethod* referrer, StubTest* test)
1346 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1347#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1348 constexpr size_t num_values = 5;
1349 int8_t values[num_values] = { -128, -64, 0, 64, 127 };
1350
1351 for (size_t i = 0; i < num_values; ++i) {
1352 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1353 static_cast<size_t>(values[i]),
1354 0U,
1355 StubTest::GetEntrypoint(self, kQuickSet8Static),
1356 self,
1357 referrer);
1358
1359 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1360 0U, 0U,
1361 StubTest::GetEntrypoint(self, kQuickGetByteStatic),
1362 self,
1363 referrer);
1364 EXPECT_EQ(values[i], static_cast<int8_t>(res)) << "Iteration " << i;
1365 }
1366#else
1367 LOG(INFO) << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA;
1368 // Force-print to std::cout so it's also outside the logcat.
1369 std::cout << "Skipping set_byte_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1370#endif
1371}
1372
1373
1374#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1375extern "C" void art_quick_set8_instance(void);
1376extern "C" void art_quick_get_byte_instance(void);
1377extern "C" void art_quick_get_boolean_instance(void);
1378#endif
1379
1380static void GetSetBooleanInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1381 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1382 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1383#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1384 constexpr size_t num_values = 5;
1385 uint8_t values[num_values] = { 0, true, 2, 128, 0xFF };
1386
1387 for (size_t i = 0; i < num_values; ++i) {
1388 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1389 reinterpret_cast<size_t>(obj->Get()),
1390 static_cast<size_t>(values[i]),
1391 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1392 self,
1393 referrer);
1394
1395 uint8_t res = f->Get()->GetBoolean(obj->Get());
1396 EXPECT_EQ(values[i], res) << "Iteration " << i;
1397
1398 f->Get()->SetBoolean<false>(obj->Get(), res);
1399
1400 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1401 reinterpret_cast<size_t>(obj->Get()),
1402 0U,
1403 StubTest::GetEntrypoint(self, kQuickGetBooleanInstance),
1404 self,
1405 referrer);
1406 EXPECT_EQ(res, static_cast<uint8_t>(res2));
1407 }
1408#else
1409 LOG(INFO) << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA;
1410 // Force-print to std::cout so it's also outside the logcat.
1411 std::cout << "Skipping set_boolean_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1412#endif
1413}
1414static void GetSetByteInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1415 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1416 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1417#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1418 constexpr size_t num_values = 5;
1419 int8_t values[num_values] = { -128, -64, 0, 64, 127 };
1420
1421 for (size_t i = 0; i < num_values; ++i) {
1422 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1423 reinterpret_cast<size_t>(obj->Get()),
1424 static_cast<size_t>(values[i]),
1425 StubTest::GetEntrypoint(self, kQuickSet8Instance),
1426 self,
1427 referrer);
1428
1429 int8_t res = f->Get()->GetByte(obj->Get());
1430 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1431 f->Get()->SetByte<false>(obj->Get(), ++res);
1432
1433 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1434 reinterpret_cast<size_t>(obj->Get()),
1435 0U,
1436 StubTest::GetEntrypoint(self, kQuickGetByteInstance),
1437 self,
1438 referrer);
1439 EXPECT_EQ(res, static_cast<int8_t>(res2));
1440 }
1441#else
1442 LOG(INFO) << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA;
1443 // Force-print to std::cout so it's also outside the logcat.
1444 std::cout << "Skipping set_byte_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1445#endif
1446}
1447
1448#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1449extern "C" void art_quick_set16_static(void);
1450extern "C" void art_quick_get_short_static(void);
1451extern "C" void art_quick_get_char_static(void);
1452#endif
1453
1454static void GetSetCharStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1455 mirror::ArtMethod* referrer, StubTest* test)
1456 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1457#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1458 constexpr size_t num_values = 6;
1459 uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
1460
1461 for (size_t i = 0; i < num_values; ++i) {
1462 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1463 static_cast<size_t>(values[i]),
1464 0U,
1465 StubTest::GetEntrypoint(self, kQuickSet16Static),
1466 self,
1467 referrer);
1468
1469 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1470 0U, 0U,
1471 StubTest::GetEntrypoint(self, kQuickGetCharStatic),
1472 self,
1473 referrer);
1474
1475 EXPECT_EQ(values[i], static_cast<uint16_t>(res)) << "Iteration " << i;
1476 }
1477#else
1478 LOG(INFO) << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA;
1479 // Force-print to std::cout so it's also outside the logcat.
1480 std::cout << "Skipping set_char_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1481#endif
1482}
1483static void GetSetShortStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
1484 mirror::ArtMethod* referrer, StubTest* test)
1485 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1486#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1487 constexpr size_t num_values = 6;
1488 int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
1489
1490 for (size_t i = 0; i < num_values; ++i) {
1491 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1492 static_cast<size_t>(values[i]),
1493 0U,
1494 StubTest::GetEntrypoint(self, kQuickSet16Static),
1495 self,
1496 referrer);
1497
1498 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1499 0U, 0U,
1500 StubTest::GetEntrypoint(self, kQuickGetShortStatic),
1501 self,
1502 referrer);
1503
1504 EXPECT_EQ(static_cast<int16_t>(res), values[i]) << "Iteration " << i;
1505 }
1506#else
1507 LOG(INFO) << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA;
1508 // Force-print to std::cout so it's also outside the logcat.
1509 std::cout << "Skipping set_short_static as I don't know how to do that on " << kRuntimeISA << std::endl;
1510#endif
1511}
1512
1513#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1514extern "C" void art_quick_set16_instance(void);
1515extern "C" void art_quick_get_short_instance(void);
1516extern "C" void art_quick_get_char_instance(void);
1517#endif
1518
1519static void GetSetCharInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1520 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1521 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1522#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1523 constexpr size_t num_values = 6;
1524 uint16_t values[num_values] = { 0, 1, 2, 255, 32768, 0xFFFF };
1525
1526 for (size_t i = 0; i < num_values; ++i) {
1527 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1528 reinterpret_cast<size_t>(obj->Get()),
1529 static_cast<size_t>(values[i]),
1530 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1531 self,
1532 referrer);
1533
1534 uint16_t res = f->Get()->GetChar(obj->Get());
1535 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1536 f->Get()->SetChar<false>(obj->Get(), ++res);
1537
1538 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1539 reinterpret_cast<size_t>(obj->Get()),
1540 0U,
1541 StubTest::GetEntrypoint(self, kQuickGetCharInstance),
1542 self,
1543 referrer);
1544 EXPECT_EQ(res, static_cast<uint16_t>(res2));
1545 }
1546#else
1547 LOG(INFO) << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA;
1548 // Force-print to std::cout so it's also outside the logcat.
1549 std::cout << "Skipping set_char_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1550#endif
1551}
1552static void GetSetShortInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
1553 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1554 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1555#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1556 constexpr size_t num_values = 6;
1557 int16_t values[num_values] = { -0x7FFF, -32768, 0, 255, 32767, 0x7FFE };
1558
1559 for (size_t i = 0; i < num_values; ++i) {
1560 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1561 reinterpret_cast<size_t>(obj->Get()),
1562 static_cast<size_t>(values[i]),
1563 StubTest::GetEntrypoint(self, kQuickSet16Instance),
1564 self,
1565 referrer);
1566
1567 int16_t res = f->Get()->GetShort(obj->Get());
1568 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1569 f->Get()->SetShort<false>(obj->Get(), ++res);
1570
1571 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1572 reinterpret_cast<size_t>(obj->Get()),
1573 0U,
1574 StubTest::GetEntrypoint(self, kQuickGetShortInstance),
1575 self,
1576 referrer);
1577 EXPECT_EQ(res, static_cast<int16_t>(res2));
1578 }
1579#else
1580 LOG(INFO) << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA;
1581 // Force-print to std::cout so it's also outside the logcat.
1582 std::cout << "Skipping set_short_instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1583#endif
1584}
1585
1586#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
1587extern "C" void art_quick_set32_static(void);
1588extern "C" void art_quick_get32_static(void);
1589#endif
1590
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001591static void GetSet32Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001592 mirror::ArtMethod* referrer, StubTest* test)
1593 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001594#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001595 constexpr size_t num_values = 7;
1596 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1597
1598 for (size_t i = 0; i < num_values; ++i) {
1599 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1600 static_cast<size_t>(values[i]),
1601 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001602 StubTest::GetEntrypoint(self, kQuickSet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001603 self,
1604 referrer);
1605
1606 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1607 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001608 StubTest::GetEntrypoint(self, kQuickGet32Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001609 self,
1610 referrer);
1611
1612 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1613 }
1614#else
1615 LOG(INFO) << "Skipping set32static as I don't know how to do that on " << kRuntimeISA;
1616 // Force-print to std::cout so it's also outside the logcat.
1617 std::cout << "Skipping set32static as I don't know how to do that on " << kRuntimeISA << std::endl;
1618#endif
1619}
1620
1621
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001622static void GetSet32Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001623 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1624 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001625#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001626 constexpr size_t num_values = 7;
1627 uint32_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF };
1628
1629 for (size_t i = 0; i < num_values; ++i) {
1630 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001631 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001632 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001633 StubTest::GetEntrypoint(self, kQuickSet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001634 self,
1635 referrer);
1636
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001637 int32_t res = f->Get()->GetInt(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001638 EXPECT_EQ(res, static_cast<int32_t>(values[i])) << "Iteration " << i;
1639
1640 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001641 f->Get()->SetInt<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001642
1643 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001644 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001645 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001646 StubTest::GetEntrypoint(self, kQuickGet32Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001647 self,
1648 referrer);
1649 EXPECT_EQ(res, static_cast<int32_t>(res2));
1650 }
1651#else
1652 LOG(INFO) << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA;
1653 // Force-print to std::cout so it's also outside the logcat.
1654 std::cout << "Skipping set32instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1655#endif
1656}
1657
1658
Ian Rogersc3ccc102014-06-25 11:52:14 -07001659#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001660
1661static void set_and_check_static(uint32_t f_idx, mirror::Object* val, Thread* self,
1662 mirror::ArtMethod* referrer, StubTest* test)
1663 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1664 test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1665 reinterpret_cast<size_t>(val),
1666 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001667 StubTest::GetEntrypoint(self, kQuickSetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001668 self,
1669 referrer);
1670
1671 size_t res = test->Invoke3WithReferrer(static_cast<size_t>(f_idx),
1672 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001673 StubTest::GetEntrypoint(self, kQuickGetObjStatic),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001674 self,
1675 referrer);
1676
1677 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1678}
1679#endif
1680
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001681static void GetSetObjStatic(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001682 mirror::ArtMethod* referrer, StubTest* test)
1683 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001684#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001685 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1686
1687 // Allocate a string object for simplicity.
1688 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
1689 set_and_check_static((*f)->GetDexFieldIndex(), str, self, referrer, test);
1690
1691 set_and_check_static((*f)->GetDexFieldIndex(), nullptr, self, referrer, test);
1692#else
1693 LOG(INFO) << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA;
1694 // Force-print to std::cout so it's also outside the logcat.
1695 std::cout << "Skipping setObjstatic as I don't know how to do that on " << kRuntimeISA << std::endl;
1696#endif
1697}
1698
1699
Ian Rogersc3ccc102014-06-25 11:52:14 -07001700#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001701static void set_and_check_instance(Handle<mirror::ArtField>* f, mirror::Object* trg,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001702 mirror::Object* val, Thread* self, mirror::ArtMethod* referrer,
1703 StubTest* test)
1704 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1705 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1706 reinterpret_cast<size_t>(trg),
1707 reinterpret_cast<size_t>(val),
Andreas Gampe29b38412014-08-13 00:15:43 -07001708 StubTest::GetEntrypoint(self, kQuickSetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001709 self,
1710 referrer);
1711
1712 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1713 reinterpret_cast<size_t>(trg),
1714 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001715 StubTest::GetEntrypoint(self, kQuickGetObjInstance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001716 self,
1717 referrer);
1718
1719 EXPECT_EQ(res, reinterpret_cast<size_t>(val)) << "Value " << val;
1720
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001721 EXPECT_EQ(val, f->Get()->GetObj(trg));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001722}
1723#endif
1724
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001725static void GetSetObjInstance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001726 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1727 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001728#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001729 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001730
1731 // Allocate a string object for simplicity.
1732 mirror::String* str = mirror::String::AllocFromModifiedUtf8(self, "Test");
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001733 set_and_check_instance(f, obj->Get(), str, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001734
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001735 set_and_check_instance(f, obj->Get(), nullptr, self, referrer, test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001736#else
1737 LOG(INFO) << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA;
1738 // Force-print to std::cout so it's also outside the logcat.
1739 std::cout << "Skipping setObjinstance as I don't know how to do that on " << kRuntimeISA << std::endl;
1740#endif
1741}
1742
1743
1744// TODO: Complete these tests for 32b architectures.
1745
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001746static void GetSet64Static(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f, Thread* self,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001747 mirror::ArtMethod* referrer, StubTest* test)
1748 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001749#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001750 constexpr size_t num_values = 8;
1751 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1752
1753 for (size_t i = 0; i < num_values; ++i) {
1754 test->Invoke3UWithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1755 values[i],
Andreas Gampe29b38412014-08-13 00:15:43 -07001756 StubTest::GetEntrypoint(self, kQuickSet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001757 self,
1758 referrer);
1759
1760 size_t res = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
1761 0U, 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001762 StubTest::GetEntrypoint(self, kQuickGet64Static),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001763 self,
1764 referrer);
1765
1766 EXPECT_EQ(res, values[i]) << "Iteration " << i;
1767 }
1768#else
1769 LOG(INFO) << "Skipping set64static as I don't know how to do that on " << kRuntimeISA;
1770 // Force-print to std::cout so it's also outside the logcat.
1771 std::cout << "Skipping set64static as I don't know how to do that on " << kRuntimeISA << std::endl;
1772#endif
1773}
1774
1775
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001776static void GetSet64Instance(Handle<mirror::Object>* obj, Handle<mirror::ArtField>* f,
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001777 Thread* self, mirror::ArtMethod* referrer, StubTest* test)
1778 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07001779#if (defined(__x86_64__) && !defined(__APPLE__)) || defined(__aarch64__)
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001780 constexpr size_t num_values = 8;
1781 uint64_t values[num_values] = { 0, 1, 2, 255, 32768, 1000000, 0xFFFFFFFF, 0xFFFFFFFFFFFF };
1782
1783 for (size_t i = 0; i < num_values; ++i) {
1784 test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001785 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001786 static_cast<size_t>(values[i]),
Andreas Gampe29b38412014-08-13 00:15:43 -07001787 StubTest::GetEntrypoint(self, kQuickSet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001788 self,
1789 referrer);
1790
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001791 int64_t res = f->Get()->GetLong(obj->Get());
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001792 EXPECT_EQ(res, static_cast<int64_t>(values[i])) << "Iteration " << i;
1793
1794 res++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001795 f->Get()->SetLong<false>(obj->Get(), res);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001796
1797 size_t res2 = test->Invoke3WithReferrer(static_cast<size_t>((*f)->GetDexFieldIndex()),
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001798 reinterpret_cast<size_t>(obj->Get()),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001799 0U,
Andreas Gampe29b38412014-08-13 00:15:43 -07001800 StubTest::GetEntrypoint(self, kQuickGet64Instance),
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001801 self,
1802 referrer);
1803 EXPECT_EQ(res, static_cast<int64_t>(res2));
1804 }
1805#else
1806 LOG(INFO) << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA;
1807 // Force-print to std::cout so it's also outside the logcat.
1808 std::cout << "Skipping set64instance as I don't know how to do that on " << kRuntimeISA << std::endl;
1809#endif
1810}
1811
1812static void TestFields(Thread* self, StubTest* test, Primitive::Type test_type) {
1813 // garbage is created during ClassLinker::Init
1814
1815 JNIEnv* env = Thread::Current()->GetJniEnv();
1816 jclass jc = env->FindClass("AllFields");
1817 CHECK(jc != NULL);
1818 jobject o = env->AllocObject(jc);
1819 CHECK(o != NULL);
1820
1821 ScopedObjectAccess soa(self);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001822 StackHandleScope<5> hs(self);
1823 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(o)));
1824 Handle<mirror::Class> c(hs.NewHandle(obj->GetClass()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001825 // Need a method as a referrer
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001826 Handle<mirror::ArtMethod> m(hs.NewHandle(c->GetDirectMethod(0)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001827
1828 // Play with it...
1829
1830 // Static fields.
1831 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001832 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetSFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001833 int32_t num_fields = fields->GetLength();
1834 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001835 StackHandleScope<1> hs(self);
1836 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001837
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001838 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001839 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001840 case Primitive::Type::kPrimBoolean:
1841 if (test_type == type) {
1842 GetSetBooleanStatic(&obj, &f, self, m.Get(), test);
1843 }
1844 break;
1845 case Primitive::Type::kPrimByte:
1846 if (test_type == type) {
1847 GetSetByteStatic(&obj, &f, self, m.Get(), test);
1848 }
1849 break;
1850 case Primitive::Type::kPrimChar:
1851 if (test_type == type) {
1852 GetSetCharStatic(&obj, &f, self, m.Get(), test);
1853 }
1854 break;
1855 case Primitive::Type::kPrimShort:
1856 if (test_type == type) {
1857 GetSetShortStatic(&obj, &f, self, m.Get(), test);
1858 }
1859 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001860 case Primitive::Type::kPrimInt:
1861 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001862 GetSet32Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001863 }
1864 break;
1865
1866 case Primitive::Type::kPrimLong:
1867 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001868 GetSet64Static(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001869 }
1870 break;
1871
1872 case Primitive::Type::kPrimNot:
1873 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001874 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001875 GetSetObjStatic(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001876 }
1877 break;
1878
1879 default:
1880 break; // Skip.
1881 }
1882 }
1883 }
1884
1885 // Instance fields.
1886 {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001887 Handle<mirror::ObjectArray<mirror::ArtField>> fields(hs.NewHandle(c.Get()->GetIFields()));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001888 int32_t num_fields = fields->GetLength();
1889 for (int32_t i = 0; i < num_fields; ++i) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001890 StackHandleScope<1> hs(self);
1891 Handle<mirror::ArtField> f(hs.NewHandle(fields->Get(i)));
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001892
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001893 Primitive::Type type = f->GetTypeAsPrimitiveType();
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001894 switch (type) {
Fred Shih37f05ef2014-07-16 18:38:08 -07001895 case Primitive::Type::kPrimBoolean:
1896 if (test_type == type) {
1897 GetSetBooleanInstance(&obj, &f, self, m.Get(), test);
1898 }
1899 break;
1900 case Primitive::Type::kPrimByte:
1901 if (test_type == type) {
1902 GetSetByteInstance(&obj, &f, self, m.Get(), test);
1903 }
1904 break;
1905 case Primitive::Type::kPrimChar:
1906 if (test_type == type) {
1907 GetSetCharInstance(&obj, &f, self, m.Get(), test);
1908 }
1909 break;
1910 case Primitive::Type::kPrimShort:
1911 if (test_type == type) {
1912 GetSetShortInstance(&obj, &f, self, m.Get(), test);
1913 }
1914 break;
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001915 case Primitive::Type::kPrimInt:
1916 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001917 GetSet32Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001918 }
1919 break;
1920
1921 case Primitive::Type::kPrimLong:
1922 if (test_type == type) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001923 GetSet64Instance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001924 }
1925 break;
1926
1927 case Primitive::Type::kPrimNot:
1928 // Don't try array.
Mathieu Chartier61c5ebc2014-06-05 17:42:53 -07001929 if (test_type == type && f->GetTypeDescriptor()[0] != '[') {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001930 GetSetObjInstance(&obj, &f, self, m.Get(), test);
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001931 }
1932 break;
1933
1934 default:
1935 break; // Skip.
1936 }
1937 }
1938 }
1939
1940 // TODO: Deallocate things.
1941}
1942
Fred Shih37f05ef2014-07-16 18:38:08 -07001943TEST_F(StubTest, Fields8) {
1944 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1945
1946 Thread* self = Thread::Current();
1947
1948 self->TransitionFromSuspendedToRunnable();
1949 LoadDex("AllFields");
1950 bool started = runtime_->Start();
1951 CHECK(started);
1952
1953 TestFields(self, this, Primitive::Type::kPrimBoolean);
1954 TestFields(self, this, Primitive::Type::kPrimByte);
1955}
1956
1957TEST_F(StubTest, Fields16) {
1958 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1959
1960 Thread* self = Thread::Current();
1961
1962 self->TransitionFromSuspendedToRunnable();
1963 LoadDex("AllFields");
1964 bool started = runtime_->Start();
1965 CHECK(started);
1966
1967 TestFields(self, this, Primitive::Type::kPrimChar);
1968 TestFields(self, this, Primitive::Type::kPrimShort);
1969}
Andreas Gampe6e4e59c2014-05-05 20:11:02 -07001970
1971TEST_F(StubTest, Fields32) {
1972 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1973
1974 Thread* self = Thread::Current();
1975
1976 self->TransitionFromSuspendedToRunnable();
1977 LoadDex("AllFields");
1978 bool started = runtime_->Start();
1979 CHECK(started);
1980
1981 TestFields(self, this, Primitive::Type::kPrimInt);
1982}
1983
1984TEST_F(StubTest, FieldsObj) {
1985 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1986
1987 Thread* self = Thread::Current();
1988
1989 self->TransitionFromSuspendedToRunnable();
1990 LoadDex("AllFields");
1991 bool started = runtime_->Start();
1992 CHECK(started);
1993
1994 TestFields(self, this, Primitive::Type::kPrimNot);
1995}
1996
1997TEST_F(StubTest, Fields64) {
1998 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
1999
2000 Thread* self = Thread::Current();
2001
2002 self->TransitionFromSuspendedToRunnable();
2003 LoadDex("AllFields");
2004 bool started = runtime_->Start();
2005 CHECK(started);
2006
2007 TestFields(self, this, Primitive::Type::kPrimLong);
2008}
2009
Andreas Gampe51f76352014-05-21 08:28:48 -07002010
2011TEST_F(StubTest, IMT) {
Ian Rogersc3ccc102014-06-25 11:52:14 -07002012#if defined(__i386__) || defined(__arm__) || defined(__aarch64__) || (defined(__x86_64__) && !defined(__APPLE__))
Andreas Gampe51f76352014-05-21 08:28:48 -07002013 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2014
2015 Thread* self = Thread::Current();
2016
2017 ScopedObjectAccess soa(self);
2018 StackHandleScope<7> hs(self);
2019
2020 JNIEnv* env = Thread::Current()->GetJniEnv();
2021
2022 // ArrayList
2023
2024 // Load ArrayList and used methods (JNI).
2025 jclass arraylist_jclass = env->FindClass("java/util/ArrayList");
2026 ASSERT_NE(nullptr, arraylist_jclass);
2027 jmethodID arraylist_constructor = env->GetMethodID(arraylist_jclass, "<init>", "()V");
2028 ASSERT_NE(nullptr, arraylist_constructor);
2029 jmethodID contains_jmethod = env->GetMethodID(arraylist_jclass, "contains", "(Ljava/lang/Object;)Z");
2030 ASSERT_NE(nullptr, contains_jmethod);
2031 jmethodID add_jmethod = env->GetMethodID(arraylist_jclass, "add", "(Ljava/lang/Object;)Z");
2032 ASSERT_NE(nullptr, add_jmethod);
2033
2034 // Get mirror representation.
2035 Handle<mirror::ArtMethod> contains_amethod(hs.NewHandle(soa.DecodeMethod(contains_jmethod)));
2036
2037 // Patch up ArrayList.contains.
2038 if (contains_amethod.Get()->GetEntryPointFromQuickCompiledCode() == nullptr) {
2039 contains_amethod.Get()->SetEntryPointFromQuickCompiledCode(reinterpret_cast<void*>(
Andreas Gampe29b38412014-08-13 00:15:43 -07002040 StubTest::GetEntrypoint(self, kQuickQuickToInterpreterBridge)));
Andreas Gampe51f76352014-05-21 08:28:48 -07002041 }
2042
2043 // List
2044
2045 // Load List and used methods (JNI).
2046 jclass list_jclass = env->FindClass("java/util/List");
2047 ASSERT_NE(nullptr, list_jclass);
2048 jmethodID inf_contains_jmethod = env->GetMethodID(list_jclass, "contains", "(Ljava/lang/Object;)Z");
2049 ASSERT_NE(nullptr, inf_contains_jmethod);
2050
2051 // Get mirror representation.
2052 Handle<mirror::ArtMethod> inf_contains(hs.NewHandle(soa.DecodeMethod(inf_contains_jmethod)));
2053
2054 // Object
2055
2056 jclass obj_jclass = env->FindClass("java/lang/Object");
2057 ASSERT_NE(nullptr, obj_jclass);
2058 jmethodID obj_constructor = env->GetMethodID(obj_jclass, "<init>", "()V");
2059 ASSERT_NE(nullptr, obj_constructor);
2060
2061 // Sanity check: check that there is a conflict for List.contains in ArrayList.
2062
2063 mirror::Class* arraylist_class = soa.Decode<mirror::Class*>(arraylist_jclass);
Mingyao Yang98d1cc82014-05-15 17:02:16 -07002064 mirror::ArtMethod* m = arraylist_class->GetEmbeddedImTableEntry(
2065 inf_contains->GetDexMethodIndex() % mirror::Class::kImtSize);
Andreas Gampe51f76352014-05-21 08:28:48 -07002066
Andreas Gampe0ea37942014-05-21 14:12:18 -07002067 if (!m->IsImtConflictMethod()) {
2068 LOG(WARNING) << "Test is meaningless, no IMT conflict in setup: " <<
2069 PrettyMethod(m, true);
2070 LOG(WARNING) << "Please update StubTest.IMT.";
2071 return;
2072 }
Andreas Gampe51f76352014-05-21 08:28:48 -07002073
2074 // Create instances.
2075
2076 jobject jarray_list = env->NewObject(arraylist_jclass, arraylist_constructor);
2077 ASSERT_NE(nullptr, jarray_list);
2078 Handle<mirror::Object> array_list(hs.NewHandle(soa.Decode<mirror::Object*>(jarray_list)));
2079
2080 jobject jobj = env->NewObject(obj_jclass, obj_constructor);
2081 ASSERT_NE(nullptr, jobj);
2082 Handle<mirror::Object> obj(hs.NewHandle(soa.Decode<mirror::Object*>(jobj)));
2083
2084 // Invoke.
2085
2086 size_t result =
2087 Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2088 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002089 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002090 self, contains_amethod.Get(),
2091 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2092
2093 ASSERT_FALSE(self->IsExceptionPending());
2094 EXPECT_EQ(static_cast<size_t>(JNI_FALSE), result);
2095
2096 // Add object.
2097
2098 env->CallBooleanMethod(jarray_list, add_jmethod, jobj);
2099
2100 ASSERT_FALSE(self->IsExceptionPending()) << PrettyTypeOf(self->GetException(nullptr));
2101
2102 // Invoke again.
2103
2104 result = Invoke3WithReferrerAndHidden(0U, reinterpret_cast<size_t>(array_list.Get()),
2105 reinterpret_cast<size_t>(obj.Get()),
Andreas Gampe29b38412014-08-13 00:15:43 -07002106 StubTest::GetEntrypoint(self, kQuickQuickImtConflictTrampoline),
Andreas Gampe51f76352014-05-21 08:28:48 -07002107 self, contains_amethod.Get(),
2108 static_cast<size_t>(inf_contains.Get()->GetDexMethodIndex()));
2109
2110 ASSERT_FALSE(self->IsExceptionPending());
2111 EXPECT_EQ(static_cast<size_t>(JNI_TRUE), result);
2112#else
Andreas Gampe6aac3552014-06-09 14:55:53 -07002113 LOG(INFO) << "Skipping imt as I don't know how to do that on " << kRuntimeISA;
Andreas Gampe51f76352014-05-21 08:28:48 -07002114 // Force-print to std::cout so it's also outside the logcat.
Andreas Gampe6aac3552014-06-09 14:55:53 -07002115 std::cout << "Skipping imt as I don't know how to do that on " << kRuntimeISA << std::endl;
2116#endif
2117}
2118
Andreas Gampe6aac3552014-06-09 14:55:53 -07002119TEST_F(StubTest, StringIndexOf) {
2120#if defined(__arm__) || defined(__aarch64__)
Hiroshi Yamauchi52fa8142014-06-16 12:59:49 -07002121 TEST_DISABLED_FOR_HEAP_REFERENCE_POISONING();
2122
Andreas Gampe6aac3552014-06-09 14:55:53 -07002123 Thread* self = Thread::Current();
2124 ScopedObjectAccess soa(self);
2125 // garbage is created during ClassLinker::Init
2126
2127 // Create some strings
2128 // Use array so we can index into it and use a matrix for expected results
2129 // Setup: The first half is standard. The second half uses a non-zero offset.
2130 // TODO: Shared backing arrays.
2131 static constexpr size_t kStringCount = 7;
2132 const char* c_str[kStringCount] = { "", "a", "ba", "cba", "dcba", "edcba", "asdfghjkl" };
2133 static constexpr size_t kCharCount = 5;
2134 const char c_char[kCharCount] = { 'a', 'b', 'c', 'd', 'e' };
2135
2136 StackHandleScope<kStringCount> hs(self);
2137 Handle<mirror::String> s[kStringCount];
2138
2139 for (size_t i = 0; i < kStringCount; ++i) {
2140 s[i] = hs.NewHandle(mirror::String::AllocFromModifiedUtf8(soa.Self(), c_str[i]));
2141 }
2142
2143 // Matrix of expectations. First component is first parameter. Note we only check against the
2144 // sign, not the value. As we are testing random offsets, we need to compute this and need to
2145 // rely on String::CompareTo being correct.
2146 static constexpr size_t kMaxLen = 9;
2147 DCHECK_LE(strlen(c_str[kStringCount-1]), kMaxLen) << "Please fix the indexof test.";
2148
2149 // Last dimension: start, offset by 1.
2150 int32_t expected[kStringCount][kCharCount][kMaxLen + 3];
2151 for (size_t x = 0; x < kStringCount; ++x) {
2152 for (size_t y = 0; y < kCharCount; ++y) {
2153 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2154 expected[x][y][z] = s[x]->FastIndexOf(c_char[y], static_cast<int32_t>(z) - 1);
2155 }
2156 }
2157 }
2158
2159 // Play with it...
2160
2161 for (size_t x = 0; x < kStringCount; ++x) {
2162 for (size_t y = 0; y < kCharCount; ++y) {
2163 for (size_t z = 0; z <= kMaxLen + 2; ++z) {
2164 int32_t start = static_cast<int32_t>(z) - 1;
2165
2166 // Test string_compareto x y
2167 size_t result = Invoke3(reinterpret_cast<size_t>(s[x].Get()), c_char[y], start,
Andreas Gampe29b38412014-08-13 00:15:43 -07002168 StubTest::GetEntrypoint(self, kQuickIndexOf), self);
Andreas Gampe6aac3552014-06-09 14:55:53 -07002169
2170 EXPECT_FALSE(self->IsExceptionPending());
2171
2172 // The result is a 32b signed integer
2173 union {
2174 size_t r;
2175 int32_t i;
2176 } conv;
2177 conv.r = result;
2178
2179 EXPECT_EQ(expected[x][y][z], conv.i) << "Wrong result for " << c_str[x] << " / " <<
2180 c_char[y] << " @ " << start;
2181 }
2182 }
2183 }
2184
2185 // TODO: Deallocate things.
2186
2187 // Tests done.
2188#else
2189 LOG(INFO) << "Skipping indexof as I don't know how to do that on " << kRuntimeISA;
2190 // Force-print to std::cout so it's also outside the logcat.
2191 std::cout << "Skipping indexof as I don't know how to do that on " << kRuntimeISA << std::endl;
Andreas Gampe51f76352014-05-21 08:28:48 -07002192#endif
2193}
2194
Andreas Gampe525cde22014-04-22 15:44:50 -07002195} // namespace art