blob: 3a448a54098c229f3354862e37943b5a972880a3 [file] [log] [blame]
Ian Rogersef7d42f2014-01-06 12:55:46 -08001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "asm_support_x86_64.S"
18
Serguei Katkovc3801912014-07-08 17:21:53 +070019MACRO0(SETUP_FP_CALLEE_SAVE_FRAME)
20 // Create space for ART FP callee-saved registers
Christopher Ferrisae912072014-07-11 13:08:40 -070021 subq MACRO_LITERAL(4 * 8), %rsp
Serguei Katkovc3801912014-07-08 17:21:53 +070022 CFI_ADJUST_CFA_OFFSET(4 * 8)
23 movq %xmm12, 0(%rsp)
24 movq %xmm13, 8(%rsp)
25 movq %xmm14, 16(%rsp)
26 movq %xmm15, 24(%rsp)
27END_MACRO
28
29MACRO0(RESTORE_FP_CALLEE_SAVE_FRAME)
30 // Restore ART FP callee-saved registers
31 movq 0(%rsp), %xmm12
32 movq 8(%rsp), %xmm13
33 movq 16(%rsp), %xmm14
34 movq 24(%rsp), %xmm15
Christopher Ferrisae912072014-07-11 13:08:40 -070035 addq MACRO_LITERAL(4 * 8), %rsp
Serguei Katkovc3801912014-07-08 17:21:53 +070036 CFI_ADJUST_CFA_OFFSET(- 4 * 8)
37END_MACRO
38
Ian Rogersef7d42f2014-01-06 12:55:46 -080039// For x86, the CFA is esp+4, the address above the pushed return address on the stack.
40
41 /*
42 * Macro that sets up the callee save frame to conform with
43 * Runtime::CreateCalleeSaveMethod(kSaveAll)
44 */
45MACRO0(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME)
Ian Rogersc3ccc102014-06-25 11:52:14 -070046#if defined(__APPLE__)
47 int3
48 int3
49#else
Andreas Gampebf6b92a2014-03-05 16:11:04 -080050 // R10 := Runtime::Current()
51 movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10
52 movq (%r10), %r10
Ian Rogers47d00c02014-04-16 17:33:27 -070053 // Save callee save registers to agree with core spills bitmap.
Andreas Gampebf6b92a2014-03-05 16:11:04 -080054 PUSH r15 // Callee save.
55 PUSH r14 // Callee save.
56 PUSH r13 // Callee save.
57 PUSH r12 // Callee save.
58 PUSH rbp // Callee save.
59 PUSH rbx // Callee save.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070060 // Create space for FPR args, plus space for StackReference<ArtMethod>.
61 subq MACRO_LITERAL(4 * 8 + 8), %rsp
62 CFI_ADJUST_CFA_OFFSET(4 * 8 + 8)
Serguei Katkovc3801912014-07-08 17:21:53 +070063 // Save FPRs.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070064 movq %xmm12, 8(%rsp)
65 movq %xmm13, 16(%rsp)
66 movq %xmm14, 24(%rsp)
67 movq %xmm15, 32(%rsp)
Ian Rogers47d00c02014-04-16 17:33:27 -070068 // R10 := ArtMethod* for save all callee save frame method.
Hiroshi Yamauchiab088112014-07-14 13:00:14 -070069 THIS_LOAD_REQUIRES_READ_BARRIER
Andreas Gampebf6b92a2014-03-05 16:11:04 -080070 movq RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
71 // Store ArtMethod* to bottom of stack.
72 movq %r10, 0(%rsp)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070073 // Store rsp as the top quick frame.
74 movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET
Andreas Gampe5c1e4352014-04-21 19:28:24 -070075
76 // Ugly compile-time check, but we only have the preprocessor.
77 // Last +8: implicit return address pushed on stack when caller made call.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070078#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8)
Andreas Gampe5c1e4352014-04-21 19:28:24 -070079#error "SAVE_ALL_CALLEE_SAVE_FRAME(X86_64) size not as expected."
80#endif
Ian Rogersc3ccc102014-06-25 11:52:14 -070081#endif // __APPLE__
Ian Rogersef7d42f2014-01-06 12:55:46 -080082END_MACRO
83
84 /*
85 * Macro that sets up the callee save frame to conform with
86 * Runtime::CreateCalleeSaveMethod(kRefsOnly)
87 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070088MACRO0(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME)
Ian Rogersc3ccc102014-06-25 11:52:14 -070089#if defined(__APPLE__)
90 int3
91 int3
92#else
Ian Rogers47d00c02014-04-16 17:33:27 -070093 // R10 := Runtime::Current()
94 movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10
95 movq (%r10), %r10
96 // Save callee and GPR args, mixed together to agree with core spills bitmap.
97 PUSH r15 // Callee save.
98 PUSH r14 // Callee save.
99 PUSH r13 // Callee save.
100 PUSH r12 // Callee save.
101 PUSH rbp // Callee save.
102 PUSH rbx // Callee save.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700103 // Create space for FPR args, plus space for StackReference<ArtMethod>.
104 subq LITERAL(8 + 4 * 8), %rsp
105 CFI_ADJUST_CFA_OFFSET(8 + 4 * 8)
Serguei Katkovc3801912014-07-08 17:21:53 +0700106 // Save FPRs.
107 movq %xmm12, 8(%rsp)
108 movq %xmm13, 16(%rsp)
109 movq %xmm14, 24(%rsp)
110 movq %xmm15, 32(%rsp)
Ian Rogers47d00c02014-04-16 17:33:27 -0700111 // R10 := ArtMethod* for refs only callee save frame method.
Hiroshi Yamauchiab088112014-07-14 13:00:14 -0700112 THIS_LOAD_REQUIRES_READ_BARRIER
Ian Rogers47d00c02014-04-16 17:33:27 -0700113 movq RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
114 // Store ArtMethod* to bottom of stack.
115 movq %r10, 0(%rsp)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700116 // Store rsp as the stop quick frame.
117 movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700118
119 // Ugly compile-time check, but we only have the preprocessor.
120 // Last +8: implicit return address pushed on stack when caller made call.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700121#if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700122#error "REFS_ONLY_CALLEE_SAVE_FRAME(X86_64) size not as expected."
123#endif
Ian Rogersc3ccc102014-06-25 11:52:14 -0700124#endif // __APPLE__
Ian Rogersef7d42f2014-01-06 12:55:46 -0800125END_MACRO
126
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700127MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME)
Serguei Katkovc3801912014-07-08 17:21:53 +0700128 movq 8(%rsp), %xmm12
129 movq 16(%rsp), %xmm13
130 movq 24(%rsp), %xmm14
131 movq 32(%rsp), %xmm15
132 addq LITERAL(8 + 4*8), %rsp
133 CFI_ADJUST_CFA_OFFSET(-8 - 4*8)
Ian Rogers47d00c02014-04-16 17:33:27 -0700134 // TODO: optimize by not restoring callee-saves restored by the ABI
135 POP rbx
136 POP rbp
137 POP r12
138 POP r13
139 POP r14
140 POP r15
Ian Rogersef7d42f2014-01-06 12:55:46 -0800141END_MACRO
142
143 /*
144 * Macro that sets up the callee save frame to conform with
145 * Runtime::CreateCalleeSaveMethod(kRefsAndArgs)
146 */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700147MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME)
Ian Rogersc3ccc102014-06-25 11:52:14 -0700148#if defined(__APPLE__)
149 int3
150 int3
151#else
Ian Rogers936b37f2014-02-14 00:52:24 -0800152 // R10 := Runtime::Current()
153 movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10
154 movq (%r10), %r10
155 // Save callee and GPR args, mixed together to agree with core spills bitmap.
156 PUSH r15 // Callee save.
157 PUSH r14 // Callee save.
158 PUSH r13 // Callee save.
159 PUSH r12 // Callee save.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800160 PUSH r9 // Quick arg 5.
161 PUSH r8 // Quick arg 4.
162 PUSH rsi // Quick arg 1.
Ian Rogers936b37f2014-02-14 00:52:24 -0800163 PUSH rbp // Callee save.
164 PUSH rbx // Callee save.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800165 PUSH rdx // Quick arg 2.
166 PUSH rcx // Quick arg 3.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700167 // Create space for FPR args and create 2 slots, 1 of padding and 1 for the
168 // StackReference<ArtMethod>.
Serguei Katkovc3801912014-07-08 17:21:53 +0700169 subq MACRO_LITERAL(80 + 4 * 8), %rsp
170 CFI_ADJUST_CFA_OFFSET(80 + 4 * 8)
Ian Rogers936b37f2014-02-14 00:52:24 -0800171 // R10 := ArtMethod* for ref and args callee save frame method.
Hiroshi Yamauchiab088112014-07-14 13:00:14 -0700172 THIS_LOAD_REQUIRES_READ_BARRIER
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700173 movq RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10
Ian Rogers936b37f2014-02-14 00:52:24 -0800174 // Save FPRs.
175 movq %xmm0, 16(%rsp)
176 movq %xmm1, 24(%rsp)
177 movq %xmm2, 32(%rsp)
178 movq %xmm3, 40(%rsp)
179 movq %xmm4, 48(%rsp)
180 movq %xmm5, 56(%rsp)
181 movq %xmm6, 64(%rsp)
182 movq %xmm7, 72(%rsp)
Serguei Katkovc3801912014-07-08 17:21:53 +0700183 movq %xmm12, 80(%rsp)
184 movq %xmm13, 88(%rsp)
185 movq %xmm14, 96(%rsp)
186 movq %xmm15, 104(%rsp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800187 // Store ArtMethod* to bottom of stack.
188 movq %r10, 0(%rsp)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700189 // Store rsp as the top quick frame.
190 movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700191
192 // Ugly compile-time check, but we only have the preprocessor.
193 // Last +8: implicit return address pushed on stack when caller made call.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700194#if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 11 * 8 + 4 * 8 + 80 + 8)
Andreas Gampe5c1e4352014-04-21 19:28:24 -0700195#error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86_64) size not as expected."
196#endif
Ian Rogersc3ccc102014-06-25 11:52:14 -0700197#endif // __APPLE__
Ian Rogersef7d42f2014-01-06 12:55:46 -0800198END_MACRO
199
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700200MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI)
201 // Save callee and GPR args, mixed together to agree with core spills bitmap.
202 PUSH r15 // Callee save.
203 PUSH r14 // Callee save.
204 PUSH r13 // Callee save.
205 PUSH r12 // Callee save.
206 PUSH r9 // Quick arg 5.
207 PUSH r8 // Quick arg 4.
208 PUSH rsi // Quick arg 1.
209 PUSH rbp // Callee save.
210 PUSH rbx // Callee save.
211 PUSH rdx // Quick arg 2.
212 PUSH rcx // Quick arg 3.
213 // Create space for FPR args and create 2 slots, 1 of padding and 1 for the
214 // StackReference<ArtMethod>.
215 subq LITERAL(80 + 4 * 8), %rsp
216 CFI_ADJUST_CFA_OFFSET(80 + 4 * 8)
217 // Save FPRs.
218 movq %xmm0, 16(%rsp)
219 movq %xmm1, 24(%rsp)
220 movq %xmm2, 32(%rsp)
221 movq %xmm3, 40(%rsp)
222 movq %xmm4, 48(%rsp)
223 movq %xmm5, 56(%rsp)
224 movq %xmm6, 64(%rsp)
225 movq %xmm7, 72(%rsp)
226 movq %xmm12, 80(%rsp)
227 movq %xmm13, 88(%rsp)
228 movq %xmm14, 96(%rsp)
229 movq %xmm15, 104(%rsp)
230 // Store ArtMethod to bottom of stack.
231 movq %rdi, 0(%rsp)
232 // Store rsp as the stop quick frame.
233 movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET
234END_MACRO
235
236MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME)
Ian Rogers936b37f2014-02-14 00:52:24 -0800237 // Restore FPRs.
238 movq 16(%rsp), %xmm0
239 movq 24(%rsp), %xmm1
240 movq 32(%rsp), %xmm2
241 movq 40(%rsp), %xmm3
242 movq 48(%rsp), %xmm4
243 movq 56(%rsp), %xmm5
244 movq 64(%rsp), %xmm6
245 movq 72(%rsp), %xmm7
Serguei Katkovc3801912014-07-08 17:21:53 +0700246 movq 80(%rsp), %xmm12
247 movq 88(%rsp), %xmm13
248 movq 96(%rsp), %xmm14
249 movq 104(%rsp), %xmm15
250 addq MACRO_LITERAL(80 + 4 * 8), %rsp
251 CFI_ADJUST_CFA_OFFSET(-(80 + 4 * 8))
Ian Rogersbefbd572014-03-06 01:13:39 -0800252 // Restore callee and GPR args, mixed together to agree with core spills bitmap.
Ian Rogers936b37f2014-02-14 00:52:24 -0800253 POP rcx
254 POP rdx
255 POP rbx
256 POP rbp
257 POP rsi
258 POP r8
259 POP r9
260 POP r12
261 POP r13
262 POP r14
263 POP r15
Ian Rogersef7d42f2014-01-06 12:55:46 -0800264END_MACRO
265
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800266
Ian Rogersef7d42f2014-01-06 12:55:46 -0800267 /*
268 * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending
269 * exception is Thread::Current()->exception_.
270 */
271MACRO0(DELIVER_PENDING_EXCEPTION)
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800272 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save callee saves for throw
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700273 // (Thread*) setup
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800274 movq %gs:THREAD_SELF_OFFSET, %rdi
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700275 call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*)
Ian Rogers47d00c02014-04-16 17:33:27 -0700276 UNREACHABLE
Ian Rogersef7d42f2014-01-06 12:55:46 -0800277END_MACRO
278
279MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
280 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers47d00c02014-04-16 17:33:27 -0700281 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
282 // Outgoing argument set up
Ian Rogers47d00c02014-04-16 17:33:27 -0700283 movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700284 call VAR(cxx_name, 1) // cxx_name(Thread*)
Ian Rogers47d00c02014-04-16 17:33:27 -0700285 UNREACHABLE
Ian Rogersef7d42f2014-01-06 12:55:46 -0800286 END_FUNCTION VAR(c_name, 0)
287END_MACRO
288
289MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
290 DEFINE_FUNCTION VAR(c_name, 0)
Dmitry Petrochenkofca82202014-03-21 11:21:37 +0700291 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
292 // Outgoing argument set up
Ian Rogers47d00c02014-04-16 17:33:27 -0700293 movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700294 call VAR(cxx_name, 1) // cxx_name(arg1, Thread*)
Ian Rogers47d00c02014-04-16 17:33:27 -0700295 UNREACHABLE
Ian Rogersef7d42f2014-01-06 12:55:46 -0800296 END_FUNCTION VAR(c_name, 0)
297END_MACRO
298
299MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name)
300 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers47d00c02014-04-16 17:33:27 -0700301 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
302 // Outgoing argument set up
Ian Rogers47d00c02014-04-16 17:33:27 -0700303 movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700304 call VAR(cxx_name, 1) // cxx_name(Thread*)
Ian Rogers47d00c02014-04-16 17:33:27 -0700305 UNREACHABLE
Ian Rogersef7d42f2014-01-06 12:55:46 -0800306 END_FUNCTION VAR(c_name, 0)
307END_MACRO
308
309 /*
310 * Called by managed code to create and deliver a NullPointerException.
311 */
312NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode
313
314 /*
315 * Called by managed code to create and deliver an ArithmeticException.
316 */
317NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode
318
319 /*
320 * Called by managed code to create and deliver a StackOverflowError.
321 */
322NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode
323
324 /*
325 * Called by managed code, saves callee saves and then calls artThrowException
326 * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception.
327 */
328ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode
329
330 /*
331 * Called by managed code to create and deliver a NoSuchMethodError.
332 */
333ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode
334
335 /*
336 * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds
337 * index, arg2 holds limit.
338 */
339TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode
340
341 /*
342 * All generated callsites for interface invokes and invocation slow paths will load arguments
Andreas Gampe51f76352014-05-21 08:28:48 -0700343 * as usual - except instead of loading arg0/rdi with the target Method*, arg0/rdi will contain
Ian Rogersef7d42f2014-01-06 12:55:46 -0800344 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the
345 * stack and call the appropriate C helper.
Andreas Gampe51f76352014-05-21 08:28:48 -0700346 * NOTE: "this" is first visible argument of the target, and so can be found in arg1/rsi.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800347 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700348 * The helper will attempt to locate the target and return a 128-bit result in rax/rdx consisting
349 * of the target Method* in rax and method->code_ in rdx.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800350 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700351 * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the
Ian Rogersef7d42f2014-01-06 12:55:46 -0800352 * thread and we branch to another stub to deliver it.
353 *
Andreas Gampe51f76352014-05-21 08:28:48 -0700354 * On success this wrapper will restore arguments and *jump* to the target, leaving the return
355 * location on the stack.
356 *
357 * Adapted from x86 code.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800358 */
359MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name)
360 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700361 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC
Andreas Gampe51f76352014-05-21 08:28:48 -0700362 // Helper signature is always
363 // (method_idx, *this_object, *caller_method, *self, sp)
364
Serguei Katkov7c748c12014-06-06 10:50:37 -0700365 movl FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE(%rsp), %edx // pass caller Method*
Andreas Gampe51f76352014-05-21 08:28:48 -0700366 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread
367 movq %rsp, %r8 // pass SP
368
Andreas Gampe29b38412014-08-13 00:15:43 -0700369 call VAR(cxx_name, 1) // cxx_name(arg1, arg2, caller method*, Thread*, SP)
Andreas Gampe51f76352014-05-21 08:28:48 -0700370 // save the code pointer
371 movq %rax, %rdi
372 movq %rdx, %rax
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700373 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gampe51f76352014-05-21 08:28:48 -0700374
375 testq %rdi, %rdi
376 jz 1f
377
378 // Tail call to intended method.
379 jmp *%rax
3801:
381 DELIVER_PENDING_EXCEPTION
Ian Rogersef7d42f2014-01-06 12:55:46 -0800382 END_FUNCTION VAR(c_name, 0)
383END_MACRO
384
385INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline
386INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck
387
388INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck
389INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck
390INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck
391INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck
392
Ian Rogers936b37f2014-02-14 00:52:24 -0800393
394 /*
395 * Helper for quick invocation stub to set up XMM registers. Assumes r10 == shorty,
396 * r11 == arg_array. Clobbers r10, r11 and al. Branches to xmm_setup_finished if it encounters
397 * the end of the shorty.
398 */
399MACRO2(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, finished)
4001: // LOOP
401 movb (%r10), %al // al := *shorty
Ian Rogers44d6ff12014-03-06 23:11:11 -0800402 addq MACRO_LITERAL(1), %r10 // shorty++
403 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto xmm_setup_finished
Ian Rogers936b37f2014-02-14 00:52:24 -0800404 je VAR(finished, 1)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800405 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE
Ian Rogers936b37f2014-02-14 00:52:24 -0800406 je 2f
Ian Rogers44d6ff12014-03-06 23:11:11 -0800407 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT
Ian Rogers936b37f2014-02-14 00:52:24 -0800408 je 3f
Ian Rogers44d6ff12014-03-06 23:11:11 -0800409 addq MACRO_LITERAL(4), %r11 // arg_array++
Ian Rogers936b37f2014-02-14 00:52:24 -0800410 // Handle extra space in arg array taken by a long.
Ian Rogers44d6ff12014-03-06 23:11:11 -0800411 cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP
Ian Rogers936b37f2014-02-14 00:52:24 -0800412 jne 1b
Ian Rogers44d6ff12014-03-06 23:11:11 -0800413 addq MACRO_LITERAL(4), %r11 // arg_array++
Ian Rogers936b37f2014-02-14 00:52:24 -0800414 jmp 1b // goto LOOP
4152: // FOUND_DOUBLE
416 movsd (%r11), REG_VAR(xmm_reg, 0)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800417 addq MACRO_LITERAL(8), %r11 // arg_array+=2
Ian Rogers936b37f2014-02-14 00:52:24 -0800418 jmp 4f
4193: // FOUND_FLOAT
420 movss (%r11), REG_VAR(xmm_reg, 0)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800421 addq MACRO_LITERAL(4), %r11 // arg_array++
Ian Rogers936b37f2014-02-14 00:52:24 -08004224:
423END_MACRO
424
425 /*
426 * Helper for quick invocation stub to set up GPR registers. Assumes r10 == shorty,
427 * r11 == arg_array. Clobbers r10, r11 and al. Branches to gpr_setup_finished if it encounters
428 * the end of the shorty.
429 */
430MACRO3(LOOP_OVER_SHORTY_LOADING_GPRS, gpr_reg64, gpr_reg32, finished)
4311: // LOOP
432 movb (%r10), %al // al := *shorty
Ian Rogers44d6ff12014-03-06 23:11:11 -0800433 addq MACRO_LITERAL(1), %r10 // shorty++
434 cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto gpr_setup_finished
Ian Rogers936b37f2014-02-14 00:52:24 -0800435 je VAR(finished, 2)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800436 cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG
Ian Rogers936b37f2014-02-14 00:52:24 -0800437 je 2f
Ian Rogers44d6ff12014-03-06 23:11:11 -0800438 cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT
Ian Rogers936b37f2014-02-14 00:52:24 -0800439 je 3f
Ian Rogers44d6ff12014-03-06 23:11:11 -0800440 cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE
Ian Rogers936b37f2014-02-14 00:52:24 -0800441 je 4f
442 movl (%r11), REG_VAR(gpr_reg32, 1)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800443 addq MACRO_LITERAL(4), %r11 // arg_array++
Ian Rogers936b37f2014-02-14 00:52:24 -0800444 jmp 5f
4452: // FOUND_LONG
446 movq (%r11), REG_VAR(gpr_reg64, 0)
Ian Rogers44d6ff12014-03-06 23:11:11 -0800447 addq MACRO_LITERAL(8), %r11 // arg_array+=2
Ian Rogers936b37f2014-02-14 00:52:24 -0800448 jmp 5f
4493: // SKIP_FLOAT
Ian Rogers44d6ff12014-03-06 23:11:11 -0800450 addq MACRO_LITERAL(4), %r11 // arg_array++
Ian Rogers936b37f2014-02-14 00:52:24 -0800451 jmp 1b
4524: // SKIP_DOUBLE
Ian Rogers44d6ff12014-03-06 23:11:11 -0800453 addq MACRO_LITERAL(8), %r11 // arg_array+=2
Ian Rogers936b37f2014-02-14 00:52:24 -0800454 jmp 1b
4555:
456END_MACRO
457
Ian Rogersef7d42f2014-01-06 12:55:46 -0800458 /*
459 * Quick invocation stub.
Ian Rogers0177e532014-02-11 16:30:46 -0800460 * On entry:
461 * [sp] = return address
462 * rdi = method pointer
Ian Rogers936b37f2014-02-14 00:52:24 -0800463 * rsi = argument array that must at least contain the this pointer.
Ian Rogers0177e532014-02-11 16:30:46 -0800464 * rdx = size of argument array in bytes
465 * rcx = (managed) thread pointer
466 * r8 = JValue* result
467 * r9 = char* shorty
Ian Rogersef7d42f2014-01-06 12:55:46 -0800468 */
469DEFINE_FUNCTION art_quick_invoke_stub
Ian Rogersc3ccc102014-06-25 11:52:14 -0700470#if defined(__APPLE__)
471 int3
472 int3
473#else
Ian Rogers936b37f2014-02-14 00:52:24 -0800474 // Set up argument XMM registers.
475 leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character.
476 leaq 4(%rsi), %r11 // R11 := arg_array + 4 ; ie skip this pointer.
477 LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished
478 LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished
479 LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished
480 LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished
481 LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished
482 LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished
483 LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished
484 LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished
485 .balign 16
486.Lxmm_setup_finished:
487 PUSH rbp // Save rbp.
488 PUSH r8 // Save r8/result*.
489 PUSH r9 // Save r9/shorty*.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000490 PUSH rbx // Save native callee save rbx
491 PUSH r12 // Save native callee save r12
492 PUSH r13 // Save native callee save r13
493 PUSH r14 // Save native callee save r14
494 PUSH r15 // Save native callee save r15
Ian Rogers47d00c02014-04-16 17:33:27 -0700495 movq %rsp, %rbp // Copy value of stack pointer into base pointer.
Ian Rogers936b37f2014-02-14 00:52:24 -0800496 CFI_DEF_CFA_REGISTER(rbp)
Andreas Gampecf4035a2014-05-28 22:43:01 -0700497
Ian Rogers936b37f2014-02-14 00:52:24 -0800498 movl %edx, %r10d
Nicolas Geoffray48088462014-12-12 10:29:38 +0000499 addl LITERAL(100), %edx // Reserve space for return addr, StackReference<method>, rbp,
500 // r8, r9, rbx, r12, r13, r14, and r15 in frame.
501 andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes.
502 subl LITERAL(72), %edx // Remove space for return address, rbp, r8, r9, rbx, r12,
503 // r13, r14, and r15
504 subq %rdx, %rsp // Reserve stack space for argument array.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700505
506#if (STACK_REFERENCE_SIZE != 4)
507#error "STACK_REFERENCE_SIZE(X86_64) size not as expected."
508#endif
509 movl LITERAL(0), (%rsp) // Store NULL for method*
510
Ian Rogers936b37f2014-02-14 00:52:24 -0800511 movl %r10d, %ecx // Place size of args in rcx.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000512 movq %rdi, %rax // rax := method to be called
513 movq %rsi, %r11 // r11 := arg_array
514 leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the
Andreas Gampecf4035a2014-05-28 22:43:01 -0700515 // stack arguments.
Ian Rogers936b37f2014-02-14 00:52:24 -0800516 // Copy arg array into stack.
517 rep movsb // while (rcx--) { *rdi++ = *rsi++ }
Nicolas Geoffray48088462014-12-12 10:29:38 +0000518 leaq 1(%r9), %r10 // r10 := shorty + 1 ; ie skip return arg character
519 movq %rax, %rdi // rdi := method to be called
520 movl (%r11), %esi // rsi := this pointer
Ian Rogers936b37f2014-02-14 00:52:24 -0800521 addq LITERAL(4), %r11 // arg_array++
522 LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished
523 LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished
524 LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished
525 LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished
526.Lgpr_setup_finished:
Mathieu Chartier2d721012014-11-10 11:08:06 -0800527 call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
Ian Rogers936b37f2014-02-14 00:52:24 -0800528 movq %rbp, %rsp // Restore stack pointer.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000529 POP r15 // Pop r15
530 POP r14 // Pop r14
531 POP r13 // Pop r13
532 POP r12 // Pop r12
533 POP rbx // Pop rbx
534 POP r9 // Pop r9 - shorty*
Ian Rogers936b37f2014-02-14 00:52:24 -0800535 POP r8 // Pop r8 - result*.
536 POP rbp // Pop rbp
537 cmpb LITERAL(68), (%r9) // Test if result type char == 'D'.
538 je .Lreturn_double_quick
539 cmpb LITERAL(70), (%r9) // Test if result type char == 'F'.
540 je .Lreturn_float_quick
541 movq %rax, (%r8) // Store the result assuming its a long, int or Object*
542 ret
543.Lreturn_double_quick:
Nicolas Geoffray48088462014-12-12 10:29:38 +0000544 movsd %xmm0, (%r8) // Store the double floating point result.
Ian Rogers936b37f2014-02-14 00:52:24 -0800545 ret
546.Lreturn_float_quick:
Nicolas Geoffray48088462014-12-12 10:29:38 +0000547 movss %xmm0, (%r8) // Store the floating point result.
Ian Rogers936b37f2014-02-14 00:52:24 -0800548 ret
Ian Rogersc3ccc102014-06-25 11:52:14 -0700549#endif // __APPLE__
Ian Rogers936b37f2014-02-14 00:52:24 -0800550END_FUNCTION art_quick_invoke_stub
551
552 /*
553 * Quick invocation stub.
554 * On entry:
555 * [sp] = return address
556 * rdi = method pointer
557 * rsi = argument array or NULL if no arguments.
558 * rdx = size of argument array in bytes
559 * rcx = (managed) thread pointer
560 * r8 = JValue* result
561 * r9 = char* shorty
562 */
563DEFINE_FUNCTION art_quick_invoke_static_stub
Ian Rogersc3ccc102014-06-25 11:52:14 -0700564#if defined(__APPLE__)
565 int3
566 int3
567#else
Ian Rogers936b37f2014-02-14 00:52:24 -0800568 // Set up argument XMM registers.
569 leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character
570 movq %rsi, %r11 // R11 := arg_array
571 LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished2
572 LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished2
573 LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished2
574 LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished2
575 LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished2
576 LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished2
577 LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished2
578 LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished2
579 .balign 16
580.Lxmm_setup_finished2:
581 PUSH rbp // Save rbp.
582 PUSH r8 // Save r8/result*.
583 PUSH r9 // Save r9/shorty*.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000584 PUSH rbx // Save rbx
585 PUSH r12 // Save r12
586 PUSH r13 // Save r13
587 PUSH r14 // Save r14
588 PUSH r15 // Save r15
Ian Rogers47d00c02014-04-16 17:33:27 -0700589 movq %rsp, %rbp // Copy value of stack pointer into base pointer.
Ian Rogers936b37f2014-02-14 00:52:24 -0800590 CFI_DEF_CFA_REGISTER(rbp)
Andreas Gampecf4035a2014-05-28 22:43:01 -0700591
Ian Rogers936b37f2014-02-14 00:52:24 -0800592 movl %edx, %r10d
Nicolas Geoffray48088462014-12-12 10:29:38 +0000593 addl LITERAL(100), %edx // Reserve space for return addr, StackReference<method>, rbp,
594 // r8, r9, r12, r13, r14, and r15 in frame.
595 andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes.
596 subl LITERAL(72), %edx // Remove space for return address, rbp, r8, r9, rbx, r12,
597 // r13, r14, and r15.
598 subq %rdx, %rsp // Reserve stack space for argument array.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700599
600#if (STACK_REFERENCE_SIZE != 4)
601#error "STACK_REFERENCE_SIZE(X86_64) size not as expected."
602#endif
Nicolas Geoffray48088462014-12-12 10:29:38 +0000603 movl LITERAL(0), (%rsp) // Store NULL for method*
Andreas Gampecf4035a2014-05-28 22:43:01 -0700604
Nicolas Geoffray48088462014-12-12 10:29:38 +0000605 movl %r10d, %ecx // Place size of args in rcx.
606 movq %rdi, %rax // rax := method to be called
607 movq %rsi, %r11 // r11 := arg_array
608 leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the
609 // stack arguments.
Ian Rogers936b37f2014-02-14 00:52:24 -0800610 // Copy arg array into stack.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000611 rep movsb // while (rcx--) { *rdi++ = *rsi++ }
612 leaq 1(%r9), %r10 // r10 := shorty + 1 ; ie skip return arg character
613 movq %rax, %rdi // rdi := method to be called
Ian Rogers936b37f2014-02-14 00:52:24 -0800614 LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, .Lgpr_setup_finished2
615 LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished2
616 LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished2
617 LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished2
618 LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished2
619.Lgpr_setup_finished2:
Mathieu Chartier2d721012014-11-10 11:08:06 -0800620 call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
Nicolas Geoffray48088462014-12-12 10:29:38 +0000621 movq %rbp, %rsp // Restore stack pointer.
622 POP r15 // Pop r15
623 POP r14 // Pop r14
624 POP r13 // Pop r13
625 POP r12 // Pop r12
626 POP rbx // Pop rbx
627 POP r9 // Pop r9 - shorty*.
628 POP r8 // Pop r8 - result*.
629 POP rbp // Pop rbp
630 cmpb LITERAL(68), (%r9) // Test if result type char == 'D'.
Ian Rogers936b37f2014-02-14 00:52:24 -0800631 je .Lreturn_double_quick2
Nicolas Geoffray48088462014-12-12 10:29:38 +0000632 cmpb LITERAL(70), (%r9) // Test if result type char == 'F'.
Ian Rogers936b37f2014-02-14 00:52:24 -0800633 je .Lreturn_float_quick2
Nicolas Geoffray48088462014-12-12 10:29:38 +0000634 movq %rax, (%r8) // Store the result assuming its a long, int or Object*
Ian Rogers936b37f2014-02-14 00:52:24 -0800635 ret
636.Lreturn_double_quick2:
Nicolas Geoffray48088462014-12-12 10:29:38 +0000637 movsd %xmm0, (%r8) // Store the double floating point result.
Ian Rogers936b37f2014-02-14 00:52:24 -0800638 ret
639.Lreturn_float_quick2:
Nicolas Geoffray48088462014-12-12 10:29:38 +0000640 movss %xmm0, (%r8) // Store the floating point result.
Ian Rogers936b37f2014-02-14 00:52:24 -0800641 ret
Ian Rogersc3ccc102014-06-25 11:52:14 -0700642#endif // __APPLE__
Ian Rogers1a570662014-03-12 01:02:21 -0700643END_FUNCTION art_quick_invoke_static_stub
Ian Rogersef7d42f2014-01-06 12:55:46 -0800644
Serguei Katkovc3801912014-07-08 17:21:53 +0700645 /*
646 * Long jump stub.
647 * On entry:
648 * rdi = gprs
649 * rsi = fprs
650 */
651DEFINE_FUNCTION art_quick_do_long_jump
652#if defined(__APPLE__)
653 int3
654 int3
655#else
656 // Restore FPRs.
657 movq 0(%rsi), %xmm0
658 movq 8(%rsi), %xmm1
659 movq 16(%rsi), %xmm2
660 movq 24(%rsi), %xmm3
661 movq 32(%rsi), %xmm4
662 movq 40(%rsi), %xmm5
663 movq 48(%rsi), %xmm6
664 movq 56(%rsi), %xmm7
665 movq 64(%rsi), %xmm8
666 movq 72(%rsi), %xmm9
667 movq 80(%rsi), %xmm10
668 movq 88(%rsi), %xmm11
669 movq 96(%rsi), %xmm12
670 movq 104(%rsi), %xmm13
671 movq 112(%rsi), %xmm14
672 movq 120(%rsi), %xmm15
673 // Restore FPRs.
674 movq %rdi, %rsp // RSP points to gprs.
675 // Load all registers except RSP and RIP with values in gprs.
676 popq %r15
677 popq %r14
678 popq %r13
679 popq %r12
680 popq %r11
681 popq %r10
682 popq %r9
683 popq %r8
684 popq %rdi
685 popq %rsi
686 popq %rbp
687 addq LITERAL(8), %rsp // Skip rsp
688 popq %rbx
689 popq %rdx
690 popq %rcx
691 popq %rax
692 popq %rsp // Load stack pointer.
693 ret // From higher in the stack pop rip.
694#endif // __APPLE__
695END_FUNCTION art_quick_do_long_jump
696
Ian Rogersef7d42f2014-01-06 12:55:46 -0800697MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
698 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700699 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
Ian Rogers47d00c02014-04-16 17:33:27 -0700700 // Outgoing argument set up
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700701 movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current()
702 call VAR(cxx_name, 1) // cxx_name(Thread*)
703 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
704 CALL_MACRO(return_macro, 2) // return or deliver exception
Ian Rogersef7d42f2014-01-06 12:55:46 -0800705 END_FUNCTION VAR(c_name, 0)
706END_MACRO
707
708MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
709 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700710 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
Ian Rogers47d00c02014-04-16 17:33:27 -0700711 // Outgoing argument set up
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700712 movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
713 call VAR(cxx_name, 1) // cxx_name(arg0, Thread*)
714 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
715 CALL_MACRO(return_macro, 2) // return or deliver exception
Ian Rogersef7d42f2014-01-06 12:55:46 -0800716 END_FUNCTION VAR(c_name, 0)
717END_MACRO
718
719MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro)
720 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700721 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
Ian Rogers47d00c02014-04-16 17:33:27 -0700722 // Outgoing argument set up
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700723 movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
724 call VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*)
725 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
726 CALL_MACRO(return_macro, 2) // return or deliver exception
Ian Rogersef7d42f2014-01-06 12:55:46 -0800727 END_FUNCTION VAR(c_name, 0)
728END_MACRO
729
730MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro)
731 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700732 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
Ian Rogers47d00c02014-04-16 17:33:27 -0700733 // Outgoing argument set up
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700734 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
735 call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*)
736 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
737 CALL_MACRO(return_macro, 2) // return or deliver exception
Ian Rogersef7d42f2014-01-06 12:55:46 -0800738 END_FUNCTION VAR(c_name, 0)
739END_MACRO
740
Ian Rogersc3ccc102014-06-25 11:52:14 -0700741MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
742 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700743 movl 8(%rsp), %esi // pass referrer
744 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
745 // arg0 is in rdi
746 movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
747 call VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*)
748 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
Ian Rogersc3ccc102014-06-25 11:52:14 -0700749 CALL_MACRO(return_macro, 2)
750 END_FUNCTION VAR(c_name, 0)
751END_MACRO
752
753MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
754 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700755 movl 8(%rsp), %edx // pass referrer
756 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
757 // arg0 and arg1 are in rdi/rsi
758 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
759 call VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*)
760 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
Ian Rogersc3ccc102014-06-25 11:52:14 -0700761 CALL_MACRO(return_macro, 2)
762 END_FUNCTION VAR(c_name, 0)
763END_MACRO
764
765MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro)
766 DEFINE_FUNCTION VAR(c_name, 0)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700767 movl 8(%rsp), %ecx // pass referrer
768 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
769 // arg0, arg1, and arg2 are in rdi/rsi/rdx
Ian Rogersc3ccc102014-06-25 11:52:14 -0700770 movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700771 call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*)
772 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
773 CALL_MACRO(return_macro, 2) // return or deliver exception
Ian Rogersc3ccc102014-06-25 11:52:14 -0700774 END_FUNCTION VAR(c_name, 0)
775END_MACRO
776
Ian Rogersef7d42f2014-01-06 12:55:46 -0800777MACRO0(RETURN_IF_RESULT_IS_NON_ZERO)
Ian Rogers47d00c02014-04-16 17:33:27 -0700778 testq %rax, %rax // rax == 0 ?
779 jz 1f // if rax == 0 goto 1
Ian Rogersef7d42f2014-01-06 12:55:46 -0800780 ret // return
7811: // deliver exception on current thread
782 DELIVER_PENDING_EXCEPTION
783END_MACRO
784
785MACRO0(RETURN_IF_EAX_ZERO)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800786 testl %eax, %eax // eax == 0 ?
787 jnz 1f // if eax != 0 goto 1
788 ret // return
7891: // deliver exception on current thread
790 DELIVER_PENDING_EXCEPTION
791END_MACRO
792
793MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION)
Ian Rogers936b37f2014-02-14 00:52:24 -0800794 movq %gs:THREAD_EXCEPTION_OFFSET, %rcx // get exception field
795 testq %rcx, %rcx // rcx == 0 ?
796 jnz 1f // if rcx != 0 goto 1
797 ret // return
7981: // deliver exception on current thread
Ian Rogersef7d42f2014-01-06 12:55:46 -0800799 DELIVER_PENDING_EXCEPTION
800END_MACRO
801
802// Generate the allocation entrypoints for each allocator.
803// TODO: use arch/quick_alloc_entrypoints.S. Currently we don't as we need to use concatenation
804// macros to work around differences between OS/X's as and binutils as (OS/X lacks named arguments
805// to macros and the VAR macro won't concatenate arguments properly), this also breaks having
806// multi-line macros that use each other (hence using 1 macro per newline below).
807#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(c_suffix, cxx_suffix) \
808 TWO_ARG_DOWNCALL art_quick_alloc_object ## c_suffix, artAllocObjectFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
809#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(c_suffix, cxx_suffix) \
810 TWO_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
811#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(c_suffix, cxx_suffix) \
812 TWO_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
813#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
814 TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check ## c_suffix, artAllocObjectFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
815#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(c_suffix, cxx_suffix) \
816 THREE_ARG_DOWNCALL art_quick_alloc_array ## c_suffix, artAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
817#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(c_suffix, cxx_suffix) \
818 THREE_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
819#define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
820 THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check ## c_suffix, artAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
821#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(c_suffix, cxx_suffix) \
822 THREE_ARG_DOWNCALL art_quick_check_and_alloc_array ## c_suffix, artCheckAndAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
823#define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \
824 THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check ## c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO
825
826GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc, DlMalloc)
827GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc, DlMalloc)
828GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc, DlMalloc)
829GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
830GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc, DlMalloc)
831GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc, DlMalloc)
832GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
833GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc, DlMalloc)
834GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc)
835
836GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc_instrumented, DlMallocInstrumented)
837GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented)
838GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc_instrumented, DlMallocInstrumented)
839GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
840GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
841GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented)
842GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
843GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented)
844GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented)
845
846GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc)
847GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc)
848GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc)
849GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
850GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc, RosAlloc)
851GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc, RosAlloc)
852GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
853GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc, RosAlloc)
854GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc)
855
856GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc_instrumented, RosAllocInstrumented)
857GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented)
858GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc_instrumented, RosAllocInstrumented)
859GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
860GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
861GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented)
862GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
863GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented)
864GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented)
865
866GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer, BumpPointer)
867GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer, BumpPointer)
868GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer, BumpPointer)
869GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
870GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer, BumpPointer)
871GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer, BumpPointer)
872GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
873GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer, BumpPointer)
874GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer)
875
876GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer_instrumented, BumpPointerInstrumented)
877GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented)
878GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer_instrumented, BumpPointerInstrumented)
879GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
880GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
881GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented)
882GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
883GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented)
884GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented)
885
Hiroshi Yamauchie01a5202015-03-19 12:35:04 -0700886DEFINE_FUNCTION art_quick_alloc_object_tlab
887 // Fast path tlab allocation.
888 // RDI: uint32_t type_idx, RSI: ArtMethod*
889 // RDX, RCX, R8, R9: free. RAX: return val.
890 movl MIRROR_ART_METHOD_DEX_CACHE_TYPES_OFFSET(%rsi), %edx // Load dex cache resolved types array
891 // Load the class
892 movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdx, %rdi, MIRROR_OBJECT_ARRAY_COMPONENT_SIZE), %edx
893 testl %edx, %edx // Check null class
894 jz .Lart_quick_alloc_object_tlab_slow_path
895 // Check class status.
896 cmpl LITERAL(MIRROR_CLASS_STATUS_INITIALIZED), MIRROR_CLASS_STATUS_OFFSET(%rdx)
897 jne .Lart_quick_alloc_object_tlab_slow_path
898 // Check access flags has kAccClassIsFinalizable
899 testl LITERAL(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), MIRROR_CLASS_ACCESS_FLAGS_OFFSET(%rdx)
900 jnz .Lart_quick_alloc_object_tlab_slow_path
901 movl MIRROR_CLASS_OBJECT_SIZE_OFFSET(%rdx), %ecx // Load the object size.
902 addl LITERAL(OBJECT_ALIGNMENT_MASK), %ecx // Align the size by 8. (addr + 7) & ~7.
903 andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %ecx
904 movq %gs:THREAD_SELF_OFFSET, %r8 // r8 = thread
905 movq THREAD_LOCAL_POS_OFFSET(%r8), %rax // Load thread_local_pos.
906 addq %rax, %rcx // Add the object size.
907 cmpq THREAD_LOCAL_END_OFFSET(%r8), %rcx // Check if it fits.
908 ja .Lart_quick_alloc_object_tlab_slow_path
909 movq %rcx, THREAD_LOCAL_POS_OFFSET(%r8) // Update thread_local_pos.
910 addq LITERAL(1), THREAD_LOCAL_OBJECTS_OFFSET(%r8) // Increment thread_local_objects.
911 // Store the class pointer in the header.
912 // No fence needed for x86.
913 movl %edx, MIRROR_OBJECT_CLASS_OFFSET(%rax)
914 ret // Fast path succeeded.
915.Lart_quick_alloc_object_tlab_slow_path:
916 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC
917 // Outgoing argument set up
918 movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
Hiroshi Yamauchi3d900a72015-03-20 17:56:45 -0700919 call SYMBOL(artAllocObjectFromCodeTLAB) // cxx_name(arg0, arg1, Thread*)
Hiroshi Yamauchie01a5202015-03-19 12:35:04 -0700920 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
921 RETURN_IF_RESULT_IS_NON_ZERO // return or deliver exception
922END_FUNCTION art_quick_alloc_object_tlab
923
Ian Rogersef7d42f2014-01-06 12:55:46 -0800924GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB)
925GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB)
926GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB)
927GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab, TLAB)
928GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB)
929GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
930GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab, TLAB)
931GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB)
932
933GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab_instrumented, TLABInstrumented)
934GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab_instrumented, TLABInstrumented)
935GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab_instrumented, TLABInstrumented)
936GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
937GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
938GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab_instrumented, TLABInstrumented)
939GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
940GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented)
941GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented)
942
Hiroshi Yamauchi2cd334a2015-01-09 14:03:35 -0800943GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region, Region)
944GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region, Region)
945GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region, Region)
946GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region, Region)
947GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region, Region)
948GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region, Region)
949GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region)
950GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region, Region)
951GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region)
952
953GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_instrumented, RegionInstrumented)
954GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_instrumented, RegionInstrumented)
955GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_instrumented, RegionInstrumented)
956GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
957GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_instrumented, RegionInstrumented)
958GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_instrumented, RegionInstrumented)
959GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
960GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_instrumented, RegionInstrumented)
961GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented)
962
963GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB)
964GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB)
965GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB)
966GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
967GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab, RegionTLAB)
968GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB)
969GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
970GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab, RegionTLAB)
971GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB)
972
973GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab_instrumented, RegionTLABInstrumented)
974GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented)
975GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab_instrumented, RegionTLABInstrumented)
976GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
977GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented)
978GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented)
979GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
980GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented)
981GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented)
982
Ian Rogersef7d42f2014-01-06 12:55:46 -0800983TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO
984TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO
985TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO
986TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO
987
Ian Rogers832336b2014-10-08 15:35:22 -0700988TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO
Ian Rogersef7d42f2014-01-06 12:55:46 -0800989
990DEFINE_FUNCTION art_quick_lock_object
Alexei Zavjalov80c79342014-05-02 16:45:40 +0700991 testl %edi, %edi // Null check object/rdi.
992 jz .Lslow_lock
993.Lretry_lock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700994 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800995 test LITERAL(LOCK_WORD_STATE_MASK), %ecx // Test the 2 high bits.
Alexei Zavjalov80c79342014-05-02 16:45:40 +0700996 jne .Lslow_lock // Slow path if either of the two high bits are set.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -0800997 movl %ecx, %edx // save lock word (edx) to keep read barrier bits.
998 andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx // zero the read barrier bits.
Alexei Zavjalov80c79342014-05-02 16:45:40 +0700999 test %ecx, %ecx
1000 jnz .Lalready_thin // Lock word contains a thin lock.
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001001 // unlocked case - edx: original lock word, edi: obj.
1002 movl %edx, %eax // eax: lock word zero except for read barrier bits.
1003 movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id
1004 or %eax, %edx // edx: thread id with count of 0 + read barrier bits.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001005 lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi)
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001006 jnz .Lretry_lock // cmpxchg failed retry
1007 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001008.Lalready_thin: // edx: lock word (with high 2 bits zero and original rb bits), edi: obj.
1009 movl %gs:THREAD_ID_OFFSET, %ecx // ecx := thread id
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001010 cmpw %cx, %dx // do we hold the lock already?
1011 jne .Lslow_lock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001012 movl %edx, %ecx // copy the lock word to check count overflow.
1013 andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx // zero the read barrier bits.
1014 addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx // increment recursion count
1015 test LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx // overflowed if either of the upper two bits (28-29) are set
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001016 jne .Lslow_lock // count overflowed so go slow
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001017 movl %edx, %eax // copy the lock word as the old val for cmpxchg.
1018 addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx // increment recursion count again for real.
1019 // update lockword, cmpxchg necessary for read barrier bits.
1020 lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, edx: new val.
1021 jnz .Lretry_lock // cmpxchg failed retry
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001022 ret
1023.Lslow_lock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001024 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001025 movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001026 call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*)
1027 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001028 RETURN_IF_EAX_ZERO
Ian Rogersef7d42f2014-01-06 12:55:46 -08001029END_FUNCTION art_quick_lock_object
1030
1031DEFINE_FUNCTION art_quick_unlock_object
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001032 testl %edi, %edi // null check object/edi
1033 jz .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001034.Lretry_unlock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001035 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001036 movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001037 test LITERAL(LOCK_WORD_STATE_MASK), %ecx
Andreas Gampe4fc046e2014-05-06 16:56:39 -07001038 jnz .Lslow_unlock // lock word contains a monitor
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001039 cmpw %cx, %dx // does the thread id match?
1040 jne .Lslow_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001041 movl %ecx, %edx // copy the lock word to detect new count of 0.
1042 andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %edx // zero the read barrier bits.
1043 cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001044 jae .Lrecursive_thin_unlock
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001045 // update lockword, cmpxchg necessary for read barrier bits.
1046 movl %ecx, %eax // eax: old lock word.
1047 andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx // ecx: new lock word zero except original rb bits.
1048#ifndef USE_READ_BARRIER
1049 movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi)
1050#else
1051 lock cmpxchg %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, ecx: new val.
1052 jnz .Lretry_unlock // cmpxchg failed retry
1053#endif
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001054 ret
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001055.Lrecursive_thin_unlock: // ecx: original lock word, edi: obj
1056 // update lockword, cmpxchg necessary for read barrier bits.
1057 movl %ecx, %eax // eax: old lock word.
1058 subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx
1059#ifndef USE_READ_BARRIER
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001060 mov %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi)
Hiroshi Yamauchie15ea082015-02-09 17:11:42 -08001061#else
1062 lock cmpxchg %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, ecx: new val.
1063 jnz .Lretry_unlock // cmpxchg failed retry
1064#endif
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001065 ret
1066.Lslow_unlock:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001067 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001068 movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001069 call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*)
1070 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
Alexei Zavjalov80c79342014-05-02 16:45:40 +07001071 RETURN_IF_EAX_ZERO
Ian Rogersef7d42f2014-01-06 12:55:46 -08001072END_FUNCTION art_quick_unlock_object
1073
Ian Rogersef7d42f2014-01-06 12:55:46 -08001074DEFINE_FUNCTION art_quick_check_cast
Andreas Gampe525cde22014-04-22 15:44:50 -07001075 PUSH rdi // Save args for exc
1076 PUSH rsi
Serguei Katkovc3801912014-07-08 17:21:53 +07001077 SETUP_FP_CALLEE_SAVE_FRAME
Andreas Gampe29b38412014-08-13 00:15:43 -07001078 call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass)
Andreas Gampe525cde22014-04-22 15:44:50 -07001079 testq %rax, %rax
1080 jz 1f // jump forward if not assignable
Serguei Katkovc3801912014-07-08 17:21:53 +07001081 RESTORE_FP_CALLEE_SAVE_FRAME
Andreas Gampe525cde22014-04-22 15:44:50 -07001082 addq LITERAL(16), %rsp // pop arguments
1083 CFI_ADJUST_CFA_OFFSET(-16)
Serguei Katkovc3801912014-07-08 17:21:53 +07001084
Andreas Gampe525cde22014-04-22 15:44:50 -07001085 ret
10861:
Serguei Katkovc3801912014-07-08 17:21:53 +07001087 RESTORE_FP_CALLEE_SAVE_FRAME
Andreas Gampe525cde22014-04-22 15:44:50 -07001088 POP rsi // Pop arguments
1089 POP rdi
1090 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context
Andreas Gampe525cde22014-04-22 15:44:50 -07001091 mov %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current()
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001092 call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*)
Andreas Gampe525cde22014-04-22 15:44:50 -07001093 int3 // unreached
Ian Rogersef7d42f2014-01-06 12:55:46 -08001094END_FUNCTION art_quick_check_cast
1095
Andreas Gampef4e910b2014-04-29 16:55:52 -07001096
Ian Rogersef7d42f2014-01-06 12:55:46 -08001097 /*
1098 * Entry from managed code for array put operations of objects where the value being stored
1099 * needs to be checked for compatibility.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001100 *
1101 * Currently all the parameters should fit into the 32b portions of the registers. Index always
1102 * will. So we optimize for a tighter encoding. The 64b versions are in comments.
1103 *
1104 * rdi(edi) = array, rsi(esi) = index, rdx(edx) = value
Ian Rogersef7d42f2014-01-06 12:55:46 -08001105 */
Andreas Gampef4e910b2014-04-29 16:55:52 -07001106DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check
Ian Rogersc3ccc102014-06-25 11:52:14 -07001107#if defined(__APPLE__)
1108 int3
1109 int3
1110#else
Andreas Gampef4e910b2014-04-29 16:55:52 -07001111 testl %edi, %edi
1112// testq %rdi, %rdi
Andreas Gampe29b38412014-08-13 00:15:43 -07001113 jnz art_quick_aput_obj_with_bound_check
1114 jmp art_quick_throw_null_pointer_exception
Ian Rogersc3ccc102014-06-25 11:52:14 -07001115#endif // __APPLE__
Andreas Gampef4e910b2014-04-29 16:55:52 -07001116END_FUNCTION art_quick_aput_obj_with_null_and_bound_check
1117
1118
1119DEFINE_FUNCTION art_quick_aput_obj_with_bound_check
Ian Rogersc3ccc102014-06-25 11:52:14 -07001120#if defined(__APPLE__)
1121 int3
1122 int3
1123#else
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001124 movl MIRROR_ARRAY_LENGTH_OFFSET(%edi), %ecx
1125// movl MIRROR_ARRAY_LENGTH_OFFSET(%rdi), %ecx // This zero-extends, so value(%rcx)=value(%ecx)
Dmitry Petrochenkof208ae92014-05-30 02:14:49 +07001126 cmpl %ecx, %esi
Andreas Gampe29b38412014-08-13 00:15:43 -07001127 jb art_quick_aput_obj
Andreas Gampef4e910b2014-04-29 16:55:52 -07001128 mov %esi, %edi
1129// mov %rsi, %rdi
Dmitry Petrochenkof208ae92014-05-30 02:14:49 +07001130 mov %ecx, %esi
1131// mov %rcx, %rsi
Andreas Gampe29b38412014-08-13 00:15:43 -07001132 jmp art_quick_throw_array_bounds
Ian Rogersc3ccc102014-06-25 11:52:14 -07001133#endif // __APPLE__
Andreas Gampef4e910b2014-04-29 16:55:52 -07001134END_FUNCTION art_quick_aput_obj_with_bound_check
1135
1136
1137DEFINE_FUNCTION art_quick_aput_obj
1138 testl %edx, %edx // store of null
1139// test %rdx, %rdx
1140 jz .Ldo_aput_null
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001141 movl MIRROR_OBJECT_CLASS_OFFSET(%edi), %ecx
1142// movq MIRROR_OBJECT_CLASS_OFFSET(%rdi), %rcx
1143 movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ecx), %ecx
1144// movq MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%rcx), %rcx
1145 cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ecx // value's type == array's component type - trivial assignability
1146// cmpq MIRROR_CLASS_OFFSET(%rdx), %rcx
Andreas Gampef4e910b2014-04-29 16:55:52 -07001147 jne .Lcheck_assignability
1148.Ldo_aput:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001149 movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4)
1150// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4)
Andreas Gampef4e910b2014-04-29 16:55:52 -07001151 movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx
1152 shrl LITERAL(7), %edi
1153// shrl LITERAL(7), %rdi
1154 movb %dl, (%rdx, %rdi) // Note: this assumes that top 32b of %rdi are zero
1155 ret
1156.Ldo_aput_null:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001157 movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4)
1158// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4)
Andreas Gampef4e910b2014-04-29 16:55:52 -07001159 ret
1160.Lcheck_assignability:
1161 // Save arguments.
1162 PUSH rdi
1163 PUSH rsi
1164 PUSH rdx
1165 subq LITERAL(8), %rsp // Alignment padding.
1166 CFI_ADJUST_CFA_OFFSET(8)
Serguei Katkovc3801912014-07-08 17:21:53 +07001167 SETUP_FP_CALLEE_SAVE_FRAME
Andreas Gampef4e910b2014-04-29 16:55:52 -07001168
1169 // "Uncompress" = do nothing, as already zero-extended on load.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001170 movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %esi // Pass arg2 = value's class.
Dmitry Petrochenkof208ae92014-05-30 02:14:49 +07001171 movq %rcx, %rdi // Pass arg1 = array's component type.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001172
Andreas Gampe29b38412014-08-13 00:15:43 -07001173 call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b)
Andreas Gampef4e910b2014-04-29 16:55:52 -07001174
1175 // Exception?
1176 testq %rax, %rax
1177 jz .Lthrow_array_store_exception
1178
Serguei Katkovc3801912014-07-08 17:21:53 +07001179 RESTORE_FP_CALLEE_SAVE_FRAME
Andreas Gampef4e910b2014-04-29 16:55:52 -07001180 // Restore arguments.
1181 addq LITERAL(8), %rsp
1182 CFI_ADJUST_CFA_OFFSET(-8)
1183 POP rdx
1184 POP rsi
1185 POP rdi
1186
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001187 movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4)
1188// movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4)
Andreas Gampef4e910b2014-04-29 16:55:52 -07001189 movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx
1190 shrl LITERAL(7), %edi
1191// shrl LITERAL(7), %rdi
1192 movb %dl, (%rdx, %rdi) // Note: this assumes that top 32b of %rdi are zero
1193// movb %dl, (%rdx, %rdi)
1194 ret
1195.Lthrow_array_store_exception:
Serguei Katkovc3801912014-07-08 17:21:53 +07001196 RESTORE_FP_CALLEE_SAVE_FRAME
Andreas Gampef4e910b2014-04-29 16:55:52 -07001197 // Restore arguments.
1198 addq LITERAL(8), %rsp
1199 CFI_ADJUST_CFA_OFFSET(-8)
1200 POP rdx
1201 POP rsi
1202 POP rdi
1203
1204 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // Save all registers as basis for long jump context.
1205
1206 // Outgoing argument set up.
Andreas Gampef4e910b2014-04-29 16:55:52 -07001207 movq %rdx, %rsi // Pass arg 2 = value.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001208 movq %gs:THREAD_SELF_OFFSET, %rdx // Pass arg 3 = Thread::Current().
Andreas Gampef4e910b2014-04-29 16:55:52 -07001209 // Pass arg 1 = array.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001210 call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*)
Andreas Gampef4e910b2014-04-29 16:55:52 -07001211 int3 // unreached
1212END_FUNCTION art_quick_aput_obj
Andreas Gampe525cde22014-04-22 15:44:50 -07001213
1214// TODO: This is quite silly on X86_64 now.
1215DEFINE_FUNCTION art_quick_memcpy
1216 call PLT_SYMBOL(memcpy) // (void*, const void*, size_t)
1217 ret
1218END_FUNCTION art_quick_memcpy
Ian Rogersef7d42f2014-01-06 12:55:46 -08001219
1220NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret
1221
Ian Rogersef7d42f2014-01-06 12:55:46 -08001222UNIMPLEMENTED art_quick_ldiv
1223UNIMPLEMENTED art_quick_lmod
1224UNIMPLEMENTED art_quick_lmul
1225UNIMPLEMENTED art_quick_lshl
1226UNIMPLEMENTED art_quick_lshr
1227UNIMPLEMENTED art_quick_lushr
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001228
Fred Shih37f05ef2014-07-16 18:38:08 -07001229THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO
1230THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001231THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001232THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_EAX_ZERO
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001233THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001234
Fred Shih37f05ef2014-07-16 18:38:08 -07001235TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1236TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1237TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1238TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001239TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1240TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1241TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001242
Fred Shih37f05ef2014-07-16 18:38:08 -07001243TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO
1244TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001245TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO
1246TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001247
Fred Shih37f05ef2014-07-16 18:38:08 -07001248ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1249ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1250ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1251ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001252ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1253ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
1254ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001255
Andreas Gampe9d4e5e22014-05-06 08:29:48 -07001256// This is singled out as the argument order is different.
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001257DEFINE_FUNCTION art_quick_set64_static
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001258 movq %rsi, %rdx // pass new_val
1259 movl 8(%rsp), %esi // pass referrer
1260 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
1261 // field_idx is in rdi
1262 movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current()
1263 call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*)
1264 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address
1265 RETURN_IF_EAX_ZERO // return or deliver exception
Alexei Zavjalov8d07e2d2014-05-05 23:36:14 +07001266END_FUNCTION art_quick_set64_static
1267
Ian Rogersb7dabf52014-03-12 12:11:54 -07001268
1269DEFINE_FUNCTION art_quick_proxy_invoke_handler
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001270 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI
1271
1272 movq %gs:THREAD_SELF_OFFSET, %rdx // Pass Thread::Current().
1273 movq %rsp, %rcx // Pass SP.
Andreas Gampe29b38412014-08-13 00:15:43 -07001274 call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001275 movq %rax, %xmm0 // Copy return value in case of float returns.
1276 addq LITERAL(168 + 4*8), %rsp // Pop arguments.
Serguei Katkovc3801912014-07-08 17:21:53 +07001277 CFI_ADJUST_CFA_OFFSET(-168 - 4*8)
Ian Rogersb7dabf52014-03-12 12:11:54 -07001278 RETURN_OR_DELIVER_PENDING_EXCEPTION
1279END_FUNCTION art_quick_proxy_invoke_handler
Ian Rogersef7d42f2014-01-06 12:55:46 -08001280
1281 /*
Mark Mendelld3703d82014-06-09 15:10:50 -04001282 * Called to resolve an imt conflict.
1283 * rax is a hidden argument that holds the target method's dex method index.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001284 */
Andreas Gampe51f76352014-05-21 08:28:48 -07001285DEFINE_FUNCTION art_quick_imt_conflict_trampoline
Ian Rogersc3ccc102014-06-25 11:52:14 -07001286#if defined(__APPLE__)
1287 int3
1288 int3
1289#else
Serguei Katkov7c748c12014-06-06 10:50:37 -07001290 movl 8(%rsp), %edi // load caller Method*
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001291 movl MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET(%rdi), %edi // load dex_cache_resolved_methods
1292 movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rax, 4), %edi // load the target method
Andreas Gampe29b38412014-08-13 00:15:43 -07001293 jmp art_quick_invoke_interface_trampoline
Ian Rogersc3ccc102014-06-25 11:52:14 -07001294#endif // __APPLE__
Andreas Gampe51f76352014-05-21 08:28:48 -07001295END_FUNCTION art_quick_imt_conflict_trampoline
Ian Rogers936b37f2014-02-14 00:52:24 -08001296
Ian Rogersbefbd572014-03-06 01:13:39 -08001297DEFINE_FUNCTION art_quick_resolution_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001298 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Ian Rogersbefbd572014-03-06 01:13:39 -08001299 movq %gs:THREAD_SELF_OFFSET, %rdx
1300 movq %rsp, %rcx
Andreas Gampe29b38412014-08-13 00:15:43 -07001301 call SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP)
Ian Rogersbefbd572014-03-06 01:13:39 -08001302 movq %rax, %r10 // Remember returned code pointer in R10.
Ian Rogers1a570662014-03-12 01:02:21 -07001303 movq (%rsp), %rdi // Load called method into RDI.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001304 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Ian Rogersbefbd572014-03-06 01:13:39 -08001305 testq %r10, %r10 // If code pointer is NULL goto deliver pending exception.
1306 jz 1f
1307 jmp *%r10 // Tail call into method.
13081:
Ian Rogersbefbd572014-03-06 01:13:39 -08001309 DELIVER_PENDING_EXCEPTION
1310END_FUNCTION art_quick_resolution_trampoline
1311
1312/* Generic JNI frame layout:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001313 *
1314 * #-------------------#
1315 * | |
1316 * | caller method... |
1317 * #-------------------# <--- SP on entry
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001318 *
1319 * |
1320 * V
1321 *
1322 * #-------------------#
1323 * | caller method... |
1324 * #-------------------#
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001325 * | Return |
1326 * | R15 | callee save
1327 * | R14 | callee save
1328 * | R13 | callee save
1329 * | R12 | callee save
1330 * | R9 | arg5
1331 * | R8 | arg4
1332 * | RSI/R6 | arg1
1333 * | RBP/R5 | callee save
1334 * | RBX/R3 | callee save
1335 * | RDX/R2 | arg2
1336 * | RCX/R1 | arg3
1337 * | XMM7 | float arg 8
1338 * | XMM6 | float arg 7
1339 * | XMM5 | float arg 6
1340 * | XMM4 | float arg 5
1341 * | XMM3 | float arg 4
1342 * | XMM2 | float arg 3
1343 * | XMM1 | float arg 2
1344 * | XMM0 | float arg 1
1345 * | Padding |
1346 * | RDI/Method* | <- sp
1347 * #-------------------#
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001348 * | Scratch Alloca | 5K scratch space
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001349 * #---------#---------#
1350 * | | sp* |
1351 * | Tramp. #---------#
1352 * | args | thread |
1353 * | Tramp. #---------#
1354 * | | method |
1355 * #-------------------# <--- SP on artQuickGenericJniTrampoline
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001356 *
1357 * |
1358 * v artQuickGenericJniTrampoline
1359 *
1360 * #-------------------#
1361 * | caller method... |
1362 * #-------------------#
1363 * | Return |
1364 * | Callee-Save Data |
1365 * #-------------------#
Mathieu Chartier421c5372014-05-14 14:11:40 -07001366 * | handle scope |
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001367 * #-------------------#
1368 * | Method* | <--- (1)
1369 * #-------------------#
1370 * | local ref cookie | // 4B
Mathieu Chartier421c5372014-05-14 14:11:40 -07001371 * | handle scope size | // 4B TODO: roll into call stack alignment?
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001372 * #-------------------#
1373 * | JNI Call Stack |
1374 * #-------------------# <--- SP on native call
1375 * | |
1376 * | Stack for Regs | The trampoline assembly will pop these values
1377 * | | into registers for native call
1378 * #-------------------#
1379 * | Native code ptr |
1380 * #-------------------#
1381 * | Free scratch |
1382 * #-------------------#
1383 * | Ptr to (1) | <--- RSP
1384 * #-------------------#
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001385 */
Andreas Gampe2da88232014-02-27 12:26:20 -08001386 /*
1387 * Called to do a generic JNI down-call
1388 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001389DEFINE_FUNCTION art_quick_generic_jni_trampoline
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001390 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI
1391
Andreas Gampec200a4a2014-06-16 18:39:09 -07001392 movq %rsp, %rbp // save SP at (old) callee-save frame
1393 CFI_DEF_CFA_REGISTER(rbp)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001394
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001395 //
1396 // reserve a lot of space
1397 //
1398 // 4 local state ref
1399 // 4 padding
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001400 // 4196 4k scratch space, enough for 2x 256 8-byte parameters (TODO: handle scope overhead?)
1401 // 16 handle scope member fields ?
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001402 // + 112 14x 8-byte stack-2-register space
1403 // ------
1404 // 4332
1405 // 16-byte aligned: 4336
1406 // Note: 14x8 = 7*16, so the stack stays aligned for the native call...
1407 // Also means: the padding is somewhere in the middle
Andreas Gampec147b002014-03-06 18:11:06 -08001408 //
1409 //
1410 // New test: use 5K and release
1411 // 5k = 5120
1412 subq LITERAL(5120), %rsp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001413 // prepare for artQuickGenericJniTrampoline call
1414 // (Thread*, SP)
1415 // rdi rsi <= C calling convention
1416 // gs:... rbp <= where they are
1417 movq %gs:THREAD_SELF_OFFSET, %rdi
1418 movq %rbp, %rsi
Andreas Gampe29b38412014-08-13 00:15:43 -07001419 call SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp)
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001420
Andreas Gampec200a4a2014-06-16 18:39:09 -07001421 // The C call will have registered the complete save-frame on success.
1422 // The result of the call is:
1423 // %rax: pointer to native code, 0 on error.
1424 // %rdx: pointer to the bottom of the used area of the alloca, can restore stack till there.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001425
Andreas Gampec200a4a2014-06-16 18:39:09 -07001426 // Check for error = 0.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001427 test %rax, %rax
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001428 jz .Lexception_in_native
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001429
Andreas Gampec200a4a2014-06-16 18:39:09 -07001430 // Release part of the alloca.
1431 movq %rdx, %rsp
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001432
Andreas Gampec147b002014-03-06 18:11:06 -08001433 // pop from the register-passing alloca region
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001434 // what's the right layout?
1435 popq %rdi
1436 popq %rsi
1437 popq %rdx
1438 popq %rcx
1439 popq %r8
1440 popq %r9
1441 // TODO: skip floating point if unused, some flag.
1442 movq 0(%rsp), %xmm0
1443 movq 8(%rsp), %xmm1
1444 movq 16(%rsp), %xmm2
1445 movq 24(%rsp), %xmm3
1446 movq 32(%rsp), %xmm4
1447 movq 40(%rsp), %xmm5
1448 movq 48(%rsp), %xmm6
1449 movq 56(%rsp), %xmm7
1450 addq LITERAL(64), %rsp // floating-point done
Andreas Gampec200a4a2014-06-16 18:39:09 -07001451
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001452 // native call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001453 call *%rax
1454
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001455 // result sign extension is handled in C code
1456 // prepare for artQuickGenericJniEndTrampoline call
Andreas Gampec200a4a2014-06-16 18:39:09 -07001457 // (Thread*, result, result_f)
1458 // rdi rsi rdx <= C calling convention
1459 // gs:... rax xmm0 <= where they are
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001460 movq %gs:THREAD_SELF_OFFSET, %rdi
Andreas Gampec200a4a2014-06-16 18:39:09 -07001461 movq %rax, %rsi
1462 movq %xmm0, %rdx
Andreas Gampe29b38412014-08-13 00:15:43 -07001463 call SYMBOL(artQuickGenericJniEndTrampoline)
Ian Rogerse0dcd462014-03-08 15:21:04 -08001464
Ian Rogerse0dcd462014-03-08 15:21:04 -08001465 // Pending exceptions possible.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001466 // TODO: use cmpq, needs direct encoding because of gas bug
Ian Rogerse0dcd462014-03-08 15:21:04 -08001467 movq %gs:THREAD_EXCEPTION_OFFSET, %rcx
1468 test %rcx, %rcx
1469 jnz .Lexception_in_native
1470
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001471 // Tear down the alloca.
1472 movq %rbp, %rsp
1473 CFI_DEF_CFA_REGISTER(rsp)
1474
Ian Rogerse0dcd462014-03-08 15:21:04 -08001475 // Tear down the callee-save frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001476 // Load FPRs.
1477 // movq %xmm0, 16(%rsp) // doesn't make sense!!!
1478 movq 24(%rsp), %xmm1 // neither does this!!!
1479 movq 32(%rsp), %xmm2
1480 movq 40(%rsp), %xmm3
1481 movq 48(%rsp), %xmm4
1482 movq 56(%rsp), %xmm5
1483 movq 64(%rsp), %xmm6
1484 movq 72(%rsp), %xmm7
Serguei Katkovc3801912014-07-08 17:21:53 +07001485 movq 80(%rsp), %xmm12
1486 movq 88(%rsp), %xmm13
1487 movq 96(%rsp), %xmm14
1488 movq 104(%rsp), %xmm15
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001489 // was 80 bytes
Serguei Katkovc3801912014-07-08 17:21:53 +07001490 addq LITERAL(80 + 4*8), %rsp
1491 CFI_ADJUST_CFA_OFFSET(-80 - 4*8)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001492 // Save callee and GPR args, mixed together to agree with core spills bitmap.
1493 POP rcx // Arg.
1494 POP rdx // Arg.
1495 POP rbx // Callee save.
1496 POP rbp // Callee save.
1497 POP rsi // Arg.
1498 POP r8 // Arg.
1499 POP r9 // Arg.
1500 POP r12 // Callee save.
1501 POP r13 // Callee save.
1502 POP r14 // Callee save.
1503 POP r15 // Callee save.
1504 // store into fpr, for when it's a fpr return...
1505 movq %rax, %xmm0
1506 ret
Ian Rogerse0dcd462014-03-08 15:21:04 -08001507.Lexception_in_native:
Nicolas Geoffray126d6592015-03-03 14:28:35 +00001508 movq %gs:THREAD_TOP_QUICK_FRAME_OFFSET, %rsp
1509 CFI_DEF_CFA_REGISTER(rsp)
1510 // Do a call to push a new save-all frame required by the runtime.
1511 call .Lexception_call
1512.Lexception_call:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001513 DELIVER_PENDING_EXCEPTION
1514END_FUNCTION art_quick_generic_jni_trampoline
Andreas Gampe2da88232014-02-27 12:26:20 -08001515
Ian Rogers936b37f2014-02-14 00:52:24 -08001516 /*
1517 * Called to bridge from the quick to interpreter ABI. On entry the arguments match those
1518 * of a quick call:
1519 * RDI = method being called / to bridge to.
1520 * RSI, RDX, RCX, R8, R9 are arguments to that method.
1521 */
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001522DEFINE_FUNCTION art_quick_to_interpreter_bridge
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001523 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments.
Ian Rogers936b37f2014-02-14 00:52:24 -08001524 movq %gs:THREAD_SELF_OFFSET, %rsi // RSI := Thread::Current()
1525 movq %rsp, %rdx // RDX := sp
Andreas Gampe29b38412014-08-13 00:15:43 -07001526 call SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP)
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001527 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case.
Ian Rogers936b37f2014-02-14 00:52:24 -08001528 movq %rax, %xmm0 // Place return value also into floating point return value.
1529 RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception
1530END_FUNCTION art_quick_to_interpreter_bridge
Ian Rogersef7d42f2014-01-06 12:55:46 -08001531
1532 /*
1533 * Routine that intercepts method calls and returns.
1534 */
Andreas Gamped58342c2014-06-05 14:18:08 -07001535DEFINE_FUNCTION art_quick_instrumentation_entry
Ian Rogersc3ccc102014-06-25 11:52:14 -07001536#if defined(__APPLE__)
1537 int3
1538 int3
1539#else
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001540 SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001541
1542 movq %rdi, %r12 // Preserve method pointer in a callee-save.
1543
1544 movq %gs:THREAD_SELF_OFFSET, %rdx // Pass thread.
Sebastien Hertz32b12f82014-11-17 12:46:27 +01001545 movq FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp), %rcx // Pass return PC.
Andreas Gamped58342c2014-06-05 14:18:08 -07001546
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001547 call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR)
Andreas Gamped58342c2014-06-05 14:18:08 -07001548
1549 // %rax = result of call.
1550 movq %r12, %rdi // Reload method pointer.
1551
Andreas Gampe29b38412014-08-13 00:15:43 -07001552 leaq art_quick_instrumentation_exit(%rip), %r12 // Set up return through instrumentation
Andreas Gamped58342c2014-06-05 14:18:08 -07001553 movq %r12, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp) // exit.
1554
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001555 RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001556
1557 jmp *%rax // Tail call to intended method.
Ian Rogersc3ccc102014-06-25 11:52:14 -07001558#endif // __APPLE__
Andreas Gamped58342c2014-06-05 14:18:08 -07001559END_FUNCTION art_quick_instrumentation_entry
1560
1561DEFINE_FUNCTION art_quick_instrumentation_exit
1562 pushq LITERAL(0) // Push a fake return PC as there will be none on the stack.
1563
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001564 SETUP_REFS_ONLY_CALLEE_SAVE_FRAME
Andreas Gamped58342c2014-06-05 14:18:08 -07001565
1566 // We need to save rax and xmm0. We could use a callee-save from SETUP_REF_ONLY, but then
1567 // we would need to fully restore it. As there are a good number of callee-save registers, it
1568 // seems easier to have an extra small stack area. But this should be revisited.
1569
1570 movq %rsp, %rsi // Pass SP.
1571
1572 PUSH rax // Save integer result.
1573 subq LITERAL(8), %rsp // Save floating-point result.
1574 CFI_ADJUST_CFA_OFFSET(8)
Andreas Gampefea29012014-07-23 10:05:02 -07001575 movq %xmm0, (%rsp)
Andreas Gamped58342c2014-06-05 14:18:08 -07001576
1577 movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread.
1578 movq %rax, %rdx // Pass integer result.
1579 movq %xmm0, %rcx // Pass floating-point result.
1580
Andreas Gampe29b38412014-08-13 00:15:43 -07001581 call SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_res, fpr_res)
Andreas Gamped58342c2014-06-05 14:18:08 -07001582
1583 movq %rax, %rdi // Store return PC
1584 movq %rdx, %rsi // Store second return PC in hidden arg.
1585
Andreas Gampefea29012014-07-23 10:05:02 -07001586 movq (%rsp), %xmm0 // Restore floating-point result.
Andreas Gamped58342c2014-06-05 14:18:08 -07001587 addq LITERAL(8), %rsp
1588 CFI_ADJUST_CFA_OFFSET(-8)
1589 POP rax // Restore integer result.
1590
Sebastien Hertz20e7d602015-03-12 15:10:50 +01001591 RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME
1592
1593 addq LITERAL(8), %rsp // Drop fake return pc.
Andreas Gamped58342c2014-06-05 14:18:08 -07001594
1595 jmp *%rdi // Return.
1596END_FUNCTION art_quick_instrumentation_exit
Ian Rogersef7d42f2014-01-06 12:55:46 -08001597
1598 /*
1599 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization
1600 * will long jump to the upcall with a special exception of -1.
1601 */
Andreas Gamped58342c2014-06-05 14:18:08 -07001602DEFINE_FUNCTION art_quick_deoptimize
Mingyao Yangd43b3ac2015-04-01 14:03:04 -07001603 pushq %rsi // Entry point for a jump. Fake that we were called.
1604 // Use hidden arg.
1605.globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls
1606 // from compiled slow paths.
1607SYMBOL(art_quick_deoptimize_from_compiled_slow_path):
Andreas Gamped58342c2014-06-05 14:18:08 -07001608 SETUP_SAVE_ALL_CALLEE_SAVE_FRAME
1609 // Stack should be aligned now.
Andreas Gamped58342c2014-06-05 14:18:08 -07001610 movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001611 call SYMBOL(artDeoptimize) // artDeoptimize(Thread*)
Andreas Gamped58342c2014-06-05 14:18:08 -07001612 int3 // Unreachable.
1613END_FUNCTION art_quick_deoptimize
1614
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001615 /*
1616 * String's compareTo.
1617 *
1618 * On entry:
1619 * rdi: this string object (known non-null)
1620 * rsi: comp string object (known non-null)
1621 */
1622DEFINE_FUNCTION art_quick_string_compareto
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001623 movl MIRROR_STRING_COUNT_OFFSET(%edi), %r8d
1624 movl MIRROR_STRING_COUNT_OFFSET(%esi), %r9d
1625 movl MIRROR_STRING_VALUE_OFFSET(%edi), %r10d
1626 movl MIRROR_STRING_VALUE_OFFSET(%esi), %r11d
1627 movl MIRROR_STRING_OFFSET_OFFSET(%edi), %eax
1628 movl MIRROR_STRING_OFFSET_OFFSET(%esi), %ecx
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001629 /* Build pointers to the start of string data */
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001630 leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r10d, %eax, 2), %esi
1631 leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r11d, %ecx, 2), %edi
Alexei Zavjalov315ccab2014-05-01 23:24:05 +07001632 /* Calculate min length and count diff */
1633 movl %r8d, %ecx
1634 movl %r8d, %eax
1635 subl %r9d, %eax
1636 cmovg %r9d, %ecx
1637 /*
1638 * At this point we have:
1639 * eax: value to return if first part of strings are equal
1640 * ecx: minimum among the lengths of the two strings
1641 * esi: pointer to this string data
1642 * edi: pointer to comp string data
1643 */
1644 jecxz .Lkeep_length
1645 repe cmpsw // find nonmatching chars in [%esi] and [%edi], up to length %ecx
1646 jne .Lnot_equal
1647.Lkeep_length:
1648 ret
1649 .balign 16
1650.Lnot_equal:
1651 movzwl -2(%esi), %eax // get last compared char from this string
1652 movzwl -2(%edi), %ecx // get last compared char from comp string
1653 subl %ecx, %eax // return the difference
1654 ret
1655END_FUNCTION art_quick_string_compareto
1656
Ian Rogersef7d42f2014-01-06 12:55:46 -08001657UNIMPLEMENTED art_quick_memcmp16
Serguei Katkovc3801912014-07-08 17:21:53 +07001658
1659DEFINE_FUNCTION art_quick_assignable_from_code
1660 SETUP_FP_CALLEE_SAVE_FRAME
Andreas Gampe29b38412014-08-13 00:15:43 -07001661 call SYMBOL(artIsAssignableFromCode) // (const mirror::Class*, const mirror::Class*)
Serguei Katkovc3801912014-07-08 17:21:53 +07001662 RESTORE_FP_CALLEE_SAVE_FRAME
1663 ret
1664END_FUNCTION art_quick_assignable_from_code
Dave Allison8ce6b902014-08-26 11:07:58 -07001665
1666
1667// Return from a nested signal:
1668// Entry:
1669// rdi: address of jmp_buf in TLS
1670
1671DEFINE_FUNCTION art_nested_signal_return
1672 // first arg to longjmp is already in correct register
1673 movq LITERAL(1), %rsi // second arg to longjmp (1)
1674 call PLT_SYMBOL(longjmp)
1675 int3 // won't get here
1676END_FUNCTION art_nested_signal_return