Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "asm_support_x86_64.S" |
| 18 | |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 19 | MACRO0(SETUP_FP_CALLEE_SAVE_FRAME) |
| 20 | // Create space for ART FP callee-saved registers |
Christopher Ferris | ae91207 | 2014-07-11 13:08:40 -0700 | [diff] [blame] | 21 | subq MACRO_LITERAL(4 * 8), %rsp |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 22 | CFI_ADJUST_CFA_OFFSET(4 * 8) |
| 23 | movq %xmm12, 0(%rsp) |
| 24 | movq %xmm13, 8(%rsp) |
| 25 | movq %xmm14, 16(%rsp) |
| 26 | movq %xmm15, 24(%rsp) |
| 27 | END_MACRO |
| 28 | |
| 29 | MACRO0(RESTORE_FP_CALLEE_SAVE_FRAME) |
| 30 | // Restore ART FP callee-saved registers |
| 31 | movq 0(%rsp), %xmm12 |
| 32 | movq 8(%rsp), %xmm13 |
| 33 | movq 16(%rsp), %xmm14 |
| 34 | movq 24(%rsp), %xmm15 |
Christopher Ferris | ae91207 | 2014-07-11 13:08:40 -0700 | [diff] [blame] | 35 | addq MACRO_LITERAL(4 * 8), %rsp |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 36 | CFI_ADJUST_CFA_OFFSET(- 4 * 8) |
| 37 | END_MACRO |
| 38 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 39 | // For x86, the CFA is esp+4, the address above the pushed return address on the stack. |
| 40 | |
| 41 | /* |
| 42 | * Macro that sets up the callee save frame to conform with |
| 43 | * Runtime::CreateCalleeSaveMethod(kSaveAll) |
| 44 | */ |
| 45 | MACRO0(SETUP_SAVE_ALL_CALLEE_SAVE_FRAME) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 46 | #if defined(__APPLE__) |
| 47 | int3 |
| 48 | int3 |
| 49 | #else |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 50 | // R10 := Runtime::Current() |
| 51 | movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10 |
| 52 | movq (%r10), %r10 |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 53 | // Save callee save registers to agree with core spills bitmap. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 54 | PUSH r15 // Callee save. |
| 55 | PUSH r14 // Callee save. |
| 56 | PUSH r13 // Callee save. |
| 57 | PUSH r12 // Callee save. |
| 58 | PUSH rbp // Callee save. |
| 59 | PUSH rbx // Callee save. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 60 | // Create space for FPR args, plus space for StackReference<ArtMethod>. |
| 61 | subq MACRO_LITERAL(4 * 8 + 8), %rsp |
| 62 | CFI_ADJUST_CFA_OFFSET(4 * 8 + 8) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 63 | // Save FPRs. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 64 | movq %xmm12, 8(%rsp) |
| 65 | movq %xmm13, 16(%rsp) |
| 66 | movq %xmm14, 24(%rsp) |
| 67 | movq %xmm15, 32(%rsp) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 68 | // R10 := ArtMethod* for save all callee save frame method. |
Hiroshi Yamauchi | ab08811 | 2014-07-14 13:00:14 -0700 | [diff] [blame] | 69 | THIS_LOAD_REQUIRES_READ_BARRIER |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 70 | movq RUNTIME_SAVE_ALL_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 |
| 71 | // Store ArtMethod* to bottom of stack. |
| 72 | movq %r10, 0(%rsp) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 73 | // Store rsp as the top quick frame. |
| 74 | movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 75 | |
| 76 | // Ugly compile-time check, but we only have the preprocessor. |
| 77 | // Last +8: implicit return address pushed on stack when caller made call. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 78 | #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8) |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 79 | #error "SAVE_ALL_CALLEE_SAVE_FRAME(X86_64) size not as expected." |
| 80 | #endif |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 81 | #endif // __APPLE__ |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 82 | END_MACRO |
| 83 | |
| 84 | /* |
| 85 | * Macro that sets up the callee save frame to conform with |
| 86 | * Runtime::CreateCalleeSaveMethod(kRefsOnly) |
| 87 | */ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 88 | MACRO0(SETUP_REFS_ONLY_CALLEE_SAVE_FRAME) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 89 | #if defined(__APPLE__) |
| 90 | int3 |
| 91 | int3 |
| 92 | #else |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 93 | // R10 := Runtime::Current() |
| 94 | movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10 |
| 95 | movq (%r10), %r10 |
| 96 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 97 | PUSH r15 // Callee save. |
| 98 | PUSH r14 // Callee save. |
| 99 | PUSH r13 // Callee save. |
| 100 | PUSH r12 // Callee save. |
| 101 | PUSH rbp // Callee save. |
| 102 | PUSH rbx // Callee save. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 103 | // Create space for FPR args, plus space for StackReference<ArtMethod>. |
| 104 | subq LITERAL(8 + 4 * 8), %rsp |
| 105 | CFI_ADJUST_CFA_OFFSET(8 + 4 * 8) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 106 | // Save FPRs. |
| 107 | movq %xmm12, 8(%rsp) |
| 108 | movq %xmm13, 16(%rsp) |
| 109 | movq %xmm14, 24(%rsp) |
| 110 | movq %xmm15, 32(%rsp) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 111 | // R10 := ArtMethod* for refs only callee save frame method. |
Hiroshi Yamauchi | ab08811 | 2014-07-14 13:00:14 -0700 | [diff] [blame] | 112 | THIS_LOAD_REQUIRES_READ_BARRIER |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 113 | movq RUNTIME_REFS_ONLY_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 |
| 114 | // Store ArtMethod* to bottom of stack. |
| 115 | movq %r10, 0(%rsp) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 116 | // Store rsp as the stop quick frame. |
| 117 | movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 118 | |
| 119 | // Ugly compile-time check, but we only have the preprocessor. |
| 120 | // Last +8: implicit return address pushed on stack when caller made call. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 121 | #if (FRAME_SIZE_REFS_ONLY_CALLEE_SAVE != 6 * 8 + 4 * 8 + 8 + 8) |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 122 | #error "REFS_ONLY_CALLEE_SAVE_FRAME(X86_64) size not as expected." |
| 123 | #endif |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 124 | #endif // __APPLE__ |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 125 | END_MACRO |
| 126 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 127 | MACRO0(RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 128 | movq 8(%rsp), %xmm12 |
| 129 | movq 16(%rsp), %xmm13 |
| 130 | movq 24(%rsp), %xmm14 |
| 131 | movq 32(%rsp), %xmm15 |
| 132 | addq LITERAL(8 + 4*8), %rsp |
| 133 | CFI_ADJUST_CFA_OFFSET(-8 - 4*8) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 134 | // TODO: optimize by not restoring callee-saves restored by the ABI |
| 135 | POP rbx |
| 136 | POP rbp |
| 137 | POP r12 |
| 138 | POP r13 |
| 139 | POP r14 |
| 140 | POP r15 |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 141 | END_MACRO |
| 142 | |
| 143 | /* |
| 144 | * Macro that sets up the callee save frame to conform with |
| 145 | * Runtime::CreateCalleeSaveMethod(kRefsAndArgs) |
| 146 | */ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 147 | MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME) |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 148 | #if defined(__APPLE__) |
| 149 | int3 |
| 150 | int3 |
| 151 | #else |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 152 | // R10 := Runtime::Current() |
| 153 | movq _ZN3art7Runtime9instance_E@GOTPCREL(%rip), %r10 |
| 154 | movq (%r10), %r10 |
| 155 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 156 | PUSH r15 // Callee save. |
| 157 | PUSH r14 // Callee save. |
| 158 | PUSH r13 // Callee save. |
| 159 | PUSH r12 // Callee save. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 160 | PUSH r9 // Quick arg 5. |
| 161 | PUSH r8 // Quick arg 4. |
| 162 | PUSH rsi // Quick arg 1. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 163 | PUSH rbp // Callee save. |
| 164 | PUSH rbx // Callee save. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 165 | PUSH rdx // Quick arg 2. |
| 166 | PUSH rcx // Quick arg 3. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 167 | // Create space for FPR args and create 2 slots, 1 of padding and 1 for the |
| 168 | // StackReference<ArtMethod>. |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 169 | subq MACRO_LITERAL(80 + 4 * 8), %rsp |
| 170 | CFI_ADJUST_CFA_OFFSET(80 + 4 * 8) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 171 | // R10 := ArtMethod* for ref and args callee save frame method. |
Hiroshi Yamauchi | ab08811 | 2014-07-14 13:00:14 -0700 | [diff] [blame] | 172 | THIS_LOAD_REQUIRES_READ_BARRIER |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 173 | movq RUNTIME_REFS_AND_ARGS_CALLEE_SAVE_FRAME_OFFSET(%r10), %r10 |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 174 | // Save FPRs. |
| 175 | movq %xmm0, 16(%rsp) |
| 176 | movq %xmm1, 24(%rsp) |
| 177 | movq %xmm2, 32(%rsp) |
| 178 | movq %xmm3, 40(%rsp) |
| 179 | movq %xmm4, 48(%rsp) |
| 180 | movq %xmm5, 56(%rsp) |
| 181 | movq %xmm6, 64(%rsp) |
| 182 | movq %xmm7, 72(%rsp) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 183 | movq %xmm12, 80(%rsp) |
| 184 | movq %xmm13, 88(%rsp) |
| 185 | movq %xmm14, 96(%rsp) |
| 186 | movq %xmm15, 104(%rsp) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 187 | // Store ArtMethod* to bottom of stack. |
| 188 | movq %r10, 0(%rsp) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 189 | // Store rsp as the top quick frame. |
| 190 | movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 191 | |
| 192 | // Ugly compile-time check, but we only have the preprocessor. |
| 193 | // Last +8: implicit return address pushed on stack when caller made call. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 194 | #if (FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE != 11 * 8 + 4 * 8 + 80 + 8) |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 195 | #error "REFS_AND_ARGS_CALLEE_SAVE_FRAME(X86_64) size not as expected." |
| 196 | #endif |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 197 | #endif // __APPLE__ |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 198 | END_MACRO |
| 199 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 200 | MACRO0(SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI) |
| 201 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 202 | PUSH r15 // Callee save. |
| 203 | PUSH r14 // Callee save. |
| 204 | PUSH r13 // Callee save. |
| 205 | PUSH r12 // Callee save. |
| 206 | PUSH r9 // Quick arg 5. |
| 207 | PUSH r8 // Quick arg 4. |
| 208 | PUSH rsi // Quick arg 1. |
| 209 | PUSH rbp // Callee save. |
| 210 | PUSH rbx // Callee save. |
| 211 | PUSH rdx // Quick arg 2. |
| 212 | PUSH rcx // Quick arg 3. |
| 213 | // Create space for FPR args and create 2 slots, 1 of padding and 1 for the |
| 214 | // StackReference<ArtMethod>. |
| 215 | subq LITERAL(80 + 4 * 8), %rsp |
| 216 | CFI_ADJUST_CFA_OFFSET(80 + 4 * 8) |
| 217 | // Save FPRs. |
| 218 | movq %xmm0, 16(%rsp) |
| 219 | movq %xmm1, 24(%rsp) |
| 220 | movq %xmm2, 32(%rsp) |
| 221 | movq %xmm3, 40(%rsp) |
| 222 | movq %xmm4, 48(%rsp) |
| 223 | movq %xmm5, 56(%rsp) |
| 224 | movq %xmm6, 64(%rsp) |
| 225 | movq %xmm7, 72(%rsp) |
| 226 | movq %xmm12, 80(%rsp) |
| 227 | movq %xmm13, 88(%rsp) |
| 228 | movq %xmm14, 96(%rsp) |
| 229 | movq %xmm15, 104(%rsp) |
| 230 | // Store ArtMethod to bottom of stack. |
| 231 | movq %rdi, 0(%rsp) |
| 232 | // Store rsp as the stop quick frame. |
| 233 | movq %rsp, %gs:THREAD_TOP_QUICK_FRAME_OFFSET |
| 234 | END_MACRO |
| 235 | |
| 236 | MACRO0(RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 237 | // Restore FPRs. |
| 238 | movq 16(%rsp), %xmm0 |
| 239 | movq 24(%rsp), %xmm1 |
| 240 | movq 32(%rsp), %xmm2 |
| 241 | movq 40(%rsp), %xmm3 |
| 242 | movq 48(%rsp), %xmm4 |
| 243 | movq 56(%rsp), %xmm5 |
| 244 | movq 64(%rsp), %xmm6 |
| 245 | movq 72(%rsp), %xmm7 |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 246 | movq 80(%rsp), %xmm12 |
| 247 | movq 88(%rsp), %xmm13 |
| 248 | movq 96(%rsp), %xmm14 |
| 249 | movq 104(%rsp), %xmm15 |
| 250 | addq MACRO_LITERAL(80 + 4 * 8), %rsp |
| 251 | CFI_ADJUST_CFA_OFFSET(-(80 + 4 * 8)) |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 252 | // Restore callee and GPR args, mixed together to agree with core spills bitmap. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 253 | POP rcx |
| 254 | POP rdx |
| 255 | POP rbx |
| 256 | POP rbp |
| 257 | POP rsi |
| 258 | POP r8 |
| 259 | POP r9 |
| 260 | POP r12 |
| 261 | POP r13 |
| 262 | POP r14 |
| 263 | POP r15 |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 264 | END_MACRO |
| 265 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 266 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 267 | /* |
| 268 | * Macro that set calls through to artDeliverPendingExceptionFromCode, where the pending |
| 269 | * exception is Thread::Current()->exception_. |
| 270 | */ |
| 271 | MACRO0(DELIVER_PENDING_EXCEPTION) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 272 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save callee saves for throw |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 273 | // (Thread*) setup |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 274 | movq %gs:THREAD_SELF_OFFSET, %rdi |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 275 | call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 276 | UNREACHABLE |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 277 | END_MACRO |
| 278 | |
| 279 | MACRO2(NO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 280 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 281 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
| 282 | // Outgoing argument set up |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 283 | movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 284 | call VAR(cxx_name, 1) // cxx_name(Thread*) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 285 | UNREACHABLE |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 286 | END_FUNCTION VAR(c_name, 0) |
| 287 | END_MACRO |
| 288 | |
| 289 | MACRO2(ONE_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 290 | DEFINE_FUNCTION VAR(c_name, 0) |
Dmitry Petrochenko | fca8220 | 2014-03-21 11:21:37 +0700 | [diff] [blame] | 291 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
| 292 | // Outgoing argument set up |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 293 | movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 294 | call VAR(cxx_name, 1) // cxx_name(arg1, Thread*) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 295 | UNREACHABLE |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 296 | END_FUNCTION VAR(c_name, 0) |
| 297 | END_MACRO |
| 298 | |
| 299 | MACRO2(TWO_ARG_RUNTIME_EXCEPTION, c_name, cxx_name) |
| 300 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 301 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
| 302 | // Outgoing argument set up |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 303 | movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 304 | call VAR(cxx_name, 1) // cxx_name(Thread*) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 305 | UNREACHABLE |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 306 | END_FUNCTION VAR(c_name, 0) |
| 307 | END_MACRO |
| 308 | |
| 309 | /* |
| 310 | * Called by managed code to create and deliver a NullPointerException. |
| 311 | */ |
| 312 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_null_pointer_exception, artThrowNullPointerExceptionFromCode |
| 313 | |
| 314 | /* |
| 315 | * Called by managed code to create and deliver an ArithmeticException. |
| 316 | */ |
| 317 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_div_zero, artThrowDivZeroFromCode |
| 318 | |
| 319 | /* |
| 320 | * Called by managed code to create and deliver a StackOverflowError. |
| 321 | */ |
| 322 | NO_ARG_RUNTIME_EXCEPTION art_quick_throw_stack_overflow, artThrowStackOverflowFromCode |
| 323 | |
| 324 | /* |
| 325 | * Called by managed code, saves callee saves and then calls artThrowException |
| 326 | * that will place a mock Method* at the bottom of the stack. Arg1 holds the exception. |
| 327 | */ |
| 328 | ONE_ARG_RUNTIME_EXCEPTION art_quick_deliver_exception, artDeliverExceptionFromCode |
| 329 | |
| 330 | /* |
| 331 | * Called by managed code to create and deliver a NoSuchMethodError. |
| 332 | */ |
| 333 | ONE_ARG_RUNTIME_EXCEPTION art_quick_throw_no_such_method, artThrowNoSuchMethodFromCode |
| 334 | |
| 335 | /* |
| 336 | * Called by managed code to create and deliver an ArrayIndexOutOfBoundsException. Arg1 holds |
| 337 | * index, arg2 holds limit. |
| 338 | */ |
| 339 | TWO_ARG_RUNTIME_EXCEPTION art_quick_throw_array_bounds, artThrowArrayBoundsFromCode |
| 340 | |
| 341 | /* |
| 342 | * All generated callsites for interface invokes and invocation slow paths will load arguments |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 343 | * as usual - except instead of loading arg0/rdi with the target Method*, arg0/rdi will contain |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 344 | * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the |
| 345 | * stack and call the appropriate C helper. |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 346 | * NOTE: "this" is first visible argument of the target, and so can be found in arg1/rsi. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 347 | * |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 348 | * The helper will attempt to locate the target and return a 128-bit result in rax/rdx consisting |
| 349 | * of the target Method* in rax and method->code_ in rdx. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 350 | * |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 351 | * If unsuccessful, the helper will return NULL/????. There will be a pending exception in the |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 352 | * thread and we branch to another stub to deliver it. |
| 353 | * |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 354 | * On success this wrapper will restore arguments and *jump* to the target, leaving the return |
| 355 | * location on the stack. |
| 356 | * |
| 357 | * Adapted from x86 code. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 358 | */ |
| 359 | MACRO2(INVOKE_TRAMPOLINE, c_name, cxx_name) |
| 360 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 361 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // save callee saves in case allocation triggers GC |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 362 | // Helper signature is always |
| 363 | // (method_idx, *this_object, *caller_method, *self, sp) |
| 364 | |
Serguei Katkov | 7c748c1 | 2014-06-06 10:50:37 -0700 | [diff] [blame] | 365 | movl FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE(%rsp), %edx // pass caller Method* |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 366 | movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread |
| 367 | movq %rsp, %r8 // pass SP |
| 368 | |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 369 | call VAR(cxx_name, 1) // cxx_name(arg1, arg2, caller method*, Thread*, SP) |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 370 | // save the code pointer |
| 371 | movq %rax, %rdi |
| 372 | movq %rdx, %rax |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 373 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 374 | |
| 375 | testq %rdi, %rdi |
| 376 | jz 1f |
| 377 | |
| 378 | // Tail call to intended method. |
| 379 | jmp *%rax |
| 380 | 1: |
| 381 | DELIVER_PENDING_EXCEPTION |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 382 | END_FUNCTION VAR(c_name, 0) |
| 383 | END_MACRO |
| 384 | |
| 385 | INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline, artInvokeInterfaceTrampoline |
| 386 | INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck |
| 387 | |
| 388 | INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck |
| 389 | INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck |
| 390 | INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck |
| 391 | INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck |
| 392 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 393 | |
| 394 | /* |
| 395 | * Helper for quick invocation stub to set up XMM registers. Assumes r10 == shorty, |
| 396 | * r11 == arg_array. Clobbers r10, r11 and al. Branches to xmm_setup_finished if it encounters |
| 397 | * the end of the shorty. |
| 398 | */ |
| 399 | MACRO2(LOOP_OVER_SHORTY_LOADING_XMMS, xmm_reg, finished) |
| 400 | 1: // LOOP |
| 401 | movb (%r10), %al // al := *shorty |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 402 | addq MACRO_LITERAL(1), %r10 // shorty++ |
| 403 | cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto xmm_setup_finished |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 404 | je VAR(finished, 1) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 405 | cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto FOUND_DOUBLE |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 406 | je 2f |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 407 | cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto FOUND_FLOAT |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 408 | je 3f |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 409 | addq MACRO_LITERAL(4), %r11 // arg_array++ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 410 | // Handle extra space in arg array taken by a long. |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 411 | cmpb MACRO_LITERAL(74), %al // if (al != 'J') goto LOOP |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 412 | jne 1b |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 413 | addq MACRO_LITERAL(4), %r11 // arg_array++ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 414 | jmp 1b // goto LOOP |
| 415 | 2: // FOUND_DOUBLE |
| 416 | movsd (%r11), REG_VAR(xmm_reg, 0) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 417 | addq MACRO_LITERAL(8), %r11 // arg_array+=2 |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 418 | jmp 4f |
| 419 | 3: // FOUND_FLOAT |
| 420 | movss (%r11), REG_VAR(xmm_reg, 0) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 421 | addq MACRO_LITERAL(4), %r11 // arg_array++ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 422 | 4: |
| 423 | END_MACRO |
| 424 | |
| 425 | /* |
| 426 | * Helper for quick invocation stub to set up GPR registers. Assumes r10 == shorty, |
| 427 | * r11 == arg_array. Clobbers r10, r11 and al. Branches to gpr_setup_finished if it encounters |
| 428 | * the end of the shorty. |
| 429 | */ |
| 430 | MACRO3(LOOP_OVER_SHORTY_LOADING_GPRS, gpr_reg64, gpr_reg32, finished) |
| 431 | 1: // LOOP |
| 432 | movb (%r10), %al // al := *shorty |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 433 | addq MACRO_LITERAL(1), %r10 // shorty++ |
| 434 | cmpb MACRO_LITERAL(0), %al // if (al == '\0') goto gpr_setup_finished |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 435 | je VAR(finished, 2) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 436 | cmpb MACRO_LITERAL(74), %al // if (al == 'J') goto FOUND_LONG |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 437 | je 2f |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 438 | cmpb MACRO_LITERAL(70), %al // if (al == 'F') goto SKIP_FLOAT |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 439 | je 3f |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 440 | cmpb MACRO_LITERAL(68), %al // if (al == 'D') goto SKIP_DOUBLE |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 441 | je 4f |
| 442 | movl (%r11), REG_VAR(gpr_reg32, 1) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 443 | addq MACRO_LITERAL(4), %r11 // arg_array++ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 444 | jmp 5f |
| 445 | 2: // FOUND_LONG |
| 446 | movq (%r11), REG_VAR(gpr_reg64, 0) |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 447 | addq MACRO_LITERAL(8), %r11 // arg_array+=2 |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 448 | jmp 5f |
| 449 | 3: // SKIP_FLOAT |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 450 | addq MACRO_LITERAL(4), %r11 // arg_array++ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 451 | jmp 1b |
| 452 | 4: // SKIP_DOUBLE |
Ian Rogers | 44d6ff1 | 2014-03-06 23:11:11 -0800 | [diff] [blame] | 453 | addq MACRO_LITERAL(8), %r11 // arg_array+=2 |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 454 | jmp 1b |
| 455 | 5: |
| 456 | END_MACRO |
| 457 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 458 | /* |
| 459 | * Quick invocation stub. |
Ian Rogers | 0177e53 | 2014-02-11 16:30:46 -0800 | [diff] [blame] | 460 | * On entry: |
| 461 | * [sp] = return address |
| 462 | * rdi = method pointer |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 463 | * rsi = argument array that must at least contain the this pointer. |
Ian Rogers | 0177e53 | 2014-02-11 16:30:46 -0800 | [diff] [blame] | 464 | * rdx = size of argument array in bytes |
| 465 | * rcx = (managed) thread pointer |
| 466 | * r8 = JValue* result |
| 467 | * r9 = char* shorty |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 468 | */ |
| 469 | DEFINE_FUNCTION art_quick_invoke_stub |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 470 | #if defined(__APPLE__) |
| 471 | int3 |
| 472 | int3 |
| 473 | #else |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 474 | // Set up argument XMM registers. |
| 475 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character. |
| 476 | leaq 4(%rsi), %r11 // R11 := arg_array + 4 ; ie skip this pointer. |
| 477 | LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished |
| 478 | LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished |
| 479 | LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished |
| 480 | LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished |
| 481 | LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished |
| 482 | LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished |
| 483 | LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished |
| 484 | LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished |
| 485 | .balign 16 |
| 486 | .Lxmm_setup_finished: |
| 487 | PUSH rbp // Save rbp. |
| 488 | PUSH r8 // Save r8/result*. |
| 489 | PUSH r9 // Save r9/shorty*. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 490 | PUSH rbx // Save native callee save rbx |
| 491 | PUSH r12 // Save native callee save r12 |
| 492 | PUSH r13 // Save native callee save r13 |
| 493 | PUSH r14 // Save native callee save r14 |
| 494 | PUSH r15 // Save native callee save r15 |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 495 | movq %rsp, %rbp // Copy value of stack pointer into base pointer. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 496 | CFI_DEF_CFA_REGISTER(rbp) |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 497 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 498 | movl %edx, %r10d |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 499 | addl LITERAL(100), %edx // Reserve space for return addr, StackReference<method>, rbp, |
| 500 | // r8, r9, rbx, r12, r13, r14, and r15 in frame. |
| 501 | andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. |
| 502 | subl LITERAL(72), %edx // Remove space for return address, rbp, r8, r9, rbx, r12, |
| 503 | // r13, r14, and r15 |
| 504 | subq %rdx, %rsp // Reserve stack space for argument array. |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 505 | |
| 506 | #if (STACK_REFERENCE_SIZE != 4) |
| 507 | #error "STACK_REFERENCE_SIZE(X86_64) size not as expected." |
| 508 | #endif |
| 509 | movl LITERAL(0), (%rsp) // Store NULL for method* |
| 510 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 511 | movl %r10d, %ecx // Place size of args in rcx. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 512 | movq %rdi, %rax // rax := method to be called |
| 513 | movq %rsi, %r11 // r11 := arg_array |
| 514 | leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 515 | // stack arguments. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 516 | // Copy arg array into stack. |
| 517 | rep movsb // while (rcx--) { *rdi++ = *rsi++ } |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 518 | leaq 1(%r9), %r10 // r10 := shorty + 1 ; ie skip return arg character |
| 519 | movq %rax, %rdi // rdi := method to be called |
| 520 | movl (%r11), %esi // rsi := this pointer |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 521 | addq LITERAL(4), %r11 // arg_array++ |
| 522 | LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished |
| 523 | LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished |
| 524 | LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished |
| 525 | LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished |
| 526 | .Lgpr_setup_finished: |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 527 | call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 528 | movq %rbp, %rsp // Restore stack pointer. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 529 | POP r15 // Pop r15 |
| 530 | POP r14 // Pop r14 |
| 531 | POP r13 // Pop r13 |
| 532 | POP r12 // Pop r12 |
| 533 | POP rbx // Pop rbx |
| 534 | POP r9 // Pop r9 - shorty* |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 535 | POP r8 // Pop r8 - result*. |
| 536 | POP rbp // Pop rbp |
| 537 | cmpb LITERAL(68), (%r9) // Test if result type char == 'D'. |
| 538 | je .Lreturn_double_quick |
| 539 | cmpb LITERAL(70), (%r9) // Test if result type char == 'F'. |
| 540 | je .Lreturn_float_quick |
| 541 | movq %rax, (%r8) // Store the result assuming its a long, int or Object* |
| 542 | ret |
| 543 | .Lreturn_double_quick: |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 544 | movsd %xmm0, (%r8) // Store the double floating point result. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 545 | ret |
| 546 | .Lreturn_float_quick: |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 547 | movss %xmm0, (%r8) // Store the floating point result. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 548 | ret |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 549 | #endif // __APPLE__ |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 550 | END_FUNCTION art_quick_invoke_stub |
| 551 | |
| 552 | /* |
| 553 | * Quick invocation stub. |
| 554 | * On entry: |
| 555 | * [sp] = return address |
| 556 | * rdi = method pointer |
| 557 | * rsi = argument array or NULL if no arguments. |
| 558 | * rdx = size of argument array in bytes |
| 559 | * rcx = (managed) thread pointer |
| 560 | * r8 = JValue* result |
| 561 | * r9 = char* shorty |
| 562 | */ |
| 563 | DEFINE_FUNCTION art_quick_invoke_static_stub |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 564 | #if defined(__APPLE__) |
| 565 | int3 |
| 566 | int3 |
| 567 | #else |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 568 | // Set up argument XMM registers. |
| 569 | leaq 1(%r9), %r10 // R10 := shorty + 1 ; ie skip return arg character |
| 570 | movq %rsi, %r11 // R11 := arg_array |
| 571 | LOOP_OVER_SHORTY_LOADING_XMMS xmm0, .Lxmm_setup_finished2 |
| 572 | LOOP_OVER_SHORTY_LOADING_XMMS xmm1, .Lxmm_setup_finished2 |
| 573 | LOOP_OVER_SHORTY_LOADING_XMMS xmm2, .Lxmm_setup_finished2 |
| 574 | LOOP_OVER_SHORTY_LOADING_XMMS xmm3, .Lxmm_setup_finished2 |
| 575 | LOOP_OVER_SHORTY_LOADING_XMMS xmm4, .Lxmm_setup_finished2 |
| 576 | LOOP_OVER_SHORTY_LOADING_XMMS xmm5, .Lxmm_setup_finished2 |
| 577 | LOOP_OVER_SHORTY_LOADING_XMMS xmm6, .Lxmm_setup_finished2 |
| 578 | LOOP_OVER_SHORTY_LOADING_XMMS xmm7, .Lxmm_setup_finished2 |
| 579 | .balign 16 |
| 580 | .Lxmm_setup_finished2: |
| 581 | PUSH rbp // Save rbp. |
| 582 | PUSH r8 // Save r8/result*. |
| 583 | PUSH r9 // Save r9/shorty*. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 584 | PUSH rbx // Save rbx |
| 585 | PUSH r12 // Save r12 |
| 586 | PUSH r13 // Save r13 |
| 587 | PUSH r14 // Save r14 |
| 588 | PUSH r15 // Save r15 |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 589 | movq %rsp, %rbp // Copy value of stack pointer into base pointer. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 590 | CFI_DEF_CFA_REGISTER(rbp) |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 591 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 592 | movl %edx, %r10d |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 593 | addl LITERAL(100), %edx // Reserve space for return addr, StackReference<method>, rbp, |
| 594 | // r8, r9, r12, r13, r14, and r15 in frame. |
| 595 | andl LITERAL(0xFFFFFFF0), %edx // Align frame size to 16 bytes. |
| 596 | subl LITERAL(72), %edx // Remove space for return address, rbp, r8, r9, rbx, r12, |
| 597 | // r13, r14, and r15. |
| 598 | subq %rdx, %rsp // Reserve stack space for argument array. |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 599 | |
| 600 | #if (STACK_REFERENCE_SIZE != 4) |
| 601 | #error "STACK_REFERENCE_SIZE(X86_64) size not as expected." |
| 602 | #endif |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 603 | movl LITERAL(0), (%rsp) // Store NULL for method* |
Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 604 | |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 605 | movl %r10d, %ecx // Place size of args in rcx. |
| 606 | movq %rdi, %rax // rax := method to be called |
| 607 | movq %rsi, %r11 // r11 := arg_array |
| 608 | leaq 4(%rsp), %rdi // rdi is pointing just above the StackReference<method> in the |
| 609 | // stack arguments. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 610 | // Copy arg array into stack. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 611 | rep movsb // while (rcx--) { *rdi++ = *rsi++ } |
| 612 | leaq 1(%r9), %r10 // r10 := shorty + 1 ; ie skip return arg character |
| 613 | movq %rax, %rdi // rdi := method to be called |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 614 | LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, .Lgpr_setup_finished2 |
| 615 | LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, .Lgpr_setup_finished2 |
| 616 | LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, .Lgpr_setup_finished2 |
| 617 | LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, .Lgpr_setup_finished2 |
| 618 | LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, .Lgpr_setup_finished2 |
| 619 | .Lgpr_setup_finished2: |
Mathieu Chartier | 2d72101 | 2014-11-10 11:08:06 -0800 | [diff] [blame] | 620 | call *MIRROR_ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method. |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 621 | movq %rbp, %rsp // Restore stack pointer. |
| 622 | POP r15 // Pop r15 |
| 623 | POP r14 // Pop r14 |
| 624 | POP r13 // Pop r13 |
| 625 | POP r12 // Pop r12 |
| 626 | POP rbx // Pop rbx |
| 627 | POP r9 // Pop r9 - shorty*. |
| 628 | POP r8 // Pop r8 - result*. |
| 629 | POP rbp // Pop rbp |
| 630 | cmpb LITERAL(68), (%r9) // Test if result type char == 'D'. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 631 | je .Lreturn_double_quick2 |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 632 | cmpb LITERAL(70), (%r9) // Test if result type char == 'F'. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 633 | je .Lreturn_float_quick2 |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 634 | movq %rax, (%r8) // Store the result assuming its a long, int or Object* |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 635 | ret |
| 636 | .Lreturn_double_quick2: |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 637 | movsd %xmm0, (%r8) // Store the double floating point result. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 638 | ret |
| 639 | .Lreturn_float_quick2: |
Nicolas Geoffray | 4808846 | 2014-12-12 10:29:38 +0000 | [diff] [blame] | 640 | movss %xmm0, (%r8) // Store the floating point result. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 641 | ret |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 642 | #endif // __APPLE__ |
Ian Rogers | 1a57066 | 2014-03-12 01:02:21 -0700 | [diff] [blame] | 643 | END_FUNCTION art_quick_invoke_static_stub |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 644 | |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 645 | /* |
| 646 | * Long jump stub. |
| 647 | * On entry: |
| 648 | * rdi = gprs |
| 649 | * rsi = fprs |
| 650 | */ |
| 651 | DEFINE_FUNCTION art_quick_do_long_jump |
| 652 | #if defined(__APPLE__) |
| 653 | int3 |
| 654 | int3 |
| 655 | #else |
| 656 | // Restore FPRs. |
| 657 | movq 0(%rsi), %xmm0 |
| 658 | movq 8(%rsi), %xmm1 |
| 659 | movq 16(%rsi), %xmm2 |
| 660 | movq 24(%rsi), %xmm3 |
| 661 | movq 32(%rsi), %xmm4 |
| 662 | movq 40(%rsi), %xmm5 |
| 663 | movq 48(%rsi), %xmm6 |
| 664 | movq 56(%rsi), %xmm7 |
| 665 | movq 64(%rsi), %xmm8 |
| 666 | movq 72(%rsi), %xmm9 |
| 667 | movq 80(%rsi), %xmm10 |
| 668 | movq 88(%rsi), %xmm11 |
| 669 | movq 96(%rsi), %xmm12 |
| 670 | movq 104(%rsi), %xmm13 |
| 671 | movq 112(%rsi), %xmm14 |
| 672 | movq 120(%rsi), %xmm15 |
| 673 | // Restore FPRs. |
| 674 | movq %rdi, %rsp // RSP points to gprs. |
| 675 | // Load all registers except RSP and RIP with values in gprs. |
| 676 | popq %r15 |
| 677 | popq %r14 |
| 678 | popq %r13 |
| 679 | popq %r12 |
| 680 | popq %r11 |
| 681 | popq %r10 |
| 682 | popq %r9 |
| 683 | popq %r8 |
| 684 | popq %rdi |
| 685 | popq %rsi |
| 686 | popq %rbp |
| 687 | addq LITERAL(8), %rsp // Skip rsp |
| 688 | popq %rbx |
| 689 | popq %rdx |
| 690 | popq %rcx |
| 691 | popq %rax |
| 692 | popq %rsp // Load stack pointer. |
| 693 | ret // From higher in the stack pop rip. |
| 694 | #endif // __APPLE__ |
| 695 | END_FUNCTION art_quick_do_long_jump |
| 696 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 697 | MACRO3(NO_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 698 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 699 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 700 | // Outgoing argument set up |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 701 | movq %gs:THREAD_SELF_OFFSET, %rdi // pass Thread::Current() |
| 702 | call VAR(cxx_name, 1) // cxx_name(Thread*) |
| 703 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 704 | CALL_MACRO(return_macro, 2) // return or deliver exception |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 705 | END_FUNCTION VAR(c_name, 0) |
| 706 | END_MACRO |
| 707 | |
| 708 | MACRO3(ONE_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 709 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 710 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 711 | // Outgoing argument set up |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 712 | movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() |
| 713 | call VAR(cxx_name, 1) // cxx_name(arg0, Thread*) |
| 714 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 715 | CALL_MACRO(return_macro, 2) // return or deliver exception |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 716 | END_FUNCTION VAR(c_name, 0) |
| 717 | END_MACRO |
| 718 | |
| 719 | MACRO3(TWO_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 720 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 721 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 722 | // Outgoing argument set up |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 723 | movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() |
| 724 | call VAR(cxx_name, 1) // cxx_name(arg0, arg1, Thread*) |
| 725 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 726 | CALL_MACRO(return_macro, 2) // return or deliver exception |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 727 | END_FUNCTION VAR(c_name, 0) |
| 728 | END_MACRO |
| 729 | |
| 730 | MACRO3(THREE_ARG_DOWNCALL, c_name, cxx_name, return_macro) |
| 731 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 732 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 733 | // Outgoing argument set up |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 734 | movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() |
| 735 | call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, Thread*) |
| 736 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 737 | CALL_MACRO(return_macro, 2) // return or deliver exception |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 738 | END_FUNCTION VAR(c_name, 0) |
| 739 | END_MACRO |
| 740 | |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 741 | MACRO3(ONE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) |
| 742 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 743 | movl 8(%rsp), %esi // pass referrer |
| 744 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
| 745 | // arg0 is in rdi |
| 746 | movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() |
| 747 | call VAR(cxx_name, 1) // cxx_name(arg0, referrer, Thread*) |
| 748 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 749 | CALL_MACRO(return_macro, 2) |
| 750 | END_FUNCTION VAR(c_name, 0) |
| 751 | END_MACRO |
| 752 | |
| 753 | MACRO3(TWO_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) |
| 754 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 755 | movl 8(%rsp), %edx // pass referrer |
| 756 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
| 757 | // arg0 and arg1 are in rdi/rsi |
| 758 | movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() |
| 759 | call VAR(cxx_name, 1) // (arg0, arg1, referrer, Thread*) |
| 760 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 761 | CALL_MACRO(return_macro, 2) |
| 762 | END_FUNCTION VAR(c_name, 0) |
| 763 | END_MACRO |
| 764 | |
| 765 | MACRO3(THREE_ARG_REF_DOWNCALL, c_name, cxx_name, return_macro) |
| 766 | DEFINE_FUNCTION VAR(c_name, 0) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 767 | movl 8(%rsp), %ecx // pass referrer |
| 768 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
| 769 | // arg0, arg1, and arg2 are in rdi/rsi/rdx |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 770 | movq %gs:THREAD_SELF_OFFSET, %r8 // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 771 | call VAR(cxx_name, 1) // cxx_name(arg0, arg1, arg2, referrer, Thread*) |
| 772 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 773 | CALL_MACRO(return_macro, 2) // return or deliver exception |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 774 | END_FUNCTION VAR(c_name, 0) |
| 775 | END_MACRO |
| 776 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 777 | MACRO0(RETURN_IF_RESULT_IS_NON_ZERO) |
Ian Rogers | 47d00c0 | 2014-04-16 17:33:27 -0700 | [diff] [blame] | 778 | testq %rax, %rax // rax == 0 ? |
| 779 | jz 1f // if rax == 0 goto 1 |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 780 | ret // return |
| 781 | 1: // deliver exception on current thread |
| 782 | DELIVER_PENDING_EXCEPTION |
| 783 | END_MACRO |
| 784 | |
| 785 | MACRO0(RETURN_IF_EAX_ZERO) |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 786 | testl %eax, %eax // eax == 0 ? |
| 787 | jnz 1f // if eax != 0 goto 1 |
| 788 | ret // return |
| 789 | 1: // deliver exception on current thread |
| 790 | DELIVER_PENDING_EXCEPTION |
| 791 | END_MACRO |
| 792 | |
| 793 | MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION) |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 794 | movq %gs:THREAD_EXCEPTION_OFFSET, %rcx // get exception field |
| 795 | testq %rcx, %rcx // rcx == 0 ? |
| 796 | jnz 1f // if rcx != 0 goto 1 |
| 797 | ret // return |
| 798 | 1: // deliver exception on current thread |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 799 | DELIVER_PENDING_EXCEPTION |
| 800 | END_MACRO |
| 801 | |
| 802 | // Generate the allocation entrypoints for each allocator. |
| 803 | // TODO: use arch/quick_alloc_entrypoints.S. Currently we don't as we need to use concatenation |
| 804 | // macros to work around differences between OS/X's as and binutils as (OS/X lacks named arguments |
| 805 | // to macros and the VAR macro won't concatenate arguments properly), this also breaks having |
| 806 | // multi-line macros that use each other (hence using 1 macro per newline below). |
| 807 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(c_suffix, cxx_suffix) \ |
| 808 | TWO_ARG_DOWNCALL art_quick_alloc_object ## c_suffix, artAllocObjectFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 809 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(c_suffix, cxx_suffix) \ |
| 810 | TWO_ARG_DOWNCALL art_quick_alloc_object_resolved ## c_suffix, artAllocObjectFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 811 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(c_suffix, cxx_suffix) \ |
| 812 | TWO_ARG_DOWNCALL art_quick_alloc_object_initialized ## c_suffix, artAllocObjectFromCodeInitialized ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 813 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 814 | TWO_ARG_DOWNCALL art_quick_alloc_object_with_access_check ## c_suffix, artAllocObjectFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 815 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(c_suffix, cxx_suffix) \ |
| 816 | THREE_ARG_DOWNCALL art_quick_alloc_array ## c_suffix, artAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 817 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(c_suffix, cxx_suffix) \ |
| 818 | THREE_ARG_DOWNCALL art_quick_alloc_array_resolved ## c_suffix, artAllocArrayFromCodeResolved ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 819 | #define GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 820 | THREE_ARG_DOWNCALL art_quick_alloc_array_with_access_check ## c_suffix, artAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 821 | #define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(c_suffix, cxx_suffix) \ |
| 822 | THREE_ARG_DOWNCALL art_quick_check_and_alloc_array ## c_suffix, artCheckAndAllocArrayFromCode ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 823 | #define GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(c_suffix, cxx_suffix) \ |
| 824 | THREE_ARG_DOWNCALL art_quick_check_and_alloc_array_with_access_check ## c_suffix, artCheckAndAllocArrayFromCodeWithAccessCheck ## cxx_suffix, RETURN_IF_RESULT_IS_NON_ZERO |
| 825 | |
| 826 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc, DlMalloc) |
| 827 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc, DlMalloc) |
| 828 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc, DlMalloc) |
| 829 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 830 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc, DlMalloc) |
| 831 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc, DlMalloc) |
| 832 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 833 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc, DlMalloc) |
| 834 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc, DlMalloc) |
| 835 | |
| 836 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_dlmalloc_instrumented, DlMallocInstrumented) |
| 837 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 838 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 839 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 840 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented) |
| 841 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_dlmalloc_instrumented, DlMallocInstrumented) |
| 842 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 843 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_dlmalloc_instrumented, DlMallocInstrumented) |
| 844 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_dlmalloc_instrumented, DlMallocInstrumented) |
| 845 | |
| 846 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc, RosAlloc) |
| 847 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc) |
| 848 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc) |
| 849 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 850 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc, RosAlloc) |
| 851 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc, RosAlloc) |
| 852 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 853 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc, RosAlloc) |
| 854 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc, RosAlloc) |
| 855 | |
| 856 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_rosalloc_instrumented, RosAllocInstrumented) |
| 857 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented) |
| 858 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc_instrumented, RosAllocInstrumented) |
| 859 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 860 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented) |
| 861 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_rosalloc_instrumented, RosAllocInstrumented) |
| 862 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 863 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_rosalloc_instrumented, RosAllocInstrumented) |
| 864 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_rosalloc_instrumented, RosAllocInstrumented) |
| 865 | |
| 866 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer, BumpPointer) |
| 867 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer, BumpPointer) |
| 868 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer, BumpPointer) |
| 869 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 870 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer, BumpPointer) |
| 871 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer, BumpPointer) |
| 872 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 873 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer, BumpPointer) |
| 874 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer, BumpPointer) |
| 875 | |
| 876 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 877 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 878 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 879 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 880 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 881 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 882 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 883 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 884 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_bump_pointer_instrumented, BumpPointerInstrumented) |
| 885 | |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 886 | DEFINE_FUNCTION art_quick_alloc_object_tlab |
| 887 | // Fast path tlab allocation. |
| 888 | // RDI: uint32_t type_idx, RSI: ArtMethod* |
| 889 | // RDX, RCX, R8, R9: free. RAX: return val. |
| 890 | movl MIRROR_ART_METHOD_DEX_CACHE_TYPES_OFFSET(%rsi), %edx // Load dex cache resolved types array |
| 891 | // Load the class |
| 892 | movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdx, %rdi, MIRROR_OBJECT_ARRAY_COMPONENT_SIZE), %edx |
| 893 | testl %edx, %edx // Check null class |
| 894 | jz .Lart_quick_alloc_object_tlab_slow_path |
| 895 | // Check class status. |
| 896 | cmpl LITERAL(MIRROR_CLASS_STATUS_INITIALIZED), MIRROR_CLASS_STATUS_OFFSET(%rdx) |
| 897 | jne .Lart_quick_alloc_object_tlab_slow_path |
| 898 | // Check access flags has kAccClassIsFinalizable |
| 899 | testl LITERAL(ACCESS_FLAGS_CLASS_IS_FINALIZABLE), MIRROR_CLASS_ACCESS_FLAGS_OFFSET(%rdx) |
| 900 | jnz .Lart_quick_alloc_object_tlab_slow_path |
| 901 | movl MIRROR_CLASS_OBJECT_SIZE_OFFSET(%rdx), %ecx // Load the object size. |
| 902 | addl LITERAL(OBJECT_ALIGNMENT_MASK), %ecx // Align the size by 8. (addr + 7) & ~7. |
| 903 | andl LITERAL(OBJECT_ALIGNMENT_MASK_TOGGLED), %ecx |
| 904 | movq %gs:THREAD_SELF_OFFSET, %r8 // r8 = thread |
| 905 | movq THREAD_LOCAL_POS_OFFSET(%r8), %rax // Load thread_local_pos. |
| 906 | addq %rax, %rcx // Add the object size. |
| 907 | cmpq THREAD_LOCAL_END_OFFSET(%r8), %rcx // Check if it fits. |
| 908 | ja .Lart_quick_alloc_object_tlab_slow_path |
| 909 | movq %rcx, THREAD_LOCAL_POS_OFFSET(%r8) // Update thread_local_pos. |
| 910 | addq LITERAL(1), THREAD_LOCAL_OBJECTS_OFFSET(%r8) // Increment thread_local_objects. |
| 911 | // Store the class pointer in the header. |
| 912 | // No fence needed for x86. |
| 913 | movl %edx, MIRROR_OBJECT_CLASS_OFFSET(%rax) |
| 914 | ret // Fast path succeeded. |
| 915 | .Lart_quick_alloc_object_tlab_slow_path: |
| 916 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME // save ref containing registers for GC |
| 917 | // Outgoing argument set up |
| 918 | movq %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() |
Hiroshi Yamauchi | 3d900a7 | 2015-03-20 17:56:45 -0700 | [diff] [blame] | 919 | call SYMBOL(artAllocObjectFromCodeTLAB) // cxx_name(arg0, arg1, Thread*) |
Hiroshi Yamauchi | e01a520 | 2015-03-19 12:35:04 -0700 | [diff] [blame] | 920 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 921 | RETURN_IF_RESULT_IS_NON_ZERO // return or deliver exception |
| 922 | END_FUNCTION art_quick_alloc_object_tlab |
| 923 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 924 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB) |
| 925 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB) |
| 926 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 927 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab, TLAB) |
| 928 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB) |
| 929 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 930 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab, TLAB) |
| 931 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab, TLAB) |
| 932 | |
| 933 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_tlab_instrumented, TLABInstrumented) |
| 934 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab_instrumented, TLABInstrumented) |
| 935 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab_instrumented, TLABInstrumented) |
| 936 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 937 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented) |
| 938 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab_instrumented, TLABInstrumented) |
| 939 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 940 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_tlab_instrumented, TLABInstrumented) |
| 941 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_tlab_instrumented, TLABInstrumented) |
| 942 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 943 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region, Region) |
| 944 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region, Region) |
| 945 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region, Region) |
| 946 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region, Region) |
| 947 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region, Region) |
| 948 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region, Region) |
| 949 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region) |
| 950 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region, Region) |
| 951 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region, Region) |
| 952 | |
| 953 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_instrumented, RegionInstrumented) |
| 954 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_instrumented, RegionInstrumented) |
| 955 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_instrumented, RegionInstrumented) |
| 956 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented) |
| 957 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_instrumented, RegionInstrumented) |
| 958 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_instrumented, RegionInstrumented) |
| 959 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented) |
| 960 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_instrumented, RegionInstrumented) |
| 961 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_instrumented, RegionInstrumented) |
| 962 | |
| 963 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab, RegionTLAB) |
| 964 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB) |
| 965 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB) |
| 966 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB) |
| 967 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab, RegionTLAB) |
| 968 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB) |
| 969 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB) |
| 970 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab, RegionTLAB) |
| 971 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB) |
| 972 | |
| 973 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT(_region_tlab_instrumented, RegionTLABInstrumented) |
| 974 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented) |
| 975 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab_instrumented, RegionTLABInstrumented) |
| 976 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented) |
| 977 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented) |
| 978 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab_instrumented, RegionTLABInstrumented) |
| 979 | GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented) |
| 980 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY(_region_tlab_instrumented, RegionTLABInstrumented) |
| 981 | GENERATE_ALLOC_ENTRYPOINTS_CHECK_AND_ALLOC_ARRAY_WITH_ACCESS_CHECK(_region_tlab_instrumented, RegionTLABInstrumented) |
| 982 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 983 | TWO_ARG_DOWNCALL art_quick_resolve_string, artResolveStringFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 984 | TWO_ARG_DOWNCALL art_quick_initialize_static_storage, artInitializeStaticStorageFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 985 | TWO_ARG_DOWNCALL art_quick_initialize_type, artInitializeTypeFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 986 | TWO_ARG_DOWNCALL art_quick_initialize_type_and_verify_access, artInitializeTypeAndVerifyAccessFromCode, RETURN_IF_RESULT_IS_NON_ZERO |
| 987 | |
Ian Rogers | 832336b | 2014-10-08 15:35:22 -0700 | [diff] [blame] | 988 | TWO_ARG_REF_DOWNCALL art_quick_handle_fill_data, artHandleFillArrayDataFromCode, RETURN_IF_EAX_ZERO |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 989 | |
| 990 | DEFINE_FUNCTION art_quick_lock_object |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 991 | testl %edi, %edi // Null check object/rdi. |
| 992 | jz .Lslow_lock |
| 993 | .Lretry_lock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 994 | movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word. |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 995 | test LITERAL(LOCK_WORD_STATE_MASK), %ecx // Test the 2 high bits. |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 996 | jne .Lslow_lock // Slow path if either of the two high bits are set. |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 997 | movl %ecx, %edx // save lock word (edx) to keep read barrier bits. |
| 998 | andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx // zero the read barrier bits. |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 999 | test %ecx, %ecx |
| 1000 | jnz .Lalready_thin // Lock word contains a thin lock. |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1001 | // unlocked case - edx: original lock word, edi: obj. |
| 1002 | movl %edx, %eax // eax: lock word zero except for read barrier bits. |
| 1003 | movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id |
| 1004 | or %eax, %edx // edx: thread id with count of 0 + read barrier bits. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1005 | lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1006 | jnz .Lretry_lock // cmpxchg failed retry |
| 1007 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1008 | .Lalready_thin: // edx: lock word (with high 2 bits zero and original rb bits), edi: obj. |
| 1009 | movl %gs:THREAD_ID_OFFSET, %ecx // ecx := thread id |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1010 | cmpw %cx, %dx // do we hold the lock already? |
| 1011 | jne .Lslow_lock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1012 | movl %edx, %ecx // copy the lock word to check count overflow. |
| 1013 | andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %ecx // zero the read barrier bits. |
| 1014 | addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx // increment recursion count |
| 1015 | test LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx // overflowed if either of the upper two bits (28-29) are set |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1016 | jne .Lslow_lock // count overflowed so go slow |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1017 | movl %edx, %eax // copy the lock word as the old val for cmpxchg. |
| 1018 | addl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx // increment recursion count again for real. |
| 1019 | // update lockword, cmpxchg necessary for read barrier bits. |
| 1020 | lock cmpxchg %edx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, edx: new val. |
| 1021 | jnz .Lretry_lock // cmpxchg failed retry |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1022 | ret |
| 1023 | .Lslow_lock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1024 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1025 | movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1026 | call SYMBOL(artLockObjectFromCode) // artLockObjectFromCode(object, Thread*) |
| 1027 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1028 | RETURN_IF_EAX_ZERO |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1029 | END_FUNCTION art_quick_lock_object |
| 1030 | |
| 1031 | DEFINE_FUNCTION art_quick_unlock_object |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1032 | testl %edi, %edi // null check object/edi |
| 1033 | jz .Lslow_unlock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1034 | .Lretry_unlock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1035 | movl MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi), %ecx // ecx := lock word |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1036 | movl %gs:THREAD_ID_OFFSET, %edx // edx := thread id |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1037 | test LITERAL(LOCK_WORD_STATE_MASK), %ecx |
Andreas Gampe | 4fc046e | 2014-05-06 16:56:39 -0700 | [diff] [blame] | 1038 | jnz .Lslow_unlock // lock word contains a monitor |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1039 | cmpw %cx, %dx // does the thread id match? |
| 1040 | jne .Lslow_unlock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1041 | movl %ecx, %edx // copy the lock word to detect new count of 0. |
| 1042 | andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK_TOGGLED), %edx // zero the read barrier bits. |
| 1043 | cmpl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %edx |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1044 | jae .Lrecursive_thin_unlock |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1045 | // update lockword, cmpxchg necessary for read barrier bits. |
| 1046 | movl %ecx, %eax // eax: old lock word. |
| 1047 | andl LITERAL(LOCK_WORD_READ_BARRIER_STATE_MASK), %ecx // ecx: new lock word zero except original rb bits. |
| 1048 | #ifndef USE_READ_BARRIER |
| 1049 | movl %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) |
| 1050 | #else |
| 1051 | lock cmpxchg %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, ecx: new val. |
| 1052 | jnz .Lretry_unlock // cmpxchg failed retry |
| 1053 | #endif |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1054 | ret |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1055 | .Lrecursive_thin_unlock: // ecx: original lock word, edi: obj |
| 1056 | // update lockword, cmpxchg necessary for read barrier bits. |
| 1057 | movl %ecx, %eax // eax: old lock word. |
| 1058 | subl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_ONE), %ecx |
| 1059 | #ifndef USE_READ_BARRIER |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1060 | mov %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) |
Hiroshi Yamauchi | e15ea08 | 2015-02-09 17:11:42 -0800 | [diff] [blame] | 1061 | #else |
| 1062 | lock cmpxchg %ecx, MIRROR_OBJECT_LOCK_WORD_OFFSET(%edi) // eax: old val, ecx: new val. |
| 1063 | jnz .Lretry_unlock // cmpxchg failed retry |
| 1064 | #endif |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1065 | ret |
| 1066 | .Lslow_unlock: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1067 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1068 | movq %gs:THREAD_SELF_OFFSET, %rsi // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1069 | call SYMBOL(artUnlockObjectFromCode) // artUnlockObjectFromCode(object, Thread*) |
| 1070 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
Alexei Zavjalov | 80c7934 | 2014-05-02 16:45:40 +0700 | [diff] [blame] | 1071 | RETURN_IF_EAX_ZERO |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1072 | END_FUNCTION art_quick_unlock_object |
| 1073 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1074 | DEFINE_FUNCTION art_quick_check_cast |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1075 | PUSH rdi // Save args for exc |
| 1076 | PUSH rsi |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1077 | SETUP_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1078 | call SYMBOL(artIsAssignableFromCode) // (Class* klass, Class* ref_klass) |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1079 | testq %rax, %rax |
| 1080 | jz 1f // jump forward if not assignable |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1081 | RESTORE_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1082 | addq LITERAL(16), %rsp // pop arguments |
| 1083 | CFI_ADJUST_CFA_OFFSET(-16) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1084 | |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1085 | ret |
| 1086 | 1: |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1087 | RESTORE_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1088 | POP rsi // Pop arguments |
| 1089 | POP rdi |
| 1090 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // save all registers as basis for long jump context |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1091 | mov %gs:THREAD_SELF_OFFSET, %rdx // pass Thread::Current() |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1092 | call SYMBOL(artThrowClassCastException) // (Class* a, Class* b, Thread*) |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1093 | int3 // unreached |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1094 | END_FUNCTION art_quick_check_cast |
| 1095 | |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1096 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1097 | /* |
| 1098 | * Entry from managed code for array put operations of objects where the value being stored |
| 1099 | * needs to be checked for compatibility. |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1100 | * |
| 1101 | * Currently all the parameters should fit into the 32b portions of the registers. Index always |
| 1102 | * will. So we optimize for a tighter encoding. The 64b versions are in comments. |
| 1103 | * |
| 1104 | * rdi(edi) = array, rsi(esi) = index, rdx(edx) = value |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1105 | */ |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1106 | DEFINE_FUNCTION art_quick_aput_obj_with_null_and_bound_check |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1107 | #if defined(__APPLE__) |
| 1108 | int3 |
| 1109 | int3 |
| 1110 | #else |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1111 | testl %edi, %edi |
| 1112 | // testq %rdi, %rdi |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1113 | jnz art_quick_aput_obj_with_bound_check |
| 1114 | jmp art_quick_throw_null_pointer_exception |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1115 | #endif // __APPLE__ |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1116 | END_FUNCTION art_quick_aput_obj_with_null_and_bound_check |
| 1117 | |
| 1118 | |
| 1119 | DEFINE_FUNCTION art_quick_aput_obj_with_bound_check |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1120 | #if defined(__APPLE__) |
| 1121 | int3 |
| 1122 | int3 |
| 1123 | #else |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1124 | movl MIRROR_ARRAY_LENGTH_OFFSET(%edi), %ecx |
| 1125 | // movl MIRROR_ARRAY_LENGTH_OFFSET(%rdi), %ecx // This zero-extends, so value(%rcx)=value(%ecx) |
Dmitry Petrochenko | f208ae9 | 2014-05-30 02:14:49 +0700 | [diff] [blame] | 1126 | cmpl %ecx, %esi |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1127 | jb art_quick_aput_obj |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1128 | mov %esi, %edi |
| 1129 | // mov %rsi, %rdi |
Dmitry Petrochenko | f208ae9 | 2014-05-30 02:14:49 +0700 | [diff] [blame] | 1130 | mov %ecx, %esi |
| 1131 | // mov %rcx, %rsi |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1132 | jmp art_quick_throw_array_bounds |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1133 | #endif // __APPLE__ |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1134 | END_FUNCTION art_quick_aput_obj_with_bound_check |
| 1135 | |
| 1136 | |
| 1137 | DEFINE_FUNCTION art_quick_aput_obj |
| 1138 | testl %edx, %edx // store of null |
| 1139 | // test %rdx, %rdx |
| 1140 | jz .Ldo_aput_null |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1141 | movl MIRROR_OBJECT_CLASS_OFFSET(%edi), %ecx |
| 1142 | // movq MIRROR_OBJECT_CLASS_OFFSET(%rdi), %rcx |
| 1143 | movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%ecx), %ecx |
| 1144 | // movq MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%rcx), %rcx |
| 1145 | cmpl MIRROR_OBJECT_CLASS_OFFSET(%edx), %ecx // value's type == array's component type - trivial assignability |
| 1146 | // cmpq MIRROR_CLASS_OFFSET(%rdx), %rcx |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1147 | jne .Lcheck_assignability |
| 1148 | .Ldo_aput: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1149 | movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) |
| 1150 | // movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1151 | movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx |
| 1152 | shrl LITERAL(7), %edi |
| 1153 | // shrl LITERAL(7), %rdi |
| 1154 | movb %dl, (%rdx, %rdi) // Note: this assumes that top 32b of %rdi are zero |
| 1155 | ret |
| 1156 | .Ldo_aput_null: |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1157 | movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) |
| 1158 | // movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1159 | ret |
| 1160 | .Lcheck_assignability: |
| 1161 | // Save arguments. |
| 1162 | PUSH rdi |
| 1163 | PUSH rsi |
| 1164 | PUSH rdx |
| 1165 | subq LITERAL(8), %rsp // Alignment padding. |
| 1166 | CFI_ADJUST_CFA_OFFSET(8) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1167 | SETUP_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1168 | |
| 1169 | // "Uncompress" = do nothing, as already zero-extended on load. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1170 | movl MIRROR_OBJECT_CLASS_OFFSET(%edx), %esi // Pass arg2 = value's class. |
Dmitry Petrochenko | f208ae9 | 2014-05-30 02:14:49 +0700 | [diff] [blame] | 1171 | movq %rcx, %rdi // Pass arg1 = array's component type. |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1172 | |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1173 | call SYMBOL(artIsAssignableFromCode) // (Class* a, Class* b) |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1174 | |
| 1175 | // Exception? |
| 1176 | testq %rax, %rax |
| 1177 | jz .Lthrow_array_store_exception |
| 1178 | |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1179 | RESTORE_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1180 | // Restore arguments. |
| 1181 | addq LITERAL(8), %rsp |
| 1182 | CFI_ADJUST_CFA_OFFSET(-8) |
| 1183 | POP rdx |
| 1184 | POP rsi |
| 1185 | POP rdi |
| 1186 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1187 | movl %edx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%edi, %esi, 4) |
| 1188 | // movq %rdx, MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rsi, 4) |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1189 | movq %gs:THREAD_CARD_TABLE_OFFSET, %rdx |
| 1190 | shrl LITERAL(7), %edi |
| 1191 | // shrl LITERAL(7), %rdi |
| 1192 | movb %dl, (%rdx, %rdi) // Note: this assumes that top 32b of %rdi are zero |
| 1193 | // movb %dl, (%rdx, %rdi) |
| 1194 | ret |
| 1195 | .Lthrow_array_store_exception: |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1196 | RESTORE_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1197 | // Restore arguments. |
| 1198 | addq LITERAL(8), %rsp |
| 1199 | CFI_ADJUST_CFA_OFFSET(-8) |
| 1200 | POP rdx |
| 1201 | POP rsi |
| 1202 | POP rdi |
| 1203 | |
| 1204 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME // Save all registers as basis for long jump context. |
| 1205 | |
| 1206 | // Outgoing argument set up. |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1207 | movq %rdx, %rsi // Pass arg 2 = value. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1208 | movq %gs:THREAD_SELF_OFFSET, %rdx // Pass arg 3 = Thread::Current(). |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1209 | // Pass arg 1 = array. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1210 | call SYMBOL(artThrowArrayStoreException) // (array, value, Thread*) |
Andreas Gampe | f4e910b | 2014-04-29 16:55:52 -0700 | [diff] [blame] | 1211 | int3 // unreached |
| 1212 | END_FUNCTION art_quick_aput_obj |
Andreas Gampe | 525cde2 | 2014-04-22 15:44:50 -0700 | [diff] [blame] | 1213 | |
| 1214 | // TODO: This is quite silly on X86_64 now. |
| 1215 | DEFINE_FUNCTION art_quick_memcpy |
| 1216 | call PLT_SYMBOL(memcpy) // (void*, const void*, size_t) |
| 1217 | ret |
| 1218 | END_FUNCTION art_quick_memcpy |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1219 | |
| 1220 | NO_ARG_DOWNCALL art_quick_test_suspend, artTestSuspendFromCode, ret |
| 1221 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1222 | UNIMPLEMENTED art_quick_ldiv |
| 1223 | UNIMPLEMENTED art_quick_lmod |
| 1224 | UNIMPLEMENTED art_quick_lmul |
| 1225 | UNIMPLEMENTED art_quick_lshl |
| 1226 | UNIMPLEMENTED art_quick_lshr |
| 1227 | UNIMPLEMENTED art_quick_lushr |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1228 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1229 | THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCode, RETURN_IF_EAX_ZERO |
| 1230 | THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCode, RETURN_IF_EAX_ZERO |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1231 | THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCode, RETURN_IF_EAX_ZERO |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1232 | THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCode, RETURN_IF_EAX_ZERO |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1233 | THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCode, RETURN_IF_EAX_ZERO |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1234 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1235 | TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1236 | TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1237 | TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1238 | TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1239 | TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1240 | TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1241 | TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1242 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1243 | TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCode, RETURN_IF_EAX_ZERO |
| 1244 | TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCode, RETURN_IF_EAX_ZERO |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1245 | TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCode, RETURN_IF_EAX_ZERO |
| 1246 | TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCode, RETURN_IF_EAX_ZERO |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1247 | |
Fred Shih | 37f05ef | 2014-07-16 18:38:08 -0700 | [diff] [blame] | 1248 | ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1249 | ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1250 | ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1251 | ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1252 | ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1253 | ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1254 | ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCode, RETURN_OR_DELIVER_PENDING_EXCEPTION |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1255 | |
Andreas Gampe | 9d4e5e2 | 2014-05-06 08:29:48 -0700 | [diff] [blame] | 1256 | // This is singled out as the argument order is different. |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1257 | DEFINE_FUNCTION art_quick_set64_static |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1258 | movq %rsi, %rdx // pass new_val |
| 1259 | movl 8(%rsp), %esi // pass referrer |
| 1260 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1261 | // field_idx is in rdi |
| 1262 | movq %gs:THREAD_SELF_OFFSET, %rcx // pass Thread::Current() |
| 1263 | call SYMBOL(artSet64StaticFromCode) // (field_idx, referrer, new_val, Thread*) |
| 1264 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME // restore frame up to return address |
| 1265 | RETURN_IF_EAX_ZERO // return or deliver exception |
Alexei Zavjalov | 8d07e2d | 2014-05-05 23:36:14 +0700 | [diff] [blame] | 1266 | END_FUNCTION art_quick_set64_static |
| 1267 | |
Ian Rogers | b7dabf5 | 2014-03-12 12:11:54 -0700 | [diff] [blame] | 1268 | |
| 1269 | DEFINE_FUNCTION art_quick_proxy_invoke_handler |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1270 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI |
| 1271 | |
| 1272 | movq %gs:THREAD_SELF_OFFSET, %rdx // Pass Thread::Current(). |
| 1273 | movq %rsp, %rcx // Pass SP. |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1274 | call SYMBOL(artQuickProxyInvokeHandler) // (proxy method, receiver, Thread*, SP) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1275 | movq %rax, %xmm0 // Copy return value in case of float returns. |
| 1276 | addq LITERAL(168 + 4*8), %rsp // Pop arguments. |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1277 | CFI_ADJUST_CFA_OFFSET(-168 - 4*8) |
Ian Rogers | b7dabf5 | 2014-03-12 12:11:54 -0700 | [diff] [blame] | 1278 | RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 1279 | END_FUNCTION art_quick_proxy_invoke_handler |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1280 | |
| 1281 | /* |
Mark Mendell | d3703d8 | 2014-06-09 15:10:50 -0400 | [diff] [blame] | 1282 | * Called to resolve an imt conflict. |
| 1283 | * rax is a hidden argument that holds the target method's dex method index. |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1284 | */ |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1285 | DEFINE_FUNCTION art_quick_imt_conflict_trampoline |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1286 | #if defined(__APPLE__) |
| 1287 | int3 |
| 1288 | int3 |
| 1289 | #else |
Serguei Katkov | 7c748c1 | 2014-06-06 10:50:37 -0700 | [diff] [blame] | 1290 | movl 8(%rsp), %edi // load caller Method* |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1291 | movl MIRROR_ART_METHOD_DEX_CACHE_METHODS_OFFSET(%rdi), %edi // load dex_cache_resolved_methods |
| 1292 | movl MIRROR_OBJECT_ARRAY_DATA_OFFSET(%rdi, %rax, 4), %edi // load the target method |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1293 | jmp art_quick_invoke_interface_trampoline |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1294 | #endif // __APPLE__ |
Andreas Gampe | 51f7635 | 2014-05-21 08:28:48 -0700 | [diff] [blame] | 1295 | END_FUNCTION art_quick_imt_conflict_trampoline |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 1296 | |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 1297 | DEFINE_FUNCTION art_quick_resolution_trampoline |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1298 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 1299 | movq %gs:THREAD_SELF_OFFSET, %rdx |
| 1300 | movq %rsp, %rcx |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1301 | call SYMBOL(artQuickResolutionTrampoline) // (called, receiver, Thread*, SP) |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 1302 | movq %rax, %r10 // Remember returned code pointer in R10. |
Ian Rogers | 1a57066 | 2014-03-12 01:02:21 -0700 | [diff] [blame] | 1303 | movq (%rsp), %rdi // Load called method into RDI. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1304 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 1305 | testq %r10, %r10 // If code pointer is NULL goto deliver pending exception. |
| 1306 | jz 1f |
| 1307 | jmp *%r10 // Tail call into method. |
| 1308 | 1: |
Ian Rogers | befbd57 | 2014-03-06 01:13:39 -0800 | [diff] [blame] | 1309 | DELIVER_PENDING_EXCEPTION |
| 1310 | END_FUNCTION art_quick_resolution_trampoline |
| 1311 | |
| 1312 | /* Generic JNI frame layout: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1313 | * |
| 1314 | * #-------------------# |
| 1315 | * | | |
| 1316 | * | caller method... | |
| 1317 | * #-------------------# <--- SP on entry |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1318 | * |
| 1319 | * | |
| 1320 | * V |
| 1321 | * |
| 1322 | * #-------------------# |
| 1323 | * | caller method... | |
| 1324 | * #-------------------# |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1325 | * | Return | |
| 1326 | * | R15 | callee save |
| 1327 | * | R14 | callee save |
| 1328 | * | R13 | callee save |
| 1329 | * | R12 | callee save |
| 1330 | * | R9 | arg5 |
| 1331 | * | R8 | arg4 |
| 1332 | * | RSI/R6 | arg1 |
| 1333 | * | RBP/R5 | callee save |
| 1334 | * | RBX/R3 | callee save |
| 1335 | * | RDX/R2 | arg2 |
| 1336 | * | RCX/R1 | arg3 |
| 1337 | * | XMM7 | float arg 8 |
| 1338 | * | XMM6 | float arg 7 |
| 1339 | * | XMM5 | float arg 6 |
| 1340 | * | XMM4 | float arg 5 |
| 1341 | * | XMM3 | float arg 4 |
| 1342 | * | XMM2 | float arg 3 |
| 1343 | * | XMM1 | float arg 2 |
| 1344 | * | XMM0 | float arg 1 |
| 1345 | * | Padding | |
| 1346 | * | RDI/Method* | <- sp |
| 1347 | * #-------------------# |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1348 | * | Scratch Alloca | 5K scratch space |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1349 | * #---------#---------# |
| 1350 | * | | sp* | |
| 1351 | * | Tramp. #---------# |
| 1352 | * | args | thread | |
| 1353 | * | Tramp. #---------# |
| 1354 | * | | method | |
| 1355 | * #-------------------# <--- SP on artQuickGenericJniTrampoline |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1356 | * |
| 1357 | * | |
| 1358 | * v artQuickGenericJniTrampoline |
| 1359 | * |
| 1360 | * #-------------------# |
| 1361 | * | caller method... | |
| 1362 | * #-------------------# |
| 1363 | * | Return | |
| 1364 | * | Callee-Save Data | |
| 1365 | * #-------------------# |
Mathieu Chartier | 421c537 | 2014-05-14 14:11:40 -0700 | [diff] [blame] | 1366 | * | handle scope | |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1367 | * #-------------------# |
| 1368 | * | Method* | <--- (1) |
| 1369 | * #-------------------# |
| 1370 | * | local ref cookie | // 4B |
Mathieu Chartier | 421c537 | 2014-05-14 14:11:40 -0700 | [diff] [blame] | 1371 | * | handle scope size | // 4B TODO: roll into call stack alignment? |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1372 | * #-------------------# |
| 1373 | * | JNI Call Stack | |
| 1374 | * #-------------------# <--- SP on native call |
| 1375 | * | | |
| 1376 | * | Stack for Regs | The trampoline assembly will pop these values |
| 1377 | * | | into registers for native call |
| 1378 | * #-------------------# |
| 1379 | * | Native code ptr | |
| 1380 | * #-------------------# |
| 1381 | * | Free scratch | |
| 1382 | * #-------------------# |
| 1383 | * | Ptr to (1) | <--- RSP |
| 1384 | * #-------------------# |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1385 | */ |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 1386 | /* |
| 1387 | * Called to do a generic JNI down-call |
| 1388 | */ |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 1389 | DEFINE_FUNCTION art_quick_generic_jni_trampoline |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1390 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME_WITH_METHOD_IN_RDI |
| 1391 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1392 | movq %rsp, %rbp // save SP at (old) callee-save frame |
| 1393 | CFI_DEF_CFA_REGISTER(rbp) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1394 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1395 | // |
| 1396 | // reserve a lot of space |
| 1397 | // |
| 1398 | // 4 local state ref |
| 1399 | // 4 padding |
Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1400 | // 4196 4k scratch space, enough for 2x 256 8-byte parameters (TODO: handle scope overhead?) |
| 1401 | // 16 handle scope member fields ? |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1402 | // + 112 14x 8-byte stack-2-register space |
| 1403 | // ------ |
| 1404 | // 4332 |
| 1405 | // 16-byte aligned: 4336 |
| 1406 | // Note: 14x8 = 7*16, so the stack stays aligned for the native call... |
| 1407 | // Also means: the padding is somewhere in the middle |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1408 | // |
| 1409 | // |
| 1410 | // New test: use 5K and release |
| 1411 | // 5k = 5120 |
| 1412 | subq LITERAL(5120), %rsp |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1413 | // prepare for artQuickGenericJniTrampoline call |
| 1414 | // (Thread*, SP) |
| 1415 | // rdi rsi <= C calling convention |
| 1416 | // gs:... rbp <= where they are |
| 1417 | movq %gs:THREAD_SELF_OFFSET, %rdi |
| 1418 | movq %rbp, %rsi |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1419 | call SYMBOL(artQuickGenericJniTrampoline) // (Thread*, sp) |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1420 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1421 | // The C call will have registered the complete save-frame on success. |
| 1422 | // The result of the call is: |
| 1423 | // %rax: pointer to native code, 0 on error. |
| 1424 | // %rdx: pointer to the bottom of the used area of the alloca, can restore stack till there. |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1425 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1426 | // Check for error = 0. |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1427 | test %rax, %rax |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1428 | jz .Lexception_in_native |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1429 | |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1430 | // Release part of the alloca. |
| 1431 | movq %rdx, %rsp |
Andreas Gampe | 36fea8d | 2014-03-10 13:37:40 -0700 | [diff] [blame] | 1432 | |
Andreas Gampe | c147b00 | 2014-03-06 18:11:06 -0800 | [diff] [blame] | 1433 | // pop from the register-passing alloca region |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1434 | // what's the right layout? |
| 1435 | popq %rdi |
| 1436 | popq %rsi |
| 1437 | popq %rdx |
| 1438 | popq %rcx |
| 1439 | popq %r8 |
| 1440 | popq %r9 |
| 1441 | // TODO: skip floating point if unused, some flag. |
| 1442 | movq 0(%rsp), %xmm0 |
| 1443 | movq 8(%rsp), %xmm1 |
| 1444 | movq 16(%rsp), %xmm2 |
| 1445 | movq 24(%rsp), %xmm3 |
| 1446 | movq 32(%rsp), %xmm4 |
| 1447 | movq 40(%rsp), %xmm5 |
| 1448 | movq 48(%rsp), %xmm6 |
| 1449 | movq 56(%rsp), %xmm7 |
| 1450 | addq LITERAL(64), %rsp // floating-point done |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1451 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1452 | // native call |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1453 | call *%rax |
| 1454 | |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1455 | // result sign extension is handled in C code |
| 1456 | // prepare for artQuickGenericJniEndTrampoline call |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1457 | // (Thread*, result, result_f) |
| 1458 | // rdi rsi rdx <= C calling convention |
| 1459 | // gs:... rax xmm0 <= where they are |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1460 | movq %gs:THREAD_SELF_OFFSET, %rdi |
Andreas Gampe | c200a4a | 2014-06-16 18:39:09 -0700 | [diff] [blame] | 1461 | movq %rax, %rsi |
| 1462 | movq %xmm0, %rdx |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1463 | call SYMBOL(artQuickGenericJniEndTrampoline) |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1464 | |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1465 | // Pending exceptions possible. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1466 | // TODO: use cmpq, needs direct encoding because of gas bug |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1467 | movq %gs:THREAD_EXCEPTION_OFFSET, %rcx |
| 1468 | test %rcx, %rcx |
| 1469 | jnz .Lexception_in_native |
| 1470 | |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1471 | // Tear down the alloca. |
| 1472 | movq %rbp, %rsp |
| 1473 | CFI_DEF_CFA_REGISTER(rsp) |
| 1474 | |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1475 | // Tear down the callee-save frame. |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1476 | // Load FPRs. |
| 1477 | // movq %xmm0, 16(%rsp) // doesn't make sense!!! |
| 1478 | movq 24(%rsp), %xmm1 // neither does this!!! |
| 1479 | movq 32(%rsp), %xmm2 |
| 1480 | movq 40(%rsp), %xmm3 |
| 1481 | movq 48(%rsp), %xmm4 |
| 1482 | movq 56(%rsp), %xmm5 |
| 1483 | movq 64(%rsp), %xmm6 |
| 1484 | movq 72(%rsp), %xmm7 |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1485 | movq 80(%rsp), %xmm12 |
| 1486 | movq 88(%rsp), %xmm13 |
| 1487 | movq 96(%rsp), %xmm14 |
| 1488 | movq 104(%rsp), %xmm15 |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1489 | // was 80 bytes |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1490 | addq LITERAL(80 + 4*8), %rsp |
| 1491 | CFI_ADJUST_CFA_OFFSET(-80 - 4*8) |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1492 | // Save callee and GPR args, mixed together to agree with core spills bitmap. |
| 1493 | POP rcx // Arg. |
| 1494 | POP rdx // Arg. |
| 1495 | POP rbx // Callee save. |
| 1496 | POP rbp // Callee save. |
| 1497 | POP rsi // Arg. |
| 1498 | POP r8 // Arg. |
| 1499 | POP r9 // Arg. |
| 1500 | POP r12 // Callee save. |
| 1501 | POP r13 // Callee save. |
| 1502 | POP r14 // Callee save. |
| 1503 | POP r15 // Callee save. |
| 1504 | // store into fpr, for when it's a fpr return... |
| 1505 | movq %rax, %xmm0 |
| 1506 | ret |
Ian Rogers | e0dcd46 | 2014-03-08 15:21:04 -0800 | [diff] [blame] | 1507 | .Lexception_in_native: |
Nicolas Geoffray | 126d659 | 2015-03-03 14:28:35 +0000 | [diff] [blame] | 1508 | movq %gs:THREAD_TOP_QUICK_FRAME_OFFSET, %rsp |
| 1509 | CFI_DEF_CFA_REGISTER(rsp) |
| 1510 | // Do a call to push a new save-all frame required by the runtime. |
| 1511 | call .Lexception_call |
| 1512 | .Lexception_call: |
Andreas Gampe | bf6b92a | 2014-03-05 16:11:04 -0800 | [diff] [blame] | 1513 | DELIVER_PENDING_EXCEPTION |
| 1514 | END_FUNCTION art_quick_generic_jni_trampoline |
Andreas Gampe | 2da8823 | 2014-02-27 12:26:20 -0800 | [diff] [blame] | 1515 | |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 1516 | /* |
| 1517 | * Called to bridge from the quick to interpreter ABI. On entry the arguments match those |
| 1518 | * of a quick call: |
| 1519 | * RDI = method being called / to bridge to. |
| 1520 | * RSI, RDX, RCX, R8, R9 are arguments to that method. |
| 1521 | */ |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 1522 | DEFINE_FUNCTION art_quick_to_interpreter_bridge |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1523 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME // Set up frame and save arguments. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 1524 | movq %gs:THREAD_SELF_OFFSET, %rsi // RSI := Thread::Current() |
| 1525 | movq %rsp, %rdx // RDX := sp |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1526 | call SYMBOL(artQuickToInterpreterBridge) // (method, Thread*, SP) |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1527 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME // TODO: no need to restore arguments in this case. |
Ian Rogers | 936b37f | 2014-02-14 00:52:24 -0800 | [diff] [blame] | 1528 | movq %rax, %xmm0 // Place return value also into floating point return value. |
| 1529 | RETURN_OR_DELIVER_PENDING_EXCEPTION // return or deliver exception |
| 1530 | END_FUNCTION art_quick_to_interpreter_bridge |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1531 | |
| 1532 | /* |
| 1533 | * Routine that intercepts method calls and returns. |
| 1534 | */ |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1535 | DEFINE_FUNCTION art_quick_instrumentation_entry |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1536 | #if defined(__APPLE__) |
| 1537 | int3 |
| 1538 | int3 |
| 1539 | #else |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1540 | SETUP_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1541 | |
| 1542 | movq %rdi, %r12 // Preserve method pointer in a callee-save. |
| 1543 | |
| 1544 | movq %gs:THREAD_SELF_OFFSET, %rdx // Pass thread. |
Sebastien Hertz | 32b12f8 | 2014-11-17 12:46:27 +0100 | [diff] [blame] | 1545 | movq FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp), %rcx // Pass return PC. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1546 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1547 | call SYMBOL(artInstrumentationMethodEntryFromCode) // (Method*, Object*, Thread*, LR) |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1548 | |
| 1549 | // %rax = result of call. |
| 1550 | movq %r12, %rdi // Reload method pointer. |
| 1551 | |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1552 | leaq art_quick_instrumentation_exit(%rip), %r12 // Set up return through instrumentation |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1553 | movq %r12, FRAME_SIZE_REFS_AND_ARGS_CALLEE_SAVE-8(%rsp) // exit. |
| 1554 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1555 | RESTORE_REFS_AND_ARGS_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1556 | |
| 1557 | jmp *%rax // Tail call to intended method. |
Ian Rogers | c3ccc10 | 2014-06-25 11:52:14 -0700 | [diff] [blame] | 1558 | #endif // __APPLE__ |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1559 | END_FUNCTION art_quick_instrumentation_entry |
| 1560 | |
| 1561 | DEFINE_FUNCTION art_quick_instrumentation_exit |
| 1562 | pushq LITERAL(0) // Push a fake return PC as there will be none on the stack. |
| 1563 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1564 | SETUP_REFS_ONLY_CALLEE_SAVE_FRAME |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1565 | |
| 1566 | // We need to save rax and xmm0. We could use a callee-save from SETUP_REF_ONLY, but then |
| 1567 | // we would need to fully restore it. As there are a good number of callee-save registers, it |
| 1568 | // seems easier to have an extra small stack area. But this should be revisited. |
| 1569 | |
| 1570 | movq %rsp, %rsi // Pass SP. |
| 1571 | |
| 1572 | PUSH rax // Save integer result. |
| 1573 | subq LITERAL(8), %rsp // Save floating-point result. |
| 1574 | CFI_ADJUST_CFA_OFFSET(8) |
Andreas Gampe | fea2901 | 2014-07-23 10:05:02 -0700 | [diff] [blame] | 1575 | movq %xmm0, (%rsp) |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1576 | |
| 1577 | movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread. |
| 1578 | movq %rax, %rdx // Pass integer result. |
| 1579 | movq %xmm0, %rcx // Pass floating-point result. |
| 1580 | |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1581 | call SYMBOL(artInstrumentationMethodExitFromCode) // (Thread*, SP, gpr_res, fpr_res) |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1582 | |
| 1583 | movq %rax, %rdi // Store return PC |
| 1584 | movq %rdx, %rsi // Store second return PC in hidden arg. |
| 1585 | |
Andreas Gampe | fea2901 | 2014-07-23 10:05:02 -0700 | [diff] [blame] | 1586 | movq (%rsp), %xmm0 // Restore floating-point result. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1587 | addq LITERAL(8), %rsp |
| 1588 | CFI_ADJUST_CFA_OFFSET(-8) |
| 1589 | POP rax // Restore integer result. |
| 1590 | |
Sebastien Hertz | 20e7d60 | 2015-03-12 15:10:50 +0100 | [diff] [blame] | 1591 | RESTORE_REFS_ONLY_CALLEE_SAVE_FRAME |
| 1592 | |
| 1593 | addq LITERAL(8), %rsp // Drop fake return pc. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1594 | |
| 1595 | jmp *%rdi // Return. |
| 1596 | END_FUNCTION art_quick_instrumentation_exit |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1597 | |
| 1598 | /* |
| 1599 | * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization |
| 1600 | * will long jump to the upcall with a special exception of -1. |
| 1601 | */ |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1602 | DEFINE_FUNCTION art_quick_deoptimize |
Mingyao Yang | d43b3ac | 2015-04-01 14:03:04 -0700 | [diff] [blame] | 1603 | pushq %rsi // Entry point for a jump. Fake that we were called. |
| 1604 | // Use hidden arg. |
| 1605 | .globl SYMBOL(art_quick_deoptimize_from_compiled_slow_path) // Entry point for real calls |
| 1606 | // from compiled slow paths. |
| 1607 | SYMBOL(art_quick_deoptimize_from_compiled_slow_path): |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1608 | SETUP_SAVE_ALL_CALLEE_SAVE_FRAME |
| 1609 | // Stack should be aligned now. |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1610 | movq %gs:THREAD_SELF_OFFSET, %rdi // Pass Thread. |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1611 | call SYMBOL(artDeoptimize) // artDeoptimize(Thread*) |
Andreas Gampe | d58342c | 2014-06-05 14:18:08 -0700 | [diff] [blame] | 1612 | int3 // Unreachable. |
| 1613 | END_FUNCTION art_quick_deoptimize |
| 1614 | |
Alexei Zavjalov | 315ccab | 2014-05-01 23:24:05 +0700 | [diff] [blame] | 1615 | /* |
| 1616 | * String's compareTo. |
| 1617 | * |
| 1618 | * On entry: |
| 1619 | * rdi: this string object (known non-null) |
| 1620 | * rsi: comp string object (known non-null) |
| 1621 | */ |
| 1622 | DEFINE_FUNCTION art_quick_string_compareto |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1623 | movl MIRROR_STRING_COUNT_OFFSET(%edi), %r8d |
| 1624 | movl MIRROR_STRING_COUNT_OFFSET(%esi), %r9d |
| 1625 | movl MIRROR_STRING_VALUE_OFFSET(%edi), %r10d |
| 1626 | movl MIRROR_STRING_VALUE_OFFSET(%esi), %r11d |
| 1627 | movl MIRROR_STRING_OFFSET_OFFSET(%edi), %eax |
| 1628 | movl MIRROR_STRING_OFFSET_OFFSET(%esi), %ecx |
Alexei Zavjalov | 315ccab | 2014-05-01 23:24:05 +0700 | [diff] [blame] | 1629 | /* Build pointers to the start of string data */ |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 1630 | leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r10d, %eax, 2), %esi |
| 1631 | leal MIRROR_CHAR_ARRAY_DATA_OFFSET(%r11d, %ecx, 2), %edi |
Alexei Zavjalov | 315ccab | 2014-05-01 23:24:05 +0700 | [diff] [blame] | 1632 | /* Calculate min length and count diff */ |
| 1633 | movl %r8d, %ecx |
| 1634 | movl %r8d, %eax |
| 1635 | subl %r9d, %eax |
| 1636 | cmovg %r9d, %ecx |
| 1637 | /* |
| 1638 | * At this point we have: |
| 1639 | * eax: value to return if first part of strings are equal |
| 1640 | * ecx: minimum among the lengths of the two strings |
| 1641 | * esi: pointer to this string data |
| 1642 | * edi: pointer to comp string data |
| 1643 | */ |
| 1644 | jecxz .Lkeep_length |
| 1645 | repe cmpsw // find nonmatching chars in [%esi] and [%edi], up to length %ecx |
| 1646 | jne .Lnot_equal |
| 1647 | .Lkeep_length: |
| 1648 | ret |
| 1649 | .balign 16 |
| 1650 | .Lnot_equal: |
| 1651 | movzwl -2(%esi), %eax // get last compared char from this string |
| 1652 | movzwl -2(%edi), %ecx // get last compared char from comp string |
| 1653 | subl %ecx, %eax // return the difference |
| 1654 | ret |
| 1655 | END_FUNCTION art_quick_string_compareto |
| 1656 | |
Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 1657 | UNIMPLEMENTED art_quick_memcmp16 |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1658 | |
| 1659 | DEFINE_FUNCTION art_quick_assignable_from_code |
| 1660 | SETUP_FP_CALLEE_SAVE_FRAME |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 1661 | call SYMBOL(artIsAssignableFromCode) // (const mirror::Class*, const mirror::Class*) |
Serguei Katkov | c380191 | 2014-07-08 17:21:53 +0700 | [diff] [blame] | 1662 | RESTORE_FP_CALLEE_SAVE_FRAME |
| 1663 | ret |
| 1664 | END_FUNCTION art_quick_assignable_from_code |
Dave Allison | 8ce6b90 | 2014-08-26 11:07:58 -0700 | [diff] [blame] | 1665 | |
| 1666 | |
| 1667 | // Return from a nested signal: |
| 1668 | // Entry: |
| 1669 | // rdi: address of jmp_buf in TLS |
| 1670 | |
| 1671 | DEFINE_FUNCTION art_nested_signal_return |
| 1672 | // first arg to longjmp is already in correct register |
| 1673 | movq LITERAL(1), %rsi // second arg to longjmp (1) |
| 1674 | call PLT_SYMBOL(longjmp) |
| 1675 | int3 // won't get here |
| 1676 | END_FUNCTION art_nested_signal_return |