Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | 4d0589c | 2014-06-10 16:10:56 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_ARCH_ARM_ASM_SUPPORT_ARM_S_ |
| 18 | #define ART_RUNTIME_ARCH_ARM_ASM_SUPPORT_ARM_S_ |
Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 19 | |
| 20 | #include "asm_support_arm.h" |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 21 | #include "interpreter/cfi_asm_support.h" |
Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 22 | |
Andreas Gampe | 5c1e435 | 2014-04-21 19:28:24 -0700 | [diff] [blame] | 23 | // Define special registers. |
| 24 | |
| 25 | // Register holding suspend check count down. |
| 26 | #define rSUSPEND r4 |
| 27 | // Register holding Thread::Current(). |
| 28 | #define rSELF r9 |
| 29 | |
Lokesh Gidra | bae279c | 2022-09-06 09:35:35 -0700 | [diff] [blame] | 30 | #ifdef RESERVE_MARKING_REGISTER |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 31 | // Marking Register, holding Thread::Current()->GetIsGcMarking(). |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 32 | #define rMR r8 |
| 33 | #endif |
| 34 | |
Ian Rogers | 637859c | 2013-08-27 14:35:54 -0700 | [diff] [blame] | 35 | .syntax unified |
| 36 | .arch armv7-a |
Nicolas Geoffray | dd406c3 | 2020-11-22 22:53:18 +0000 | [diff] [blame] | 37 | .arch_extension idiv |
Ian Rogers | 637859c | 2013-08-27 14:35:54 -0700 | [diff] [blame] | 38 | .thumb |
| 39 | |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 40 | .macro CFI_EXPRESSION_BREG n, b, offset |
| 41 | .if (-0x40 <= (\offset)) && ((\offset) < 0x40) |
| 42 | CFI_EXPRESSION_BREG_1(\n, \b, \offset) |
| 43 | .elseif (-0x2000 <= (\offset)) && ((\offset) < 0x2000) |
| 44 | CFI_EXPRESSION_BREG_2(\n, \b, \offset) |
| 45 | .else |
| 46 | .error "Unsupported offset" |
| 47 | .endif |
| 48 | .endm |
| 49 | |
Nicolas Geoffray | dd406c3 | 2020-11-22 22:53:18 +0000 | [diff] [blame] | 50 | .macro CFI_DEF_CFA_BREG_PLUS_UCONST reg, offset, size |
| 51 | .if ((\size) < 0) |
| 52 | .error "Size should be positive" |
| 53 | .endif |
| 54 | .if (((\offset) < -0x40) || ((\offset) >= 0x40)) |
| 55 | .error "Unsupported offset" |
| 56 | .endif |
| 57 | .if ((\size) < 0x80) |
| 58 | CFI_DEF_CFA_BREG_PLUS_UCONST_1_1(\reg, \offset, \size) |
| 59 | .elseif ((\size) < 0x4000) |
| 60 | CFI_DEF_CFA_BREG_PLUS_UCONST_1_2(\reg, \offset, \size) |
| 61 | .else |
| 62 | .error "Unsupported size" |
| 63 | .endif |
| 64 | .endm |
| 65 | |
David Srbecky | 1281b1c | 2022-02-14 10:27:50 +0000 | [diff] [blame] | 66 | // The spec is not clear whether the CFA is part of the saved state and tools |
| 67 | // differ in the behaviour, so explicitly set the CFA to avoid any ambiguity. |
| 68 | // The restored CFA state should match the CFA state during CFI_REMEMBER_STATE. |
| 69 | .macro CFI_RESTORE_STATE_AND_DEF_CFA reg, offset |
| 70 | .cfi_restore_state |
| 71 | .cfi_def_cfa \reg, \offset |
| 72 | .endm |
| 73 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 74 | // Common ENTRY declaration code for ARM and thumb, an ENTRY should always be paired with an END. |
David Srbecky | 1eb5d87 | 2019-04-03 13:56:22 +0100 | [diff] [blame] | 75 | .macro DEF_ENTRY thumb_or_arm, name, alignment |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 76 | \thumb_or_arm |
Chih-Hung Hsieh | e0d7cff | 2015-07-27 10:10:44 -0700 | [diff] [blame] | 77 | // Clang ignores .thumb_func and requires an explicit .thumb. Investigate whether we should still |
| 78 | // carry around the .thumb_func. |
| 79 | .ifc \thumb_or_arm, .thumb_func |
| 80 | .thumb |
| 81 | .endif |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 82 | .type \name, #function |
| 83 | .hidden \name // Hide this as a global symbol, so we do not incur plt calls. |
| 84 | .global \name |
David Srbecky | 1eb5d87 | 2019-04-03 13:56:22 +0100 | [diff] [blame] | 85 | // ART-compiled functions have OatQuickMethodHeader but assembly funtions do not. |
| 86 | // Prefix the assembly code with 0xFFs, which means there is no method header. |
| 87 | .byte 0xFF, 0xFF, 0xFF, 0xFF |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 88 | // Cache alignment for function entry. |
David Srbecky | 1eb5d87 | 2019-04-03 13:56:22 +0100 | [diff] [blame] | 89 | // NB: 0xFF because there is a bug in balign where 0x00 creates nop instructions. |
| 90 | .balign \alignment, 0xFF |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 91 | \name: |
| 92 | .cfi_startproc |
| 93 | .fnstart |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 94 | .endm |
| 95 | |
| 96 | // A thumb2 style ENTRY. |
Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 97 | .macro ENTRY name |
David Srbecky | 1eb5d87 | 2019-04-03 13:56:22 +0100 | [diff] [blame] | 98 | DEF_ENTRY .thumb_func, \name, 16 |
| 99 | .endm |
| 100 | .macro ENTRY_ALIGNED name, alignment |
| 101 | DEF_ENTRY .thumb_func, \name, \alignment |
Ian Rogers | 637859c | 2013-08-27 14:35:54 -0700 | [diff] [blame] | 102 | .endm |
| 103 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 104 | // A ARM style ENTRY. |
Ian Rogers | 637859c | 2013-08-27 14:35:54 -0700 | [diff] [blame] | 105 | .macro ARM_ENTRY name |
David Srbecky | 1eb5d87 | 2019-04-03 13:56:22 +0100 | [diff] [blame] | 106 | DEF_ENTRY .arm, \name, 16 |
Andreas Gampe | 29b3841 | 2014-08-13 00:15:43 -0700 | [diff] [blame] | 107 | .endm |
| 108 | |
Vladimir Marko | 19a49a9 | 2022-03-02 15:23:03 +0000 | [diff] [blame] | 109 | // Terminate an ENTRY. |
Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 110 | .macro END name |
| 111 | .fnend |
| 112 | .cfi_endproc |
| 113 | .size \name, .-\name |
| 114 | .endm |
| 115 | |
Ian Rogers | 1d8cdbc | 2014-09-22 22:51:09 -0700 | [diff] [blame] | 116 | // Declare an unimplemented ENTRY that will halt a debugger. |
Ian Rogers | 468532e | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 117 | .macro UNIMPLEMENTED name |
| 118 | ENTRY \name |
| 119 | bkpt |
| 120 | bkpt |
| 121 | END \name |
| 122 | .endm |
| 123 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 124 | // Macro to poison (negate) the reference for heap poisoning. |
Hiroshi Yamauchi | bfa5eb6 | 2015-05-29 15:04:41 -0700 | [diff] [blame] | 125 | .macro POISON_HEAP_REF rRef |
| 126 | #ifdef USE_HEAP_POISONING |
| 127 | rsb \rRef, \rRef, #0 |
| 128 | #endif // USE_HEAP_POISONING |
| 129 | .endm |
| 130 | |
Roland Levillain | 6d729a7 | 2017-06-30 18:34:01 +0100 | [diff] [blame] | 131 | // Macro to unpoison (negate) the reference for heap poisoning. |
Hiroshi Yamauchi | bfa5eb6 | 2015-05-29 15:04:41 -0700 | [diff] [blame] | 132 | .macro UNPOISON_HEAP_REF rRef |
| 133 | #ifdef USE_HEAP_POISONING |
| 134 | rsb \rRef, \rRef, #0 |
| 135 | #endif // USE_HEAP_POISONING |
| 136 | .endm |
| 137 | |
Vladimir Marko | 86c8752 | 2020-05-11 16:55:55 +0100 | [diff] [blame] | 138 | .macro INCREASE_FRAME frame_adjustment |
| 139 | sub sp, sp, #(\frame_adjustment) |
| 140 | .cfi_adjust_cfa_offset (\frame_adjustment) |
| 141 | .endm |
| 142 | |
| 143 | .macro DECREASE_FRAME frame_adjustment |
| 144 | add sp, sp, #(\frame_adjustment) |
| 145 | .cfi_adjust_cfa_offset -(\frame_adjustment) |
| 146 | .endm |
| 147 | |
Vladimir Marko | 19a49a9 | 2022-03-02 15:23:03 +0000 | [diff] [blame] | 148 | .macro LOAD_RUNTIME_INSTANCE rDest |
| 149 | movw \rDest, #:lower16:(_ZN3art7Runtime9instance_E - (. + 12)) |
| 150 | movt \rDest, #:upper16:(_ZN3art7Runtime9instance_E - (. + 8)) |
| 151 | add \rDest, pc |
| 152 | ldr \rDest, [\rDest] |
| 153 | .endm |
| 154 | |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 155 | // Macro to refresh the Marking Register (R8). |
| 156 | // |
| 157 | // This macro must be called at the end of functions implementing |
| 158 | // entrypoints that possibly (directly or indirectly) perform a |
| 159 | // suspend check (before they return). |
| 160 | .macro REFRESH_MARKING_REGISTER |
Lokesh Gidra | bae279c | 2022-09-06 09:35:35 -0700 | [diff] [blame] | 161 | #ifdef RESERVE_MARKING_REGISTER |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 162 | ldr rMR, [rSELF, #THREAD_IS_GC_MARKING_OFFSET] |
| 163 | #endif |
| 164 | .endm |
| 165 | |
Vladimir Marko | 50bbbf1 | 2020-12-11 14:07:46 +0000 | [diff] [blame] | 166 | .macro CONDITIONAL_CBZ reg, reg_if, dest |
| 167 | .ifc \reg, \reg_if |
| 168 | cbz \reg, \dest |
| 169 | .endif |
| 170 | .endm |
| 171 | |
| 172 | .macro CONDITIONAL_CMPBZ reg, reg_if, dest |
| 173 | .ifc \reg, \reg_if |
| 174 | cmp \reg, #0 |
| 175 | beq \dest |
| 176 | .endif |
| 177 | .endm |
| 178 | |
| 179 | // Use CBZ if the register is in {r0, r7} otherwise compare and branch. |
| 180 | .macro SMART_CBZ reg, dest |
| 181 | CONDITIONAL_CBZ \reg, r0, \dest |
| 182 | CONDITIONAL_CBZ \reg, r1, \dest |
| 183 | CONDITIONAL_CBZ \reg, r2, \dest |
| 184 | CONDITIONAL_CBZ \reg, r3, \dest |
| 185 | CONDITIONAL_CBZ \reg, r4, \dest |
| 186 | CONDITIONAL_CBZ \reg, r5, \dest |
| 187 | CONDITIONAL_CBZ \reg, r6, \dest |
| 188 | CONDITIONAL_CBZ \reg, r7, \dest |
| 189 | CONDITIONAL_CMPBZ \reg, r8, \dest |
| 190 | CONDITIONAL_CMPBZ \reg, r9, \dest |
| 191 | CONDITIONAL_CMPBZ \reg, r10, \dest |
| 192 | CONDITIONAL_CMPBZ \reg, r11, \dest |
| 193 | CONDITIONAL_CMPBZ \reg, r12, \dest |
| 194 | CONDITIONAL_CMPBZ \reg, r13, \dest |
| 195 | CONDITIONAL_CMPBZ \reg, r14, \dest |
| 196 | CONDITIONAL_CMPBZ \reg, r15, \dest |
| 197 | .endm |
| 198 | |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 199 | /* |
| 200 | * Macro that sets up the callee save frame to conform with |
| 201 | * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs), except for storing the method. |
| 202 | */ |
| 203 | .macro SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY |
| 204 | // Note: We could avoid saving R8 in the case of Baker read |
| 205 | // barriers, as it is overwritten by REFRESH_MARKING_REGISTER |
| 206 | // later; but it's not worth handling this special case. |
| 207 | push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves and args. |
| 208 | .cfi_adjust_cfa_offset 40 |
| 209 | .cfi_rel_offset r1, 0 |
| 210 | .cfi_rel_offset r2, 4 |
| 211 | .cfi_rel_offset r3, 8 |
| 212 | .cfi_rel_offset r5, 12 |
| 213 | .cfi_rel_offset r6, 16 |
| 214 | .cfi_rel_offset r7, 20 |
| 215 | .cfi_rel_offset r8, 24 |
| 216 | .cfi_rel_offset r10, 28 |
| 217 | .cfi_rel_offset r11, 32 |
| 218 | .cfi_rel_offset lr, 36 |
| 219 | vpush {s0-s15} @ 16 words of float args. |
| 220 | .cfi_adjust_cfa_offset 64 |
| 221 | sub sp, #8 @ 2 words of space, alignment padding and Method* |
| 222 | .cfi_adjust_cfa_offset 8 |
| 223 | // Ugly compile-time check, but we only have the preprocessor. |
| 224 | #if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 40 + 64 + 8) |
| 225 | #error "FRAME_SIZE_SAVE_REFS_AND_ARGS(ARM) size not as expected." |
| 226 | #endif |
| 227 | .endm |
| 228 | |
| 229 | .macro RESTORE_SAVE_REFS_AND_ARGS_FRAME |
| 230 | add sp, #8 @ rewind sp |
| 231 | .cfi_adjust_cfa_offset -8 |
| 232 | vpop {s0-s15} |
| 233 | .cfi_adjust_cfa_offset -64 |
| 234 | // Note: Likewise, we could avoid restoring R8 in the case of Baker |
| 235 | // read barriers, as it is overwritten by REFRESH_MARKING_REGISTER |
| 236 | // later; but it's not worth handling this special case. |
| 237 | pop {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves and args. |
| 238 | .cfi_restore r1 |
| 239 | .cfi_restore r2 |
| 240 | .cfi_restore r3 |
| 241 | .cfi_restore r5 |
| 242 | .cfi_restore r6 |
| 243 | .cfi_restore r7 |
| 244 | .cfi_restore r8 |
| 245 | .cfi_restore r10 |
| 246 | .cfi_restore r11 |
| 247 | .cfi_restore lr |
| 248 | .cfi_adjust_cfa_offset -40 |
| 249 | .endm |
| 250 | |
| 251 | /* |
| 252 | * Macro to spill the GPRs. |
| 253 | */ |
| 254 | .macro SPILL_ALL_CALLEE_SAVE_GPRS |
| 255 | push {r4-r11, lr} @ 9 words (36 bytes) of callee saves. |
| 256 | .cfi_adjust_cfa_offset 36 |
| 257 | .cfi_rel_offset r4, 0 |
| 258 | .cfi_rel_offset r5, 4 |
| 259 | .cfi_rel_offset r6, 8 |
| 260 | .cfi_rel_offset r7, 12 |
| 261 | .cfi_rel_offset r8, 16 |
| 262 | .cfi_rel_offset r9, 20 |
| 263 | .cfi_rel_offset r10, 24 |
| 264 | .cfi_rel_offset r11, 28 |
| 265 | .cfi_rel_offset lr, 32 |
| 266 | .endm |
| 267 | |
| 268 | /* |
| 269 | * Macro that sets up the callee save frame to conform with |
| 270 | * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves) |
| 271 | */ |
| 272 | .macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME rTemp |
| 273 | SPILL_ALL_CALLEE_SAVE_GPRS @ 9 words (36 bytes) of callee saves. |
| 274 | vpush {s16-s31} @ 16 words (64 bytes) of floats. |
| 275 | .cfi_adjust_cfa_offset 64 |
| 276 | sub sp, #12 @ 3 words of space, bottom word will hold Method* |
| 277 | .cfi_adjust_cfa_offset 12 |
Vladimir Marko | 19a49a9 | 2022-03-02 15:23:03 +0000 | [diff] [blame] | 278 | LOAD_RUNTIME_INSTANCE \rTemp @ Load Runtime::Current into rTemp. |
Vladimir Marko | fa458ac | 2020-02-12 14:08:07 +0000 | [diff] [blame] | 279 | @ Load kSaveAllCalleeSaves Method* into rTemp. |
| 280 | ldr \rTemp, [\rTemp, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET] |
| 281 | str \rTemp, [sp, #0] @ Place Method* at bottom of stack. |
| 282 | str sp, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. |
| 283 | |
| 284 | // Ugly compile-time check, but we only have the preprocessor. |
| 285 | #if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 36 + 64 + 12) |
| 286 | #error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(ARM) size not as expected." |
| 287 | #endif |
| 288 | .endm |
| 289 | |
| 290 | /* |
| 291 | * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending |
| 292 | * exception is Thread::Current()->exception_ when the runtime method frame is ready. |
| 293 | */ |
| 294 | .macro DELIVER_PENDING_EXCEPTION_FRAME_READY |
| 295 | mov r0, rSELF @ pass Thread::Current |
| 296 | bl artDeliverPendingExceptionFromCode @ artDeliverPendingExceptionFromCode(Thread*) |
| 297 | .endm |
| 298 | |
| 299 | /* |
| 300 | * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending |
| 301 | * exception is Thread::Current()->exception_. |
| 302 | */ |
| 303 | .macro DELIVER_PENDING_EXCEPTION |
| 304 | SETUP_SAVE_ALL_CALLEE_SAVES_FRAME r0 @ save callee saves for throw |
| 305 | DELIVER_PENDING_EXCEPTION_FRAME_READY |
| 306 | .endm |
| 307 | |
Nicolas Geoffray | dd406c3 | 2020-11-22 22:53:18 +0000 | [diff] [blame] | 308 | .macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg |
| 309 | ldr \reg, [rSELF, #THREAD_EXCEPTION_OFFSET] @ Get exception field. |
| 310 | cbnz \reg, 1f |
| 311 | bx lr |
| 312 | 1: |
| 313 | DELIVER_PENDING_EXCEPTION |
| 314 | .endm |
| 315 | |
Nicolas Geoffray | dd406c3 | 2020-11-22 22:53:18 +0000 | [diff] [blame] | 316 | .macro RETURN_OR_DELIVER_PENDING_EXCEPTION |
| 317 | ldr ip, [rSELF, #THREAD_EXCEPTION_OFFSET] @ Get exception field. |
| 318 | cmp ip, #0 |
| 319 | bne 1f |
| 320 | bx lr |
| 321 | 1: |
| 322 | DELIVER_PENDING_EXCEPTION |
| 323 | .endm |
| 324 | |
| 325 | /* |
| 326 | * Macro that sets up the callee save frame to conform with |
| 327 | * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly). |
| 328 | */ |
| 329 | .macro SETUP_SAVE_REFS_ONLY_FRAME rTemp |
| 330 | // Note: We could avoid saving R8 in the case of Baker read |
| 331 | // barriers, as it is overwritten by REFRESH_MARKING_REGISTER |
| 332 | // later; but it's not worth handling this special case. |
| 333 | push {r5-r8, r10-r11, lr} @ 7 words of callee saves |
| 334 | .cfi_adjust_cfa_offset 28 |
| 335 | .cfi_rel_offset r5, 0 |
| 336 | .cfi_rel_offset r6, 4 |
| 337 | .cfi_rel_offset r7, 8 |
| 338 | .cfi_rel_offset r8, 12 |
| 339 | .cfi_rel_offset r10, 16 |
| 340 | .cfi_rel_offset r11, 20 |
| 341 | .cfi_rel_offset lr, 24 |
| 342 | sub sp, #4 @ bottom word will hold Method* |
| 343 | .cfi_adjust_cfa_offset 4 |
Vladimir Marko | 19a49a9 | 2022-03-02 15:23:03 +0000 | [diff] [blame] | 344 | LOAD_RUNTIME_INSTANCE \rTemp @ Load Runtime::Current into rTemp. |
Nicolas Geoffray | dd406c3 | 2020-11-22 22:53:18 +0000 | [diff] [blame] | 345 | @ Load kSaveRefsOnly Method* into rTemp. |
| 346 | ldr \rTemp, [\rTemp, #RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET] |
| 347 | str \rTemp, [sp, #0] @ Place Method* at bottom of stack. |
| 348 | str sp, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame. |
| 349 | |
| 350 | // Ugly compile-time check, but we only have the preprocessor. |
| 351 | #if (FRAME_SIZE_SAVE_REFS_ONLY != 28 + 4) |
| 352 | #error "FRAME_SIZE_SAVE_REFS_ONLY(ARM) size not as expected." |
| 353 | #endif |
| 354 | .endm |
| 355 | |
| 356 | .macro RESTORE_SAVE_REFS_ONLY_FRAME |
| 357 | add sp, #4 @ bottom word holds Method* |
| 358 | .cfi_adjust_cfa_offset -4 |
| 359 | // Note: Likewise, we could avoid restoring R8 in the case of Baker |
| 360 | // read barriers, as it is overwritten by REFRESH_MARKING_REGISTER |
| 361 | // later; but it's not worth handling this special case. |
| 362 | pop {r5-r8, r10-r11, lr} @ 7 words of callee saves |
| 363 | .cfi_restore r5 |
| 364 | .cfi_restore r6 |
| 365 | .cfi_restore r7 |
| 366 | .cfi_restore r8 |
| 367 | .cfi_restore r10 |
| 368 | .cfi_restore r11 |
| 369 | .cfi_restore lr |
| 370 | .cfi_adjust_cfa_offset -28 |
| 371 | .endm |
| 372 | |
Vladimir Marko | 8439e45 | 2021-11-25 15:36:29 +0000 | [diff] [blame] | 373 | // Locking is needed for both managed code and JNI stubs. |
| 374 | .macro LOCK_OBJECT_FAST_PATH obj, tmp1, tmp2, tmp3, slow_lock, can_be_null |
| 375 | ldr \tmp1, [rSELF, #THREAD_ID_OFFSET] |
| 376 | .if \can_be_null |
| 377 | cbz \obj, \slow_lock |
| 378 | .endif |
| 379 | 1: |
| 380 | ldrex \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 381 | eor \tmp3, \tmp2, \tmp1 @ Prepare the value to store if unlocked |
| 382 | @ (thread id, count of 0 and preserved read barrier bits), |
| 383 | @ or prepare to compare thread id for recursive lock check |
| 384 | @ (lock_word.ThreadId() ^ self->ThreadId()). |
| 385 | ands ip, \tmp2, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED @ Test the non-gc bits. |
| 386 | bne 2f @ Check if unlocked. |
| 387 | @ unlocked case - store tmp3: original lock word plus thread id, preserved read barrier bits. |
| 388 | strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 389 | cbnz \tmp2, 3f @ If store failed, retry. |
| 390 | dmb ish @ Full (LoadLoad|LoadStore) memory barrier. |
| 391 | bx lr |
| 392 | 2: @ tmp2: original lock word, tmp1: thread_id, tmp3: tmp2 ^ tmp1 |
| 393 | #if LOCK_WORD_THIN_LOCK_COUNT_SHIFT + LOCK_WORD_THIN_LOCK_COUNT_SIZE != LOCK_WORD_GC_STATE_SHIFT |
| 394 | #error "Expecting thin lock count and gc state in consecutive bits." |
| 395 | #endif |
| 396 | @ Check lock word state and thread id together. |
| 397 | bfc \tmp3, \ |
| 398 | #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, \ |
| 399 | #(LOCK_WORD_THIN_LOCK_COUNT_SIZE + LOCK_WORD_GC_STATE_SIZE) |
| 400 | cbnz \tmp3, \slow_lock @ if either of the top two bits are set, or the lock word's |
| 401 | @ thread id did not match, go slow path. |
| 402 | add \tmp3, \tmp2, #LOCK_WORD_THIN_LOCK_COUNT_ONE @ Increment the recursive lock count. |
| 403 | @ Extract the new thin lock count for overflow check. |
| 404 | ubfx \tmp2, \tmp3, #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, #LOCK_WORD_THIN_LOCK_COUNT_SIZE |
| 405 | cbz \tmp2, \slow_lock @ Zero as the new count indicates overflow, go slow path. |
| 406 | @ strex necessary for read barrier bits. |
| 407 | strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 408 | cbnz \tmp2, 3f @ If strex failed, retry. |
| 409 | bx lr |
| 410 | 3: |
| 411 | b 1b @ retry |
| 412 | .endm |
| 413 | |
| 414 | // Unlocking is needed for both managed code and JNI stubs. |
| 415 | .macro UNLOCK_OBJECT_FAST_PATH obj, tmp1, tmp2, tmp3, slow_unlock, can_be_null |
| 416 | ldr \tmp1, [rSELF, #THREAD_ID_OFFSET] |
| 417 | .if \can_be_null |
| 418 | cbz \obj, \slow_unlock |
| 419 | .endif |
| 420 | 1: |
| 421 | #ifndef USE_READ_BARRIER |
| 422 | ldr \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 423 | #else |
| 424 | @ Need to use atomic instructions for read barrier. |
| 425 | ldrex \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 426 | #endif |
| 427 | eor \tmp3, \tmp2, \tmp1 @ Prepare the value to store if simply locked |
| 428 | @ (mostly 0s, and preserved read barrier bits), |
| 429 | @ or prepare to compare thread id for recursive lock check |
| 430 | @ (lock_word.ThreadId() ^ self->ThreadId()). |
| 431 | ands ip, \tmp3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED @ Test the non-gc bits. |
| 432 | bne 2f @ Locked recursively or by other thread? |
| 433 | @ Transition to unlocked. |
| 434 | dmb ish @ Full (LoadStore|StoreStore) memory barrier. |
| 435 | #ifndef USE_READ_BARRIER |
| 436 | str \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 437 | #else |
| 438 | @ strex necessary for read barrier bits |
| 439 | strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 440 | cbnz \tmp2, 3f @ If the store failed, retry. |
| 441 | #endif |
| 442 | bx lr |
| 443 | 2: @ tmp2: original lock word, tmp1: thread_id, tmp3: tmp2 ^ tmp1 |
| 444 | #if LOCK_WORD_THIN_LOCK_COUNT_SHIFT + LOCK_WORD_THIN_LOCK_COUNT_SIZE != LOCK_WORD_GC_STATE_SHIFT |
| 445 | #error "Expecting thin lock count and gc state in consecutive bits." |
| 446 | #endif |
| 447 | @ Check lock word state and thread id together, |
| 448 | bfc \tmp3, \ |
| 449 | #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, \ |
| 450 | #(LOCK_WORD_THIN_LOCK_COUNT_SIZE + LOCK_WORD_GC_STATE_SIZE) |
| 451 | cbnz \tmp3, \slow_unlock @ if either of the top two bits are set, or the lock word's |
| 452 | @ thread id did not match, go slow path. |
| 453 | sub \tmp3, \tmp2, #LOCK_WORD_THIN_LOCK_COUNT_ONE @ Decrement recursive lock count. |
| 454 | #ifndef USE_READ_BARRIER |
| 455 | str \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 456 | #else |
| 457 | @ strex necessary for read barrier bits. |
| 458 | strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET] |
| 459 | cbnz \tmp2, 3f @ If the store failed, retry. |
| 460 | #endif |
| 461 | bx lr |
| 462 | 3: |
| 463 | b 1b @ retry |
| 464 | .endm |
| 465 | |
Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 466 | #endif // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_ |