blob: 7f1f4708b21ef46e0c25bf9608b62b441275616c [file] [log] [blame]
Ian Rogers7655f292013-07-29 11:07:13 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gampe4d0589c2014-06-10 16:10:56 -070017#ifndef ART_RUNTIME_ARCH_ARM_ASM_SUPPORT_ARM_S_
18#define ART_RUNTIME_ARCH_ARM_ASM_SUPPORT_ARM_S_
Ian Rogers7655f292013-07-29 11:07:13 -070019
20#include "asm_support_arm.h"
Vladimir Marko86c87522020-05-11 16:55:55 +010021#include "interpreter/cfi_asm_support.h"
Ian Rogers7655f292013-07-29 11:07:13 -070022
Andreas Gampe5c1e4352014-04-21 19:28:24 -070023// Define special registers.
24
25// Register holding suspend check count down.
26#define rSUSPEND r4
27// Register holding Thread::Current().
28#define rSELF r9
29
Lokesh Gidrabae279c2022-09-06 09:35:35 -070030#ifdef RESERVE_MARKING_REGISTER
Roland Levillain6d729a72017-06-30 18:34:01 +010031// Marking Register, holding Thread::Current()->GetIsGcMarking().
Roland Levillain6d729a72017-06-30 18:34:01 +010032#define rMR r8
33#endif
34
Ian Rogers637859c2013-08-27 14:35:54 -070035.syntax unified
36.arch armv7-a
Nicolas Geoffraydd406c32020-11-22 22:53:18 +000037.arch_extension idiv
Ian Rogers637859c2013-08-27 14:35:54 -070038.thumb
39
Vladimir Marko86c87522020-05-11 16:55:55 +010040.macro CFI_EXPRESSION_BREG n, b, offset
41 .if (-0x40 <= (\offset)) && ((\offset) < 0x40)
42 CFI_EXPRESSION_BREG_1(\n, \b, \offset)
43 .elseif (-0x2000 <= (\offset)) && ((\offset) < 0x2000)
44 CFI_EXPRESSION_BREG_2(\n, \b, \offset)
45 .else
46 .error "Unsupported offset"
47 .endif
48.endm
49
Nicolas Geoffraydd406c32020-11-22 22:53:18 +000050.macro CFI_DEF_CFA_BREG_PLUS_UCONST reg, offset, size
51 .if ((\size) < 0)
52 .error "Size should be positive"
53 .endif
54 .if (((\offset) < -0x40) || ((\offset) >= 0x40))
55 .error "Unsupported offset"
56 .endif
57 .if ((\size) < 0x80)
58 CFI_DEF_CFA_BREG_PLUS_UCONST_1_1(\reg, \offset, \size)
59 .elseif ((\size) < 0x4000)
60 CFI_DEF_CFA_BREG_PLUS_UCONST_1_2(\reg, \offset, \size)
61 .else
62 .error "Unsupported size"
63 .endif
64.endm
65
David Srbecky1281b1c2022-02-14 10:27:50 +000066// The spec is not clear whether the CFA is part of the saved state and tools
67// differ in the behaviour, so explicitly set the CFA to avoid any ambiguity.
68// The restored CFA state should match the CFA state during CFI_REMEMBER_STATE.
69.macro CFI_RESTORE_STATE_AND_DEF_CFA reg, offset
70 .cfi_restore_state
71 .cfi_def_cfa \reg, \offset
72.endm
73
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070074// Common ENTRY declaration code for ARM and thumb, an ENTRY should always be paired with an END.
David Srbecky1eb5d872019-04-03 13:56:22 +010075.macro DEF_ENTRY thumb_or_arm, name, alignment
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070076 \thumb_or_arm
Chih-Hung Hsiehe0d7cff2015-07-27 10:10:44 -070077// Clang ignores .thumb_func and requires an explicit .thumb. Investigate whether we should still
78// carry around the .thumb_func.
79 .ifc \thumb_or_arm, .thumb_func
80 .thumb
81 .endif
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070082 .type \name, #function
83 .hidden \name // Hide this as a global symbol, so we do not incur plt calls.
84 .global \name
David Srbecky1eb5d872019-04-03 13:56:22 +010085 // ART-compiled functions have OatQuickMethodHeader but assembly funtions do not.
86 // Prefix the assembly code with 0xFFs, which means there is no method header.
87 .byte 0xFF, 0xFF, 0xFF, 0xFF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070088 // Cache alignment for function entry.
David Srbecky1eb5d872019-04-03 13:56:22 +010089 // NB: 0xFF because there is a bug in balign where 0x00 creates nop instructions.
90 .balign \alignment, 0xFF
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070091\name:
92 .cfi_startproc
93 .fnstart
Ian Rogers1d8cdbc2014-09-22 22:51:09 -070094.endm
95
96// A thumb2 style ENTRY.
Ian Rogers7655f292013-07-29 11:07:13 -070097.macro ENTRY name
David Srbecky1eb5d872019-04-03 13:56:22 +010098 DEF_ENTRY .thumb_func, \name, 16
99.endm
100.macro ENTRY_ALIGNED name, alignment
101 DEF_ENTRY .thumb_func, \name, \alignment
Ian Rogers637859c2013-08-27 14:35:54 -0700102.endm
103
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700104// A ARM style ENTRY.
Ian Rogers637859c2013-08-27 14:35:54 -0700105.macro ARM_ENTRY name
David Srbecky1eb5d872019-04-03 13:56:22 +0100106 DEF_ENTRY .arm, \name, 16
Andreas Gampe29b38412014-08-13 00:15:43 -0700107.endm
108
Vladimir Marko19a49a92022-03-02 15:23:03 +0000109// Terminate an ENTRY.
Ian Rogers7655f292013-07-29 11:07:13 -0700110.macro END name
111 .fnend
112 .cfi_endproc
113 .size \name, .-\name
114.endm
115
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700116// Declare an unimplemented ENTRY that will halt a debugger.
Ian Rogers468532e2013-08-05 10:56:33 -0700117.macro UNIMPLEMENTED name
118 ENTRY \name
119 bkpt
120 bkpt
121 END \name
122.endm
123
Roland Levillain6d729a72017-06-30 18:34:01 +0100124// Macro to poison (negate) the reference for heap poisoning.
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -0700125.macro POISON_HEAP_REF rRef
126#ifdef USE_HEAP_POISONING
127 rsb \rRef, \rRef, #0
128#endif // USE_HEAP_POISONING
129.endm
130
Roland Levillain6d729a72017-06-30 18:34:01 +0100131// Macro to unpoison (negate) the reference for heap poisoning.
Hiroshi Yamauchibfa5eb62015-05-29 15:04:41 -0700132.macro UNPOISON_HEAP_REF rRef
133#ifdef USE_HEAP_POISONING
134 rsb \rRef, \rRef, #0
135#endif // USE_HEAP_POISONING
136.endm
137
Vladimir Marko86c87522020-05-11 16:55:55 +0100138.macro INCREASE_FRAME frame_adjustment
139 sub sp, sp, #(\frame_adjustment)
140 .cfi_adjust_cfa_offset (\frame_adjustment)
141.endm
142
143.macro DECREASE_FRAME frame_adjustment
144 add sp, sp, #(\frame_adjustment)
145 .cfi_adjust_cfa_offset -(\frame_adjustment)
146.endm
147
Vladimir Marko19a49a92022-03-02 15:23:03 +0000148.macro LOAD_RUNTIME_INSTANCE rDest
149 movw \rDest, #:lower16:(_ZN3art7Runtime9instance_E - (. + 12))
150 movt \rDest, #:upper16:(_ZN3art7Runtime9instance_E - (. + 8))
151 add \rDest, pc
152 ldr \rDest, [\rDest]
153.endm
154
Vladimir Markofa458ac2020-02-12 14:08:07 +0000155// Macro to refresh the Marking Register (R8).
156//
157// This macro must be called at the end of functions implementing
158// entrypoints that possibly (directly or indirectly) perform a
159// suspend check (before they return).
160.macro REFRESH_MARKING_REGISTER
Lokesh Gidrabae279c2022-09-06 09:35:35 -0700161#ifdef RESERVE_MARKING_REGISTER
Vladimir Markofa458ac2020-02-12 14:08:07 +0000162 ldr rMR, [rSELF, #THREAD_IS_GC_MARKING_OFFSET]
163#endif
164.endm
165
Vladimir Marko50bbbf12020-12-11 14:07:46 +0000166.macro CONDITIONAL_CBZ reg, reg_if, dest
167.ifc \reg, \reg_if
168 cbz \reg, \dest
169.endif
170.endm
171
172.macro CONDITIONAL_CMPBZ reg, reg_if, dest
173.ifc \reg, \reg_if
174 cmp \reg, #0
175 beq \dest
176.endif
177.endm
178
179// Use CBZ if the register is in {r0, r7} otherwise compare and branch.
180.macro SMART_CBZ reg, dest
181 CONDITIONAL_CBZ \reg, r0, \dest
182 CONDITIONAL_CBZ \reg, r1, \dest
183 CONDITIONAL_CBZ \reg, r2, \dest
184 CONDITIONAL_CBZ \reg, r3, \dest
185 CONDITIONAL_CBZ \reg, r4, \dest
186 CONDITIONAL_CBZ \reg, r5, \dest
187 CONDITIONAL_CBZ \reg, r6, \dest
188 CONDITIONAL_CBZ \reg, r7, \dest
189 CONDITIONAL_CMPBZ \reg, r8, \dest
190 CONDITIONAL_CMPBZ \reg, r9, \dest
191 CONDITIONAL_CMPBZ \reg, r10, \dest
192 CONDITIONAL_CMPBZ \reg, r11, \dest
193 CONDITIONAL_CMPBZ \reg, r12, \dest
194 CONDITIONAL_CMPBZ \reg, r13, \dest
195 CONDITIONAL_CMPBZ \reg, r14, \dest
196 CONDITIONAL_CMPBZ \reg, r15, \dest
197.endm
198
Vladimir Markofa458ac2020-02-12 14:08:07 +0000199 /*
200 * Macro that sets up the callee save frame to conform with
201 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs), except for storing the method.
202 */
203.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY
204 // Note: We could avoid saving R8 in the case of Baker read
205 // barriers, as it is overwritten by REFRESH_MARKING_REGISTER
206 // later; but it's not worth handling this special case.
207 push {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves and args.
208 .cfi_adjust_cfa_offset 40
209 .cfi_rel_offset r1, 0
210 .cfi_rel_offset r2, 4
211 .cfi_rel_offset r3, 8
212 .cfi_rel_offset r5, 12
213 .cfi_rel_offset r6, 16
214 .cfi_rel_offset r7, 20
215 .cfi_rel_offset r8, 24
216 .cfi_rel_offset r10, 28
217 .cfi_rel_offset r11, 32
218 .cfi_rel_offset lr, 36
219 vpush {s0-s15} @ 16 words of float args.
220 .cfi_adjust_cfa_offset 64
221 sub sp, #8 @ 2 words of space, alignment padding and Method*
222 .cfi_adjust_cfa_offset 8
223 // Ugly compile-time check, but we only have the preprocessor.
224#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 40 + 64 + 8)
225#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(ARM) size not as expected."
226#endif
227.endm
228
229.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME
230 add sp, #8 @ rewind sp
231 .cfi_adjust_cfa_offset -8
232 vpop {s0-s15}
233 .cfi_adjust_cfa_offset -64
234 // Note: Likewise, we could avoid restoring R8 in the case of Baker
235 // read barriers, as it is overwritten by REFRESH_MARKING_REGISTER
236 // later; but it's not worth handling this special case.
237 pop {r1-r3, r5-r8, r10-r11, lr} @ 10 words of callee saves and args.
238 .cfi_restore r1
239 .cfi_restore r2
240 .cfi_restore r3
241 .cfi_restore r5
242 .cfi_restore r6
243 .cfi_restore r7
244 .cfi_restore r8
245 .cfi_restore r10
246 .cfi_restore r11
247 .cfi_restore lr
248 .cfi_adjust_cfa_offset -40
249.endm
250
251 /*
252 * Macro to spill the GPRs.
253 */
254.macro SPILL_ALL_CALLEE_SAVE_GPRS
255 push {r4-r11, lr} @ 9 words (36 bytes) of callee saves.
256 .cfi_adjust_cfa_offset 36
257 .cfi_rel_offset r4, 0
258 .cfi_rel_offset r5, 4
259 .cfi_rel_offset r6, 8
260 .cfi_rel_offset r7, 12
261 .cfi_rel_offset r8, 16
262 .cfi_rel_offset r9, 20
263 .cfi_rel_offset r10, 24
264 .cfi_rel_offset r11, 28
265 .cfi_rel_offset lr, 32
266.endm
267
268 /*
269 * Macro that sets up the callee save frame to conform with
270 * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves)
271 */
272.macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME rTemp
273 SPILL_ALL_CALLEE_SAVE_GPRS @ 9 words (36 bytes) of callee saves.
274 vpush {s16-s31} @ 16 words (64 bytes) of floats.
275 .cfi_adjust_cfa_offset 64
276 sub sp, #12 @ 3 words of space, bottom word will hold Method*
277 .cfi_adjust_cfa_offset 12
Vladimir Marko19a49a92022-03-02 15:23:03 +0000278 LOAD_RUNTIME_INSTANCE \rTemp @ Load Runtime::Current into rTemp.
Vladimir Markofa458ac2020-02-12 14:08:07 +0000279 @ Load kSaveAllCalleeSaves Method* into rTemp.
280 ldr \rTemp, [\rTemp, #RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET]
281 str \rTemp, [sp, #0] @ Place Method* at bottom of stack.
282 str sp, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame.
283
284 // Ugly compile-time check, but we only have the preprocessor.
285#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 36 + 64 + 12)
286#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(ARM) size not as expected."
287#endif
288.endm
289
290 /*
291 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
292 * exception is Thread::Current()->exception_ when the runtime method frame is ready.
293 */
294.macro DELIVER_PENDING_EXCEPTION_FRAME_READY
295 mov r0, rSELF @ pass Thread::Current
296 bl artDeliverPendingExceptionFromCode @ artDeliverPendingExceptionFromCode(Thread*)
297.endm
298
299 /*
300 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending
301 * exception is Thread::Current()->exception_.
302 */
303.macro DELIVER_PENDING_EXCEPTION
304 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME r0 @ save callee saves for throw
305 DELIVER_PENDING_EXCEPTION_FRAME_READY
306.endm
307
Nicolas Geoffraydd406c32020-11-22 22:53:18 +0000308.macro RETURN_OR_DELIVER_PENDING_EXCEPTION_REG reg
309 ldr \reg, [rSELF, #THREAD_EXCEPTION_OFFSET] @ Get exception field.
310 cbnz \reg, 1f
311 bx lr
3121:
313 DELIVER_PENDING_EXCEPTION
314.endm
315
Nicolas Geoffraydd406c32020-11-22 22:53:18 +0000316.macro RETURN_OR_DELIVER_PENDING_EXCEPTION
317 ldr ip, [rSELF, #THREAD_EXCEPTION_OFFSET] @ Get exception field.
318 cmp ip, #0
319 bne 1f
320 bx lr
3211:
322 DELIVER_PENDING_EXCEPTION
323.endm
324
325 /*
326 * Macro that sets up the callee save frame to conform with
327 * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly).
328 */
329.macro SETUP_SAVE_REFS_ONLY_FRAME rTemp
330 // Note: We could avoid saving R8 in the case of Baker read
331 // barriers, as it is overwritten by REFRESH_MARKING_REGISTER
332 // later; but it's not worth handling this special case.
333 push {r5-r8, r10-r11, lr} @ 7 words of callee saves
334 .cfi_adjust_cfa_offset 28
335 .cfi_rel_offset r5, 0
336 .cfi_rel_offset r6, 4
337 .cfi_rel_offset r7, 8
338 .cfi_rel_offset r8, 12
339 .cfi_rel_offset r10, 16
340 .cfi_rel_offset r11, 20
341 .cfi_rel_offset lr, 24
342 sub sp, #4 @ bottom word will hold Method*
343 .cfi_adjust_cfa_offset 4
Vladimir Marko19a49a92022-03-02 15:23:03 +0000344 LOAD_RUNTIME_INSTANCE \rTemp @ Load Runtime::Current into rTemp.
Nicolas Geoffraydd406c32020-11-22 22:53:18 +0000345 @ Load kSaveRefsOnly Method* into rTemp.
346 ldr \rTemp, [\rTemp, #RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET]
347 str \rTemp, [sp, #0] @ Place Method* at bottom of stack.
348 str sp, [rSELF, #THREAD_TOP_QUICK_FRAME_OFFSET] @ Place sp in Thread::Current()->top_quick_frame.
349
350 // Ugly compile-time check, but we only have the preprocessor.
351#if (FRAME_SIZE_SAVE_REFS_ONLY != 28 + 4)
352#error "FRAME_SIZE_SAVE_REFS_ONLY(ARM) size not as expected."
353#endif
354.endm
355
356.macro RESTORE_SAVE_REFS_ONLY_FRAME
357 add sp, #4 @ bottom word holds Method*
358 .cfi_adjust_cfa_offset -4
359 // Note: Likewise, we could avoid restoring R8 in the case of Baker
360 // read barriers, as it is overwritten by REFRESH_MARKING_REGISTER
361 // later; but it's not worth handling this special case.
362 pop {r5-r8, r10-r11, lr} @ 7 words of callee saves
363 .cfi_restore r5
364 .cfi_restore r6
365 .cfi_restore r7
366 .cfi_restore r8
367 .cfi_restore r10
368 .cfi_restore r11
369 .cfi_restore lr
370 .cfi_adjust_cfa_offset -28
371.endm
372
Vladimir Marko8439e452021-11-25 15:36:29 +0000373// Locking is needed for both managed code and JNI stubs.
374.macro LOCK_OBJECT_FAST_PATH obj, tmp1, tmp2, tmp3, slow_lock, can_be_null
375 ldr \tmp1, [rSELF, #THREAD_ID_OFFSET]
376 .if \can_be_null
377 cbz \obj, \slow_lock
378 .endif
3791:
380 ldrex \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
381 eor \tmp3, \tmp2, \tmp1 @ Prepare the value to store if unlocked
382 @ (thread id, count of 0 and preserved read barrier bits),
383 @ or prepare to compare thread id for recursive lock check
384 @ (lock_word.ThreadId() ^ self->ThreadId()).
385 ands ip, \tmp2, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED @ Test the non-gc bits.
386 bne 2f @ Check if unlocked.
387 @ unlocked case - store tmp3: original lock word plus thread id, preserved read barrier bits.
388 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
389 cbnz \tmp2, 3f @ If store failed, retry.
390 dmb ish @ Full (LoadLoad|LoadStore) memory barrier.
391 bx lr
3922: @ tmp2: original lock word, tmp1: thread_id, tmp3: tmp2 ^ tmp1
393#if LOCK_WORD_THIN_LOCK_COUNT_SHIFT + LOCK_WORD_THIN_LOCK_COUNT_SIZE != LOCK_WORD_GC_STATE_SHIFT
394#error "Expecting thin lock count and gc state in consecutive bits."
395#endif
396 @ Check lock word state and thread id together.
397 bfc \tmp3, \
398 #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, \
399 #(LOCK_WORD_THIN_LOCK_COUNT_SIZE + LOCK_WORD_GC_STATE_SIZE)
400 cbnz \tmp3, \slow_lock @ if either of the top two bits are set, or the lock word's
401 @ thread id did not match, go slow path.
402 add \tmp3, \tmp2, #LOCK_WORD_THIN_LOCK_COUNT_ONE @ Increment the recursive lock count.
403 @ Extract the new thin lock count for overflow check.
404 ubfx \tmp2, \tmp3, #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, #LOCK_WORD_THIN_LOCK_COUNT_SIZE
405 cbz \tmp2, \slow_lock @ Zero as the new count indicates overflow, go slow path.
406 @ strex necessary for read barrier bits.
407 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
408 cbnz \tmp2, 3f @ If strex failed, retry.
409 bx lr
4103:
411 b 1b @ retry
412.endm
413
414// Unlocking is needed for both managed code and JNI stubs.
415.macro UNLOCK_OBJECT_FAST_PATH obj, tmp1, tmp2, tmp3, slow_unlock, can_be_null
416 ldr \tmp1, [rSELF, #THREAD_ID_OFFSET]
417 .if \can_be_null
418 cbz \obj, \slow_unlock
419 .endif
4201:
421#ifndef USE_READ_BARRIER
422 ldr \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
423#else
424 @ Need to use atomic instructions for read barrier.
425 ldrex \tmp2, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
426#endif
427 eor \tmp3, \tmp2, \tmp1 @ Prepare the value to store if simply locked
428 @ (mostly 0s, and preserved read barrier bits),
429 @ or prepare to compare thread id for recursive lock check
430 @ (lock_word.ThreadId() ^ self->ThreadId()).
431 ands ip, \tmp3, #LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED @ Test the non-gc bits.
432 bne 2f @ Locked recursively or by other thread?
433 @ Transition to unlocked.
434 dmb ish @ Full (LoadStore|StoreStore) memory barrier.
435#ifndef USE_READ_BARRIER
436 str \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
437#else
438 @ strex necessary for read barrier bits
439 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
440 cbnz \tmp2, 3f @ If the store failed, retry.
441#endif
442 bx lr
4432: @ tmp2: original lock word, tmp1: thread_id, tmp3: tmp2 ^ tmp1
444#if LOCK_WORD_THIN_LOCK_COUNT_SHIFT + LOCK_WORD_THIN_LOCK_COUNT_SIZE != LOCK_WORD_GC_STATE_SHIFT
445#error "Expecting thin lock count and gc state in consecutive bits."
446#endif
447 @ Check lock word state and thread id together,
448 bfc \tmp3, \
449 #LOCK_WORD_THIN_LOCK_COUNT_SHIFT, \
450 #(LOCK_WORD_THIN_LOCK_COUNT_SIZE + LOCK_WORD_GC_STATE_SIZE)
451 cbnz \tmp3, \slow_unlock @ if either of the top two bits are set, or the lock word's
452 @ thread id did not match, go slow path.
453 sub \tmp3, \tmp2, #LOCK_WORD_THIN_LOCK_COUNT_ONE @ Decrement recursive lock count.
454#ifndef USE_READ_BARRIER
455 str \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
456#else
457 @ strex necessary for read barrier bits.
458 strex \tmp2, \tmp3, [\obj, #MIRROR_OBJECT_LOCK_WORD_OFFSET]
459 cbnz \tmp2, 3f @ If the store failed, retry.
460#endif
461 bx lr
4623:
463 b 1b @ retry
464.endm
465
Ian Rogers7655f292013-07-29 11:07:13 -0700466#endif // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_