| #include <linux/init.h> |
| #include <linux/linkage.h> |
| |
| #include <asm/assembler.h> |
| #include <asm/asm-offsets.h> |
| #include <asm/errno.h> |
| #include <asm/thread_info.h> |
| |
| @ Bad Abort numbers |
| @ ----------------- |
| @ |
| #define BAD_PREFETCH 0 |
| #define BAD_DATA 1 |
| #define BAD_ADDREXCPTN 2 |
| #define BAD_IRQ 3 |
| #define BAD_UNDEFINSTR 4 |
| |
| @ |
| @ Most of the stack format comes from struct pt_regs, but with |
| @ the addition of 8 bytes for storing syscall args 5 and 6. |
| @ This _must_ remain a multiple of 8 for EABI. |
| @ |
| #define S_OFF 8 |
| |
| /* |
| * The SWI code relies on the fact that R0 is at the bottom of the stack |
| * (due to slow/fast restore user regs). |
| */ |
| #if S_R0 != 0 |
| #error "Please fix" |
| #endif |
| |
| .macro zero_fp |
| #ifdef CONFIG_FRAME_POINTER |
| mov fp, #0 |
| #endif |
| .endm |
| |
| .macro alignment_trap, rtemp |
| #ifdef CONFIG_ALIGNMENT_TRAP |
| ldr \rtemp, .LCcralign |
| ldr \rtemp, [\rtemp] |
| mcr p15, 0, \rtemp, c1, c0 |
| #endif |
| .endm |
| |
| @ |
| @ Store/load the USER SP and LR registers by switching to the SYS |
| @ mode. Useful in Thumb-2 mode where "stm/ldm rd, {sp, lr}^" is not |
| @ available. Should only be called from SVC mode |
| @ |
| .macro store_user_sp_lr, rd, rtemp, offset = 0 |
| mrs \rtemp, cpsr |
| eor \rtemp, \rtemp, #(SVC_MODE ^ SYSTEM_MODE) |
| msr cpsr_c, \rtemp @ switch to the SYS mode |
| |
| str sp, [\rd, #\offset] @ save sp_usr |
| str lr, [\rd, #\offset + 4] @ save lr_usr |
| |
| eor \rtemp, \rtemp, #(SVC_MODE ^ SYSTEM_MODE) |
| msr cpsr_c, \rtemp @ switch back to the SVC mode |
| .endm |
| |
| .macro load_user_sp_lr, rd, rtemp, offset = 0 |
| mrs \rtemp, cpsr |
| eor \rtemp, \rtemp, #(SVC_MODE ^ SYSTEM_MODE) |
| msr cpsr_c, \rtemp @ switch to the SYS mode |
| |
| ldr sp, [\rd, #\offset] @ load sp_usr |
| ldr lr, [\rd, #\offset + 4] @ load lr_usr |
| |
| eor \rtemp, \rtemp, #(SVC_MODE ^ SYSTEM_MODE) |
| msr cpsr_c, \rtemp @ switch back to the SVC mode |
| .endm |
| |
| #ifndef CONFIG_THUMB2_KERNEL |
| .macro svc_exit, rpsr |
| msr spsr_cxsf, \rpsr |
| #if defined(CONFIG_CPU_32v6K) |
| clrex @ clear the exclusive monitor |
| ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
| #elif defined (CONFIG_CPU_V6) |
| ldr r0, [sp] |
| strex r1, r2, [sp] @ clear the exclusive monitor |
| ldmib sp, {r1 - pc}^ @ load r1 - pc, cpsr |
| #else |
| ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr |
| #endif |
| .endm |
| |
| .macro restore_user_regs, fast = 0, offset = 0 |
| ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr |
| ldr lr, [sp, #\offset + S_PC]! @ get pc |
| msr spsr_cxsf, r1 @ save in spsr_svc |
| #if defined(CONFIG_CPU_32v6K) |
| clrex @ clear the exclusive monitor |
| #elif defined (CONFIG_CPU_V6) |
| strex r1, r2, [sp] @ clear the exclusive monitor |
| #endif |
| .if \fast |
| ldmdb sp, {r1 - lr}^ @ get calling r1 - lr |
| .else |
| ldmdb sp, {r0 - lr}^ @ get calling r0 - lr |
| .endif |
| add sp, sp, #S_FRAME_SIZE - S_PC |
| movs pc, lr @ return & move spsr_svc into cpsr |
| .endm |
| |
| .macro get_thread_info, rd |
| mov \rd, sp, lsr #13 |
| mov \rd, \rd, lsl #13 |
| .endm |
| #else /* CONFIG_THUMB2_KERNEL */ |
| .macro svc_exit, rpsr |
| clrex @ clear the exclusive monitor |
| ldr r0, [sp, #S_SP] @ top of the stack |
| ldr r1, [sp, #S_PC] @ return address |
| tst r0, #4 @ orig stack 8-byte aligned? |
| stmdb r0, {r1, \rpsr} @ rfe context |
| ldmia sp, {r0 - r12} |
| ldr lr, [sp, #S_LR] |
| addeq sp, sp, #S_FRAME_SIZE - 8 @ aligned |
| addne sp, sp, #S_FRAME_SIZE - 4 @ not aligned |
| rfeia sp! |
| .endm |
| |
| .macro restore_user_regs, fast = 0, offset = 0 |
| clrex @ clear the exclusive monitor |
| mov r2, sp |
| load_user_sp_lr r2, r3, \offset + S_SP @ calling sp, lr |
| ldr r1, [sp, #\offset + S_PSR] @ get calling cpsr |
| ldr lr, [sp, #\offset + S_PC] @ get pc |
| add sp, sp, #\offset + S_SP |
| msr spsr_cxsf, r1 @ save in spsr_svc |
| .if \fast |
| ldmdb sp, {r1 - r12} @ get calling r1 - r12 |
| .else |
| ldmdb sp, {r0 - r12} @ get calling r0 - r12 |
| .endif |
| add sp, sp, #S_FRAME_SIZE - S_SP |
| movs pc, lr @ return & move spsr_svc into cpsr |
| .endm |
| |
| .macro get_thread_info, rd |
| mov \rd, sp |
| lsr \rd, \rd, #13 |
| mov \rd, \rd, lsl #13 |
| .endm |
| #endif /* !CONFIG_THUMB2_KERNEL */ |
| |
| /* |
| * These are the registers used in the syscall handler, and allow us to |
| * have in theory up to 7 arguments to a function - r0 to r6. |
| * |
| * r7 is reserved for the system call number for thumb mode. |
| * |
| * Note that tbl == why is intentional. |
| * |
| * We must set at least "tsk" and "why" when calling ret_with_reschedule. |
| */ |
| scno .req r7 @ syscall number |
| tbl .req r8 @ syscall table pointer |
| why .req r8 @ Linux syscall (!= 0) |
| tsk .req r9 @ current thread_info |