summaryrefslogtreecommitdiffstats
path: root/arch/arm/kernel/entry-armv.S
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/kernel/entry-armv.S')
-rw-r--r--arch/arm/kernel/entry-armv.S111
1 files changed, 73 insertions, 38 deletions
diff --git a/arch/arm/kernel/entry-armv.S b/arch/arm/kernel/entry-armv.S
index a46d5b456765..7dca225752c1 100644
--- a/arch/arm/kernel/entry-armv.S
+++ b/arch/arm/kernel/entry-armv.S
@@ -166,12 +166,12 @@ __dabt_svc:
@ The abort handler must return the aborted address in r0, and
@ the fault status register in r1. r9 must be preserved.
@
-#ifdef MULTI_ABORT
+#ifdef MULTI_DABORT
ldr r4, .LCprocfns
mov lr, pc
- ldr pc, [r4]
+ ldr pc, [r4, #PROCESSOR_DABT_FUNC]
#else
- bl CPU_ABORT_HANDLER
+ bl CPU_DABORT_HANDLER
#endif
@
@@ -209,14 +209,12 @@ __irq_svc:
irq_handler
#ifdef CONFIG_PREEMPT
+ str r8, [tsk, #TI_PREEMPT] @ restore preempt count
ldr r0, [tsk, #TI_FLAGS] @ get flags
+ teq r8, #0 @ if preempt count != 0
+ movne r0, #0 @ force flags to 0
tst r0, #_TIF_NEED_RESCHED
blne svc_preempt
-preempt_return:
- ldr r0, [tsk, #TI_PREEMPT] @ read preempt value
- str r8, [tsk, #TI_PREEMPT] @ restore preempt count
- teq r0, r7
- strne r0, [r0, -r0] @ bug()
#endif
ldr r0, [sp, #S_PSR] @ irqs are already disabled
msr spsr_cxsf, r0
@@ -230,19 +228,11 @@ preempt_return:
#ifdef CONFIG_PREEMPT
svc_preempt:
- teq r8, #0 @ was preempt count = 0
- ldreq r6, .LCirq_stat
- movne pc, lr @ no
- ldr r0, [r6, #4] @ local_irq_count
- ldr r1, [r6, #8] @ local_bh_count
- adds r0, r0, r1
- movne pc, lr
- mov r7, #0 @ preempt_schedule_irq
- str r7, [tsk, #TI_PREEMPT] @ expects preempt_count == 0
+ mov r8, lr
1: bl preempt_schedule_irq @ irq en/disable is done inside
ldr r0, [tsk, #TI_FLAGS] @ get new tasks TI_FLAGS
tst r0, #_TIF_NEED_RESCHED
- beq preempt_return @ go again
+ moveq pc, r8 @ go again
b 1b
#endif
@@ -293,7 +283,6 @@ __pabt_svc:
mrs r9, cpsr
tst r3, #PSR_I_BIT
biceq r9, r9, #PSR_I_BIT
- msr cpsr_c, r9
@
@ set args, then call main handler
@@ -301,7 +290,15 @@ __pabt_svc:
@ r0 - address of faulting instruction
@ r1 - pointer to registers on stack
@
- mov r0, r2 @ address (pc)
+#ifdef MULTI_PABORT
+ mov r0, r2 @ pass address of aborted instruction.
+ ldr r4, .LCprocfns
+ mov lr, pc
+ ldr pc, [r4, #PROCESSOR_PABT_FUNC]
+#else
+ CPU_PABORT_HANDLER(r0, r2)
+#endif
+ msr cpsr_c, r9 @ Maybe enable interrupts
mov r1, sp @ regs
bl do_PrefetchAbort @ call abort handler
@@ -320,16 +317,12 @@ __pabt_svc:
.align 5
.LCcralign:
.word cr_alignment
-#ifdef MULTI_ABORT
+#ifdef MULTI_DABORT
.LCprocfns:
.word processor
#endif
.LCfp:
.word fp_enter
-#ifdef CONFIG_PREEMPT
-.LCirq_stat:
- .word irq_stat
-#endif
/*
* User mode handlers
@@ -404,12 +397,12 @@ __dabt_usr:
@ The abort handler must return the aborted address in r0, and
@ the fault status register in r1.
@
-#ifdef MULTI_ABORT
+#ifdef MULTI_DABORT
ldr r4, .LCprocfns
mov lr, pc
- ldr pc, [r4]
+ ldr pc, [r4, #PROCESSOR_DABT_FUNC]
#else
- bl CPU_ABORT_HANDLER
+ bl CPU_DABORT_HANDLER
#endif
@
@@ -455,10 +448,6 @@ __irq_usr:
__und_usr:
usr_entry
- tst r3, #PSR_T_BIT @ Thumb mode?
- bne __und_usr_unknown @ ignore FP
- sub r4, r2, #4
-
@
@ fall through to the emulation code, which returns using r9 if
@ it has emulated the instruction, or the more conventional lr
@@ -468,7 +457,24 @@ __und_usr:
@
adr r9, ret_from_exception
adr lr, __und_usr_unknown
-1: ldrt r0, [r4]
+ tst r3, #PSR_T_BIT @ Thumb mode?
+ subeq r4, r2, #4 @ ARM instr at LR - 4
+ subne r4, r2, #2 @ Thumb instr at LR - 2
+1: ldreqt r0, [r4]
+ beq call_fpe
+ @ Thumb instruction
+#if __LINUX_ARM_ARCH__ >= 7
+2: ldrht r5, [r4], #2
+ and r0, r5, #0xf800 @ mask bits 111x x... .... ....
+ cmp r0, #0xe800 @ 32bit instruction if xx != 0
+ blo __und_usr_unknown
+3: ldrht r0, [r4]
+ add r2, r2, #2 @ r2 is PC + 2, make it PC + 4
+ orr r0, r0, r5, lsl #16
+#else
+ b __und_usr_unknown
+#endif
+
@
@ fallthrough to call_fpe
@
@@ -477,10 +483,14 @@ __und_usr:
* The out of line fixup for the ldrt above.
*/
.section .fixup, "ax"
-2: mov pc, r9
+4: mov pc, r9
.previous
.section __ex_table,"a"
- .long 1b, 2b
+ .long 1b, 4b
+#if __LINUX_ARM_ARCH__ >= 7
+ .long 2b, 4b
+ .long 3b, 4b
+#endif
.previous
/*
@@ -507,9 +517,16 @@ __und_usr:
* r10 = this threads thread_info structure.
* lr = unrecognised instruction return address
*/
+ @
+ @ Fall-through from Thumb-2 __und_usr
+ @
+#ifdef CONFIG_NEON
+ adr r6, .LCneon_thumb_opcodes
+ b 2f
+#endif
call_fpe:
#ifdef CONFIG_NEON
- adr r6, .LCneon_opcodes
+ adr r6, .LCneon_arm_opcodes
2:
ldr r7, [r6], #4 @ mask value
cmp r7, #0 @ end mask?
@@ -526,6 +543,7 @@ call_fpe:
1:
#endif
tst r0, #0x08000000 @ only CDP/CPRT/LDC/STC have bit 27
+ tstne r0, #0x04000000 @ bit 26 set on both ARM and Thumb-2
#if defined(CONFIG_CPU_ARM610) || defined(CONFIG_CPU_ARM710)
and r8, r0, #0x0f000000 @ mask out op-code bits
teqne r8, #0x0f000000 @ SWI (ARM6/7 bug)?
@@ -577,7 +595,7 @@ call_fpe:
#ifdef CONFIG_NEON
.align 6
-.LCneon_opcodes:
+.LCneon_arm_opcodes:
.word 0xfe000000 @ mask
.word 0xf2000000 @ opcode
@@ -586,6 +604,16 @@ call_fpe:
.word 0x00000000 @ mask
.word 0x00000000 @ opcode
+
+.LCneon_thumb_opcodes:
+ .word 0xef000000 @ mask
+ .word 0xef000000 @ opcode
+
+ .word 0xff100000 @ mask
+ .word 0xf9000000 @ opcode
+
+ .word 0x00000000 @ mask
+ .word 0x00000000 @ opcode
#endif
do_fpe:
@@ -619,8 +647,15 @@ __und_usr_unknown:
__pabt_usr:
usr_entry
+#ifdef MULTI_PABORT
+ mov r0, r2 @ pass address of aborted instruction.
+ ldr r4, .LCprocfns
+ mov lr, pc
+ ldr pc, [r4, #PROCESSOR_PABT_FUNC]
+#else
+ CPU_PABORT_HANDLER(r0, r2)
+#endif
enable_irq @ Enable interrupts
- mov r0, r2 @ address (pc)
mov r1, sp @ regs
bl do_PrefetchAbort @ call abort handler
/* fall through */