Skip to content

Commit b86040a

Browse files
committed
Thumb-2: Implementation of the unified start-up and exceptions code
This patch implements the ARM/Thumb-2 unified kernel start-up and exception handling code. Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
1 parent 0becb08 commit b86040a

11 files changed

+263
-120
lines changed

arch/arm/include/asm/assembler.h

+11
Original file line numberDiff line numberDiff line change
@@ -127,3 +127,14 @@
127127
#endif
128128
#endif
129129
.endm
130+
131+
#ifdef CONFIG_THUMB2_KERNEL
132+
.macro setmode, mode, reg
133+
mov \reg, #\mode
134+
msr cpsr_c, \reg
135+
.endm
136+
#else
137+
.macro setmode, mode, reg
138+
msr cpsr_c, #\mode
139+
.endm
140+
#endif

arch/arm/include/asm/futex.h

+1
Original file line numberDiff line numberDiff line change
@@ -99,6 +99,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
9999
__asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
100100
"1: ldrt %0, [%3]\n"
101101
" teq %0, %1\n"
102+
" it eq @ explicit IT needed for the 2b label\n"
102103
"2: streqt %2, [%3]\n"
103104
"3:\n"
104105
" .section __ex_table,\"a\"\n"

arch/arm/kernel/entry-armv.S

+98-67
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@
3434
@
3535
@ routine called with r0 = irq number, r1 = struct pt_regs *
3636
@
37-
adrne lr, 1b
37+
adrne lr, BSYM(1b)
3838
bne asm_do_IRQ
3939

4040
#ifdef CONFIG_SMP
@@ -46,13 +46,13 @@
4646
*/
4747
test_for_ipi r0, r6, r5, lr
4848
movne r0, sp
49-
adrne lr, 1b
49+
adrne lr, BSYM(1b)
5050
bne do_IPI
5151

5252
#ifdef CONFIG_LOCAL_TIMERS
5353
test_for_ltirq r0, r6, r5, lr
5454
movne r0, sp
55-
adrne lr, 1b
55+
adrne lr, BSYM(1b)
5656
bne do_local_timer
5757
#endif
5858
#endif
@@ -70,7 +70,10 @@
7070
*/
7171
.macro inv_entry, reason
7272
sub sp, sp, #S_FRAME_SIZE
73-
stmib sp, {r1 - lr}
73+
ARM( stmib sp, {r1 - lr} )
74+
THUMB( stmia sp, {r0 - r12} )
75+
THUMB( str sp, [sp, #S_SP] )
76+
THUMB( str lr, [sp, #S_LR] )
7477
mov r1, #\reason
7578
.endm
7679

@@ -126,17 +129,24 @@ ENDPROC(__und_invalid)
126129
.macro svc_entry, stack_hole=0
127130
UNWIND(.fnstart )
128131
UNWIND(.save {r0 - pc} )
129-
sub sp, sp, #(S_FRAME_SIZE + \stack_hole)
132+
sub sp, sp, #(S_FRAME_SIZE + \stack_hole - 4)
133+
#ifdef CONFIG_THUMB2_KERNEL
134+
SPFIX( str r0, [sp] ) @ temporarily saved
135+
SPFIX( mov r0, sp )
136+
SPFIX( tst r0, #4 ) @ test original stack alignment
137+
SPFIX( ldr r0, [sp] ) @ restored
138+
#else
130139
SPFIX( tst sp, #4 )
131-
SPFIX( bicne sp, sp, #4 )
132-
stmib sp, {r1 - r12}
140+
#endif
141+
SPFIX( subeq sp, sp, #4 )
142+
stmia sp, {r1 - r12}
133143

134144
ldmia r0, {r1 - r3}
135-
add r5, sp, #S_SP @ here for interlock avoidance
145+
add r5, sp, #S_SP - 4 @ here for interlock avoidance
136146
mov r4, #-1 @ "" "" "" ""
137-
add r0, sp, #(S_FRAME_SIZE + \stack_hole)
138-
SPFIX( addne r0, r0, #4 )
139-
str r1, [sp] @ save the "real" r0 copied
147+
add r0, sp, #(S_FRAME_SIZE + \stack_hole - 4)
148+
SPFIX( addeq r0, r0, #4 )
149+
str r1, [sp, #-4]! @ save the "real" r0 copied
140150
@ from the exception stack
141151

142152
mov r1, lr
@@ -196,9 +206,8 @@ __dabt_svc:
196206
@
197207
@ restore SPSR and restart the instruction
198208
@
199-
ldr r0, [sp, #S_PSR]
200-
msr spsr_cxsf, r0
201-
ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
209+
ldr r2, [sp, #S_PSR]
210+
svc_exit r2 @ return from exception
202211
UNWIND(.fnend )
203212
ENDPROC(__dabt_svc)
204213

@@ -225,13 +234,12 @@ __irq_svc:
225234
tst r0, #_TIF_NEED_RESCHED
226235
blne svc_preempt
227236
#endif
228-
ldr r0, [sp, #S_PSR] @ irqs are already disabled
229-
msr spsr_cxsf, r0
237+
ldr r4, [sp, #S_PSR] @ irqs are already disabled
230238
#ifdef CONFIG_TRACE_IRQFLAGS
231-
tst r0, #PSR_I_BIT
239+
tst r4, #PSR_I_BIT
232240
bleq trace_hardirqs_on
233241
#endif
234-
ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
242+
svc_exit r4 @ return from exception
235243
UNWIND(.fnend )
236244
ENDPROC(__irq_svc)
237245

@@ -266,7 +274,7 @@ __und_svc:
266274
@ r0 - instruction
267275
@
268276
ldr r0, [r2, #-4]
269-
adr r9, 1f
277+
adr r9, BSYM(1f)
270278
bl call_fpe
271279

272280
mov r0, sp @ struct pt_regs *regs
@@ -280,9 +288,8 @@ __und_svc:
280288
@
281289
@ restore SPSR and restart the instruction
282290
@
283-
ldr lr, [sp, #S_PSR] @ Get SVC cpsr
284-
msr spsr_cxsf, lr
285-
ldmia sp, {r0 - pc}^ @ Restore SVC registers
291+
ldr r2, [sp, #S_PSR] @ Get SVC cpsr
292+
svc_exit r2 @ return from exception
286293
UNWIND(.fnend )
287294
ENDPROC(__und_svc)
288295

@@ -323,9 +330,8 @@ __pabt_svc:
323330
@
324331
@ restore SPSR and restart the instruction
325332
@
326-
ldr r0, [sp, #S_PSR]
327-
msr spsr_cxsf, r0
328-
ldmia sp, {r0 - pc}^ @ load r0 - pc, cpsr
333+
ldr r2, [sp, #S_PSR]
334+
svc_exit r2 @ return from exception
329335
UNWIND(.fnend )
330336
ENDPROC(__pabt_svc)
331337

@@ -353,7 +359,8 @@ ENDPROC(__pabt_svc)
353359
UNWIND(.fnstart )
354360
UNWIND(.cantunwind ) @ don't unwind the user space
355361
sub sp, sp, #S_FRAME_SIZE
356-
stmib sp, {r1 - r12}
362+
ARM( stmib sp, {r1 - r12} )
363+
THUMB( stmia sp, {r0 - r12} )
357364

358365
ldmia r0, {r1 - r3}
359366
add r0, sp, #S_PC @ here for interlock avoidance
@@ -372,7 +379,8 @@ ENDPROC(__pabt_svc)
372379
@ Also, separately save sp_usr and lr_usr
373380
@
374381
stmia r0, {r2 - r4}
375-
stmdb r0, {sp, lr}^
382+
ARM( stmdb r0, {sp, lr}^ )
383+
THUMB( store_user_sp_lr r0, r1, S_SP - S_PC )
376384

377385
@
378386
@ Enable the alignment trap while in kernel mode
@@ -427,7 +435,7 @@ __dabt_usr:
427435
@
428436
enable_irq
429437
mov r2, sp
430-
adr lr, ret_from_exception
438+
adr lr, BSYM(ret_from_exception)
431439
b do_DataAbort
432440
UNWIND(.fnend )
433441
ENDPROC(__dabt_usr)
@@ -452,7 +460,9 @@ __irq_usr:
452460
ldr r0, [tsk, #TI_PREEMPT]
453461
str r8, [tsk, #TI_PREEMPT]
454462
teq r0, r7
455-
strne r0, [r0, -r0]
463+
ARM( strne r0, [r0, -r0] )
464+
THUMB( movne r0, #0 )
465+
THUMB( strne r0, [r0] )
456466
#endif
457467
#ifdef CONFIG_TRACE_IRQFLAGS
458468
bl trace_hardirqs_on
@@ -476,9 +486,10 @@ __und_usr:
476486
@
477487
@ r0 - instruction
478488
@
479-
adr r9, ret_from_exception
480-
adr lr, __und_usr_unknown
489+
adr r9, BSYM(ret_from_exception)
490+
adr lr, BSYM(__und_usr_unknown)
481491
tst r3, #PSR_T_BIT @ Thumb mode?
492+
itet eq @ explicit IT needed for the 1f label
482493
subeq r4, r2, #4 @ ARM instr at LR - 4
483494
subne r4, r2, #2 @ Thumb instr at LR - 2
484495
1: ldreqt r0, [r4]
@@ -488,7 +499,10 @@ __und_usr:
488499
beq call_fpe
489500
@ Thumb instruction
490501
#if __LINUX_ARM_ARCH__ >= 7
491-
2: ldrht r5, [r4], #2
502+
2:
503+
ARM( ldrht r5, [r4], #2 )
504+
THUMB( ldrht r5, [r4] )
505+
THUMB( add r4, r4, #2 )
492506
and r0, r5, #0xf800 @ mask bits 111x x... .... ....
493507
cmp r0, #0xe800 @ 32bit instruction if xx != 0
494508
blo __und_usr_unknown
@@ -577,46 +591,50 @@ call_fpe:
577591
moveq pc, lr
578592
get_thread_info r10 @ get current thread
579593
and r8, r0, #0x00000f00 @ mask out CP number
594+
THUMB( lsr r8, r8, #8 )
580595
mov r7, #1
581596
add r6, r10, #TI_USED_CP
582-
strb r7, [r6, r8, lsr #8] @ set appropriate used_cp[]
597+
ARM( strb r7, [r6, r8, lsr #8] ) @ set appropriate used_cp[]
598+
THUMB( strb r7, [r6, r8] ) @ set appropriate used_cp[]
583599
#ifdef CONFIG_IWMMXT
584600
@ Test if we need to give access to iWMMXt coprocessors
585601
ldr r5, [r10, #TI_FLAGS]
586602
rsbs r7, r8, #(1 << 8) @ CP 0 or 1 only
587603
movcss r7, r5, lsr #(TIF_USING_IWMMXT + 1)
588604
bcs iwmmxt_task_enable
589605
#endif
590-
add pc, pc, r8, lsr #6
591-
mov r0, r0
592-
593-
mov pc, lr @ CP#0
594-
b do_fpe @ CP#1 (FPE)
595-
b do_fpe @ CP#2 (FPE)
596-
mov pc, lr @ CP#3
606+
ARM( add pc, pc, r8, lsr #6 )
607+
THUMB( lsl r8, r8, #2 )
608+
THUMB( add pc, r8 )
609+
nop
610+
611+
W(mov) pc, lr @ CP#0
612+
W(b) do_fpe @ CP#1 (FPE)
613+
W(b) do_fpe @ CP#2 (FPE)
614+
W(mov) pc, lr @ CP#3
597615
#ifdef CONFIG_CRUNCH
598616
b crunch_task_enable @ CP#4 (MaverickCrunch)
599617
b crunch_task_enable @ CP#5 (MaverickCrunch)
600618
b crunch_task_enable @ CP#6 (MaverickCrunch)
601619
#else
602-
mov pc, lr @ CP#4
603-
mov pc, lr @ CP#5
604-
mov pc, lr @ CP#6
620+
W(mov) pc, lr @ CP#4
621+
W(mov) pc, lr @ CP#5
622+
W(mov) pc, lr @ CP#6
605623
#endif
606-
mov pc, lr @ CP#7
607-
mov pc, lr @ CP#8
608-
mov pc, lr @ CP#9
624+
W(mov) pc, lr @ CP#7
625+
W(mov) pc, lr @ CP#8
626+
W(mov) pc, lr @ CP#9
609627
#ifdef CONFIG_VFP
610-
b do_vfp @ CP#10 (VFP)
611-
b do_vfp @ CP#11 (VFP)
628+
W(b) do_vfp @ CP#10 (VFP)
629+
W(b) do_vfp @ CP#11 (VFP)
612630
#else
613-
mov pc, lr @ CP#10 (VFP)
614-
mov pc, lr @ CP#11 (VFP)
631+
W(mov) pc, lr @ CP#10 (VFP)
632+
W(mov) pc, lr @ CP#11 (VFP)
615633
#endif
616-
mov pc, lr @ CP#12
617-
mov pc, lr @ CP#13
618-
mov pc, lr @ CP#14 (Debug)
619-
mov pc, lr @ CP#15 (Control)
634+
W(mov) pc, lr @ CP#12
635+
W(mov) pc, lr @ CP#13
636+
W(mov) pc, lr @ CP#14 (Debug)
637+
W(mov) pc, lr @ CP#15 (Control)
620638

621639
#ifdef CONFIG_NEON
622640
.align 6
@@ -667,7 +685,7 @@ no_fp: mov pc, lr
667685
__und_usr_unknown:
668686
enable_irq
669687
mov r0, sp
670-
adr lr, ret_from_exception
688+
adr lr, BSYM(ret_from_exception)
671689
b do_undefinstr
672690
ENDPROC(__und_usr_unknown)
673691

@@ -711,7 +729,10 @@ ENTRY(__switch_to)
711729
UNWIND(.cantunwind )
712730
add ip, r1, #TI_CPU_SAVE
713731
ldr r3, [r2, #TI_TP_VALUE]
714-
stmia ip!, {r4 - sl, fp, sp, lr} @ Store most regs on stack
732+
ARM( stmia ip!, {r4 - sl, fp, sp, lr} ) @ Store most regs on stack
733+
THUMB( stmia ip!, {r4 - sl, fp} ) @ Store most regs on stack
734+
THUMB( str sp, [ip], #4 )
735+
THUMB( str lr, [ip], #4 )
715736
#ifdef CONFIG_MMU
716737
ldr r6, [r2, #TI_CPU_DOMAIN]
717738
#endif
@@ -736,8 +757,12 @@ ENTRY(__switch_to)
736757
ldr r0, =thread_notify_head
737758
mov r1, #THREAD_NOTIFY_SWITCH
738759
bl atomic_notifier_call_chain
760+
THUMB( mov ip, r4 )
739761
mov r0, r5
740-
ldmia r4, {r4 - sl, fp, sp, pc} @ Load all regs saved previously
762+
ARM( ldmia r4, {r4 - sl, fp, sp, pc} ) @ Load all regs saved previously
763+
THUMB( ldmia ip!, {r4 - sl, fp} ) @ Load all regs saved previously
764+
THUMB( ldr sp, [ip], #4 )
765+
THUMB( ldr pc, [ip] )
741766
UNWIND(.fnend )
742767
ENDPROC(__switch_to)
743768

@@ -772,6 +797,7 @@ ENDPROC(__switch_to)
772797
* if your compiled code is not going to use the new instructions for other
773798
* purpose.
774799
*/
800+
THUMB( .arm )
775801

776802
.macro usr_ret, reg
777803
#ifdef CONFIG_ARM_THUMB
@@ -1020,6 +1046,7 @@ __kuser_helper_version: @ 0xffff0ffc
10201046
.globl __kuser_helper_end
10211047
__kuser_helper_end:
10221048

1049+
THUMB( .thumb )
10231050

10241051
/*
10251052
* Vector stubs.
@@ -1054,15 +1081,17 @@ vector_\name:
10541081
@ Prepare for SVC32 mode. IRQs remain disabled.
10551082
@
10561083
mrs r0, cpsr
1057-
eor r0, r0, #(\mode ^ SVC_MODE)
1084+
eor r0, r0, #(\mode ^ SVC_MODE | PSR_ISETSTATE)
10581085
msr spsr_cxsf, r0
10591086

10601087
@
10611088
@ the branch table must immediately follow this code
10621089
@
10631090
and lr, lr, #0x0f
1091+
THUMB( adr r0, 1f )
1092+
THUMB( ldr lr, [r0, lr, lsl #2] )
10641093
mov r0, sp
1065-
ldr lr, [pc, lr, lsl #2]
1094+
ARM( ldr lr, [pc, lr, lsl #2] )
10661095
movs pc, lr @ branch to handler in SVC mode
10671096
ENDPROC(vector_\name)
10681097

@@ -1206,14 +1235,16 @@ __stubs_end:
12061235

12071236
.globl __vectors_start
12081237
__vectors_start:
1209-
swi SYS_ERROR0
1210-
b vector_und + stubs_offset
1211-
ldr pc, .LCvswi + stubs_offset
1212-
b vector_pabt + stubs_offset
1213-
b vector_dabt + stubs_offset
1214-
b vector_addrexcptn + stubs_offset
1215-
b vector_irq + stubs_offset
1216-
b vector_fiq + stubs_offset
1238+
ARM( swi SYS_ERROR0 )
1239+
THUMB( svc #0 )
1240+
THUMB( nop )
1241+
W(b) vector_und + stubs_offset
1242+
W(ldr) pc, .LCvswi + stubs_offset
1243+
W(b) vector_pabt + stubs_offset
1244+
W(b) vector_dabt + stubs_offset
1245+
W(b) vector_addrexcptn + stubs_offset
1246+
W(b) vector_irq + stubs_offset
1247+
W(b) vector_fiq + stubs_offset
12171248

12181249
.globl __vectors_end
12191250
__vectors_end:

0 commit comments

Comments
 (0)