Commit 450abd38 authored by Ard Biesheuvel's avatar Ard Biesheuvel

ARM: kernel: use relative references for UP/SMP alternatives

Currently, the .alt.smp.init section contains the virtual addresses
of the patch sites. Since patching may occur both before and after
switching into virtual mode, this requires some manual handling of
the address when applying the UP alternative.

Let's simplify this by using relative offsets in the table entries:
this allows us to simply add each entry's address to its contents,
regardless of whether we are running in virtual mode or not.
Reviewed-by: default avatarNicolas Pitre <nico@fluxnic.net>
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
parent 91580f0d
...@@ -259,7 +259,7 @@ ...@@ -259,7 +259,7 @@
*/ */
#define ALT_UP(instr...) \ #define ALT_UP(instr...) \
.pushsection ".alt.smp.init", "a" ;\ .pushsection ".alt.smp.init", "a" ;\
.long 9998b ;\ .long 9998b - . ;\
9997: instr ;\ 9997: instr ;\
.if . - 9997b == 2 ;\ .if . - 9997b == 2 ;\
nop ;\ nop ;\
...@@ -270,7 +270,7 @@ ...@@ -270,7 +270,7 @@
.popsection .popsection
#define ALT_UP_B(label) \ #define ALT_UP_B(label) \
.pushsection ".alt.smp.init", "a" ;\ .pushsection ".alt.smp.init", "a" ;\
.long 9998b ;\ .long 9998b - . ;\
W(b) . + (label - 9998b) ;\ W(b) . + (label - 9998b) ;\
.popsection .popsection
#else #else
......
...@@ -96,7 +96,7 @@ unsigned long get_wchan(struct task_struct *p); ...@@ -96,7 +96,7 @@ unsigned long get_wchan(struct task_struct *p);
#define __ALT_SMP_ASM(smp, up) \ #define __ALT_SMP_ASM(smp, up) \
"9998: " smp "\n" \ "9998: " smp "\n" \
" .pushsection \".alt.smp.init\", \"a\"\n" \ " .pushsection \".alt.smp.init\", \"a\"\n" \
" .long 9998b\n" \ " .long 9998b - .\n" \
" " up "\n" \ " " up "\n" \
" .popsection\n" " .popsection\n"
#else #else
......
...@@ -546,14 +546,15 @@ smp_on_up: ...@@ -546,14 +546,15 @@ smp_on_up:
__do_fixup_smp_on_up: __do_fixup_smp_on_up:
cmp r4, r5 cmp r4, r5
reths lr reths lr
ldmia r4!, {r0, r6} ldmia r4, {r0, r6}
ARM( str r6, [r0, r3] ) ARM( str r6, [r0, r4] )
THUMB( add r0, r0, r3 ) THUMB( add r0, r0, r4 )
add r4, r4, #8
#ifdef __ARMEB__ #ifdef __ARMEB__
THUMB( mov r6, r6, ror #16 ) @ Convert word order for big-endian. THUMB( mov r6, r6, ror #16 ) @ Convert word order for big-endian.
#endif #endif
THUMB( strh r6, [r0], #2 ) @ For Thumb-2, store as two halfwords THUMB( strh r6, [r0], #2 ) @ For Thumb-2, store as two halfwords
THUMB( mov r6, r6, lsr #16 ) @ to be robust against misaligned r3. THUMB( mov r6, r6, lsr #16 ) @ to be robust against misaligned r0.
THUMB( strh r6, [r0] ) THUMB( strh r6, [r0] )
b __do_fixup_smp_on_up b __do_fixup_smp_on_up
ENDPROC(__do_fixup_smp_on_up) ENDPROC(__do_fixup_smp_on_up)
...@@ -562,7 +563,6 @@ ENTRY(fixup_smp) ...@@ -562,7 +563,6 @@ ENTRY(fixup_smp)
stmfd sp!, {r4 - r6, lr} stmfd sp!, {r4 - r6, lr}
mov r4, r0 mov r4, r0
add r5, r0, r1 add r5, r0, r1
mov r3, #0
bl __do_fixup_smp_on_up bl __do_fixup_smp_on_up
ldmfd sp!, {r4 - r6, pc} ldmfd sp!, {r4 - r6, pc}
ENDPROC(fixup_smp) ENDPROC(fixup_smp)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment