Commit 53617825 authored by Roman Zippel's avatar Roman Zippel Committed by Linus Torvalds

[PATCH] m68k: fix uaccess.h for gcc-3.x

gcc-3.x has a few problems detecting a constant parameter.
Signed-off-by: default avatarRoman Zippel <zippel@linux-m68k.org>
Signed-off-by: default avatarAndrew Morton <akpm@osdl.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@osdl.org>
parent caad3c2a
...@@ -181,144 +181,164 @@ asm volatile ("\n" \ ...@@ -181,144 +181,164 @@ asm volatile ("\n" \
unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n); unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n); unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
asm volatile ("\n" \
"1: moves."#s1" (%2)+,%3\n" \
" move."#s1" %3,(%1)+\n" \
"2: moves."#s2" (%2)+,%3\n" \
" move."#s2" %3,(%1)+\n" \
" .ifnc \""#s3"\",\"\"\n" \
"3: moves."#s3" (%2)+,%3\n" \
" move."#s3" %3,(%1)+\n" \
" .endif\n" \
"4:\n" \
" .section __ex_table,\"a\"\n" \
" .align 4\n" \
" .long 1b,10f\n" \
" .long 2b,20f\n" \
" .ifnc \""#s3"\",\"\"\n" \
" .long 3b,30f\n" \
" .endif\n" \
" .previous\n" \
"\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
"10: clr."#s1" (%1)+\n" \
"20: clr."#s2" (%1)+\n" \
" .ifnc \""#s3"\",\"\"\n" \
"30: clr."#s3" (%1)+\n" \
" .endif\n" \
" moveq.l #"#n",%0\n" \
" jra 4b\n" \
" .previous\n" \
: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
: : "memory")
static __always_inline unsigned long static __always_inline unsigned long
__constant_copy_from_user(void *to, const void __user *from, unsigned long n) __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
{ {
unsigned long res = 0, tmp; unsigned long res = 0, tmp;
/* limit the inlined version to 3 moves */
if (n == 11 || n > 12)
return __generic_copy_from_user(to, from, n);
switch (n) { switch (n) {
case 1: case 1:
__get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1); __get_user_asm(res, *(u8 *)to, (u8 *)from, u8, b, d, 1);
return res; break;
case 2: case 2:
__get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2); __get_user_asm(res, *(u16 *)to, (u16 *)from, u16, w, d, 2);
return res; break;
case 3:
__constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
break;
case 4: case 4:
__get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4); __get_user_asm(res, *(u32 *)to, (u32 *)from, u32, l, r, 4);
return res; break;
case 5:
__constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
break;
case 6:
__constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
break;
case 7:
__constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
break;
case 8:
__constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
break;
case 9:
__constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
break;
case 10:
__constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
break;
case 12:
__constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
break;
default:
/* we limit the inlined version to 3 moves */
return __generic_copy_from_user(to, from, n);
} }
asm volatile ("\n"
" .ifndef .Lfrom_user\n"
" .set .Lfrom_user,1\n"
" .macro copy_from_user to,from,tmp\n"
" .if .Lcnt >= 4\n"
"1: moves.l (\\from)+,\\tmp\n"
" move.l \\tmp,(\\to)+\n"
" .set .Lcnt,.Lcnt-4\n"
" .elseif .Lcnt & 2\n"
"1: moves.w (\\from)+,\\tmp\n"
" move.w \\tmp,(\\to)+\n"
" .set .Lcnt,.Lcnt-2\n"
" .elseif .Lcnt & 1\n"
"1: moves.b (\\from)+,\\tmp\n"
" move.b \\tmp,(\\to)+\n"
" .set .Lcnt,.Lcnt-1\n"
" .else\n"
" .exitm\n"
" .endif\n"
"\n"
" .section __ex_table,\"a\"\n"
" .align 4\n"
" .long 1b,3f\n"
" .previous\n"
" .endm\n"
" .endif\n"
"\n"
" .set .Lcnt,%c4\n"
" copy_from_user %1,%2,%3\n"
" copy_from_user %1,%2,%3\n"
" copy_from_user %1,%2,%3\n"
"2:\n"
" .section .fixup,\"ax\"\n"
" .even\n"
"3: moveq.l %4,%0\n"
" move.l %5,%1\n"
" .rept %c4 / 4\n"
" clr.l (%1)+\n"
" .endr\n"
" .if %c4 & 2\n"
" clr.w (%1)+\n"
" .endif\n"
" .if %c4 & 1\n"
" clr.b (%1)+\n"
" .endif\n"
" jra 2b\n"
" .previous\n"
: "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
: "i" (n), "g" (to)
: "memory");
return res; return res;
} }
#define __constant_copy_to_user_asm(res, to, from, tmp, n, s1, s2, s3) \
asm volatile ("\n" \
" move."#s1" (%2)+,%3\n" \
"11: moves."#s1" %3,(%1)+\n" \
"12: move."#s2" (%2)+,%3\n" \
"21: moves."#s2" %3,(%1)+\n" \
"22:\n" \
" .ifnc \""#s3"\",\"\"\n" \
" move."#s3" (%2)+,%3\n" \
"31: moves."#s3" %3,(%1)+\n" \
"32:\n" \
" .endif\n" \
"4:\n" \
"\n" \
" .section __ex_table,\"a\"\n" \
" .align 4\n" \
" .long 11b,5f\n" \
" .long 12b,5f\n" \
" .long 21b,5f\n" \
" .long 22b,5f\n" \
" .ifnc \""#s3"\",\"\"\n" \
" .long 31b,5f\n" \
" .long 32b,5f\n" \
" .endif\n" \
" .previous\n" \
"\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
"5: moveq.l #"#n",%0\n" \
" jra 4b\n" \
" .previous\n" \
: "+d" (res), "+a" (to), "+a" (from), "=&d" (tmp) \
: : "memory")
static __always_inline unsigned long static __always_inline unsigned long
__constant_copy_to_user(void __user *to, const void *from, unsigned long n) __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
{ {
unsigned long res = 0, tmp; unsigned long res = 0, tmp;
/* limit the inlined version to 3 moves */
if (n == 11 || n > 12)
return __generic_copy_to_user(to, from, n);
switch (n) { switch (n) {
case 1: case 1:
__put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1); __put_user_asm(res, *(u8 *)from, (u8 *)to, b, d, 1);
return res; break;
case 2: case 2:
__put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2); __put_user_asm(res, *(u16 *)from, (u16 *)to, w, d, 2);
return res; break;
case 3:
__constant_copy_to_user_asm(res, to, from, tmp, 3, w, b,);
break;
case 4: case 4:
__put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4); __put_user_asm(res, *(u32 *)from, (u32 *)to, l, r, 4);
return res; break;
case 5:
__constant_copy_to_user_asm(res, to, from, tmp, 5, l, b,);
break;
case 6:
__constant_copy_to_user_asm(res, to, from, tmp, 6, l, w,);
break;
case 7:
__constant_copy_to_user_asm(res, to, from, tmp, 7, l, w, b);
break;
case 8:
__constant_copy_to_user_asm(res, to, from, tmp, 8, l, l,);
break;
case 9:
__constant_copy_to_user_asm(res, to, from, tmp, 9, l, l, b);
break;
case 10:
__constant_copy_to_user_asm(res, to, from, tmp, 10, l, l, w);
break;
case 12:
__constant_copy_to_user_asm(res, to, from, tmp, 12, l, l, l);
break;
default:
/* limit the inlined version to 3 moves */
return __generic_copy_to_user(to, from, n);
} }
asm volatile ("\n"
" .ifndef .Lto_user\n"
" .set .Lto_user,1\n"
" .macro copy_to_user to,from,tmp\n"
" .if .Lcnt >= 4\n"
" move.l (\\from)+,\\tmp\n"
"11: moves.l \\tmp,(\\to)+\n"
"12: .set .Lcnt,.Lcnt-4\n"
" .elseif .Lcnt & 2\n"
" move.w (\\from)+,\\tmp\n"
"11: moves.w \\tmp,(\\to)+\n"
"12: .set .Lcnt,.Lcnt-2\n"
" .elseif .Lcnt & 1\n"
" move.b (\\from)+,\\tmp\n"
"11: moves.b \\tmp,(\\to)+\n"
"12: .set .Lcnt,.Lcnt-1\n"
" .else\n"
" .exitm\n"
" .endif\n"
"\n"
" .section __ex_table,\"a\"\n"
" .align 4\n"
" .long 11b,3f\n"
" .long 12b,3f\n"
" .previous\n"
" .endm\n"
" .endif\n"
"\n"
" .set .Lcnt,%c4\n"
" copy_to_user %1,%2,%3\n"
" copy_to_user %1,%2,%3\n"
" copy_to_user %1,%2,%3\n"
"2:\n"
" .section .fixup,\"ax\"\n"
" .even\n"
"3: moveq.l %4,%0\n"
" jra 2b\n"
" .previous\n"
: "+r" (res), "+a" (to), "+a" (from), "=&d" (tmp)
: "i" (n)
: "memory");
return res; return res;
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment