summaryrefslogtreecommitdiffstats
path: root/arch/m68k/include
diff options
context:
space:
mode:
authorAl Viro <viro@zeniv.linux.org.uk>2017-03-20 00:55:24 -0400
committerAl Viro <viro@zeniv.linux.org.uk>2017-03-28 18:23:40 -0400
commit7cefa5a05dbda1f0bbbd98e9d2861b09a35cc6ea (patch)
treee2e75444c0097957954f60811bd2c148723758fb /arch/m68k/include
parent68acfdcb477abdbf875e33e4a950094c8de08f41 (diff)
downloadlinux-7cefa5a05dbda1f0bbbd98e9d2861b09a35cc6ea.tar.bz2
m68k: get rid of zeroing
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Diffstat (limited to 'arch/m68k/include')
-rw-r--r--arch/m68k/include/asm/uaccess_mm.h57
1 files changed, 40 insertions, 17 deletions
diff --git a/arch/m68k/include/asm/uaccess_mm.h b/arch/m68k/include/asm/uaccess_mm.h
index 23be968a53fd..931bbba449e2 100644
--- a/arch/m68k/include/asm/uaccess_mm.h
+++ b/arch/m68k/include/asm/uaccess_mm.h
@@ -179,39 +179,55 @@ asm volatile ("\n" \
unsigned long __generic_copy_from_user(void *to, const void __user *from, unsigned long n);
unsigned long __generic_copy_to_user(void __user *to, const void *from, unsigned long n);
-#define __constant_copy_from_user_asm(res, to, from, tmp, n, s1, s2, s3)\
+#define __suffix0
+#define __suffix1 b
+#define __suffix2 w
+#define __suffix4 l
+
+#define ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
asm volatile ("\n" \
"1: "MOVES"."#s1" (%2)+,%3\n" \
" move."#s1" %3,(%1)+\n" \
+ " .ifnc \""#s2"\",\"\"\n" \
"2: "MOVES"."#s2" (%2)+,%3\n" \
" move."#s2" %3,(%1)+\n" \
" .ifnc \""#s3"\",\"\"\n" \
"3: "MOVES"."#s3" (%2)+,%3\n" \
" move."#s3" %3,(%1)+\n" \
" .endif\n" \
+ " .endif\n" \
"4:\n" \
" .section __ex_table,\"a\"\n" \
" .align 4\n" \
" .long 1b,10f\n" \
+ " .ifnc \""#s2"\",\"\"\n" \
" .long 2b,20f\n" \
" .ifnc \""#s3"\",\"\"\n" \
" .long 3b,30f\n" \
" .endif\n" \
+ " .endif\n" \
" .previous\n" \
"\n" \
" .section .fixup,\"ax\"\n" \
" .even\n" \
- "10: clr."#s1" (%1)+\n" \
- "20: clr."#s2" (%1)+\n" \
+ "10: addq.l #"#n1",%0\n" \
+ " .ifnc \""#s2"\",\"\"\n" \
+ "20: addq.l #"#n2",%0\n" \
" .ifnc \""#s3"\",\"\"\n" \
- "30: clr."#s3" (%1)+\n" \
+ "30: addq.l #"#n3",%0\n" \
+ " .endif\n" \
" .endif\n" \
- " moveq.l #"#n",%0\n" \
" jra 4b\n" \
" .previous\n" \
: "+d" (res), "+&a" (to), "+a" (from), "=&d" (tmp) \
: : "memory")
+#define ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)\
+ ____constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, s1, s2, s3)
+#define __constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3) \
+ ___constant_copy_from_user_asm(res, to, from, tmp, n1, n2, n3, \
+ __suffix##n1, __suffix##n2, __suffix##n3)
+
static __always_inline unsigned long
__constant_copy_from_user(void *to, const void __user *from, unsigned long n)
{
@@ -219,37 +235,37 @@ __constant_copy_from_user(void *to, const void __user *from, unsigned long n)
switch (n) {
case 1:
- __get_user_asm(res, *(u8 *)to, (u8 __user *)from, u8, b, d, 1);
+ __constant_copy_from_user_asm(res, to, from, tmp, 1, 0, 0);
break;
case 2:
- __get_user_asm(res, *(u16 *)to, (u16 __user *)from, u16, w, r, 2);
+ __constant_copy_from_user_asm(res, to, from, tmp, 2, 0, 0);
break;
case 3:
- __constant_copy_from_user_asm(res, to, from, tmp, 3, w, b,);
+ __constant_copy_from_user_asm(res, to, from, tmp, 2, 1, 0);
break;
case 4:
- __get_user_asm(res, *(u32 *)to, (u32 __user *)from, u32, l, r, 4);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 0, 0);
break;
case 5:
- __constant_copy_from_user_asm(res, to, from, tmp, 5, l, b,);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 1, 0);
break;
case 6:
- __constant_copy_from_user_asm(res, to, from, tmp, 6, l, w,);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 0);
break;
case 7:
- __constant_copy_from_user_asm(res, to, from, tmp, 7, l, w, b);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 2, 1);
break;
case 8:
- __constant_copy_from_user_asm(res, to, from, tmp, 8, l, l,);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 0);
break;
case 9:
- __constant_copy_from_user_asm(res, to, from, tmp, 9, l, l, b);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 1);
break;
case 10:
- __constant_copy_from_user_asm(res, to, from, tmp, 10, l, l, w);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 2);
break;
case 12:
- __constant_copy_from_user_asm(res, to, from, tmp, 12, l, l, l);
+ __constant_copy_from_user_asm(res, to, from, tmp, 4, 4, 4);
break;
default:
/* we limit the inlined version to 3 moves */
@@ -353,7 +369,14 @@ __constant_copy_to_user(void __user *to, const void *from, unsigned long n)
#define __copy_to_user_inatomic __copy_to_user
#define __copy_from_user_inatomic __copy_from_user
-#define copy_from_user(to, from, n) __copy_from_user(to, from, n)
+static inline unsigned long
+copy_from_user(void *to, const void __user *from, unsigned long n)
+{
+ unsigned long res = __copy_from_user_inatomic(to, from, n);
+ if (unlikely(res))
+ memset(to + (n - res), 0, res);
+ return res;
+}
#define copy_to_user(to, from, n) __copy_to_user(to, from, n)
#define user_addr_max() \