OSDN Git Service

x86/uaccess: Remove .fixup usage
authorPeter Zijlstra <peterz@infradead.org>
Wed, 10 Nov 2021 10:01:12 +0000 (11:01 +0100)
committerPeter Zijlstra <peterz@infradead.org>
Sat, 11 Dec 2021 08:09:47 +0000 (09:09 +0100)
For the !CC_AS_ASM_GOTO_OUTPUT (aka. the legacy codepath), remove the
.fixup usage by employing both EX_TYPE_EFAULT_REG and EX_FLAG_CLEAR.
Like was already done for X86_32's version of __get_user_asm_u64() use
the "a" register for output, specifically so we can use CLEAR_AX.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Josh Poimboeuf <jpoimboe@redhat.com>
Link: https://lore.kernel.org/r/20211110101325.485154848@infradead.org
arch/x86/include/asm/uaccess.h

index 33a6840..ac6233a 100644 (file)
@@ -351,24 +351,22 @@ do {                                                                      \
                     "1:        movl %[lowbits],%%eax\n"                \
                     "2:        movl %[highbits],%%edx\n"               \
                     "3:\n"                                             \
-                    ".section .fixup,\"ax\"\n"                         \
-                    "4:        mov %[efault],%[errout]\n"              \
-                    "  xorl %%eax,%%eax\n"                             \
-                    "  xorl %%edx,%%edx\n"                             \
-                    "  jmp 3b\n"                                       \
-                    ".previous\n"                                      \
-                    _ASM_EXTABLE_UA(1b, 4b)                            \
-                    _ASM_EXTABLE_UA(2b, 4b)                            \
+                    _ASM_EXTABLE_TYPE_REG(1b, 3b, EX_TYPE_EFAULT_REG | \
+                                          EX_FLAG_CLEAR_AX_DX,         \
+                                          %[errout])                   \
+                    _ASM_EXTABLE_TYPE_REG(2b, 3b, EX_TYPE_EFAULT_REG | \
+                                          EX_FLAG_CLEAR_AX_DX,         \
+                                          %[errout])                   \
                     : [errout] "=r" (retval),                          \
                       [output] "=&A"(x)                                \
                     : [lowbits] "m" (__m(__ptr)),                      \
                       [highbits] "m" __m(((u32 __user *)(__ptr)) + 1), \
-                      [efault] "i" (-EFAULT), "0" (retval));           \
+                      "0" (retval));                                   \
 })
 
 #else
 #define __get_user_asm_u64(x, ptr, retval) \
-        __get_user_asm(x, ptr, retval, "q", "=r")
+        __get_user_asm(x, ptr, retval, "q")
 #endif
 
 #define __get_user_size(x, ptr, size, retval)                          \
@@ -379,14 +377,14 @@ do {                                                                      \
        __chk_user_ptr(ptr);                                            \
        switch (size) {                                                 \
        case 1:                                                         \
-               __get_user_asm(x_u8__, ptr, retval, "b", "=q");         \
+               __get_user_asm(x_u8__, ptr, retval, "b");               \
                (x) = x_u8__;                                           \
                break;                                                  \
        case 2:                                                         \
-               __get_user_asm(x, ptr, retval, "w", "=r");              \
+               __get_user_asm(x, ptr, retval, "w");                    \
                break;                                                  \
        case 4:                                                         \
-               __get_user_asm(x, ptr, retval, "l", "=r");              \
+               __get_user_asm(x, ptr, retval, "l");                    \
                break;                                                  \
        case 8:                                                         \
                __get_user_asm_u64(x, ptr, retval);                     \
@@ -396,20 +394,17 @@ do {                                                                      \
        }                                                               \
 } while (0)
 
-#define __get_user_asm(x, addr, err, itype, ltype)                     \
+#define __get_user_asm(x, addr, err, itype)                            \
        asm volatile("\n"                                               \
                     "1:        mov"itype" %[umem],%[output]\n"         \
                     "2:\n"                                             \
-                    ".section .fixup,\"ax\"\n"                         \
-                    "3:        mov %[efault],%[errout]\n"              \
-                    "  xorl %k[output],%k[output]\n"                   \
-                    "  jmp 2b\n"                                       \
-                    ".previous\n"                                      \
-                    _ASM_EXTABLE_UA(1b, 3b)                            \
+                    _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG | \
+                                          EX_FLAG_CLEAR_AX,            \
+                                          %[errout])                   \
                     : [errout] "=r" (err),                             \
-                      [output] ltype(x)                                \
+                      [output] "=a" (x)                                \
                     : [umem] "m" (__m(addr)),                          \
-                      [efault] "i" (-EFAULT), "0" (err))
+                      "0" (err))
 
 #endif // CONFIG_CC_HAS_ASM_GOTO_OUTPUT