OSDN Git Service

x86: convert arch_futex_atomic_op_inuser() to user_access_begin/user_access_end()
authorAl Viro <viro@zeniv.linux.org.uk>
Sun, 16 Feb 2020 18:10:42 +0000 (13:10 -0500)
committerAl Viro <viro@zeniv.linux.org.uk>
Sat, 28 Mar 2020 03:58:53 +0000 (23:58 -0400)
Lift stac/clac pairs from __futex_atomic_op{1,2} into arch_futex_atomic_op_inuser(),
fold them with access_ok() in there.  The switch in arch_futex_atomic_op_inuser()
is what has required the previous (objtool) commit...

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
arch/x86/include/asm/futex.h

index 6bcd1c1..53c07ab 100644 (file)
 #include <asm/processor.h>
 #include <asm/smap.h>
 
-#define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)    \
-       asm volatile("\t" ASM_STAC "\n"                         \
-                    "1:\t" insn "\n"                           \
-                    "2:\t" ASM_CLAC "\n"                       \
+#define unsafe_atomic_op1(insn, oval, uaddr, oparg, label)     \
+do {                                                           \
+       int oldval = 0, ret;                                    \
+       asm volatile("1:\t" insn "\n"                           \
+                    "2:\n"                                     \
                     "\t.section .fixup,\"ax\"\n"               \
                     "3:\tmov\t%3, %1\n"                        \
                     "\tjmp\t2b\n"                              \
                     "\t.previous\n"                            \
                     _ASM_EXTABLE_UA(1b, 3b)                    \
                     : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
-                    : "i" (-EFAULT), "0" (oparg), "1" (0))
+                    : "i" (-EFAULT), "0" (oparg), "1" (0));    \
+       if (ret)                                                \
+               goto label;                                     \
+       *oval = oldval;                                         \
+} while(0)
 
-#define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)    \
-       asm volatile("\t" ASM_STAC "\n"                         \
-                    "1:\tmovl  %2, %0\n"                       \
+
+#define unsafe_atomic_op2(insn, oval, uaddr, oparg, label)     \
+do {                                                           \
+       int oldval = 0, ret, tem;                               \
+       asm volatile("1:\tmovl  %2, %0\n"                       \
                     "\tmovl\t%0, %3\n"                         \
                     "\t" insn "\n"                             \
                     "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"     \
                     "\tjnz\t1b\n"                              \
-                    "3:\t" ASM_CLAC "\n"                       \
+                    "3:\n"                                     \
                     "\t.section .fixup,\"ax\"\n"               \
                     "4:\tmov\t%5, %1\n"                        \
                     "\tjmp\t3b\n"                              \
                     _ASM_EXTABLE_UA(2b, 4b)                    \
                     : "=&a" (oldval), "=&r" (ret),             \
                       "+m" (*uaddr), "=&r" (tem)               \
-                    : "r" (oparg), "i" (-EFAULT), "1" (0))
+                    : "r" (oparg), "i" (-EFAULT), "1" (0));    \
+       if (ret)                                                \
+               goto label;                                     \
+       *oval = oldval;                                         \
+} while(0)
 
-static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
+static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
                u32 __user *uaddr)
 {
-       int oldval = 0, ret, tem;
-
-       if (!access_ok(uaddr, sizeof(u32)))
+       if (!user_access_begin(uaddr, sizeof(u32)))
                return -EFAULT;
 
        switch (op) {
        case FUTEX_OP_SET:
-               __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
+               unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault);
                break;
        case FUTEX_OP_ADD:
-               __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
-                                  uaddr, oparg);
+               unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval,
+                                  uaddr, oparg, Efault);
                break;
        case FUTEX_OP_OR:
-               __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
+               unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault);
                break;
        case FUTEX_OP_ANDN:
-               __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
+               unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault);
                break;
        case FUTEX_OP_XOR:
-               __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
+               unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault);
                break;
        default:
-               ret = -ENOSYS;
+               user_access_end();
+               return -ENOSYS;
        }
-
-       if (!ret)
-               *oval = oldval;
-
-       return ret;
+       user_access_end();
+       return 0;
+Efault:
+       user_access_end();
+       return -EFAULT;
 }
 
 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,