Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_X86_FUTEX_H |
| 3 | #define _ASM_X86_FUTEX_H |
| 4 | |
| 5 | #ifdef __KERNEL__ |
| 6 | |
| 7 | #include <linux/futex.h> |
| 8 | #include <linux/uaccess.h> |
| 9 | |
| 10 | #include <asm/asm.h> |
| 11 | #include <asm/errno.h> |
| 12 | #include <asm/processor.h> |
| 13 | #include <asm/smap.h> |
| 14 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 15 | #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \ |
| 16 | do { \ |
| 17 | int oldval = 0, ret; \ |
| 18 | asm volatile("1:\t" insn "\n" \ |
| 19 | "2:\n" \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 20 | "\t.section .fixup,\"ax\"\n" \ |
| 21 | "3:\tmov\t%3, %1\n" \ |
| 22 | "\tjmp\t2b\n" \ |
| 23 | "\t.previous\n" \ |
David Brazdil | 0f672f6 | 2019-12-10 10:32:29 +0000 | [diff] [blame] | 24 | _ASM_EXTABLE_UA(1b, 3b) \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 25 | : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 26 | : "i" (-EFAULT), "0" (oparg), "1" (0)); \ |
| 27 | if (ret) \ |
| 28 | goto label; \ |
| 29 | *oval = oldval; \ |
| 30 | } while(0) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 31 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 32 | |
| 33 | #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \ |
| 34 | do { \ |
| 35 | int oldval = 0, ret, tem; \ |
| 36 | asm volatile("1:\tmovl %2, %0\n" \ |
| 37 | "2:\tmovl\t%0, %3\n" \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 38 | "\t" insn "\n" \ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 39 | "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ |
| 40 | "\tjnz\t2b\n" \ |
| 41 | "4:\n" \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 42 | "\t.section .fixup,\"ax\"\n" \ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 43 | "5:\tmov\t%5, %1\n" \ |
| 44 | "\tjmp\t4b\n" \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 45 | "\t.previous\n" \ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 46 | _ASM_EXTABLE_UA(1b, 5b) \ |
| 47 | _ASM_EXTABLE_UA(3b, 5b) \ |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 48 | : "=&a" (oldval), "=&r" (ret), \ |
| 49 | "+m" (*uaddr), "=&r" (tem) \ |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 50 | : "r" (oparg), "i" (-EFAULT), "1" (0)); \ |
| 51 | if (ret) \ |
| 52 | goto label; \ |
| 53 | *oval = oldval; \ |
| 54 | } while(0) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 55 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 56 | static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 57 | u32 __user *uaddr) |
| 58 | { |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 59 | if (!user_access_begin(uaddr, sizeof(u32))) |
| 60 | return -EFAULT; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 61 | |
| 62 | switch (op) { |
| 63 | case FUTEX_OP_SET: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 64 | unsafe_atomic_op1("xchgl %0, %2", oval, uaddr, oparg, Efault); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 65 | break; |
| 66 | case FUTEX_OP_ADD: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 67 | unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2", oval, |
| 68 | uaddr, oparg, Efault); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 69 | break; |
| 70 | case FUTEX_OP_OR: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 71 | unsafe_atomic_op2("orl %4, %3", oval, uaddr, oparg, Efault); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 72 | break; |
| 73 | case FUTEX_OP_ANDN: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 74 | unsafe_atomic_op2("andl %4, %3", oval, uaddr, ~oparg, Efault); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 75 | break; |
| 76 | case FUTEX_OP_XOR: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 77 | unsafe_atomic_op2("xorl %4, %3", oval, uaddr, oparg, Efault); |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 78 | break; |
| 79 | default: |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 80 | user_access_end(); |
| 81 | return -ENOSYS; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 82 | } |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 83 | user_access_end(); |
| 84 | return 0; |
| 85 | Efault: |
| 86 | user_access_end(); |
| 87 | return -EFAULT; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 88 | } |
| 89 | |
| 90 | static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
| 91 | u32 oldval, u32 newval) |
| 92 | { |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 93 | int ret = 0; |
| 94 | |
| 95 | if (!user_access_begin(uaddr, sizeof(u32))) |
| 96 | return -EFAULT; |
| 97 | asm volatile("\n" |
| 98 | "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n" |
| 99 | "2:\n" |
| 100 | "\t.section .fixup, \"ax\"\n" |
| 101 | "3:\tmov %3, %0\n" |
| 102 | "\tjmp 2b\n" |
| 103 | "\t.previous\n" |
| 104 | _ASM_EXTABLE_UA(1b, 3b) |
| 105 | : "+r" (ret), "=a" (oldval), "+m" (*uaddr) |
| 106 | : "i" (-EFAULT), "r" (newval), "1" (oldval) |
| 107 | : "memory" |
| 108 | ); |
| 109 | user_access_end(); |
| 110 | *uval = oldval; |
| 111 | return ret; |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 112 | } |
| 113 | |
| 114 | #endif |
| 115 | #endif /* _ASM_X86_FUTEX_H */ |