Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_GENERIC_FUTEX_H |
| 3 | #define _ASM_GENERIC_FUTEX_H |
| 4 | |
| 5 | #include <linux/futex.h> |
| 6 | #include <linux/uaccess.h> |
| 7 | #include <asm/errno.h> |
| 8 | |
| 9 | #ifndef CONFIG_SMP |
| 10 | /* |
| 11 | * The following implementation only for uniprocessor machines. |
| 12 | * It relies on preempt_disable() ensuring mutual exclusion. |
| 13 | * |
| 14 | */ |
| 15 | |
| 16 | /** |
| 17 | * arch_futex_atomic_op_inuser() - Atomic arithmetic operation with constant |
| 18 | * argument and comparison of the previous |
| 19 | * futex value with another constant. |
| 20 | * |
| 21 | * @encoded_op: encoded operation to execute |
| 22 | * @uaddr: pointer to user space address |
| 23 | * |
| 24 | * Return: |
| 25 | * 0 - On success |
| 26 | * <0 - On error |
| 27 | */ |
| 28 | static inline int |
| 29 | arch_futex_atomic_op_inuser(int op, u32 oparg, int *oval, u32 __user *uaddr) |
| 30 | { |
| 31 | int oldval, ret; |
| 32 | u32 tmp; |
| 33 | |
| 34 | preempt_disable(); |
| 35 | pagefault_disable(); |
| 36 | |
| 37 | ret = -EFAULT; |
| 38 | if (unlikely(get_user(oldval, uaddr) != 0)) |
| 39 | goto out_pagefault_enable; |
| 40 | |
| 41 | ret = 0; |
| 42 | tmp = oldval; |
| 43 | |
| 44 | switch (op) { |
| 45 | case FUTEX_OP_SET: |
| 46 | tmp = oparg; |
| 47 | break; |
| 48 | case FUTEX_OP_ADD: |
| 49 | tmp += oparg; |
| 50 | break; |
| 51 | case FUTEX_OP_OR: |
| 52 | tmp |= oparg; |
| 53 | break; |
| 54 | case FUTEX_OP_ANDN: |
| 55 | tmp &= ~oparg; |
| 56 | break; |
| 57 | case FUTEX_OP_XOR: |
| 58 | tmp ^= oparg; |
| 59 | break; |
| 60 | default: |
| 61 | ret = -ENOSYS; |
| 62 | } |
| 63 | |
| 64 | if (ret == 0 && unlikely(put_user(tmp, uaddr) != 0)) |
| 65 | ret = -EFAULT; |
| 66 | |
| 67 | out_pagefault_enable: |
| 68 | pagefault_enable(); |
| 69 | preempt_enable(); |
| 70 | |
| 71 | if (ret == 0) |
| 72 | *oval = oldval; |
| 73 | |
| 74 | return ret; |
| 75 | } |
| 76 | |
| 77 | /** |
| 78 | * futex_atomic_cmpxchg_inatomic() - Compare and exchange the content of the |
| 79 | * uaddr with newval if the current value is |
| 80 | * oldval. |
| 81 | * @uval: pointer to store content of @uaddr |
| 82 | * @uaddr: pointer to user space address |
| 83 | * @oldval: old value |
| 84 | * @newval: new value to store to @uaddr |
| 85 | * |
| 86 | * Return: |
| 87 | * 0 - On success |
| 88 | * <0 - On error |
| 89 | */ |
| 90 | static inline int |
| 91 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
| 92 | u32 oldval, u32 newval) |
| 93 | { |
| 94 | u32 val; |
| 95 | |
| 96 | preempt_disable(); |
| 97 | if (unlikely(get_user(val, uaddr) != 0)) { |
| 98 | preempt_enable(); |
| 99 | return -EFAULT; |
| 100 | } |
| 101 | |
| 102 | if (val == oldval && unlikely(put_user(newval, uaddr) != 0)) { |
| 103 | preempt_enable(); |
| 104 | return -EFAULT; |
| 105 | } |
| 106 | |
| 107 | *uval = val; |
| 108 | preempt_enable(); |
| 109 | |
| 110 | return 0; |
| 111 | } |
| 112 | |
| 113 | #else |
| 114 | static inline int |
| 115 | arch_futex_atomic_op_inuser(int op, u32 oparg, int *oval, u32 __user *uaddr) |
| 116 | { |
| 117 | int oldval = 0, ret; |
| 118 | |
| 119 | pagefault_disable(); |
| 120 | |
| 121 | switch (op) { |
| 122 | case FUTEX_OP_SET: |
| 123 | case FUTEX_OP_ADD: |
| 124 | case FUTEX_OP_OR: |
| 125 | case FUTEX_OP_ANDN: |
| 126 | case FUTEX_OP_XOR: |
| 127 | default: |
| 128 | ret = -ENOSYS; |
| 129 | } |
| 130 | |
| 131 | pagefault_enable(); |
| 132 | |
| 133 | if (!ret) |
| 134 | *oval = oldval; |
| 135 | |
| 136 | return ret; |
| 137 | } |
| 138 | |
| 139 | static inline int |
| 140 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
| 141 | u32 oldval, u32 newval) |
| 142 | { |
| 143 | return -ENOSYS; |
| 144 | } |
| 145 | |
| 146 | #endif /* CONFIG_SMP */ |
| 147 | #endif |