Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_GENERIC_BITOPS_ATOMIC_H_ |
| 3 | #define _ASM_GENERIC_BITOPS_ATOMIC_H_ |
| 4 | |
| 5 | #include <linux/atomic.h> |
| 6 | #include <linux/compiler.h> |
| 7 | #include <asm/barrier.h> |
| 8 | |
| 9 | /* |
| 10 | * Implementation of atomic bitops using atomic-fetch ops. |
| 11 | * See Documentation/atomic_bitops.txt for details. |
| 12 | */ |
| 13 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 14 | static __always_inline void set_bit(unsigned int nr, volatile unsigned long *p) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 15 | { |
| 16 | p += BIT_WORD(nr); |
| 17 | atomic_long_or(BIT_MASK(nr), (atomic_long_t *)p); |
| 18 | } |
| 19 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 20 | static __always_inline void clear_bit(unsigned int nr, volatile unsigned long *p) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 21 | { |
| 22 | p += BIT_WORD(nr); |
| 23 | atomic_long_andnot(BIT_MASK(nr), (atomic_long_t *)p); |
| 24 | } |
| 25 | |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 26 | static __always_inline void change_bit(unsigned int nr, volatile unsigned long *p) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 27 | { |
| 28 | p += BIT_WORD(nr); |
| 29 | atomic_long_xor(BIT_MASK(nr), (atomic_long_t *)p); |
| 30 | } |
| 31 | |
| 32 | static inline int test_and_set_bit(unsigned int nr, volatile unsigned long *p) |
| 33 | { |
| 34 | long old; |
| 35 | unsigned long mask = BIT_MASK(nr); |
| 36 | |
| 37 | p += BIT_WORD(nr); |
| 38 | if (READ_ONCE(*p) & mask) |
| 39 | return 1; |
| 40 | |
| 41 | old = atomic_long_fetch_or(mask, (atomic_long_t *)p); |
| 42 | return !!(old & mask); |
| 43 | } |
| 44 | |
| 45 | static inline int test_and_clear_bit(unsigned int nr, volatile unsigned long *p) |
| 46 | { |
| 47 | long old; |
| 48 | unsigned long mask = BIT_MASK(nr); |
| 49 | |
| 50 | p += BIT_WORD(nr); |
| 51 | if (!(READ_ONCE(*p) & mask)) |
| 52 | return 0; |
| 53 | |
| 54 | old = atomic_long_fetch_andnot(mask, (atomic_long_t *)p); |
| 55 | return !!(old & mask); |
| 56 | } |
| 57 | |
| 58 | static inline int test_and_change_bit(unsigned int nr, volatile unsigned long *p) |
| 59 | { |
| 60 | long old; |
| 61 | unsigned long mask = BIT_MASK(nr); |
| 62 | |
| 63 | p += BIT_WORD(nr); |
| 64 | old = atomic_long_fetch_xor(mask, (atomic_long_t *)p); |
| 65 | return !!(old & mask); |
| 66 | } |
| 67 | |
| 68 | #endif /* _ASM_GENERIC_BITOPS_ATOMIC_H */ |