Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | /* |
| 3 | * bitops.c: atomic operations which got too long to be inlined all over |
| 4 | * the place. |
| 5 | * |
| 6 | * Copyright 1999 Philipp Rumpf (prumpf@tux.org) |
| 7 | * Copyright 2000 Grant Grundler (grundler@cup.hp.com) |
| 8 | */ |
| 9 | |
| 10 | #include <linux/kernel.h> |
| 11 | #include <linux/spinlock.h> |
| 12 | #include <linux/atomic.h> |
| 13 | |
| 14 | #ifdef CONFIG_SMP |
| 15 | arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = { |
| 16 | [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED |
| 17 | }; |
| 18 | #endif |
| 19 | |
| 20 | #ifdef CONFIG_64BIT |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 21 | unsigned long __xchg64(unsigned long x, volatile unsigned long *ptr) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 22 | { |
| 23 | unsigned long temp, flags; |
| 24 | |
| 25 | _atomic_spin_lock_irqsave(ptr, flags); |
| 26 | temp = *ptr; |
| 27 | *ptr = x; |
| 28 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 29 | return temp; |
| 30 | } |
| 31 | #endif |
| 32 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 33 | unsigned long __xchg32(int x, volatile int *ptr) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 34 | { |
| 35 | unsigned long flags; |
| 36 | long temp; |
| 37 | |
| 38 | _atomic_spin_lock_irqsave(ptr, flags); |
| 39 | temp = (long) *ptr; /* XXX - sign extension wanted? */ |
| 40 | *ptr = x; |
| 41 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 42 | return (unsigned long)temp; |
| 43 | } |
| 44 | |
| 45 | |
Olivier Deprez | 157378f | 2022-04-04 15:47:50 +0200 | [diff] [blame^] | 46 | unsigned long __xchg8(char x, volatile char *ptr) |
Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame] | 47 | { |
| 48 | unsigned long flags; |
| 49 | long temp; |
| 50 | |
| 51 | _atomic_spin_lock_irqsave(ptr, flags); |
| 52 | temp = (long) *ptr; /* XXX - sign extension wanted? */ |
| 53 | *ptr = x; |
| 54 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 55 | return (unsigned long)temp; |
| 56 | } |
| 57 | |
| 58 | |
| 59 | u64 __cmpxchg_u64(volatile u64 *ptr, u64 old, u64 new) |
| 60 | { |
| 61 | unsigned long flags; |
| 62 | u64 prev; |
| 63 | |
| 64 | _atomic_spin_lock_irqsave(ptr, flags); |
| 65 | if ((prev = *ptr) == old) |
| 66 | *ptr = new; |
| 67 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 68 | return prev; |
| 69 | } |
| 70 | |
| 71 | unsigned long __cmpxchg_u32(volatile unsigned int *ptr, unsigned int old, unsigned int new) |
| 72 | { |
| 73 | unsigned long flags; |
| 74 | unsigned int prev; |
| 75 | |
| 76 | _atomic_spin_lock_irqsave(ptr, flags); |
| 77 | if ((prev = *ptr) == old) |
| 78 | *ptr = new; |
| 79 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 80 | return (unsigned long)prev; |
| 81 | } |
Olivier Deprez | 0e64123 | 2021-09-23 10:07:05 +0200 | [diff] [blame] | 82 | |
| 83 | u8 __cmpxchg_u8(volatile u8 *ptr, u8 old, u8 new) |
| 84 | { |
| 85 | unsigned long flags; |
| 86 | u8 prev; |
| 87 | |
| 88 | _atomic_spin_lock_irqsave(ptr, flags); |
| 89 | if ((prev = *ptr) == old) |
| 90 | *ptr = new; |
| 91 | _atomic_spin_unlock_irqrestore(ptr, flags); |
| 92 | return prev; |
| 93 | } |