Andrew Scull | b4b6d4a | 2019-01-02 15:54:55 +0000 | [diff] [blame^] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | #include <linux/export.h> |
| 3 | #include <linux/spinlock.h> |
| 4 | #include <linux/atomic.h> |
| 5 | |
| 6 | /* |
| 7 | * This is an implementation of the notion of "decrement a |
| 8 | * reference count, and return locked if it decremented to zero". |
| 9 | * |
| 10 | * NOTE NOTE NOTE! This is _not_ equivalent to |
| 11 | * |
| 12 | * if (atomic_dec_and_test(&atomic)) { |
| 13 | * spin_lock(&lock); |
| 14 | * return 1; |
| 15 | * } |
| 16 | * return 0; |
| 17 | * |
| 18 | * because the spin-lock and the decrement must be |
| 19 | * "atomic". |
| 20 | */ |
| 21 | int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock) |
| 22 | { |
| 23 | /* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */ |
| 24 | if (atomic_add_unless(atomic, -1, 1)) |
| 25 | return 0; |
| 26 | |
| 27 | /* Otherwise do it the slow way */ |
| 28 | spin_lock(lock); |
| 29 | if (atomic_dec_and_test(atomic)) |
| 30 | return 1; |
| 31 | spin_unlock(lock); |
| 32 | return 0; |
| 33 | } |
| 34 | |
| 35 | EXPORT_SYMBOL(_atomic_dec_and_lock); |
| 36 | |
| 37 | int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock, |
| 38 | unsigned long *flags) |
| 39 | { |
| 40 | /* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */ |
| 41 | if (atomic_add_unless(atomic, -1, 1)) |
| 42 | return 0; |
| 43 | |
| 44 | /* Otherwise do it the slow way */ |
| 45 | spin_lock_irqsave(lock, *flags); |
| 46 | if (atomic_dec_and_test(atomic)) |
| 47 | return 1; |
| 48 | spin_unlock_irqrestore(lock, *flags); |
| 49 | return 0; |
| 50 | } |
| 51 | EXPORT_SYMBOL(_atomic_dec_and_lock_irqsave); |