83 lines
		
	
	
		
			2.0 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
			
		
		
	
	
			83 lines
		
	
	
		
			2.0 KiB
		
	
	
	
		
			C
		
	
	
	
	
	
| // SPDX-License-Identifier: GPL-2.0
 | |
| #include <linux/export.h>
 | |
| #include <linux/spinlock.h>
 | |
| #include <linux/atomic.h>
 | |
| 
 | |
| /*
 | |
|  * This is an implementation of the notion of "decrement a
 | |
|  * reference count, and return locked if it decremented to zero".
 | |
|  *
 | |
|  * NOTE NOTE NOTE! This is _not_ equivalent to
 | |
|  *
 | |
|  *	if (atomic_dec_and_test(&atomic)) {
 | |
|  *		spin_lock(&lock);
 | |
|  *		return 1;
 | |
|  *	}
 | |
|  *	return 0;
 | |
|  *
 | |
|  * because the spin-lock and the decrement must be
 | |
|  * "atomic".
 | |
|  */
 | |
| int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
 | |
| {
 | |
| 	/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
 | |
| 	if (atomic_add_unless(atomic, -1, 1))
 | |
| 		return 0;
 | |
| 
 | |
| 	/* Otherwise do it the slow way */
 | |
| 	spin_lock(lock);
 | |
| 	if (atomic_dec_and_test(atomic))
 | |
| 		return 1;
 | |
| 	spin_unlock(lock);
 | |
| 	return 0;
 | |
| }
 | |
| 
 | |
| EXPORT_SYMBOL(_atomic_dec_and_lock);
 | |
| 
 | |
| int _atomic_dec_and_lock_irqsave(atomic_t *atomic, spinlock_t *lock,
 | |
| 				 unsigned long *flags)
 | |
| {
 | |
| 	/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
 | |
| 	if (atomic_add_unless(atomic, -1, 1))
 | |
| 		return 0;
 | |
| 
 | |
| 	/* Otherwise do it the slow way */
 | |
| 	spin_lock_irqsave(lock, *flags);
 | |
| 	if (atomic_dec_and_test(atomic))
 | |
| 		return 1;
 | |
| 	spin_unlock_irqrestore(lock, *flags);
 | |
| 	return 0;
 | |
| }
 | |
| EXPORT_SYMBOL(_atomic_dec_and_lock_irqsave);
 | |
| 
 | |
| int _atomic_dec_and_raw_lock(atomic_t *atomic, raw_spinlock_t *lock)
 | |
| {
 | |
| 	/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
 | |
| 	if (atomic_add_unless(atomic, -1, 1))
 | |
| 		return 0;
 | |
| 
 | |
| 	/* Otherwise do it the slow way */
 | |
| 	raw_spin_lock(lock);
 | |
| 	if (atomic_dec_and_test(atomic))
 | |
| 		return 1;
 | |
| 	raw_spin_unlock(lock);
 | |
| 	return 0;
 | |
| }
 | |
| EXPORT_SYMBOL(_atomic_dec_and_raw_lock);
 | |
| 
 | |
| int _atomic_dec_and_raw_lock_irqsave(atomic_t *atomic, raw_spinlock_t *lock,
 | |
| 				     unsigned long *flags)
 | |
| {
 | |
| 	/* Subtract 1 from counter unless that drops it to 0 (ie. it was 1) */
 | |
| 	if (atomic_add_unless(atomic, -1, 1))
 | |
| 		return 0;
 | |
| 
 | |
| 	/* Otherwise do it the slow way */
 | |
| 	raw_spin_lock_irqsave(lock, *flags);
 | |
| 	if (atomic_dec_and_test(atomic))
 | |
| 		return 1;
 | |
| 	raw_spin_unlock_irqrestore(lock, *flags);
 | |
| 	return 0;
 | |
| }
 | |
| EXPORT_SYMBOL(_atomic_dec_and_raw_lock_irqsave);
 |