| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | /* $Id: atomic.h,v 1.22 2001/07/11 23:56:07 davem Exp $ | 
 | 2 |  * atomic.h: Thankfully the V9 is at least reasonable for this | 
 | 3 |  *           stuff. | 
 | 4 |  * | 
 | 5 |  * Copyright (C) 1996, 1997, 2000 David S. Miller (davem@redhat.com) | 
 | 6 |  */ | 
 | 7 |  | 
 | 8 | #ifndef __ARCH_SPARC64_ATOMIC__ | 
 | 9 | #define __ARCH_SPARC64_ATOMIC__ | 
 | 10 |  | 
 | 11 | #include <linux/config.h> | 
 | 12 | #include <linux/types.h> | 
 | 13 |  | 
 | 14 | typedef struct { volatile int counter; } atomic_t; | 
 | 15 | typedef struct { volatile __s64 counter; } atomic64_t; | 
 | 16 |  | 
 | 17 | #define ATOMIC_INIT(i)		{ (i) } | 
 | 18 | #define ATOMIC64_INIT(i)	{ (i) } | 
 | 19 |  | 
 | 20 | #define atomic_read(v)		((v)->counter) | 
 | 21 | #define atomic64_read(v)	((v)->counter) | 
 | 22 |  | 
 | 23 | #define atomic_set(v, i)	(((v)->counter) = i) | 
 | 24 | #define atomic64_set(v, i)	(((v)->counter) = i) | 
 | 25 |  | 
 | 26 | extern void atomic_add(int, atomic_t *); | 
 | 27 | extern void atomic64_add(int, atomic64_t *); | 
 | 28 | extern void atomic_sub(int, atomic_t *); | 
 | 29 | extern void atomic64_sub(int, atomic64_t *); | 
 | 30 |  | 
 | 31 | extern int atomic_add_ret(int, atomic_t *); | 
 | 32 | extern int atomic64_add_ret(int, atomic64_t *); | 
 | 33 | extern int atomic_sub_ret(int, atomic_t *); | 
 | 34 | extern int atomic64_sub_ret(int, atomic64_t *); | 
 | 35 |  | 
 | 36 | #define atomic_dec_return(v) atomic_sub_ret(1, v) | 
 | 37 | #define atomic64_dec_return(v) atomic64_sub_ret(1, v) | 
 | 38 |  | 
 | 39 | #define atomic_inc_return(v) atomic_add_ret(1, v) | 
 | 40 | #define atomic64_inc_return(v) atomic64_add_ret(1, v) | 
 | 41 |  | 
 | 42 | #define atomic_sub_return(i, v) atomic_sub_ret(i, v) | 
 | 43 | #define atomic64_sub_return(i, v) atomic64_sub_ret(i, v) | 
 | 44 |  | 
 | 45 | #define atomic_add_return(i, v) atomic_add_ret(i, v) | 
 | 46 | #define atomic64_add_return(i, v) atomic64_add_ret(i, v) | 
 | 47 |  | 
 | 48 | /* | 
 | 49 |  * atomic_inc_and_test - increment and test | 
 | 50 |  * @v: pointer of type atomic_t | 
 | 51 |  * | 
 | 52 |  * Atomically increments @v by 1 | 
 | 53 |  * and returns true if the result is zero, or false for all | 
 | 54 |  * other cases. | 
 | 55 |  */ | 
 | 56 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | 
 | 57 |  | 
 | 58 | #define atomic_sub_and_test(i, v) (atomic_sub_ret(i, v) == 0) | 
 | 59 | #define atomic64_sub_and_test(i, v) (atomic64_sub_ret(i, v) == 0) | 
 | 60 |  | 
 | 61 | #define atomic_dec_and_test(v) (atomic_sub_ret(1, v) == 0) | 
 | 62 | #define atomic64_dec_and_test(v) (atomic64_sub_ret(1, v) == 0) | 
 | 63 |  | 
 | 64 | #define atomic_inc(v) atomic_add(1, v) | 
 | 65 | #define atomic64_inc(v) atomic64_add(1, v) | 
 | 66 |  | 
 | 67 | #define atomic_dec(v) atomic_sub(1, v) | 
 | 68 | #define atomic64_dec(v) atomic64_sub(1, v) | 
 | 69 |  | 
 | 70 | #define atomic_add_negative(i, v) (atomic_add_ret(i, v) < 0) | 
 | 71 | #define atomic64_add_negative(i, v) (atomic64_add_ret(i, v) < 0) | 
 | 72 |  | 
 | 73 | /* Atomic operations are already serializing */ | 
 | 74 | #ifdef CONFIG_SMP | 
 | 75 | #define smp_mb__before_atomic_dec()	membar("#StoreLoad | #LoadLoad") | 
 | 76 | #define smp_mb__after_atomic_dec()	membar("#StoreLoad | #StoreStore") | 
 | 77 | #define smp_mb__before_atomic_inc()	membar("#StoreLoad | #LoadLoad") | 
 | 78 | #define smp_mb__after_atomic_inc()	membar("#StoreLoad | #StoreStore") | 
 | 79 | #else | 
 | 80 | #define smp_mb__before_atomic_dec()	barrier() | 
 | 81 | #define smp_mb__after_atomic_dec()	barrier() | 
 | 82 | #define smp_mb__before_atomic_inc()	barrier() | 
 | 83 | #define smp_mb__after_atomic_inc()	barrier() | 
 | 84 | #endif | 
 | 85 |  | 
 | 86 | #endif /* !(__ARCH_SPARC64_ATOMIC__) */ |