| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef _ASM_IA64_ATOMIC_H | 
|  | 2 | #define _ASM_IA64_ATOMIC_H | 
|  | 3 |  | 
|  | 4 | /* | 
|  | 5 | * Atomic operations that C can't guarantee us.  Useful for | 
|  | 6 | * resource counting etc.. | 
|  | 7 | * | 
|  | 8 | * NOTE: don't mess with the types below!  The "unsigned long" and | 
|  | 9 | * "int" types were carefully placed so as to ensure proper operation | 
|  | 10 | * of the macros. | 
|  | 11 | * | 
|  | 12 | * Copyright (C) 1998, 1999, 2002-2003 Hewlett-Packard Co | 
|  | 13 | *	David Mosberger-Tang <davidm@hpl.hp.com> | 
|  | 14 | */ | 
|  | 15 | #include <linux/types.h> | 
|  | 16 |  | 
|  | 17 | #include <asm/intrinsics.h> | 
| Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 18 | #include <asm/system.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 19 |  | 
|  | 20 | /* | 
|  | 21 | * On IA-64, counter must always be volatile to ensure that that the | 
|  | 22 | * memory accesses are ordered. | 
|  | 23 | */ | 
|  | 24 | typedef struct { volatile __s32 counter; } atomic_t; | 
|  | 25 | typedef struct { volatile __s64 counter; } atomic64_t; | 
|  | 26 |  | 
|  | 27 | #define ATOMIC_INIT(i)		((atomic_t) { (i) }) | 
|  | 28 | #define ATOMIC64_INIT(i)	((atomic64_t) { (i) }) | 
|  | 29 |  | 
|  | 30 | #define atomic_read(v)		((v)->counter) | 
|  | 31 | #define atomic64_read(v)	((v)->counter) | 
|  | 32 |  | 
|  | 33 | #define atomic_set(v,i)		(((v)->counter) = (i)) | 
|  | 34 | #define atomic64_set(v,i)	(((v)->counter) = (i)) | 
|  | 35 |  | 
|  | 36 | static __inline__ int | 
|  | 37 | ia64_atomic_add (int i, atomic_t *v) | 
|  | 38 | { | 
|  | 39 | __s32 old, new; | 
|  | 40 | CMPXCHG_BUGCHECK_DECL | 
|  | 41 |  | 
|  | 42 | do { | 
|  | 43 | CMPXCHG_BUGCHECK(v); | 
|  | 44 | old = atomic_read(v); | 
|  | 45 | new = old + i; | 
|  | 46 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); | 
|  | 47 | return new; | 
|  | 48 | } | 
|  | 49 |  | 
|  | 50 | static __inline__ int | 
|  | 51 | ia64_atomic64_add (__s64 i, atomic64_t *v) | 
|  | 52 | { | 
|  | 53 | __s64 old, new; | 
|  | 54 | CMPXCHG_BUGCHECK_DECL | 
|  | 55 |  | 
|  | 56 | do { | 
|  | 57 | CMPXCHG_BUGCHECK(v); | 
| Andreas Schwab | 6cba986 | 2007-08-10 23:42:59 +0200 | [diff] [blame] | 58 | old = atomic64_read(v); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 59 | new = old + i; | 
|  | 60 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); | 
|  | 61 | return new; | 
|  | 62 | } | 
|  | 63 |  | 
|  | 64 | static __inline__ int | 
|  | 65 | ia64_atomic_sub (int i, atomic_t *v) | 
|  | 66 | { | 
|  | 67 | __s32 old, new; | 
|  | 68 | CMPXCHG_BUGCHECK_DECL | 
|  | 69 |  | 
|  | 70 | do { | 
|  | 71 | CMPXCHG_BUGCHECK(v); | 
|  | 72 | old = atomic_read(v); | 
|  | 73 | new = old - i; | 
|  | 74 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic_t)) != old); | 
|  | 75 | return new; | 
|  | 76 | } | 
|  | 77 |  | 
|  | 78 | static __inline__ int | 
|  | 79 | ia64_atomic64_sub (__s64 i, atomic64_t *v) | 
|  | 80 | { | 
|  | 81 | __s64 old, new; | 
|  | 82 | CMPXCHG_BUGCHECK_DECL | 
|  | 83 |  | 
|  | 84 | do { | 
|  | 85 | CMPXCHG_BUGCHECK(v); | 
| Andreas Schwab | 6cba986 | 2007-08-10 23:42:59 +0200 | [diff] [blame] | 86 | old = atomic64_read(v); | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 87 | new = old - i; | 
|  | 88 | } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); | 
|  | 89 | return new; | 
|  | 90 | } | 
|  | 91 |  | 
| Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 92 | #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) | 
| Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 93 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 
| Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 94 |  | 
| Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 95 | #define atomic64_cmpxchg(v, old, new) \ | 
|  | 96 | (cmpxchg(&((v)->counter), old, new)) | 
|  | 97 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | 
|  | 98 |  | 
| Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 99 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | 
|  | 100 | { | 
|  | 101 | int c, old; | 
|  | 102 | c = atomic_read(v); | 
|  | 103 | for (;;) { | 
|  | 104 | if (unlikely(c == (u))) | 
|  | 105 | break; | 
|  | 106 | old = atomic_cmpxchg((v), c, c + (a)); | 
|  | 107 | if (likely(old == c)) | 
|  | 108 | break; | 
|  | 109 | c = old; | 
|  | 110 | } | 
|  | 111 | return c != (u); | 
|  | 112 | } | 
|  | 113 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 114 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 
|  | 115 |  | 
| Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 116 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | 
|  | 117 | { | 
|  | 118 | long c, old; | 
|  | 119 | c = atomic64_read(v); | 
|  | 120 | for (;;) { | 
|  | 121 | if (unlikely(c == (u))) | 
|  | 122 | break; | 
|  | 123 | old = atomic64_cmpxchg((v), c, c + (a)); | 
|  | 124 | if (likely(old == c)) | 
|  | 125 | break; | 
|  | 126 | c = old; | 
|  | 127 | } | 
|  | 128 | return c != (u); | 
|  | 129 | } | 
|  | 130 |  | 
| Mathieu Desnoyers | 8197913 | 2007-05-08 00:34:22 -0700 | [diff] [blame] | 131 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 
|  | 132 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 133 | #define atomic_add_return(i,v)						\ | 
|  | 134 | ({									\ | 
|  | 135 | int __ia64_aar_i = (i);						\ | 
|  | 136 | (__builtin_constant_p(i)					\ | 
|  | 137 | && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\ | 
|  | 138 | || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\ | 
|  | 139 | || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\ | 
|  | 140 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\ | 
|  | 141 | ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\ | 
|  | 142 | : ia64_atomic_add(__ia64_aar_i, v);			\ | 
|  | 143 | }) | 
|  | 144 |  | 
|  | 145 | #define atomic64_add_return(i,v)					\ | 
|  | 146 | ({									\ | 
|  | 147 | long __ia64_aar_i = (i);					\ | 
|  | 148 | (__builtin_constant_p(i)					\ | 
|  | 149 | && (   (__ia64_aar_i ==  1) || (__ia64_aar_i ==   4)		\ | 
|  | 150 | || (__ia64_aar_i ==  8) || (__ia64_aar_i ==  16)		\ | 
|  | 151 | || (__ia64_aar_i == -1) || (__ia64_aar_i ==  -4)		\ | 
|  | 152 | || (__ia64_aar_i == -8) || (__ia64_aar_i == -16)))		\ | 
|  | 153 | ? ia64_fetch_and_add(__ia64_aar_i, &(v)->counter)	\ | 
|  | 154 | : ia64_atomic64_add(__ia64_aar_i, v);			\ | 
|  | 155 | }) | 
|  | 156 |  | 
|  | 157 | /* | 
|  | 158 | * Atomically add I to V and return TRUE if the resulting value is | 
|  | 159 | * negative. | 
|  | 160 | */ | 
|  | 161 | static __inline__ int | 
|  | 162 | atomic_add_negative (int i, atomic_t *v) | 
|  | 163 | { | 
|  | 164 | return atomic_add_return(i, v) < 0; | 
|  | 165 | } | 
|  | 166 |  | 
|  | 167 | static __inline__ int | 
|  | 168 | atomic64_add_negative (__s64 i, atomic64_t *v) | 
|  | 169 | { | 
|  | 170 | return atomic64_add_return(i, v) < 0; | 
|  | 171 | } | 
|  | 172 |  | 
|  | 173 | #define atomic_sub_return(i,v)						\ | 
|  | 174 | ({									\ | 
|  | 175 | int __ia64_asr_i = (i);						\ | 
|  | 176 | (__builtin_constant_p(i)					\ | 
|  | 177 | && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\ | 
|  | 178 | || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\ | 
|  | 179 | || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\ | 
|  | 180 | || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\ | 
|  | 181 | ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\ | 
|  | 182 | : ia64_atomic_sub(__ia64_asr_i, v);			\ | 
|  | 183 | }) | 
|  | 184 |  | 
|  | 185 | #define atomic64_sub_return(i,v)					\ | 
|  | 186 | ({									\ | 
|  | 187 | long __ia64_asr_i = (i);					\ | 
|  | 188 | (__builtin_constant_p(i)					\ | 
|  | 189 | && (   (__ia64_asr_i ==   1) || (__ia64_asr_i ==   4)		\ | 
|  | 190 | || (__ia64_asr_i ==   8) || (__ia64_asr_i ==  16)		\ | 
|  | 191 | || (__ia64_asr_i ==  -1) || (__ia64_asr_i ==  -4)		\ | 
|  | 192 | || (__ia64_asr_i ==  -8) || (__ia64_asr_i == -16)))	\ | 
|  | 193 | ? ia64_fetch_and_add(-__ia64_asr_i, &(v)->counter)	\ | 
|  | 194 | : ia64_atomic64_sub(__ia64_asr_i, v);			\ | 
|  | 195 | }) | 
|  | 196 |  | 
|  | 197 | #define atomic_dec_return(v)		atomic_sub_return(1, (v)) | 
|  | 198 | #define atomic_inc_return(v)		atomic_add_return(1, (v)) | 
|  | 199 | #define atomic64_dec_return(v)		atomic64_sub_return(1, (v)) | 
|  | 200 | #define atomic64_inc_return(v)		atomic64_add_return(1, (v)) | 
|  | 201 |  | 
|  | 202 | #define atomic_sub_and_test(i,v)	(atomic_sub_return((i), (v)) == 0) | 
|  | 203 | #define atomic_dec_and_test(v)		(atomic_sub_return(1, (v)) == 0) | 
|  | 204 | #define atomic_inc_and_test(v)		(atomic_add_return(1, (v)) == 0) | 
|  | 205 | #define atomic64_sub_and_test(i,v)	(atomic64_sub_return((i), (v)) == 0) | 
|  | 206 | #define atomic64_dec_and_test(v)	(atomic64_sub_return(1, (v)) == 0) | 
|  | 207 | #define atomic64_inc_and_test(v)	(atomic64_add_return(1, (v)) == 0) | 
|  | 208 |  | 
|  | 209 | #define atomic_add(i,v)			atomic_add_return((i), (v)) | 
|  | 210 | #define atomic_sub(i,v)			atomic_sub_return((i), (v)) | 
|  | 211 | #define atomic_inc(v)			atomic_add(1, (v)) | 
|  | 212 | #define atomic_dec(v)			atomic_sub(1, (v)) | 
|  | 213 |  | 
|  | 214 | #define atomic64_add(i,v)		atomic64_add_return((i), (v)) | 
|  | 215 | #define atomic64_sub(i,v)		atomic64_sub_return((i), (v)) | 
|  | 216 | #define atomic64_inc(v)			atomic64_add(1, (v)) | 
|  | 217 | #define atomic64_dec(v)			atomic64_sub(1, (v)) | 
|  | 218 |  | 
|  | 219 | /* Atomic operations are already serializing */ | 
|  | 220 | #define smp_mb__before_atomic_dec()	barrier() | 
|  | 221 | #define smp_mb__after_atomic_dec()	barrier() | 
|  | 222 | #define smp_mb__before_atomic_inc()	barrier() | 
|  | 223 | #define smp_mb__after_atomic_inc()	barrier() | 
|  | 224 |  | 
| Christoph Lameter | d3cb487 | 2006-01-06 00:11:20 -0800 | [diff] [blame] | 225 | #include <asm-generic/atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 226 | #endif /* _ASM_IA64_ATOMIC_H */ |