| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 1 | #ifndef _ASM_POWERPC_ATOMIC_H_ | 
 | 2 | #define _ASM_POWERPC_ATOMIC_H_ | 
 | 3 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 4 | /* | 
 | 5 |  * PowerPC atomic operations | 
 | 6 |  */ | 
 | 7 |  | 
| Segher Boessenkool | 9f0cbea | 2007-08-11 10:15:30 +1000 | [diff] [blame] | 8 | typedef struct { int counter; } atomic_t; | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 9 |  | 
 | 10 | #ifdef __KERNEL__ | 
| Nick Piggin | f055aff | 2006-02-20 10:41:40 +0100 | [diff] [blame] | 11 | #include <linux/compiler.h> | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 12 | #include <asm/synch.h> | 
| David Gibson | 3ddfbcf | 2005-11-10 12:56:55 +1100 | [diff] [blame] | 13 | #include <asm/asm-compat.h> | 
| Mathieu Desnoyers | 2856f5e | 2007-05-08 00:34:38 -0700 | [diff] [blame] | 14 | #include <asm/system.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 15 |  | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 16 | #define ATOMIC_INIT(i)		{ (i) } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 17 |  | 
| Segher Boessenkool | 9f0cbea | 2007-08-11 10:15:30 +1000 | [diff] [blame] | 18 | static __inline__ int atomic_read(const atomic_t *v) | 
 | 19 | { | 
 | 20 | 	int t; | 
 | 21 |  | 
 | 22 | 	__asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | 
 | 23 |  | 
 | 24 | 	return t; | 
 | 25 | } | 
 | 26 |  | 
 | 27 | static __inline__ void atomic_set(atomic_t *v, int i) | 
 | 28 | { | 
 | 29 | 	__asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | 
 | 30 | } | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 31 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 32 | static __inline__ void atomic_add(int a, atomic_t *v) | 
 | 33 | { | 
 | 34 | 	int t; | 
 | 35 |  | 
 | 36 | 	__asm__ __volatile__( | 
 | 37 | "1:	lwarx	%0,0,%3		# atomic_add\n\ | 
 | 38 | 	add	%0,%2,%0\n" | 
 | 39 | 	PPC405_ERR77(0,%3) | 
 | 40 | "	stwcx.	%0,0,%3 \n\ | 
 | 41 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 42 | 	: "=&r" (t), "+m" (v->counter) | 
 | 43 | 	: "r" (a), "r" (&v->counter) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 44 | 	: "cc"); | 
 | 45 | } | 
 | 46 |  | 
 | 47 | static __inline__ int atomic_add_return(int a, atomic_t *v) | 
 | 48 | { | 
 | 49 | 	int t; | 
 | 50 |  | 
 | 51 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 52 | 	LWSYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 53 | "1:	lwarx	%0,0,%2		# atomic_add_return\n\ | 
 | 54 | 	add	%0,%1,%0\n" | 
 | 55 | 	PPC405_ERR77(0,%2) | 
 | 56 | "	stwcx.	%0,0,%2 \n\ | 
 | 57 | 	bne-	1b" | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 58 | 	ISYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 59 | 	: "=&r" (t) | 
 | 60 | 	: "r" (a), "r" (&v->counter) | 
 | 61 | 	: "cc", "memory"); | 
 | 62 |  | 
 | 63 | 	return t; | 
 | 64 | } | 
 | 65 |  | 
 | 66 | #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0) | 
 | 67 |  | 
 | 68 | static __inline__ void atomic_sub(int a, atomic_t *v) | 
 | 69 | { | 
 | 70 | 	int t; | 
 | 71 |  | 
 | 72 | 	__asm__ __volatile__( | 
 | 73 | "1:	lwarx	%0,0,%3		# atomic_sub\n\ | 
 | 74 | 	subf	%0,%2,%0\n" | 
 | 75 | 	PPC405_ERR77(0,%3) | 
 | 76 | "	stwcx.	%0,0,%3 \n\ | 
 | 77 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 78 | 	: "=&r" (t), "+m" (v->counter) | 
 | 79 | 	: "r" (a), "r" (&v->counter) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 80 | 	: "cc"); | 
 | 81 | } | 
 | 82 |  | 
 | 83 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | 
 | 84 | { | 
 | 85 | 	int t; | 
 | 86 |  | 
 | 87 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 88 | 	LWSYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 89 | "1:	lwarx	%0,0,%2		# atomic_sub_return\n\ | 
 | 90 | 	subf	%0,%1,%0\n" | 
 | 91 | 	PPC405_ERR77(0,%2) | 
 | 92 | "	stwcx.	%0,0,%2 \n\ | 
 | 93 | 	bne-	1b" | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 94 | 	ISYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 95 | 	: "=&r" (t) | 
 | 96 | 	: "r" (a), "r" (&v->counter) | 
 | 97 | 	: "cc", "memory"); | 
 | 98 |  | 
 | 99 | 	return t; | 
 | 100 | } | 
 | 101 |  | 
 | 102 | static __inline__ void atomic_inc(atomic_t *v) | 
 | 103 | { | 
 | 104 | 	int t; | 
 | 105 |  | 
 | 106 | 	__asm__ __volatile__( | 
 | 107 | "1:	lwarx	%0,0,%2		# atomic_inc\n\ | 
 | 108 | 	addic	%0,%0,1\n" | 
 | 109 | 	PPC405_ERR77(0,%2) | 
 | 110 | "	stwcx.	%0,0,%2 \n\ | 
 | 111 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 112 | 	: "=&r" (t), "+m" (v->counter) | 
 | 113 | 	: "r" (&v->counter) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 114 | 	: "cc"); | 
 | 115 | } | 
 | 116 |  | 
 | 117 | static __inline__ int atomic_inc_return(atomic_t *v) | 
 | 118 | { | 
 | 119 | 	int t; | 
 | 120 |  | 
 | 121 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 122 | 	LWSYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 123 | "1:	lwarx	%0,0,%1		# atomic_inc_return\n\ | 
 | 124 | 	addic	%0,%0,1\n" | 
 | 125 | 	PPC405_ERR77(0,%1) | 
 | 126 | "	stwcx.	%0,0,%1 \n\ | 
 | 127 | 	bne-	1b" | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 128 | 	ISYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 129 | 	: "=&r" (t) | 
 | 130 | 	: "r" (&v->counter) | 
 | 131 | 	: "cc", "memory"); | 
 | 132 |  | 
 | 133 | 	return t; | 
 | 134 | } | 
 | 135 |  | 
 | 136 | /* | 
 | 137 |  * atomic_inc_and_test - increment and test | 
 | 138 |  * @v: pointer of type atomic_t | 
 | 139 |  * | 
 | 140 |  * Atomically increments @v by 1 | 
 | 141 |  * and returns true if the result is zero, or false for all | 
 | 142 |  * other cases. | 
 | 143 |  */ | 
 | 144 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | 
 | 145 |  | 
 | 146 | static __inline__ void atomic_dec(atomic_t *v) | 
 | 147 | { | 
 | 148 | 	int t; | 
 | 149 |  | 
 | 150 | 	__asm__ __volatile__( | 
 | 151 | "1:	lwarx	%0,0,%2		# atomic_dec\n\ | 
 | 152 | 	addic	%0,%0,-1\n" | 
 | 153 | 	PPC405_ERR77(0,%2)\ | 
 | 154 | "	stwcx.	%0,0,%2\n\ | 
 | 155 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 156 | 	: "=&r" (t), "+m" (v->counter) | 
 | 157 | 	: "r" (&v->counter) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 158 | 	: "cc"); | 
 | 159 | } | 
 | 160 |  | 
 | 161 | static __inline__ int atomic_dec_return(atomic_t *v) | 
 | 162 | { | 
 | 163 | 	int t; | 
 | 164 |  | 
 | 165 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 166 | 	LWSYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 167 | "1:	lwarx	%0,0,%1		# atomic_dec_return\n\ | 
 | 168 | 	addic	%0,%0,-1\n" | 
 | 169 | 	PPC405_ERR77(0,%1) | 
 | 170 | "	stwcx.	%0,0,%1\n\ | 
 | 171 | 	bne-	1b" | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 172 | 	ISYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 173 | 	: "=&r" (t) | 
 | 174 | 	: "r" (&v->counter) | 
 | 175 | 	: "cc", "memory"); | 
 | 176 |  | 
 | 177 | 	return t; | 
 | 178 | } | 
 | 179 |  | 
| Mathieu Desnoyers | f46e477 | 2007-05-08 00:34:27 -0700 | [diff] [blame] | 180 | #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 
| Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 181 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 
| Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 182 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 183 | /** | 
 | 184 |  * atomic_add_unless - add unless the number is a given value | 
 | 185 |  * @v: pointer of type atomic_t | 
 | 186 |  * @a: the amount to add to v... | 
 | 187 |  * @u: ...unless v is equal to u. | 
 | 188 |  * | 
 | 189 |  * Atomically adds @a to @v, so long as it was not @u. | 
 | 190 |  * Returns non-zero if @v was not @u, and zero otherwise. | 
 | 191 |  */ | 
| Nick Piggin | f055aff | 2006-02-20 10:41:40 +0100 | [diff] [blame] | 192 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) | 
 | 193 | { | 
 | 194 | 	int t; | 
 | 195 |  | 
 | 196 | 	__asm__ __volatile__ ( | 
 | 197 | 	LWSYNC_ON_SMP | 
 | 198 | "1:	lwarx	%0,0,%1		# atomic_add_unless\n\ | 
 | 199 | 	cmpw	0,%0,%3 \n\ | 
 | 200 | 	beq-	2f \n\ | 
 | 201 | 	add	%0,%2,%0 \n" | 
 | 202 | 	PPC405_ERR77(0,%2) | 
 | 203 | "	stwcx.	%0,0,%1 \n\ | 
 | 204 | 	bne-	1b \n" | 
 | 205 | 	ISYNC_ON_SMP | 
 | 206 | "	subf	%0,%2,%0 \n\ | 
 | 207 | 2:" | 
 | 208 | 	: "=&r" (t) | 
 | 209 | 	: "r" (&v->counter), "r" (a), "r" (u) | 
 | 210 | 	: "cc", "memory"); | 
 | 211 |  | 
 | 212 | 	return t != u; | 
 | 213 | } | 
 | 214 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 215 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 
 | 216 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 217 | #define atomic_sub_and_test(a, v)	(atomic_sub_return((a), (v)) == 0) | 
 | 218 | #define atomic_dec_and_test(v)		(atomic_dec_return((v)) == 0) | 
 | 219 |  | 
 | 220 | /* | 
 | 221 |  * Atomically test *v and decrement if it is greater than 0. | 
| Robert Jennings | 434f98c | 2007-01-17 10:50:20 -0600 | [diff] [blame] | 222 |  * The function returns the old value of *v minus 1, even if | 
 | 223 |  * the atomic variable, v, was not decremented. | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 224 |  */ | 
 | 225 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | 
 | 226 | { | 
 | 227 | 	int t; | 
 | 228 |  | 
 | 229 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 230 | 	LWSYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 231 | "1:	lwarx	%0,0,%1		# atomic_dec_if_positive\n\ | 
| Robert Jennings | 434f98c | 2007-01-17 10:50:20 -0600 | [diff] [blame] | 232 | 	cmpwi	%0,1\n\ | 
 | 233 | 	addi	%0,%0,-1\n\ | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 234 | 	blt-	2f\n" | 
 | 235 | 	PPC405_ERR77(0,%1) | 
 | 236 | "	stwcx.	%0,0,%1\n\ | 
 | 237 | 	bne-	1b" | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 238 | 	ISYNC_ON_SMP | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 239 | 	"\n\ | 
| Robert Jennings | 434f98c | 2007-01-17 10:50:20 -0600 | [diff] [blame] | 240 | 2:"	: "=&b" (t) | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 241 | 	: "r" (&v->counter) | 
 | 242 | 	: "cc", "memory"); | 
 | 243 |  | 
 | 244 | 	return t; | 
 | 245 | } | 
 | 246 |  | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 247 | #define smp_mb__before_atomic_dec()     smp_mb() | 
 | 248 | #define smp_mb__after_atomic_dec()      smp_mb() | 
 | 249 | #define smp_mb__before_atomic_inc()     smp_mb() | 
 | 250 | #define smp_mb__after_atomic_inc()      smp_mb() | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 251 |  | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 252 | #ifdef __powerpc64__ | 
 | 253 |  | 
| Segher Boessenkool | 9f0cbea | 2007-08-11 10:15:30 +1000 | [diff] [blame] | 254 | typedef struct { long counter; } atomic64_t; | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 255 |  | 
 | 256 | #define ATOMIC64_INIT(i)	{ (i) } | 
 | 257 |  | 
| Segher Boessenkool | 9f0cbea | 2007-08-11 10:15:30 +1000 | [diff] [blame] | 258 | static __inline__ long atomic64_read(const atomic64_t *v) | 
 | 259 | { | 
 | 260 | 	long t; | 
 | 261 |  | 
 | 262 | 	__asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter)); | 
 | 263 |  | 
 | 264 | 	return t; | 
 | 265 | } | 
 | 266 |  | 
 | 267 | static __inline__ void atomic64_set(atomic64_t *v, long i) | 
 | 268 | { | 
 | 269 | 	__asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i)); | 
 | 270 | } | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 271 |  | 
 | 272 | static __inline__ void atomic64_add(long a, atomic64_t *v) | 
 | 273 | { | 
 | 274 | 	long t; | 
 | 275 |  | 
 | 276 | 	__asm__ __volatile__( | 
 | 277 | "1:	ldarx	%0,0,%3		# atomic64_add\n\ | 
 | 278 | 	add	%0,%2,%0\n\ | 
 | 279 | 	stdcx.	%0,0,%3 \n\ | 
 | 280 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 281 | 	: "=&r" (t), "+m" (v->counter) | 
 | 282 | 	: "r" (a), "r" (&v->counter) | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 283 | 	: "cc"); | 
 | 284 | } | 
 | 285 |  | 
 | 286 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | 
 | 287 | { | 
 | 288 | 	long t; | 
 | 289 |  | 
 | 290 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 291 | 	LWSYNC_ON_SMP | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 292 | "1:	ldarx	%0,0,%2		# atomic64_add_return\n\ | 
 | 293 | 	add	%0,%1,%0\n\ | 
 | 294 | 	stdcx.	%0,0,%2 \n\ | 
 | 295 | 	bne-	1b" | 
 | 296 | 	ISYNC_ON_SMP | 
 | 297 | 	: "=&r" (t) | 
 | 298 | 	: "r" (a), "r" (&v->counter) | 
 | 299 | 	: "cc", "memory"); | 
 | 300 |  | 
 | 301 | 	return t; | 
 | 302 | } | 
 | 303 |  | 
 | 304 | #define atomic64_add_negative(a, v)	(atomic64_add_return((a), (v)) < 0) | 
 | 305 |  | 
 | 306 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | 
 | 307 | { | 
 | 308 | 	long t; | 
 | 309 |  | 
 | 310 | 	__asm__ __volatile__( | 
 | 311 | "1:	ldarx	%0,0,%3		# atomic64_sub\n\ | 
 | 312 | 	subf	%0,%2,%0\n\ | 
 | 313 | 	stdcx.	%0,0,%3 \n\ | 
 | 314 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 315 | 	: "=&r" (t), "+m" (v->counter) | 
 | 316 | 	: "r" (a), "r" (&v->counter) | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 317 | 	: "cc"); | 
 | 318 | } | 
 | 319 |  | 
 | 320 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | 
 | 321 | { | 
 | 322 | 	long t; | 
 | 323 |  | 
 | 324 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 325 | 	LWSYNC_ON_SMP | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 326 | "1:	ldarx	%0,0,%2		# atomic64_sub_return\n\ | 
 | 327 | 	subf	%0,%1,%0\n\ | 
 | 328 | 	stdcx.	%0,0,%2 \n\ | 
 | 329 | 	bne-	1b" | 
 | 330 | 	ISYNC_ON_SMP | 
 | 331 | 	: "=&r" (t) | 
 | 332 | 	: "r" (a), "r" (&v->counter) | 
 | 333 | 	: "cc", "memory"); | 
 | 334 |  | 
 | 335 | 	return t; | 
 | 336 | } | 
 | 337 |  | 
 | 338 | static __inline__ void atomic64_inc(atomic64_t *v) | 
 | 339 | { | 
 | 340 | 	long t; | 
 | 341 |  | 
 | 342 | 	__asm__ __volatile__( | 
 | 343 | "1:	ldarx	%0,0,%2		# atomic64_inc\n\ | 
 | 344 | 	addic	%0,%0,1\n\ | 
 | 345 | 	stdcx.	%0,0,%2 \n\ | 
 | 346 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 347 | 	: "=&r" (t), "+m" (v->counter) | 
 | 348 | 	: "r" (&v->counter) | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 349 | 	: "cc"); | 
 | 350 | } | 
 | 351 |  | 
 | 352 | static __inline__ long atomic64_inc_return(atomic64_t *v) | 
 | 353 | { | 
 | 354 | 	long t; | 
 | 355 |  | 
 | 356 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 357 | 	LWSYNC_ON_SMP | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 358 | "1:	ldarx	%0,0,%1		# atomic64_inc_return\n\ | 
 | 359 | 	addic	%0,%0,1\n\ | 
 | 360 | 	stdcx.	%0,0,%1 \n\ | 
 | 361 | 	bne-	1b" | 
 | 362 | 	ISYNC_ON_SMP | 
 | 363 | 	: "=&r" (t) | 
 | 364 | 	: "r" (&v->counter) | 
 | 365 | 	: "cc", "memory"); | 
 | 366 |  | 
 | 367 | 	return t; | 
 | 368 | } | 
 | 369 |  | 
 | 370 | /* | 
 | 371 |  * atomic64_inc_and_test - increment and test | 
 | 372 |  * @v: pointer of type atomic64_t | 
 | 373 |  * | 
 | 374 |  * Atomically increments @v by 1 | 
 | 375 |  * and returns true if the result is zero, or false for all | 
 | 376 |  * other cases. | 
 | 377 |  */ | 
 | 378 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | 
 | 379 |  | 
 | 380 | static __inline__ void atomic64_dec(atomic64_t *v) | 
 | 381 | { | 
 | 382 | 	long t; | 
 | 383 |  | 
 | 384 | 	__asm__ __volatile__( | 
 | 385 | "1:	ldarx	%0,0,%2		# atomic64_dec\n\ | 
 | 386 | 	addic	%0,%0,-1\n\ | 
 | 387 | 	stdcx.	%0,0,%2\n\ | 
 | 388 | 	bne-	1b" | 
| Linus Torvalds | e2a3d40 | 2006-07-08 15:00:28 -0700 | [diff] [blame] | 389 | 	: "=&r" (t), "+m" (v->counter) | 
 | 390 | 	: "r" (&v->counter) | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 391 | 	: "cc"); | 
 | 392 | } | 
 | 393 |  | 
 | 394 | static __inline__ long atomic64_dec_return(atomic64_t *v) | 
 | 395 | { | 
 | 396 | 	long t; | 
 | 397 |  | 
 | 398 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 399 | 	LWSYNC_ON_SMP | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 400 | "1:	ldarx	%0,0,%1		# atomic64_dec_return\n\ | 
 | 401 | 	addic	%0,%0,-1\n\ | 
 | 402 | 	stdcx.	%0,0,%1\n\ | 
 | 403 | 	bne-	1b" | 
 | 404 | 	ISYNC_ON_SMP | 
 | 405 | 	: "=&r" (t) | 
 | 406 | 	: "r" (&v->counter) | 
 | 407 | 	: "cc", "memory"); | 
 | 408 |  | 
 | 409 | 	return t; | 
 | 410 | } | 
 | 411 |  | 
 | 412 | #define atomic64_sub_and_test(a, v)	(atomic64_sub_return((a), (v)) == 0) | 
 | 413 | #define atomic64_dec_and_test(v)	(atomic64_dec_return((v)) == 0) | 
 | 414 |  | 
 | 415 | /* | 
 | 416 |  * Atomically test *v and decrement if it is greater than 0. | 
 | 417 |  * The function returns the old value of *v minus 1. | 
 | 418 |  */ | 
 | 419 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | 
 | 420 | { | 
 | 421 | 	long t; | 
 | 422 |  | 
 | 423 | 	__asm__ __volatile__( | 
| Anton Blanchard | 144b9c1 | 2006-01-13 15:37:17 +1100 | [diff] [blame] | 424 | 	LWSYNC_ON_SMP | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 425 | "1:	ldarx	%0,0,%1		# atomic64_dec_if_positive\n\ | 
 | 426 | 	addic.	%0,%0,-1\n\ | 
 | 427 | 	blt-	2f\n\ | 
 | 428 | 	stdcx.	%0,0,%1\n\ | 
 | 429 | 	bne-	1b" | 
 | 430 | 	ISYNC_ON_SMP | 
 | 431 | 	"\n\ | 
 | 432 | 2:"	: "=&r" (t) | 
 | 433 | 	: "r" (&v->counter) | 
 | 434 | 	: "cc", "memory"); | 
 | 435 |  | 
 | 436 | 	return t; | 
 | 437 | } | 
 | 438 |  | 
| Mathieu Desnoyers | f46e477 | 2007-05-08 00:34:27 -0700 | [diff] [blame] | 439 | #define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n))) | 
| Mathieu Desnoyers | 41806ef | 2007-01-25 11:15:52 -0500 | [diff] [blame] | 440 | #define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) | 
 | 441 |  | 
 | 442 | /** | 
 | 443 |  * atomic64_add_unless - add unless the number is a given value | 
 | 444 |  * @v: pointer of type atomic64_t | 
 | 445 |  * @a: the amount to add to v... | 
 | 446 |  * @u: ...unless v is equal to u. | 
 | 447 |  * | 
 | 448 |  * Atomically adds @a to @v, so long as it was not @u. | 
 | 449 |  * Returns non-zero if @v was not @u, and zero otherwise. | 
 | 450 |  */ | 
 | 451 | static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) | 
 | 452 | { | 
 | 453 | 	long t; | 
 | 454 |  | 
 | 455 | 	__asm__ __volatile__ ( | 
 | 456 | 	LWSYNC_ON_SMP | 
 | 457 | "1:	ldarx	%0,0,%1		# atomic_add_unless\n\ | 
 | 458 | 	cmpd	0,%0,%3 \n\ | 
 | 459 | 	beq-	2f \n\ | 
 | 460 | 	add	%0,%2,%0 \n" | 
 | 461 | "	stdcx.	%0,0,%1 \n\ | 
 | 462 | 	bne-	1b \n" | 
 | 463 | 	ISYNC_ON_SMP | 
 | 464 | "	subf	%0,%2,%0 \n\ | 
 | 465 | 2:" | 
 | 466 | 	: "=&r" (t) | 
 | 467 | 	: "r" (&v->counter), "r" (a), "r" (u) | 
 | 468 | 	: "cc", "memory"); | 
 | 469 |  | 
 | 470 | 	return t != u; | 
 | 471 | } | 
 | 472 |  | 
 | 473 | #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) | 
 | 474 |  | 
| Stephen Rothwell | 06a98db | 2005-11-10 15:51:14 +1100 | [diff] [blame] | 475 | #endif /* __powerpc64__ */ | 
 | 476 |  | 
| Christoph Lameter | d3cb487 | 2006-01-06 00:11:20 -0800 | [diff] [blame] | 477 | #include <asm-generic/atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 478 | #endif /* __KERNEL__ */ | 
| Becky Bruce | feaf7cf | 2005-09-22 14:20:04 -0500 | [diff] [blame] | 479 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |