blob: 14174e838ad9c443c5b68e71ebb1195a701d9c7b [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Matthew Wilcoxea4354672009-01-06 14:40:39 -08008#include <linux/types.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -07009
10#ifdef __KERNEL__
Nick Pigginf055aff2006-02-20 10:41:40 +010011#include <linux/compiler.h>
Becky Brucefeaf7cf2005-09-22 14:20:04 -050012#include <asm/synch.h>
David Gibson3ddfbcf2005-11-10 12:56:55 +110013#include <asm/asm-compat.h>
Mathieu Desnoyers2856f5e2007-05-08 00:34:38 -070014#include <asm/system.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070015
Becky Brucefeaf7cf2005-09-22 14:20:04 -050016#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070017
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +100018static __inline__ int atomic_read(const atomic_t *v)
19{
20 int t;
21
22 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
23
24 return t;
25}
26
27static __inline__ void atomic_set(atomic_t *v, int i)
28{
29 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
30}
Linus Torvalds1da177e2005-04-16 15:20:36 -070031
Linus Torvalds1da177e2005-04-16 15:20:36 -070032static __inline__ void atomic_add(int a, atomic_t *v)
33{
34 int t;
35
36 __asm__ __volatile__(
37"1: lwarx %0,0,%3 # atomic_add\n\
38 add %0,%2,%0\n"
39 PPC405_ERR77(0,%3)
40" stwcx. %0,0,%3 \n\
41 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070042 : "=&r" (t), "+m" (v->counter)
43 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070044 : "cc");
45}
46
47static __inline__ int atomic_add_return(int a, atomic_t *v)
48{
49 int t;
50
51 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000052 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070053"1: lwarx %0,0,%2 # atomic_add_return\n\
54 add %0,%1,%0\n"
55 PPC405_ERR77(0,%2)
56" stwcx. %0,0,%2 \n\
57 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000058 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070059 : "=&r" (t)
60 : "r" (a), "r" (&v->counter)
61 : "cc", "memory");
62
63 return t;
64}
65
66#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
67
68static __inline__ void atomic_sub(int a, atomic_t *v)
69{
70 int t;
71
72 __asm__ __volatile__(
73"1: lwarx %0,0,%3 # atomic_sub\n\
74 subf %0,%2,%0\n"
75 PPC405_ERR77(0,%3)
76" stwcx. %0,0,%3 \n\
77 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -070078 : "=&r" (t), "+m" (v->counter)
79 : "r" (a), "r" (&v->counter)
Linus Torvalds1da177e2005-04-16 15:20:36 -070080 : "cc");
81}
82
83static __inline__ int atomic_sub_return(int a, atomic_t *v)
84{
85 int t;
86
87 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000088 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070089"1: lwarx %0,0,%2 # atomic_sub_return\n\
90 subf %0,%1,%0\n"
91 PPC405_ERR77(0,%2)
92" stwcx. %0,0,%2 \n\
93 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +000094 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -070095 : "=&r" (t)
96 : "r" (a), "r" (&v->counter)
97 : "cc", "memory");
98
99 return t;
100}
101
102static __inline__ void atomic_inc(atomic_t *v)
103{
104 int t;
105
106 __asm__ __volatile__(
107"1: lwarx %0,0,%2 # atomic_inc\n\
108 addic %0,%0,1\n"
109 PPC405_ERR77(0,%2)
110" stwcx. %0,0,%2 \n\
111 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700112 : "=&r" (t), "+m" (v->counter)
113 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000114 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700115}
116
117static __inline__ int atomic_inc_return(atomic_t *v)
118{
119 int t;
120
121 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000122 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700123"1: lwarx %0,0,%1 # atomic_inc_return\n\
124 addic %0,%0,1\n"
125 PPC405_ERR77(0,%1)
126" stwcx. %0,0,%1 \n\
127 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000128 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700129 : "=&r" (t)
130 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000131 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700132
133 return t;
134}
135
136/*
137 * atomic_inc_and_test - increment and test
138 * @v: pointer of type atomic_t
139 *
140 * Atomically increments @v by 1
141 * and returns true if the result is zero, or false for all
142 * other cases.
143 */
144#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
145
146static __inline__ void atomic_dec(atomic_t *v)
147{
148 int t;
149
150 __asm__ __volatile__(
151"1: lwarx %0,0,%2 # atomic_dec\n\
152 addic %0,%0,-1\n"
153 PPC405_ERR77(0,%2)\
154" stwcx. %0,0,%2\n\
155 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700156 : "=&r" (t), "+m" (v->counter)
157 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000158 : "cc", "xer");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700159}
160
161static __inline__ int atomic_dec_return(atomic_t *v)
162{
163 int t;
164
165 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000166 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700167"1: lwarx %0,0,%1 # atomic_dec_return\n\
168 addic %0,%0,-1\n"
169 PPC405_ERR77(0,%1)
170" stwcx. %0,0,%1\n\
171 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000172 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700173 : "=&r" (t)
174 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000175 : "cc", "xer", "memory");
Linus Torvalds1da177e2005-04-16 15:20:36 -0700176
177 return t;
178}
179
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700180#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Ingo Molnarffbf6702006-01-09 15:59:17 -0800181#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
Nick Piggin4a6dae62005-11-13 16:07:24 -0800182
Nick Piggin8426e1f2005-11-13 16:07:25 -0800183/**
Arun Sharmaf24219b2011-07-26 16:09:07 -0700184 * __atomic_add_unless - add unless the number is a given value
Nick Piggin8426e1f2005-11-13 16:07:25 -0800185 * @v: pointer of type atomic_t
186 * @a: the amount to add to v...
187 * @u: ...unless v is equal to u.
188 *
189 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700190 * Returns the old value of @v.
Nick Piggin8426e1f2005-11-13 16:07:25 -0800191 */
Arun Sharmaf24219b2011-07-26 16:09:07 -0700192static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
Nick Pigginf055aff2006-02-20 10:41:40 +0100193{
194 int t;
195
196 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000197 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700198"1: lwarx %0,0,%1 # __atomic_add_unless\n\
Nick Pigginf055aff2006-02-20 10:41:40 +0100199 cmpw 0,%0,%3 \n\
200 beq- 2f \n\
201 add %0,%2,%0 \n"
202 PPC405_ERR77(0,%2)
203" stwcx. %0,0,%1 \n\
204 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000205 PPC_ATOMIC_EXIT_BARRIER
Nick Pigginf055aff2006-02-20 10:41:40 +0100206" subf %0,%2,%0 \n\
2072:"
208 : "=&r" (t)
209 : "r" (&v->counter), "r" (a), "r" (u)
210 : "cc", "memory");
211
Arun Sharmaf24219b2011-07-26 16:09:07 -0700212 return t;
Nick Pigginf055aff2006-02-20 10:41:40 +0100213}
214
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000215/**
216 * atomic_inc_not_zero - increment unless the number is zero
217 * @v: pointer of type atomic_t
218 *
219 * Atomically increments @v by 1, so long as @v is non-zero.
220 * Returns non-zero if @v was non-zero, and zero otherwise.
221 */
222static __inline__ int atomic_inc_not_zero(atomic_t *v)
223{
224 int t1, t2;
225
226 __asm__ __volatile__ (
227 PPC_ATOMIC_ENTRY_BARRIER
228"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
229 cmpwi 0,%0,0\n\
230 beq- 2f\n\
231 addic %1,%0,1\n"
232 PPC405_ERR77(0,%2)
233" stwcx. %1,0,%2\n\
234 bne- 1b\n"
235 PPC_ATOMIC_EXIT_BARRIER
236 "\n\
2372:"
238 : "=&r" (t1), "=&r" (t2)
239 : "r" (&v->counter)
240 : "cc", "xer", "memory");
241
242 return t1;
243}
244#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
Nick Piggin8426e1f2005-11-13 16:07:25 -0800245
Linus Torvalds1da177e2005-04-16 15:20:36 -0700246#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
247#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
248
249/*
250 * Atomically test *v and decrement if it is greater than 0.
Robert Jennings434f98c2007-01-17 10:50:20 -0600251 * The function returns the old value of *v minus 1, even if
252 * the atomic variable, v, was not decremented.
Linus Torvalds1da177e2005-04-16 15:20:36 -0700253 */
254static __inline__ int atomic_dec_if_positive(atomic_t *v)
255{
256 int t;
257
258 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000259 PPC_ATOMIC_ENTRY_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700260"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
Robert Jennings434f98c2007-01-17 10:50:20 -0600261 cmpwi %0,1\n\
262 addi %0,%0,-1\n\
Linus Torvalds1da177e2005-04-16 15:20:36 -0700263 blt- 2f\n"
264 PPC405_ERR77(0,%1)
265" stwcx. %0,0,%1\n\
266 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000267 PPC_ATOMIC_EXIT_BARRIER
Linus Torvalds1da177e2005-04-16 15:20:36 -0700268 "\n\
Robert Jennings434f98c2007-01-17 10:50:20 -06002692:" : "=&b" (t)
Linus Torvalds1da177e2005-04-16 15:20:36 -0700270 : "r" (&v->counter)
271 : "cc", "memory");
272
273 return t;
274}
275
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500276#define smp_mb__before_atomic_dec() smp_mb()
277#define smp_mb__after_atomic_dec() smp_mb()
278#define smp_mb__before_atomic_inc() smp_mb()
279#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700280
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100281#ifdef __powerpc64__
282
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100283#define ATOMIC64_INIT(i) { (i) }
284
Segher Boessenkool9f0cbea2007-08-11 10:15:30 +1000285static __inline__ long atomic64_read(const atomic64_t *v)
286{
287 long t;
288
289 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
290
291 return t;
292}
293
294static __inline__ void atomic64_set(atomic64_t *v, long i)
295{
296 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
297}
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100298
299static __inline__ void atomic64_add(long a, atomic64_t *v)
300{
301 long t;
302
303 __asm__ __volatile__(
304"1: ldarx %0,0,%3 # atomic64_add\n\
305 add %0,%2,%0\n\
306 stdcx. %0,0,%3 \n\
307 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700308 : "=&r" (t), "+m" (v->counter)
309 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100310 : "cc");
311}
312
313static __inline__ long atomic64_add_return(long a, atomic64_t *v)
314{
315 long t;
316
317 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000318 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100319"1: ldarx %0,0,%2 # atomic64_add_return\n\
320 add %0,%1,%0\n\
321 stdcx. %0,0,%2 \n\
322 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000323 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100324 : "=&r" (t)
325 : "r" (a), "r" (&v->counter)
326 : "cc", "memory");
327
328 return t;
329}
330
331#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
332
333static __inline__ void atomic64_sub(long a, atomic64_t *v)
334{
335 long t;
336
337 __asm__ __volatile__(
338"1: ldarx %0,0,%3 # atomic64_sub\n\
339 subf %0,%2,%0\n\
340 stdcx. %0,0,%3 \n\
341 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700342 : "=&r" (t), "+m" (v->counter)
343 : "r" (a), "r" (&v->counter)
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100344 : "cc");
345}
346
347static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
348{
349 long t;
350
351 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000352 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100353"1: ldarx %0,0,%2 # atomic64_sub_return\n\
354 subf %0,%1,%0\n\
355 stdcx. %0,0,%2 \n\
356 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000357 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100358 : "=&r" (t)
359 : "r" (a), "r" (&v->counter)
360 : "cc", "memory");
361
362 return t;
363}
364
365static __inline__ void atomic64_inc(atomic64_t *v)
366{
367 long t;
368
369 __asm__ __volatile__(
370"1: ldarx %0,0,%2 # atomic64_inc\n\
371 addic %0,%0,1\n\
372 stdcx. %0,0,%2 \n\
373 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700374 : "=&r" (t), "+m" (v->counter)
375 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000376 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100377}
378
379static __inline__ long atomic64_inc_return(atomic64_t *v)
380{
381 long t;
382
383 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000384 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100385"1: ldarx %0,0,%1 # atomic64_inc_return\n\
386 addic %0,%0,1\n\
387 stdcx. %0,0,%1 \n\
388 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000389 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100390 : "=&r" (t)
391 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000392 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100393
394 return t;
395}
396
397/*
398 * atomic64_inc_and_test - increment and test
399 * @v: pointer of type atomic64_t
400 *
401 * Atomically increments @v by 1
402 * and returns true if the result is zero, or false for all
403 * other cases.
404 */
405#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
406
407static __inline__ void atomic64_dec(atomic64_t *v)
408{
409 long t;
410
411 __asm__ __volatile__(
412"1: ldarx %0,0,%2 # atomic64_dec\n\
413 addic %0,%0,-1\n\
414 stdcx. %0,0,%2\n\
415 bne- 1b"
Linus Torvaldse2a3d402006-07-08 15:00:28 -0700416 : "=&r" (t), "+m" (v->counter)
417 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000418 : "cc", "xer");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100419}
420
421static __inline__ long atomic64_dec_return(atomic64_t *v)
422{
423 long t;
424
425 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000426 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100427"1: ldarx %0,0,%1 # atomic64_dec_return\n\
428 addic %0,%0,-1\n\
429 stdcx. %0,0,%1\n\
430 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000431 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100432 : "=&r" (t)
433 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000434 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100435
436 return t;
437}
438
439#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
440#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
441
442/*
443 * Atomically test *v and decrement if it is greater than 0.
444 * The function returns the old value of *v minus 1.
445 */
446static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
447{
448 long t;
449
450 __asm__ __volatile__(
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000451 PPC_ATOMIC_ENTRY_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100452"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
453 addic. %0,%0,-1\n\
454 blt- 2f\n\
455 stdcx. %0,0,%1\n\
456 bne- 1b"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000457 PPC_ATOMIC_EXIT_BARRIER
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100458 "\n\
4592:" : "=&r" (t)
460 : "r" (&v->counter)
Paul Mackerrasefc36242008-11-05 18:39:27 +0000461 : "cc", "xer", "memory");
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100462
463 return t;
464}
465
Mathieu Desnoyersf46e4772007-05-08 00:34:27 -0700466#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500467#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
468
469/**
470 * atomic64_add_unless - add unless the number is a given value
471 * @v: pointer of type atomic64_t
472 * @a: the amount to add to v...
473 * @u: ...unless v is equal to u.
474 *
475 * Atomically adds @a to @v, so long as it was not @u.
Arun Sharmaf24219b2011-07-26 16:09:07 -0700476 * Returns the old value of @v.
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500477 */
478static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
479{
480 long t;
481
482 __asm__ __volatile__ (
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000483 PPC_ATOMIC_ENTRY_BARRIER
Arun Sharmaf24219b2011-07-26 16:09:07 -0700484"1: ldarx %0,0,%1 # __atomic_add_unless\n\
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500485 cmpd 0,%0,%3 \n\
486 beq- 2f \n\
487 add %0,%2,%0 \n"
488" stdcx. %0,0,%1 \n\
489 bne- 1b \n"
Benjamin Herrenschmidtb97021f2011-11-15 17:11:27 +0000490 PPC_ATOMIC_EXIT_BARRIER
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500491" subf %0,%2,%0 \n\
4922:"
493 : "=&r" (t)
494 : "r" (&v->counter), "r" (a), "r" (u)
495 : "cc", "memory");
496
497 return t != u;
498}
499
Anton Blancharda6cf7ed2012-02-29 21:12:16 +0000500/**
501 * atomic_inc64_not_zero - increment unless the number is zero
502 * @v: pointer of type atomic64_t
503 *
504 * Atomically increments @v by 1, so long as @v is non-zero.
505 * Returns non-zero if @v was non-zero, and zero otherwise.
506 */
507static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
508{
509 long t1, t2;
510
511 __asm__ __volatile__ (
512 PPC_ATOMIC_ENTRY_BARRIER
513"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
514 cmpdi 0,%0,0\n\
515 beq- 2f\n\
516 addic %1,%0,1\n\
517 stdcx. %1,0,%2\n\
518 bne- 1b\n"
519 PPC_ATOMIC_EXIT_BARRIER
520 "\n\
5212:"
522 : "=&r" (t1), "=&r" (t2)
523 : "r" (&v->counter)
524 : "cc", "xer", "memory");
525
526 return t1;
527}
Mathieu Desnoyers41806ef2007-01-25 11:15:52 -0500528
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100529#endif /* __powerpc64__ */
530
Linus Torvalds1da177e2005-04-16 15:20:36 -0700531#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500532#endif /* _ASM_POWERPC_ATOMIC_H_ */