blob: 9c0b372a46e1ffb4f0160857ff0f21192f401692 [file] [log] [blame]
Becky Brucefeaf7cf2005-09-22 14:20:04 -05001#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
Linus Torvalds1da177e2005-04-16 15:20:36 -07004/*
5 * PowerPC atomic operations
6 */
7
Linus Torvalds1da177e2005-04-16 15:20:36 -07008typedef struct { volatile int counter; } atomic_t;
9
10#ifdef __KERNEL__
Becky Brucefeaf7cf2005-09-22 14:20:04 -050011#include <asm/synch.h>
David Gibson3ddfbcf2005-11-10 12:56:55 +110012#include <asm/asm-compat.h>
Linus Torvalds1da177e2005-04-16 15:20:36 -070013
Becky Brucefeaf7cf2005-09-22 14:20:04 -050014#define ATOMIC_INIT(i) { (i) }
Linus Torvalds1da177e2005-04-16 15:20:36 -070015
16#define atomic_read(v) ((v)->counter)
17#define atomic_set(v,i) (((v)->counter) = (i))
18
Linus Torvalds1da177e2005-04-16 15:20:36 -070019static __inline__ void atomic_add(int a, atomic_t *v)
20{
21 int t;
22
23 __asm__ __volatile__(
24"1: lwarx %0,0,%3 # atomic_add\n\
25 add %0,%2,%0\n"
26 PPC405_ERR77(0,%3)
27" stwcx. %0,0,%3 \n\
28 bne- 1b"
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
31 : "cc");
32}
33
34static __inline__ int atomic_add_return(int a, atomic_t *v)
35{
36 int t;
37
38 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050039 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070040"1: lwarx %0,0,%2 # atomic_add_return\n\
41 add %0,%1,%0\n"
42 PPC405_ERR77(0,%2)
43" stwcx. %0,0,%2 \n\
44 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050045 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070046 : "=&r" (t)
47 : "r" (a), "r" (&v->counter)
48 : "cc", "memory");
49
50 return t;
51}
52
53#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
54
55static __inline__ void atomic_sub(int a, atomic_t *v)
56{
57 int t;
58
59 __asm__ __volatile__(
60"1: lwarx %0,0,%3 # atomic_sub\n\
61 subf %0,%2,%0\n"
62 PPC405_ERR77(0,%3)
63" stwcx. %0,0,%3 \n\
64 bne- 1b"
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
67 : "cc");
68}
69
70static __inline__ int atomic_sub_return(int a, atomic_t *v)
71{
72 int t;
73
74 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -050075 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070076"1: lwarx %0,0,%2 # atomic_sub_return\n\
77 subf %0,%1,%0\n"
78 PPC405_ERR77(0,%2)
79" stwcx. %0,0,%2 \n\
80 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -050081 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -070082 : "=&r" (t)
83 : "r" (a), "r" (&v->counter)
84 : "cc", "memory");
85
86 return t;
87}
88
89static __inline__ void atomic_inc(atomic_t *v)
90{
91 int t;
92
93 __asm__ __volatile__(
94"1: lwarx %0,0,%2 # atomic_inc\n\
95 addic %0,%0,1\n"
96 PPC405_ERR77(0,%2)
97" stwcx. %0,0,%2 \n\
98 bne- 1b"
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
101 : "cc");
102}
103
104static __inline__ int atomic_inc_return(atomic_t *v)
105{
106 int t;
107
108 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500109 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700110"1: lwarx %0,0,%1 # atomic_inc_return\n\
111 addic %0,%0,1\n"
112 PPC405_ERR77(0,%1)
113" stwcx. %0,0,%1 \n\
114 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500115 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700116 : "=&r" (t)
117 : "r" (&v->counter)
118 : "cc", "memory");
119
120 return t;
121}
122
123/*
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
126 *
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
129 * other cases.
130 */
131#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
132
133static __inline__ void atomic_dec(atomic_t *v)
134{
135 int t;
136
137 __asm__ __volatile__(
138"1: lwarx %0,0,%2 # atomic_dec\n\
139 addic %0,%0,-1\n"
140 PPC405_ERR77(0,%2)\
141" stwcx. %0,0,%2\n\
142 bne- 1b"
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
145 : "cc");
146}
147
148static __inline__ int atomic_dec_return(atomic_t *v)
149{
150 int t;
151
152 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500153 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700154"1: lwarx %0,0,%1 # atomic_dec_return\n\
155 addic %0,%0,-1\n"
156 PPC405_ERR77(0,%1)
157" stwcx. %0,0,%1\n\
158 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500159 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700160 : "=&r" (t)
161 : "r" (&v->counter)
162 : "cc", "memory");
163
164 return t;
165}
166
167#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
168#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
169
170/*
171 * Atomically test *v and decrement if it is greater than 0.
172 * The function returns the old value of *v minus 1.
173 */
174static __inline__ int atomic_dec_if_positive(atomic_t *v)
175{
176 int t;
177
178 __asm__ __volatile__(
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500179 EIEIO_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700180"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
181 addic. %0,%0,-1\n\
182 blt- 2f\n"
183 PPC405_ERR77(0,%1)
184" stwcx. %0,0,%1\n\
185 bne- 1b"
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500186 ISYNC_ON_SMP
Linus Torvalds1da177e2005-04-16 15:20:36 -0700187 "\n\
1882:" : "=&r" (t)
189 : "r" (&v->counter)
190 : "cc", "memory");
191
192 return t;
193}
194
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500195#define smp_mb__before_atomic_dec() smp_mb()
196#define smp_mb__after_atomic_dec() smp_mb()
197#define smp_mb__before_atomic_inc() smp_mb()
198#define smp_mb__after_atomic_inc() smp_mb()
Linus Torvalds1da177e2005-04-16 15:20:36 -0700199
Stephen Rothwell06a98db2005-11-10 15:51:14 +1100200#ifdef __powerpc64__
201
202typedef struct { volatile long counter; } atomic64_t;
203
204#define ATOMIC64_INIT(i) { (i) }
205
206#define atomic64_read(v) ((v)->counter)
207#define atomic64_set(v,i) (((v)->counter) = (i))
208
209static __inline__ void atomic64_add(long a, atomic64_t *v)
210{
211 long t;
212
213 __asm__ __volatile__(
214"1: ldarx %0,0,%3 # atomic64_add\n\
215 add %0,%2,%0\n\
216 stdcx. %0,0,%3 \n\
217 bne- 1b"
218 : "=&r" (t), "=m" (v->counter)
219 : "r" (a), "r" (&v->counter), "m" (v->counter)
220 : "cc");
221}
222
223static __inline__ long atomic64_add_return(long a, atomic64_t *v)
224{
225 long t;
226
227 __asm__ __volatile__(
228 EIEIO_ON_SMP
229"1: ldarx %0,0,%2 # atomic64_add_return\n\
230 add %0,%1,%0\n\
231 stdcx. %0,0,%2 \n\
232 bne- 1b"
233 ISYNC_ON_SMP
234 : "=&r" (t)
235 : "r" (a), "r" (&v->counter)
236 : "cc", "memory");
237
238 return t;
239}
240
241#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
242
243static __inline__ void atomic64_sub(long a, atomic64_t *v)
244{
245 long t;
246
247 __asm__ __volatile__(
248"1: ldarx %0,0,%3 # atomic64_sub\n\
249 subf %0,%2,%0\n\
250 stdcx. %0,0,%3 \n\
251 bne- 1b"
252 : "=&r" (t), "=m" (v->counter)
253 : "r" (a), "r" (&v->counter), "m" (v->counter)
254 : "cc");
255}
256
257static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
258{
259 long t;
260
261 __asm__ __volatile__(
262 EIEIO_ON_SMP
263"1: ldarx %0,0,%2 # atomic64_sub_return\n\
264 subf %0,%1,%0\n\
265 stdcx. %0,0,%2 \n\
266 bne- 1b"
267 ISYNC_ON_SMP
268 : "=&r" (t)
269 : "r" (a), "r" (&v->counter)
270 : "cc", "memory");
271
272 return t;
273}
274
275static __inline__ void atomic64_inc(atomic64_t *v)
276{
277 long t;
278
279 __asm__ __volatile__(
280"1: ldarx %0,0,%2 # atomic64_inc\n\
281 addic %0,%0,1\n\
282 stdcx. %0,0,%2 \n\
283 bne- 1b"
284 : "=&r" (t), "=m" (v->counter)
285 : "r" (&v->counter), "m" (v->counter)
286 : "cc");
287}
288
289static __inline__ long atomic64_inc_return(atomic64_t *v)
290{
291 long t;
292
293 __asm__ __volatile__(
294 EIEIO_ON_SMP
295"1: ldarx %0,0,%1 # atomic64_inc_return\n\
296 addic %0,%0,1\n\
297 stdcx. %0,0,%1 \n\
298 bne- 1b"
299 ISYNC_ON_SMP
300 : "=&r" (t)
301 : "r" (&v->counter)
302 : "cc", "memory");
303
304 return t;
305}
306
307/*
308 * atomic64_inc_and_test - increment and test
309 * @v: pointer of type atomic64_t
310 *
311 * Atomically increments @v by 1
312 * and returns true if the result is zero, or false for all
313 * other cases.
314 */
315#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
316
317static __inline__ void atomic64_dec(atomic64_t *v)
318{
319 long t;
320
321 __asm__ __volatile__(
322"1: ldarx %0,0,%2 # atomic64_dec\n\
323 addic %0,%0,-1\n\
324 stdcx. %0,0,%2\n\
325 bne- 1b"
326 : "=&r" (t), "=m" (v->counter)
327 : "r" (&v->counter), "m" (v->counter)
328 : "cc");
329}
330
331static __inline__ long atomic64_dec_return(atomic64_t *v)
332{
333 long t;
334
335 __asm__ __volatile__(
336 EIEIO_ON_SMP
337"1: ldarx %0,0,%1 # atomic64_dec_return\n\
338 addic %0,%0,-1\n\
339 stdcx. %0,0,%1\n\
340 bne- 1b"
341 ISYNC_ON_SMP
342 : "=&r" (t)
343 : "r" (&v->counter)
344 : "cc", "memory");
345
346 return t;
347}
348
349#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
350#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
351
352/*
353 * Atomically test *v and decrement if it is greater than 0.
354 * The function returns the old value of *v minus 1.
355 */
356static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
357{
358 long t;
359
360 __asm__ __volatile__(
361 EIEIO_ON_SMP
362"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
363 addic. %0,%0,-1\n\
364 blt- 2f\n\
365 stdcx. %0,0,%1\n\
366 bne- 1b"
367 ISYNC_ON_SMP
368 "\n\
3692:" : "=&r" (t)
370 : "r" (&v->counter)
371 : "cc", "memory");
372
373 return t;
374}
375
376#endif /* __powerpc64__ */
377
Linus Torvalds1da177e2005-04-16 15:20:36 -0700378#endif /* __KERNEL__ */
Becky Brucefeaf7cf2005-09-22 14:20:04 -0500379#endif /* _ASM_POWERPC_ATOMIC_H_ */