blob: d76275e5638c2285a60e52a7aec37d8c83eeaf06 [file] [log] [blame]
Bryan Wu1394f032007-05-06 14:50:22 -07001#ifndef __ARCH_BLACKFIN_ATOMIC__
2#define __ARCH_BLACKFIN_ATOMIC__
3
Matthew Wilcoxea4354672009-01-06 14:40:39 -08004#include <linux/types.h>
Bryan Wu1394f032007-05-06 14:50:22 -07005#include <asm/system.h> /* local_irq_XXX() */
6
7/*
8 * Atomic operations that C can't guarantee us. Useful for
9 * resource counting etc..
10 *
11 * Generally we do not concern about SMP BFIN systems, so we don't have
12 * to deal with that.
13 *
14 * Tony Kou (tonyko@lineo.ca) Lineo Inc. 2001
15 */
16
Bryan Wu1394f032007-05-06 14:50:22 -070017#define ATOMIC_INIT(i) { (i) }
Bryan Wu1394f032007-05-06 14:50:22 -070018#define atomic_set(v, i) (((v)->counter) = i)
19
Graf Yang6b3087c2009-01-07 23:14:39 +080020#ifdef CONFIG_SMP
21
22#define atomic_read(v) __raw_uncached_fetch_asm(&(v)->counter)
23
24asmlinkage int __raw_uncached_fetch_asm(const volatile int *ptr);
25
26asmlinkage int __raw_atomic_update_asm(volatile int *ptr, int value);
27
28asmlinkage int __raw_atomic_clear_asm(volatile int *ptr, int value);
29
30asmlinkage int __raw_atomic_set_asm(volatile int *ptr, int value);
31
32asmlinkage int __raw_atomic_xor_asm(volatile int *ptr, int value);
33
34asmlinkage int __raw_atomic_test_asm(const volatile int *ptr, int value);
35
36static inline void atomic_add(int i, atomic_t *v)
37{
38 __raw_atomic_update_asm(&v->counter, i);
39}
40
41static inline void atomic_sub(int i, atomic_t *v)
42{
43 __raw_atomic_update_asm(&v->counter, -i);
44}
45
46static inline int atomic_add_return(int i, atomic_t *v)
47{
48 return __raw_atomic_update_asm(&v->counter, i);
49}
50
51static inline int atomic_sub_return(int i, atomic_t *v)
52{
53 return __raw_atomic_update_asm(&v->counter, -i);
54}
55
56static inline void atomic_inc(volatile atomic_t *v)
57{
58 __raw_atomic_update_asm(&v->counter, 1);
59}
60
61static inline void atomic_dec(volatile atomic_t *v)
62{
63 __raw_atomic_update_asm(&v->counter, -1);
64}
65
66static inline void atomic_clear_mask(int mask, atomic_t *v)
67{
68 __raw_atomic_clear_asm(&v->counter, mask);
69}
70
71static inline void atomic_set_mask(int mask, atomic_t *v)
72{
73 __raw_atomic_set_asm(&v->counter, mask);
74}
75
76static inline int atomic_test_mask(int mask, atomic_t *v)
77{
78 return __raw_atomic_test_asm(&v->counter, mask);
79}
80
81/* Atomic operations are already serializing */
82#define smp_mb__before_atomic_dec() barrier()
83#define smp_mb__after_atomic_dec() barrier()
84#define smp_mb__before_atomic_inc() barrier()
85#define smp_mb__after_atomic_inc() barrier()
86
87#else /* !CONFIG_SMP */
88
89#define atomic_read(v) ((v)->counter)
90
91static inline void atomic_add(int i, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -070092{
93 long flags;
94
95 local_irq_save(flags);
96 v->counter += i;
97 local_irq_restore(flags);
98}
99
Graf Yang6b3087c2009-01-07 23:14:39 +0800100static inline void atomic_sub(int i, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700101{
102 long flags;
103
104 local_irq_save(flags);
105 v->counter -= i;
106 local_irq_restore(flags);
107
108}
109
Graf Yang6b3087c2009-01-07 23:14:39 +0800110static inline int atomic_add_return(int i, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700111{
112 int __temp = 0;
113 long flags;
114
115 local_irq_save(flags);
116 v->counter += i;
117 __temp = v->counter;
118 local_irq_restore(flags);
119
120
121 return __temp;
122}
123
Graf Yang6b3087c2009-01-07 23:14:39 +0800124static inline int atomic_sub_return(int i, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700125{
126 int __temp = 0;
127 long flags;
128
129 local_irq_save(flags);
130 v->counter -= i;
131 __temp = v->counter;
132 local_irq_restore(flags);
133
134 return __temp;
135}
136
Graf Yang6b3087c2009-01-07 23:14:39 +0800137static inline void atomic_inc(volatile atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700138{
139 long flags;
140
141 local_irq_save(flags);
142 v->counter++;
143 local_irq_restore(flags);
144}
145
Graf Yang6b3087c2009-01-07 23:14:39 +0800146static inline void atomic_dec(volatile atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700147{
148 long flags;
149
150 local_irq_save(flags);
151 v->counter--;
152 local_irq_restore(flags);
153}
154
Graf Yang6b3087c2009-01-07 23:14:39 +0800155static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700156{
157 long flags;
158
159 local_irq_save(flags);
160 v->counter &= ~mask;
161 local_irq_restore(flags);
162}
163
Graf Yang6b3087c2009-01-07 23:14:39 +0800164static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
Bryan Wu1394f032007-05-06 14:50:22 -0700165{
166 long flags;
167
168 local_irq_save(flags);
169 v->counter |= mask;
170 local_irq_restore(flags);
171}
172
173/* Atomic operations are already serializing */
174#define smp_mb__before_atomic_dec() barrier()
175#define smp_mb__after_atomic_dec() barrier()
176#define smp_mb__before_atomic_inc() barrier()
177#define smp_mb__after_atomic_inc() barrier()
178
Graf Yang6b3087c2009-01-07 23:14:39 +0800179#endif /* !CONFIG_SMP */
180
181#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
Bryan Wu1394f032007-05-06 14:50:22 -0700182#define atomic_dec_return(v) atomic_sub_return(1,(v))
183#define atomic_inc_return(v) atomic_add_return(1,(v))
184
Graf Yang6b3087c2009-01-07 23:14:39 +0800185#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
186#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
187
188#define atomic_add_unless(v, a, u) \
189({ \
190 int c, old; \
191 c = atomic_read(v); \
192 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
193 c = old; \
194 c != (u); \
195})
196#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
197
Bryan Wu1394f032007-05-06 14:50:22 -0700198/*
199 * atomic_inc_and_test - increment and test
200 * @v: pointer of type atomic_t
201 *
202 * Atomically increments @v by 1
203 * and returns true if the result is zero, or false for all
204 * other cases.
205 */
206#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
207
208#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
209#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
210
211#include <asm-generic/atomic.h>
212
213#endif /* __ARCH_BLACKFIN_ATOMIC __ */