| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 1 | #ifndef __ARCH_H8300_ATOMIC__ | 
|  | 2 | #define __ARCH_H8300_ATOMIC__ | 
|  | 3 |  | 
|  | 4 | /* | 
|  | 5 | * Atomic operations that C can't guarantee us.  Useful for | 
|  | 6 | * resource counting etc.. | 
|  | 7 | */ | 
|  | 8 |  | 
|  | 9 | typedef struct { int counter; } atomic_t; | 
|  | 10 | #define ATOMIC_INIT(i)	{ (i) } | 
|  | 11 |  | 
|  | 12 | #define atomic_read(v)		((v)->counter) | 
|  | 13 | #define atomic_set(v, i)	(((v)->counter) = i) | 
|  | 14 |  | 
|  | 15 | #include <asm/system.h> | 
|  | 16 | #include <linux/kernel.h> | 
|  | 17 |  | 
|  | 18 | static __inline__ int atomic_add_return(int i, atomic_t *v) | 
|  | 19 | { | 
|  | 20 | int ret,flags; | 
|  | 21 | local_irq_save(flags); | 
|  | 22 | ret = v->counter += i; | 
|  | 23 | local_irq_restore(flags); | 
|  | 24 | return ret; | 
|  | 25 | } | 
|  | 26 |  | 
|  | 27 | #define atomic_add(i, v) atomic_add_return(i, v) | 
|  | 28 | #define atomic_add_negative(a, v)	(atomic_add_return((a), (v)) < 0) | 
|  | 29 |  | 
|  | 30 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | 
|  | 31 | { | 
|  | 32 | int ret,flags; | 
|  | 33 | local_irq_save(flags); | 
|  | 34 | ret = v->counter -= i; | 
|  | 35 | local_irq_restore(flags); | 
|  | 36 | return ret; | 
|  | 37 | } | 
|  | 38 |  | 
|  | 39 | #define atomic_sub(i, v) atomic_sub_return(i, v) | 
|  | 40 |  | 
|  | 41 | static __inline__ int atomic_inc_return(atomic_t *v) | 
|  | 42 | { | 
|  | 43 | int ret,flags; | 
|  | 44 | local_irq_save(flags); | 
|  | 45 | v->counter++; | 
|  | 46 | ret = v->counter; | 
|  | 47 | local_irq_restore(flags); | 
|  | 48 | return ret; | 
|  | 49 | } | 
|  | 50 |  | 
|  | 51 | #define atomic_inc(v) atomic_inc_return(v) | 
|  | 52 |  | 
|  | 53 | /* | 
|  | 54 | * atomic_inc_and_test - increment and test | 
|  | 55 | * @v: pointer of type atomic_t | 
|  | 56 | * | 
|  | 57 | * Atomically increments @v by 1 | 
|  | 58 | * and returns true if the result is zero, or false for all | 
|  | 59 | * other cases. | 
|  | 60 | */ | 
|  | 61 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | 
|  | 62 |  | 
|  | 63 | static __inline__ int atomic_dec_return(atomic_t *v) | 
|  | 64 | { | 
|  | 65 | int ret,flags; | 
|  | 66 | local_irq_save(flags); | 
|  | 67 | --v->counter; | 
|  | 68 | ret = v->counter; | 
|  | 69 | local_irq_restore(flags); | 
|  | 70 | return ret; | 
|  | 71 | } | 
|  | 72 |  | 
|  | 73 | #define atomic_dec(v) atomic_dec_return(v) | 
|  | 74 |  | 
|  | 75 | static __inline__ int atomic_dec_and_test(atomic_t *v) | 
|  | 76 | { | 
|  | 77 | int ret,flags; | 
|  | 78 | local_irq_save(flags); | 
|  | 79 | --v->counter; | 
|  | 80 | ret = v->counter; | 
|  | 81 | local_irq_restore(flags); | 
|  | 82 | return ret == 0; | 
|  | 83 | } | 
|  | 84 |  | 
| Nick Piggin | 4a6dae6 | 2005-11-13 16:07:24 -0800 | [diff] [blame] | 85 | static inline int atomic_cmpxchg(atomic_t *v, int old, int new) | 
|  | 86 | { | 
|  | 87 | int ret; | 
|  | 88 | unsigned long flags; | 
|  | 89 |  | 
|  | 90 | local_irq_save(flags); | 
|  | 91 | ret = v->counter; | 
|  | 92 | if (likely(ret == old)) | 
|  | 93 | v->counter = new; | 
|  | 94 | local_irq_restore(flags); | 
|  | 95 | return ret; | 
|  | 96 | } | 
|  | 97 |  | 
| Ingo Molnar | ffbf670 | 2006-01-09 15:59:17 -0800 | [diff] [blame] | 98 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) | 
|  | 99 |  | 
| Nick Piggin | 8426e1f | 2005-11-13 16:07:25 -0800 | [diff] [blame] | 100 | static inline int atomic_add_unless(atomic_t *v, int a, int u) | 
|  | 101 | { | 
|  | 102 | int ret; | 
|  | 103 | unsigned long flags; | 
|  | 104 |  | 
|  | 105 | local_irq_save(flags); | 
|  | 106 | ret = v->counter; | 
|  | 107 | if (ret != u) | 
|  | 108 | v->counter += a; | 
|  | 109 | local_irq_restore(flags); | 
|  | 110 | return ret != u; | 
|  | 111 | } | 
|  | 112 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | 
|  | 113 |  | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 114 | static __inline__ void atomic_clear_mask(unsigned long mask, unsigned long *v) | 
|  | 115 | { | 
|  | 116 | __asm__ __volatile__("stc ccr,r1l\n\t" | 
|  | 117 | "orc #0x80,ccr\n\t" | 
|  | 118 | "mov.l %0,er0\n\t" | 
|  | 119 | "and.l %1,er0\n\t" | 
|  | 120 | "mov.l er0,%0\n\t" | 
|  | 121 | "ldc r1l,ccr" | 
|  | 122 | : "=m" (*v) : "g" (~(mask)) :"er0","er1"); | 
|  | 123 | } | 
|  | 124 |  | 
|  | 125 | static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v) | 
|  | 126 | { | 
|  | 127 | __asm__ __volatile__("stc ccr,r1l\n\t" | 
|  | 128 | "orc #0x80,ccr\n\t" | 
|  | 129 | "mov.l %0,er0\n\t" | 
|  | 130 | "or.l %1,er0\n\t" | 
|  | 131 | "mov.l er0,%0\n\t" | 
|  | 132 | "ldc r1l,ccr" | 
|  | 133 | : "=m" (*v) : "g" (mask) :"er0","er1"); | 
|  | 134 | } | 
|  | 135 |  | 
|  | 136 | /* Atomic operations are already serializing */ | 
|  | 137 | #define smp_mb__before_atomic_dec()    barrier() | 
|  | 138 | #define smp_mb__after_atomic_dec() barrier() | 
|  | 139 | #define smp_mb__before_atomic_inc()    barrier() | 
|  | 140 | #define smp_mb__after_atomic_inc() barrier() | 
|  | 141 |  | 
| Christoph Lameter | d3cb487 | 2006-01-06 00:11:20 -0800 | [diff] [blame] | 142 | #include <asm-generic/atomic.h> | 
| Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 143 | #endif /* __ARCH_H8300_ATOMIC __ */ |