| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 1 | #ifndef ASM_X86_CMPXCHG_H | 
|  | 2 | #define ASM_X86_CMPXCHG_H | 
|  | 3 |  | 
| Jeremy Fitzhardinge | 61e2cd0 | 2011-08-29 14:47:58 -0700 | [diff] [blame] | 4 | #include <linux/compiler.h> | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 5 | #include <asm/alternative.h> /* Provides LOCK_PREFIX */ | 
|  | 6 |  | 
| Jeremy Fitzhardinge | 61e2cd0 | 2011-08-29 14:47:58 -0700 | [diff] [blame] | 7 | /* | 
|  | 8 | * Non-existant functions to indicate usage errors at link time | 
|  | 9 | * (or compile-time if the compiler implements __compiletime_error(). | 
|  | 10 | */ | 
|  | 11 | extern void __xchg_wrong_size(void) | 
|  | 12 | __compiletime_error("Bad argument size for xchg"); | 
|  | 13 | extern void __cmpxchg_wrong_size(void) | 
|  | 14 | __compiletime_error("Bad argument size for cmpxchg"); | 
|  | 15 | extern void __xadd_wrong_size(void) | 
|  | 16 | __compiletime_error("Bad argument size for xadd"); | 
| Jeremy Fitzhardinge | 3d94ae0 | 2011-09-28 11:49:28 -0700 | [diff] [blame] | 17 | extern void __add_wrong_size(void) | 
|  | 18 | __compiletime_error("Bad argument size for add"); | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 19 |  | 
|  | 20 | /* | 
|  | 21 | * Constants for operation sizes. On 32-bit, the 64-bit size it set to | 
|  | 22 | * -1 because sizeof will never return -1, thereby making those switch | 
|  | 23 | * case statements guaranteeed dead code which the compiler will | 
|  | 24 | * eliminate, and allowing the "missing symbol in the default case" to | 
|  | 25 | * indicate a usage error. | 
|  | 26 | */ | 
|  | 27 | #define __X86_CASE_B	1 | 
|  | 28 | #define __X86_CASE_W	2 | 
|  | 29 | #define __X86_CASE_L	4 | 
|  | 30 | #ifdef CONFIG_64BIT | 
|  | 31 | #define __X86_CASE_Q	8 | 
|  | 32 | #else | 
|  | 33 | #define	__X86_CASE_Q	-1		/* sizeof will never return -1 */ | 
|  | 34 | #endif | 
|  | 35 |  | 
| Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 36 | /* | 
|  | 37 | * An exchange-type operation, which takes a value and a pointer, and | 
|  | 38 | * returns a the old value. | 
|  | 39 | */ | 
|  | 40 | #define __xchg_op(ptr, arg, op, lock)					\ | 
|  | 41 | ({								\ | 
|  | 42 | __typeof__ (*(ptr)) __ret = (arg);			\ | 
|  | 43 | switch (sizeof(*(ptr))) {				\ | 
|  | 44 | case __X86_CASE_B:					\ | 
|  | 45 | asm volatile (lock #op "b %b0, %1\n"		\ | 
| Jeremy Fitzhardinge | 2ca052a | 2012-04-02 16:15:33 -0700 | [diff] [blame] | 46 | : "+q" (__ret), "+m" (*(ptr))	\ | 
| Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 47 | : : "memory", "cc");		\ | 
|  | 48 | break;						\ | 
|  | 49 | case __X86_CASE_W:					\ | 
|  | 50 | asm volatile (lock #op "w %w0, %1\n"		\ | 
|  | 51 | : "+r" (__ret), "+m" (*(ptr))	\ | 
|  | 52 | : : "memory", "cc");		\ | 
|  | 53 | break;						\ | 
|  | 54 | case __X86_CASE_L:					\ | 
|  | 55 | asm volatile (lock #op "l %0, %1\n"		\ | 
|  | 56 | : "+r" (__ret), "+m" (*(ptr))	\ | 
|  | 57 | : : "memory", "cc");		\ | 
|  | 58 | break;						\ | 
|  | 59 | case __X86_CASE_Q:					\ | 
|  | 60 | asm volatile (lock #op "q %q0, %1\n"		\ | 
|  | 61 | : "+r" (__ret), "+m" (*(ptr))	\ | 
|  | 62 | : : "memory", "cc");		\ | 
|  | 63 | break;						\ | 
|  | 64 | default:						\ | 
|  | 65 | __ ## op ## _wrong_size();			\ | 
|  | 66 | }							\ | 
|  | 67 | __ret;							\ | 
|  | 68 | }) | 
|  | 69 |  | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 70 | /* | 
|  | 71 | * Note: no "lock" prefix even on SMP: xchg always implies lock anyway. | 
|  | 72 | * Since this is generally used to protect other memory information, we | 
|  | 73 | * use "asm volatile" and "memory" clobbers to prevent gcc from moving | 
|  | 74 | * information around. | 
|  | 75 | */ | 
| Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 76 | #define xchg(ptr, v)	__xchg_op((ptr), (v), xchg, "") | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 77 |  | 
|  | 78 | /* | 
|  | 79 | * Atomic compare and exchange.  Compare OLD with MEM, if identical, | 
|  | 80 | * store NEW in MEM.  Return the initial value in MEM.  Success is | 
|  | 81 | * indicated by comparing RETURN with OLD. | 
|  | 82 | */ | 
|  | 83 | #define __raw_cmpxchg(ptr, old, new, size, lock)			\ | 
|  | 84 | ({									\ | 
|  | 85 | __typeof__(*(ptr)) __ret;					\ | 
|  | 86 | __typeof__(*(ptr)) __old = (old);				\ | 
|  | 87 | __typeof__(*(ptr)) __new = (new);				\ | 
|  | 88 | switch (size) {							\ | 
|  | 89 | case __X86_CASE_B:						\ | 
|  | 90 | {								\ | 
|  | 91 | volatile u8 *__ptr = (volatile u8 *)(ptr);		\ | 
|  | 92 | asm volatile(lock "cmpxchgb %2,%1"			\ | 
|  | 93 | : "=a" (__ret), "+m" (*__ptr)		\ | 
|  | 94 | : "q" (__new), "0" (__old)			\ | 
|  | 95 | : "memory");				\ | 
|  | 96 | break;							\ | 
|  | 97 | }								\ | 
|  | 98 | case __X86_CASE_W:						\ | 
|  | 99 | {								\ | 
|  | 100 | volatile u16 *__ptr = (volatile u16 *)(ptr);		\ | 
|  | 101 | asm volatile(lock "cmpxchgw %2,%1"			\ | 
|  | 102 | : "=a" (__ret), "+m" (*__ptr)		\ | 
|  | 103 | : "r" (__new), "0" (__old)			\ | 
|  | 104 | : "memory");				\ | 
|  | 105 | break;							\ | 
|  | 106 | }								\ | 
|  | 107 | case __X86_CASE_L:						\ | 
|  | 108 | {								\ | 
|  | 109 | volatile u32 *__ptr = (volatile u32 *)(ptr);		\ | 
|  | 110 | asm volatile(lock "cmpxchgl %2,%1"			\ | 
|  | 111 | : "=a" (__ret), "+m" (*__ptr)		\ | 
|  | 112 | : "r" (__new), "0" (__old)			\ | 
|  | 113 | : "memory");				\ | 
|  | 114 | break;							\ | 
|  | 115 | }								\ | 
|  | 116 | case __X86_CASE_Q:						\ | 
|  | 117 | {								\ | 
|  | 118 | volatile u64 *__ptr = (volatile u64 *)(ptr);		\ | 
|  | 119 | asm volatile(lock "cmpxchgq %2,%1"			\ | 
|  | 120 | : "=a" (__ret), "+m" (*__ptr)		\ | 
|  | 121 | : "r" (__new), "0" (__old)			\ | 
|  | 122 | : "memory");				\ | 
|  | 123 | break;							\ | 
|  | 124 | }								\ | 
|  | 125 | default:							\ | 
|  | 126 | __cmpxchg_wrong_size();					\ | 
|  | 127 | }								\ | 
|  | 128 | __ret;								\ | 
|  | 129 | }) | 
|  | 130 |  | 
|  | 131 | #define __cmpxchg(ptr, old, new, size)					\ | 
|  | 132 | __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX) | 
|  | 133 |  | 
|  | 134 | #define __sync_cmpxchg(ptr, old, new, size)				\ | 
|  | 135 | __raw_cmpxchg((ptr), (old), (new), (size), "lock; ") | 
|  | 136 |  | 
|  | 137 | #define __cmpxchg_local(ptr, old, new, size)				\ | 
|  | 138 | __raw_cmpxchg((ptr), (old), (new), (size), "") | 
|  | 139 |  | 
| Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 140 | #ifdef CONFIG_X86_32 | 
| David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 141 | # include <asm/cmpxchg_32.h> | 
| Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 142 | #else | 
| David Howells | a1ce392 | 2012-10-02 18:01:25 +0100 | [diff] [blame] | 143 | # include <asm/cmpxchg_64.h> | 
| Thomas Gleixner | 96a388d | 2007-10-11 11:20:03 +0200 | [diff] [blame] | 144 | #endif | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 145 |  | 
|  | 146 | #ifdef __HAVE_ARCH_CMPXCHG | 
|  | 147 | #define cmpxchg(ptr, old, new)						\ | 
| Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 148 | __cmpxchg(ptr, old, new, sizeof(*(ptr))) | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 149 |  | 
|  | 150 | #define sync_cmpxchg(ptr, old, new)					\ | 
| Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 151 | __sync_cmpxchg(ptr, old, new, sizeof(*(ptr))) | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 152 |  | 
|  | 153 | #define cmpxchg_local(ptr, old, new)					\ | 
| Jan Beulich | fc395b9 | 2012-01-26 15:47:37 +0000 | [diff] [blame] | 154 | __cmpxchg_local(ptr, old, new, sizeof(*(ptr))) | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 155 | #endif | 
|  | 156 |  | 
| Jeremy Fitzhardinge | 433b352 | 2011-06-21 12:00:55 -0700 | [diff] [blame] | 157 | /* | 
|  | 158 | * xadd() adds "inc" to "*ptr" and atomically returns the previous | 
|  | 159 | * value of "*ptr". | 
|  | 160 | * | 
|  | 161 | * xadd() is locked when multiple CPUs are online | 
|  | 162 | * xadd_sync() is always locked | 
|  | 163 | * xadd_local() is never locked | 
|  | 164 | */ | 
| Jeremy Fitzhardinge | 31a8394 | 2011-09-30 12:14:10 -0700 | [diff] [blame] | 165 | #define __xadd(ptr, inc, lock)	__xchg_op((ptr), (inc), xadd, lock) | 
| Jeremy Fitzhardinge | 433b352 | 2011-06-21 12:00:55 -0700 | [diff] [blame] | 166 | #define xadd(ptr, inc)		__xadd((ptr), (inc), LOCK_PREFIX) | 
|  | 167 | #define xadd_sync(ptr, inc)	__xadd((ptr), (inc), "lock; ") | 
|  | 168 | #define xadd_local(ptr, inc)	__xadd((ptr), (inc), "") | 
|  | 169 |  | 
| Jeremy Fitzhardinge | 3d94ae0 | 2011-09-28 11:49:28 -0700 | [diff] [blame] | 170 | #define __add(ptr, inc, lock)						\ | 
|  | 171 | ({								\ | 
|  | 172 | __typeof__ (*(ptr)) __ret = (inc);			\ | 
|  | 173 | switch (sizeof(*(ptr))) {				\ | 
|  | 174 | case __X86_CASE_B:					\ | 
|  | 175 | asm volatile (lock "addb %b1, %0\n"		\ | 
| H. Peter Anvin | 8c91c53 | 2012-04-06 09:30:57 -0700 | [diff] [blame] | 176 | : "+m" (*(ptr)) : "qi" (inc)	\ | 
| Jeremy Fitzhardinge | 3d94ae0 | 2011-09-28 11:49:28 -0700 | [diff] [blame] | 177 | : "memory", "cc");		\ | 
|  | 178 | break;						\ | 
|  | 179 | case __X86_CASE_W:					\ | 
|  | 180 | asm volatile (lock "addw %w1, %0\n"		\ | 
|  | 181 | : "+m" (*(ptr)) : "ri" (inc)	\ | 
|  | 182 | : "memory", "cc");		\ | 
|  | 183 | break;						\ | 
|  | 184 | case __X86_CASE_L:					\ | 
|  | 185 | asm volatile (lock "addl %1, %0\n"		\ | 
|  | 186 | : "+m" (*(ptr)) : "ri" (inc)	\ | 
|  | 187 | : "memory", "cc");		\ | 
|  | 188 | break;						\ | 
|  | 189 | case __X86_CASE_Q:					\ | 
|  | 190 | asm volatile (lock "addq %1, %0\n"		\ | 
|  | 191 | : "+m" (*(ptr)) : "ri" (inc)	\ | 
|  | 192 | : "memory", "cc");		\ | 
|  | 193 | break;						\ | 
|  | 194 | default:						\ | 
|  | 195 | __add_wrong_size();				\ | 
|  | 196 | }							\ | 
|  | 197 | __ret;							\ | 
|  | 198 | }) | 
|  | 199 |  | 
|  | 200 | /* | 
|  | 201 | * add_*() adds "inc" to "*ptr" | 
|  | 202 | * | 
|  | 203 | * __add() takes a lock prefix | 
|  | 204 | * add_smp() is locked when multiple CPUs are online | 
|  | 205 | * add_sync() is always locked | 
|  | 206 | */ | 
|  | 207 | #define add_smp(ptr, inc)	__add((ptr), (inc), LOCK_PREFIX) | 
|  | 208 | #define add_sync(ptr, inc)	__add((ptr), (inc), "lock; ") | 
|  | 209 |  | 
| Jan Beulich | cdcd629 | 2012-01-02 17:02:18 +0000 | [diff] [blame] | 210 | #define __cmpxchg_double(pfx, p1, p2, o1, o2, n1, n2)			\ | 
|  | 211 | ({									\ | 
|  | 212 | bool __ret;							\ | 
|  | 213 | __typeof__(*(p1)) __old1 = (o1), __new1 = (n1);			\ | 
|  | 214 | __typeof__(*(p2)) __old2 = (o2), __new2 = (n2);			\ | 
|  | 215 | BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long));			\ | 
|  | 216 | BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long));			\ | 
|  | 217 | VM_BUG_ON((unsigned long)(p1) % (2 * sizeof(long)));		\ | 
|  | 218 | VM_BUG_ON((unsigned long)((p1) + 1) != (unsigned long)(p2));	\ | 
|  | 219 | asm volatile(pfx "cmpxchg%c4b %2; sete %0"			\ | 
|  | 220 | : "=a" (__ret), "+d" (__old2),			\ | 
|  | 221 | "+m" (*(p1)), "+m" (*(p2))			\ | 
|  | 222 | : "i" (2 * sizeof(long)), "a" (__old1),		\ | 
|  | 223 | "b" (__new1), "c" (__new2));			\ | 
|  | 224 | __ret;								\ | 
|  | 225 | }) | 
|  | 226 |  | 
|  | 227 | #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ | 
|  | 228 | __cmpxchg_double(LOCK_PREFIX, p1, p2, o1, o2, n1, n2) | 
|  | 229 |  | 
|  | 230 | #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \ | 
|  | 231 | __cmpxchg_double(, p1, p2, o1, o2, n1, n2) | 
|  | 232 |  | 
| Jeremy Fitzhardinge | e982638 | 2011-08-18 11:48:06 -0700 | [diff] [blame] | 233 | #endif	/* ASM_X86_CMPXCHG_H */ |