| Stuart Menefy | 1efe4ce | 2007-11-30 16:12:36 +0900 | [diff] [blame] | 1 | #ifndef __ASM_SH_CMPXCHG_GRB_H | 
|  | 2 | #define __ASM_SH_CMPXCHG_GRB_H | 
|  | 3 |  | 
|  | 4 | static inline unsigned long xchg_u32(volatile u32 *m, unsigned long val) | 
|  | 5 | { | 
|  | 6 | unsigned long retval; | 
|  | 7 |  | 
|  | 8 | __asm__ __volatile__ ( | 
|  | 9 | "   .align 2              \n\t" | 
|  | 10 | "   mova    1f,   r0      \n\t" /* r0 = end point */ | 
|  | 11 | "   nop                   \n\t" | 
|  | 12 | "   mov    r15,   r1      \n\t" /* r1 = saved sp */ | 
|  | 13 | "   mov    #-4,   r15     \n\t" /* LOGIN */ | 
|  | 14 | "   mov.l  @%1,   %0      \n\t" /* load  old value */ | 
|  | 15 | "   mov.l   %2,   @%1     \n\t" /* store new value */ | 
|  | 16 | "1: mov     r1,   r15     \n\t" /* LOGOUT */ | 
|  | 17 | : "=&r" (retval), | 
|  | 18 | "+r"  (m) | 
|  | 19 | : "r"   (val) | 
|  | 20 | : "memory", "r0", "r1"); | 
|  | 21 |  | 
|  | 22 | return retval; | 
|  | 23 | } | 
|  | 24 |  | 
|  | 25 | static inline unsigned long xchg_u8(volatile u8 *m, unsigned long val) | 
|  | 26 | { | 
|  | 27 | unsigned long retval; | 
|  | 28 |  | 
|  | 29 | __asm__ __volatile__ ( | 
|  | 30 | "   .align  2             \n\t" | 
|  | 31 | "   mova    1f,   r0      \n\t" /* r0 = end point */ | 
|  | 32 | "   mov    r15,   r1      \n\t" /* r1 = saved sp */ | 
|  | 33 | "   mov    #-6,   r15     \n\t" /* LOGIN */ | 
|  | 34 | "   mov.b  @%1,   %0      \n\t" /* load  old value */ | 
|  | 35 | "   extu.b  %0,   %0      \n\t" /* extend as unsigned */ | 
|  | 36 | "   mov.b   %2,   @%1     \n\t" /* store new value */ | 
|  | 37 | "1: mov     r1,   r15     \n\t" /* LOGOUT */ | 
|  | 38 | : "=&r" (retval), | 
|  | 39 | "+r"  (m) | 
|  | 40 | : "r"   (val) | 
|  | 41 | : "memory" , "r0", "r1"); | 
|  | 42 |  | 
|  | 43 | return retval; | 
|  | 44 | } | 
|  | 45 |  | 
|  | 46 | static inline unsigned long __cmpxchg_u32(volatile int *m, unsigned long old, | 
|  | 47 | unsigned long new) | 
|  | 48 | { | 
|  | 49 | unsigned long retval; | 
|  | 50 |  | 
|  | 51 | __asm__ __volatile__ ( | 
|  | 52 | "   .align  2             \n\t" | 
|  | 53 | "   mova    1f,   r0      \n\t" /* r0 = end point */ | 
|  | 54 | "   nop                   \n\t" | 
|  | 55 | "   mov    r15,   r1      \n\t" /* r1 = saved sp */ | 
|  | 56 | "   mov    #-8,   r15     \n\t" /* LOGIN */ | 
|  | 57 | "   mov.l  @%1,   %0      \n\t" /* load  old value */ | 
|  | 58 | "   cmp/eq  %0,   %2      \n\t" | 
|  | 59 | "   bf            1f      \n\t" /* if not equal */ | 
|  | 60 | "   mov.l   %2,   @%1     \n\t" /* store new value */ | 
|  | 61 | "1: mov     r1,   r15     \n\t" /* LOGOUT */ | 
|  | 62 | : "=&r" (retval), | 
|  | 63 | "+r"  (m) | 
|  | 64 | : "r"   (new) | 
|  | 65 | : "memory" , "r0", "r1", "t"); | 
|  | 66 |  | 
|  | 67 | return retval; | 
|  | 68 | } | 
|  | 69 |  | 
|  | 70 | #endif /* __ASM_SH_CMPXCHG_GRB_H */ |