arm: Convert spinlocks to new inline assembly style
Refactor the inline assembly code in spinlock.h in
preparation for supporting the Krait safe WFE sequence.
Change-Id: I2db4f823c39b164e04673f44cea916e334a20c9a
Signed-off-by: Stepan Moskovchenko <stepanm@codeaurora.org>
diff --git a/arch/arm/include/asm/spinlock.h b/arch/arm/include/asm/spinlock.h
index 88c3f30..51be229 100644
--- a/arch/arm/include/asm/spinlock.h
+++ b/arch/arm/include/asm/spinlock.h
@@ -74,16 +74,16 @@
unsigned long tmp;
__asm__ __volatile__(
-"1: ldrex %0, [%1]\n"
-" teq %0, #0\n"
+"1: ldrex %[tmp], [%[lock]]\n"
+" teq %[tmp], #0\n"
" beq 2f\n"
WFE()
"2:\n"
-" strexeq %0, %2, [%1]\n"
-" teqeq %0, #0\n"
+" strexeq %[tmp], %[bit0], [%[lock]]\n"
+" teqeq %[tmp], #0\n"
" bne 1b"
- : "=&r" (tmp)
- : "r" (&lock->lock), "r" (1)
+ : [tmp] "=&r" (tmp)
+ : [lock] "r" (&lock->lock), [bit0] "r" (1)
: "cc");
smp_mb();
@@ -265,16 +265,16 @@
unsigned long tmp;
__asm__ __volatile__(
-"1: ldrex %0, [%1]\n"
-" teq %0, #0\n"
+"1: ldrex %[tmp], [%[lock]]\n"
+" teq %[tmp], #0\n"
" beq 2f\n"
WFE()
"2:\n"
-" strexeq %0, %2, [%1]\n"
-" teq %0, #0\n"
+" strexeq %[tmp], %[bit31], [%[lock]]\n"
+" teq %[tmp], #0\n"
" bne 1b"
- : "=&r" (tmp)
- : "r" (&rw->lock), "r" (0x80000000)
+ : [tmp] "=&r" (tmp)
+ : [lock] "r" (&rw->lock), [bit31] "r" (0x80000000)
: "cc");
smp_mb();
@@ -333,16 +333,16 @@
unsigned long tmp, tmp2;
__asm__ __volatile__(
-"1: ldrex %0, [%2]\n"
-" adds %0, %0, #1\n"
-" strexpl %1, %0, [%2]\n"
+"1: ldrex %[tmp], [%[lock]]\n"
+" adds %[tmp], %[tmp], #1\n"
+" strexpl %[tmp2], %[tmp], [%[lock]]\n"
" bpl 2f\n"
WFE()
"2:\n"
-" rsbpls %0, %1, #0\n"
+" rsbpls %[tmp], %[tmp2], #0\n"
" bmi 1b"
- : "=&r" (tmp), "=&r" (tmp2)
- : "r" (&rw->lock)
+ : [tmp] "=&r" (tmp), [tmp2] "=&r" (tmp2)
+ : [lock] "r" (&rw->lock)
: "cc");
smp_mb();