[ARM] 3103/1: ARM EABI: stack pointer must be 64-bit aligned (part 2)

Patch from Nicolas Pitre

We must make sure that assembly code that modifies the stack pointer
before calling a C function does it so it remains 64-bit aligned.

Signed-off-by: Nicolas Pitre <nico@cam.org>
Signed-off-by: Russell King <rmk+kernel@arm.linux.org.uk>
diff --git a/arch/arm/kernel/semaphore.c b/arch/arm/kernel/semaphore.c
index 4c31f29..981fe5c 100644
--- a/arch/arm/kernel/semaphore.c
+++ b/arch/arm/kernel/semaphore.c
@@ -177,41 +177,42 @@
  * ip contains the semaphore pointer on entry. Save the C-clobbered
  * registers (r0 to r3 and lr), but not ip, as we use it as a return
  * value in some cases..
+ * To remain AAPCS compliant (64-bit stack align) we save r4 as well.
  */
 asm("	.section .sched.text,\"ax\",%progbits	\n\
 	.align	5				\n\
 	.globl	__down_failed			\n\
 __down_failed:					\n\
-	stmfd	sp!, {r0 - r3, lr}		\n\
+	stmfd	sp!, {r0 - r4, lr}		\n\
 	mov	r0, ip				\n\
 	bl	__down				\n\
-	ldmfd	sp!, {r0 - r3, pc}		\n\
+	ldmfd	sp!, {r0 - r4, pc}		\n\
 						\n\
 	.align	5				\n\
 	.globl	__down_interruptible_failed	\n\
 __down_interruptible_failed:			\n\
-	stmfd	sp!, {r0 - r3, lr}		\n\
+	stmfd	sp!, {r0 - r4, lr}		\n\
 	mov	r0, ip				\n\
 	bl	__down_interruptible		\n\
 	mov	ip, r0				\n\
-	ldmfd	sp!, {r0 - r3, pc}		\n\
+	ldmfd	sp!, {r0 - r4, pc}		\n\
 						\n\
 	.align	5				\n\
 	.globl	__down_trylock_failed		\n\
 __down_trylock_failed:				\n\
-	stmfd	sp!, {r0 - r3, lr}		\n\
+	stmfd	sp!, {r0 - r4, lr}		\n\
 	mov	r0, ip				\n\
 	bl	__down_trylock			\n\
 	mov	ip, r0				\n\
-	ldmfd	sp!, {r0 - r3, pc}		\n\
+	ldmfd	sp!, {r0 - r4, pc}		\n\
 						\n\
 	.align	5				\n\
 	.globl	__up_wakeup			\n\
 __up_wakeup:					\n\
-	stmfd	sp!, {r0 - r3, lr}		\n\
+	stmfd	sp!, {r0 - r4, lr}		\n\
 	mov	r0, ip				\n\
 	bl	__up				\n\
-	ldmfd	sp!, {r0 - r3, pc}		\n\
+	ldmfd	sp!, {r0 - r4, pc}		\n\
 	");
 
 EXPORT_SYMBOL(__down_failed);