[S390] Inline assembly cleanup.

Major cleanup of all s390 inline assemblies. They now have a common
coding style. Quite a few have been shortened, mainly by using register
asm variables. Use of the EX_TABLE macro helps  as well. The atomic ops,
bit ops and locking inlines new use the Q-constraint if a newer gcc
is used.  That results in slightly better code.

Thanks to Christian Borntraeger for proof reading the changes.

Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
diff --git a/include/asm-s390/string.h b/include/asm-s390/string.h
index 23a4c39..d074673 100644
--- a/include/asm-s390/string.h
+++ b/include/asm-s390/string.h
@@ -60,12 +60,13 @@
 	register int r0 asm("0") = (char) c;
 	const void *ret = s + n;
 
-	asm volatile ("0: srst  %0,%1\n"
-		      "   jo    0b\n"
-		      "   jl	1f\n"
-		      "   la    %0,0\n"
-		      "1:"
-		      : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" );
+	asm volatile(
+		"0:	srst	%0,%1\n"
+		"	jo	0b\n"
+		"	jl	1f\n"
+		"	la	%0,0\n"
+		"1:"
+		: "+a" (ret), "+&a" (s) : "d" (r0) : "cc");
 	return (void *) ret;
 }
 
@@ -74,9 +75,10 @@
 	register int r0 asm("0") = (char) c;
 	const void *ret = s + n;
 
-	asm volatile ("0: srst  %0,%1\n"
-		      "   jo    0b\n"
-		      : "+a" (ret), "+&a" (s) : "d" (r0) : "cc" );
+	asm volatile(
+		"0:	srst	%0,%1\n"
+		"	jo	0b\n"
+		: "+a" (ret), "+&a" (s) : "d" (r0) : "cc");
 	return (void *) ret;
 }
 
@@ -86,12 +88,13 @@
 	unsigned long dummy;
 	char *ret = dst;
 
-	asm volatile ("0: srst  %0,%1\n"
-		      "   jo    0b\n"
-		      "1: mvst  %0,%2\n"
-		      "   jo    1b"
-		      : "=&a" (dummy), "+a" (dst), "+a" (src)
-		      : "d" (r0), "0" (0) : "cc", "memory" );
+	asm volatile(
+		"0:	srst	%0,%1\n"
+		"	jo	0b\n"
+		"1:	mvst	%0,%2\n"
+		"	jo	1b"
+		: "=&a" (dummy), "+a" (dst), "+a" (src)
+		: "d" (r0), "0" (0) : "cc", "memory" );
 	return ret;
 }
 
@@ -100,10 +103,11 @@
 	register int r0 asm("0") = 0;
 	char *ret = dst;
 
-	asm volatile ("0: mvst  %0,%1\n"
-		      "   jo    0b"
-		      : "+&a" (dst), "+&a" (src) : "d" (r0)
-		      : "cc", "memory" );
+	asm volatile(
+		"0:	mvst	%0,%1\n"
+		"	jo	0b"
+		: "+&a" (dst), "+&a" (src) : "d" (r0)
+		: "cc", "memory");
 	return ret;
 }
 
@@ -112,9 +116,10 @@
 	register unsigned long r0 asm("0") = 0;
 	const char *tmp = s;
 
-	asm volatile ("0: srst  %0,%1\n"
-		      "   jo    0b"
-		      : "+d" (r0), "+a" (tmp) :  : "cc" );
+	asm volatile(
+		"0:	srst	%0,%1\n"
+		"	jo	0b"
+		: "+d" (r0), "+a" (tmp) :  : "cc");
 	return r0 - (unsigned long) s;
 }
 
@@ -124,9 +129,10 @@
 	const char *tmp = s;
 	const char *end = s + n;
 
-	asm volatile ("0: srst  %0,%1\n"
-		      "   jo    0b"
-		      : "+a" (end), "+a" (tmp) : "d" (r0)  : "cc" );
+	asm volatile(
+		"0:	srst	%0,%1\n"
+		"	jo	0b"
+		: "+a" (end), "+a" (tmp) : "d" (r0)  : "cc");
 	return end - s;
 }