ppc32: use L1_CACHE_SHIFT/L1_CACHE_BYTES

instead of L1_CACHE_LINE_SIZE and LG_L1_CACHE_LINE_SIZE

Signed-off-by: Stephen Rothwell <sfr@canb.auug.org.au>
diff --git a/arch/powerpc/kernel/misc_32.S b/arch/powerpc/kernel/misc_32.S
index 2727410..0b0e908 100644
--- a/arch/powerpc/kernel/misc_32.S
+++ b/arch/powerpc/kernel/misc_32.S
@@ -496,21 +496,21 @@
 BEGIN_FTR_SECTION
 	blr				/* for 601, do nothing */
 END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
-	li	r5,L1_CACHE_LINE_SIZE-1
+	li	r5,L1_CACHE_BYTES-1
 	andc	r3,r3,r5
 	subf	r4,r3,r4
 	add	r4,r4,r5
-	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
+	srwi.	r4,r4,L1_CACHE_SHIFT
 	beqlr
 	mtctr	r4
 	mr	r6,r3
 1:	dcbst	0,r3
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	1b
 	sync				/* wait for dcbst's to get to ram */
 	mtctr	r4
 2:	icbi	0,r6
-	addi	r6,r6,L1_CACHE_LINE_SIZE
+	addi	r6,r6,L1_CACHE_BYTES
 	bdnz	2b
 	sync				/* additional sync needed on g4 */
 	isync
@@ -523,16 +523,16 @@
  * clean_dcache_range(unsigned long start, unsigned long stop)
  */
 _GLOBAL(clean_dcache_range)
-	li	r5,L1_CACHE_LINE_SIZE-1
+	li	r5,L1_CACHE_BYTES-1
 	andc	r3,r3,r5
 	subf	r4,r3,r4
 	add	r4,r4,r5
-	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
+	srwi.	r4,r4,L1_CACHE_SHIFT
 	beqlr
 	mtctr	r4
 
 1:	dcbst	0,r3
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	1b
 	sync				/* wait for dcbst's to get to ram */
 	blr
@@ -544,16 +544,16 @@
  * flush_dcache_range(unsigned long start, unsigned long stop)
  */
 _GLOBAL(flush_dcache_range)
-	li	r5,L1_CACHE_LINE_SIZE-1
+	li	r5,L1_CACHE_BYTES-1
 	andc	r3,r3,r5
 	subf	r4,r3,r4
 	add	r4,r4,r5
-	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
+	srwi.	r4,r4,L1_CACHE_SHIFT
 	beqlr
 	mtctr	r4
 
 1:	dcbf	0,r3
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	1b
 	sync				/* wait for dcbst's to get to ram */
 	blr
@@ -566,16 +566,16 @@
  * invalidate_dcache_range(unsigned long start, unsigned long stop)
  */
 _GLOBAL(invalidate_dcache_range)
-	li	r5,L1_CACHE_LINE_SIZE-1
+	li	r5,L1_CACHE_BYTES-1
 	andc	r3,r3,r5
 	subf	r4,r3,r4
 	add	r4,r4,r5
-	srwi.	r4,r4,LG_L1_CACHE_LINE_SIZE
+	srwi.	r4,r4,L1_CACHE_SHIFT
 	beqlr
 	mtctr	r4
 
 1:	dcbi	0,r3
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	1b
 	sync				/* wait for dcbi's to get to ram */
 	blr
@@ -596,7 +596,7 @@
 	mtctr	r4
 	lis     r5, KERNELBASE@h
 1:	lwz	r3, 0(r5)		/* Load one word from every line */
-	addi	r5, r5, L1_CACHE_LINE_SIZE
+	addi	r5, r5, L1_CACHE_BYTES
 	bdnz    1b
 	blr
 #endif /* CONFIG_NOT_COHERENT_CACHE */
@@ -614,16 +614,16 @@
 	blr					/* for 601, do nothing */
 END_FTR_SECTION_IFCLR(CPU_FTR_SPLIT_ID_CACHE)
 	rlwinm	r3,r3,0,0,19			/* Get page base address */
-	li	r4,4096/L1_CACHE_LINE_SIZE	/* Number of lines in a page */
+	li	r4,4096/L1_CACHE_BYTES	/* Number of lines in a page */
 	mtctr	r4
 	mr	r6,r3
 0:	dcbst	0,r3				/* Write line to ram */
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	0b
 	sync
 	mtctr	r4
 1:	icbi	0,r6
-	addi	r6,r6,L1_CACHE_LINE_SIZE
+	addi	r6,r6,L1_CACHE_BYTES
 	bdnz	1b
 	sync
 	isync
@@ -646,16 +646,16 @@
 	mtmsr	r0
 	isync
 	rlwinm	r3,r3,0,0,19			/* Get page base address */
-	li	r4,4096/L1_CACHE_LINE_SIZE	/* Number of lines in a page */
+	li	r4,4096/L1_CACHE_BYTES	/* Number of lines in a page */
 	mtctr	r4
 	mr	r6,r3
 0:	dcbst	0,r3				/* Write line to ram */
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	0b
 	sync
 	mtctr	r4
 1:	icbi	0,r6
-	addi	r6,r6,L1_CACHE_LINE_SIZE
+	addi	r6,r6,L1_CACHE_BYTES
 	bdnz	1b
 	sync
 	mtmsr	r10				/* restore DR */
@@ -670,7 +670,7 @@
  * void clear_pages(void *page, int order) ;
  */
 _GLOBAL(clear_pages)
-	li	r0,4096/L1_CACHE_LINE_SIZE
+	li	r0,4096/L1_CACHE_BYTES
 	slw	r0,r0,r4
 	mtctr	r0
 #ifdef CONFIG_8xx
@@ -682,7 +682,7 @@
 #else
 1:	dcbz	0,r3
 #endif
-	addi	r3,r3,L1_CACHE_LINE_SIZE
+	addi	r3,r3,L1_CACHE_BYTES
 	bdnz	1b
 	blr
 
@@ -708,7 +708,7 @@
 
 #ifdef CONFIG_8xx
 	/* don't use prefetch on 8xx */
-    	li	r0,4096/L1_CACHE_LINE_SIZE
+    	li	r0,4096/L1_CACHE_BYTES
 	mtctr	r0
 1:	COPY_16_BYTES
 	bdnz	1b
@@ -722,13 +722,13 @@
 	li	r11,4
 	mtctr	r0
 11:	dcbt	r11,r4
-	addi	r11,r11,L1_CACHE_LINE_SIZE
+	addi	r11,r11,L1_CACHE_BYTES
 	bdnz	11b
 #else /* MAX_COPY_PREFETCH == 1 */
 	dcbt	r5,r4
-	li	r11,L1_CACHE_LINE_SIZE+4
+	li	r11,L1_CACHE_BYTES+4
 #endif /* MAX_COPY_PREFETCH */
-	li	r0,4096/L1_CACHE_LINE_SIZE - MAX_COPY_PREFETCH
+	li	r0,4096/L1_CACHE_BYTES - MAX_COPY_PREFETCH
 	crclr	4*cr0+eq
 2:
 	mtctr	r0
@@ -736,12 +736,12 @@
 	dcbt	r11,r4
 	dcbz	r5,r3
 	COPY_16_BYTES
-#if L1_CACHE_LINE_SIZE >= 32
+#if L1_CACHE_BYTES >= 32
 	COPY_16_BYTES
-#if L1_CACHE_LINE_SIZE >= 64
+#if L1_CACHE_BYTES >= 64
 	COPY_16_BYTES
 	COPY_16_BYTES
-#if L1_CACHE_LINE_SIZE >= 128
+#if L1_CACHE_BYTES >= 128
 	COPY_16_BYTES
 	COPY_16_BYTES
 	COPY_16_BYTES