[PATCH] Kill L1_CACHE_SHIFT_MAX

Kill L1_CACHE_SHIFT from all arches.  Since L1_CACHE_SHIFT_MAX is not used
anymore with the introduction of INTERNODE_CACHE, kill L1_CACHE_SHIFT_MAX.

Signed-off-by: Ravikiran Thirumalai <kiran@scalex86.org>
Signed-off-by: Shai Fultheim <shai@scalex86.org>
Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Andrew Morton <akpm@osdl.org>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
diff --git a/include/asm-alpha/cache.h b/include/asm-alpha/cache.h
index e69b295..e6d4d16 100644
--- a/include/asm-alpha/cache.h
+++ b/include/asm-alpha/cache.h
@@ -20,6 +20,5 @@
 
 #define L1_CACHE_ALIGN(x)  (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
 #define SMP_CACHE_BYTES    L1_CACHE_BYTES
-#define L1_CACHE_SHIFT_MAX L1_CACHE_SHIFT
 
 #endif
diff --git a/include/asm-arm/cache.h b/include/asm-arm/cache.h
index 8d161f7..31332c8 100644
--- a/include/asm-arm/cache.h
+++ b/include/asm-arm/cache.h
@@ -7,9 +7,4 @@
 #define L1_CACHE_SHIFT		5
 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
-/*
- * largest L1 which this arch supports
- */
-#define L1_CACHE_SHIFT_MAX	5
-
 #endif
diff --git a/include/asm-cris/arch-v10/cache.h b/include/asm-cris/arch-v10/cache.h
index 1d1d1ba..aea2718 100644
--- a/include/asm-cris/arch-v10/cache.h
+++ b/include/asm-cris/arch-v10/cache.h
@@ -4,6 +4,5 @@
 /* Etrax 100LX have 32-byte cache-lines. */
 #define L1_CACHE_BYTES 32
 #define L1_CACHE_SHIFT 5
-#define L1_CACHE_SHIFT_MAX 5
 
 #endif /* _ASM_ARCH_CACHE_H */
diff --git a/include/asm-cris/arch-v32/cache.h b/include/asm-cris/arch-v32/cache.h
index 4fed8d6..80b236b 100644
--- a/include/asm-cris/arch-v32/cache.h
+++ b/include/asm-cris/arch-v32/cache.h
@@ -4,6 +4,5 @@
 /* A cache-line is 32 bytes. */
 #define L1_CACHE_BYTES 32
 #define L1_CACHE_SHIFT 5
-#define L1_CACHE_SHIFT_MAX 5
 
 #endif /* _ASM_CRIS_ARCH_CACHE_H */
diff --git a/include/asm-cris/dma-mapping.h b/include/asm-cris/dma-mapping.h
index 8eff513..cbf1a98 100644
--- a/include/asm-cris/dma-mapping.h
+++ b/include/asm-cris/dma-mapping.h
@@ -153,7 +153,7 @@
 static inline int
 dma_get_cache_alignment(void)
 {
-	return (1 << L1_CACHE_SHIFT_MAX);
+	return (1 << INTERNODE_CACHE_SHIFT);
 }
 
 #define dma_is_consistent(d)	(1)
diff --git a/include/asm-generic/dma-mapping.h b/include/asm-generic/dma-mapping.h
index 747d790..1b35620 100644
--- a/include/asm-generic/dma-mapping.h
+++ b/include/asm-generic/dma-mapping.h
@@ -274,7 +274,7 @@
 {
 	/* no easy way to get cache size on all processors, so return
 	 * the maximum possible, to be safe */
-	return (1 << L1_CACHE_SHIFT_MAX);
+	return (1 << INTERNODE_CACHE_SHIFT);
 }
 
 static inline void
diff --git a/include/asm-i386/cache.h b/include/asm-i386/cache.h
index 8497887..615911e 100644
--- a/include/asm-i386/cache.h
+++ b/include/asm-i386/cache.h
@@ -10,6 +10,4 @@
 #define L1_CACHE_SHIFT	(CONFIG_X86_L1_CACHE_SHIFT)
 #define L1_CACHE_BYTES	(1 << L1_CACHE_SHIFT)
 
-#define L1_CACHE_SHIFT_MAX 7	/* largest L1 which this arch supports */
-
 #endif
diff --git a/include/asm-i386/dma-mapping.h b/include/asm-i386/dma-mapping.h
index e56c335..6c37a9a 100644
--- a/include/asm-i386/dma-mapping.h
+++ b/include/asm-i386/dma-mapping.h
@@ -150,7 +150,7 @@
 {
 	/* no easy way to get cache size on all x86, so return the
 	 * maximum possible, to be safe */
-	return (1 << L1_CACHE_SHIFT_MAX);
+	return (1 << INTERNODE_CACHE_SHIFT);
 }
 
 #define dma_is_consistent(d)	(1)
diff --git a/include/asm-ia64/cache.h b/include/asm-ia64/cache.h
index 666d8f1..40dd2519 100644
--- a/include/asm-ia64/cache.h
+++ b/include/asm-ia64/cache.h
@@ -12,8 +12,6 @@
 #define L1_CACHE_SHIFT		CONFIG_IA64_L1_CACHE_SHIFT
 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
-#define L1_CACHE_SHIFT_MAX 7	/* largest L1 which this arch supports */
-
 #ifdef CONFIG_SMP
 # define SMP_CACHE_SHIFT	L1_CACHE_SHIFT
 # define SMP_CACHE_BYTES	L1_CACHE_BYTES
diff --git a/include/asm-m32r/cache.h b/include/asm-m32r/cache.h
index 7248205..9c2b2d9 100644
--- a/include/asm-m32r/cache.h
+++ b/include/asm-m32r/cache.h
@@ -7,6 +7,4 @@
 #define L1_CACHE_SHIFT		4
 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
-#define L1_CACHE_SHIFT_MAX	4
-
 #endif  /* _ASM_M32R_CACHE_H */
diff --git a/include/asm-m68k/cache.h b/include/asm-m68k/cache.h
index 6161fd3..fed3fd3 100644
--- a/include/asm-m68k/cache.h
+++ b/include/asm-m68k/cache.h
@@ -8,6 +8,4 @@
 #define        L1_CACHE_SHIFT  4
 #define        L1_CACHE_BYTES  (1<< L1_CACHE_SHIFT)
 
-#define L1_CACHE_SHIFT_MAX 4	/* largest L1 which this arch supports */
-
 #endif
diff --git a/include/asm-mips/cache.h b/include/asm-mips/cache.h
index 1a5d1a6..55e19f2 100644
--- a/include/asm-mips/cache.h
+++ b/include/asm-mips/cache.h
@@ -15,7 +15,6 @@
 #define L1_CACHE_SHIFT		CONFIG_MIPS_L1_CACHE_SHIFT
 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
-#define L1_CACHE_SHIFT_MAX	6
 #define SMP_CACHE_SHIFT		L1_CACHE_SHIFT
 #define SMP_CACHE_BYTES		L1_CACHE_BYTES
 
diff --git a/include/asm-parisc/cache.h b/include/asm-parisc/cache.h
index 5da72e3..38d201b 100644
--- a/include/asm-parisc/cache.h
+++ b/include/asm-parisc/cache.h
@@ -28,7 +28,6 @@
 #define L1_CACHE_ALIGN(x)       (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
 
 #define SMP_CACHE_BYTES L1_CACHE_BYTES
-#define L1_CACHE_SHIFT_MAX 5	/* largest L1 which this arch supports */
 
 extern void flush_data_cache_local(void);  /* flushes local data-cache only */
 extern void flush_instruction_cache_local(void); /* flushes local code-cache only */
diff --git a/include/asm-powerpc/cache.h b/include/asm-powerpc/cache.h
index 26ce502..6379c2d 100644
--- a/include/asm-powerpc/cache.h
+++ b/include/asm-powerpc/cache.h
@@ -19,7 +19,6 @@
 #define	L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
 #define	SMP_CACHE_BYTES		L1_CACHE_BYTES
-#define L1_CACHE_SHIFT_MAX	7 /* largest L1 which this arch supports */
 
 #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
 struct ppc64_caches {
diff --git a/include/asm-powerpc/dma-mapping.h b/include/asm-powerpc/dma-mapping.h
index 59a8016..a96e574 100644
--- a/include/asm-powerpc/dma-mapping.h
+++ b/include/asm-powerpc/dma-mapping.h
@@ -229,7 +229,7 @@
 #ifdef CONFIG_PPC64
 	/* no easy way to get cache size on all processors, so return
 	 * the maximum possible, to be safe */
-	return (1 << L1_CACHE_SHIFT_MAX);
+	return (1 << INTERNODE_CACHE_SHIFT);
 #else
 	/*
 	 * Each processor family will define its own L1_CACHE_SHIFT,
diff --git a/include/asm-s390/cache.h b/include/asm-s390/cache.h
index 2984537..e20cdd9 100644
--- a/include/asm-s390/cache.h
+++ b/include/asm-s390/cache.h
@@ -13,7 +13,6 @@
 
 #define L1_CACHE_BYTES     256
 #define L1_CACHE_SHIFT     8
-#define L1_CACHE_SHIFT_MAX 8	/* largest L1 which this arch supports */
 
 #define ARCH_KMALLOC_MINALIGN	8
 
diff --git a/include/asm-sh/cache.h b/include/asm-sh/cache.h
index 9b4dd6d..656fdfe 100644
--- a/include/asm-sh/cache.h
+++ b/include/asm-sh/cache.h
@@ -22,8 +22,6 @@
 
 #define L1_CACHE_ALIGN(x)	(((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
 
-#define L1_CACHE_SHIFT_MAX 	5	/* largest L1 which this arch supports */
-
 struct cache_info {
 	unsigned int ways;
 	unsigned int sets;
diff --git a/include/asm-sh64/cache.h b/include/asm-sh64/cache.h
index f54e85e..a4f36f0 100644
--- a/include/asm-sh64/cache.h
+++ b/include/asm-sh64/cache.h
@@ -20,8 +20,6 @@
 #define L1_CACHE_ALIGN_MASK	(~(L1_CACHE_BYTES - 1))
 #define L1_CACHE_ALIGN(x)	(((x)+(L1_CACHE_BYTES - 1)) & L1_CACHE_ALIGN_MASK)
 #define L1_CACHE_SIZE_BYTES	(L1_CACHE_BYTES << 10)
-/* Largest L1 which this arch supports */
-#define L1_CACHE_SHIFT_MAX	5
 
 #ifdef MODULE
 #define __cacheline_aligned __attribute__((__aligned__(L1_CACHE_BYTES)))
diff --git a/include/asm-sparc/cache.h b/include/asm-sparc/cache.h
index a10522c..cb971e8 100644
--- a/include/asm-sparc/cache.h
+++ b/include/asm-sparc/cache.h
@@ -13,7 +13,6 @@
 #define L1_CACHE_SHIFT 5
 #define L1_CACHE_BYTES 32
 #define L1_CACHE_ALIGN(x) ((((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1)))
-#define L1_CACHE_SHIFT_MAX 5	/* largest L1 which this arch supports */
 
 #define SMP_CACHE_BYTES 32
 
diff --git a/include/asm-sparc64/cache.h b/include/asm-sparc64/cache.h
index ade5ec3..f7d35a2 100644
--- a/include/asm-sparc64/cache.h
+++ b/include/asm-sparc64/cache.h
@@ -9,7 +9,6 @@
 #define        L1_CACHE_BYTES	32 /* Two 16-byte sub-blocks per line. */
 
 #define        L1_CACHE_ALIGN(x)       (((x)+(L1_CACHE_BYTES-1))&~(L1_CACHE_BYTES-1))
-#define		L1_CACHE_SHIFT_MAX 5	/* largest L1 which this arch supports */
 
 #define        SMP_CACHE_BYTES_SHIFT	6
 #define        SMP_CACHE_BYTES		(1 << SMP_CACHE_BYTES_SHIFT) /* L2 cache line size. */
diff --git a/include/asm-um/cache.h b/include/asm-um/cache.h
index a10602a..3d05870 100644
--- a/include/asm-um/cache.h
+++ b/include/asm-um/cache.h
@@ -13,9 +13,6 @@
 # define L1_CACHE_SHIFT		5
 #endif
 
-/* XXX: this is valid for x86 and x86_64. */
-#define L1_CACHE_SHIFT_MAX	7	/* largest L1 which this arch supports */
-
 #define L1_CACHE_BYTES		(1 << L1_CACHE_SHIFT)
 
 #endif
diff --git a/include/asm-v850/cache.h b/include/asm-v850/cache.h
index cbf9096..8832c7e 100644
--- a/include/asm-v850/cache.h
+++ b/include/asm-v850/cache.h
@@ -23,6 +23,4 @@
 #define L1_CACHE_SHIFT		4
 #endif
 
-#define L1_CACHE_SHIFT_MAX	L1_CACHE_SHIFT
-
 #endif /* __V850_CACHE_H__ */
diff --git a/include/asm-x86_64/cache.h b/include/asm-x86_64/cache.h
index 33e5342..b4a2401 100644
--- a/include/asm-x86_64/cache.h
+++ b/include/asm-x86_64/cache.h
@@ -9,6 +9,5 @@
 /* L1 cache line size */
 #define L1_CACHE_SHIFT	(CONFIG_X86_L1_CACHE_SHIFT)
 #define L1_CACHE_BYTES	(1 << L1_CACHE_SHIFT)
-#define L1_CACHE_SHIFT_MAX 7	/* largest L1 which this arch supports */
 
 #endif