summaryrefslogtreecommitdiffstats
path: root/arch
diff options
context:
space:
mode:
Diffstat (limited to 'arch')
-rw-r--r--arch/arm/include/asm/cache.h2
-rw-r--r--arch/avr32/include/asm/cache.h2
-rw-r--r--arch/blackfin/include/asm/cache.h2
-rw-r--r--arch/frv/include/asm/mem-layout.h2
-rw-r--r--arch/m68k/include/asm/cache.h2
-rw-r--r--arch/microblaze/include/asm/page.h2
-rw-r--r--arch/mips/include/asm/mach-generic/kmalloc.h2
-rw-r--r--arch/mips/include/asm/mach-ip27/kmalloc.h2
-rw-r--r--arch/mips/include/asm/mach-ip32/kmalloc.h4
-rw-r--r--arch/mn10300/include/asm/cache.h2
-rw-r--r--arch/powerpc/include/asm/page_32.h2
-rw-r--r--arch/sh/include/asm/page.h2
-rw-r--r--arch/xtensa/include/asm/cache.h2
13 files changed, 14 insertions, 14 deletions
diff --git a/arch/arm/include/asm/cache.h b/arch/arm/include/asm/cache.h
index 66c160b8547f..9d6122096fbe 100644
--- a/arch/arm/include/asm/cache.h
+++ b/arch/arm/include/asm/cache.h
@@ -14,7 +14,7 @@
* cache before the transfer is done, causing old data to be seen by
* the CPU.
*/
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/*
* With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
diff --git a/arch/avr32/include/asm/cache.h b/arch/avr32/include/asm/cache.h
index d3cf35ab11ab..c3a58a189a91 100644
--- a/arch/avr32/include/asm/cache.h
+++ b/arch/avr32/include/asm/cache.h
@@ -11,7 +11,7 @@
* cache before the transfer is done, causing old data to be seen by
* the CPU.
*/
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#ifndef __ASSEMBLER__
struct cache_info {
diff --git a/arch/blackfin/include/asm/cache.h b/arch/blackfin/include/asm/cache.h
index 93f6c634fdf4..bd0641a267f1 100644
--- a/arch/blackfin/include/asm/cache.h
+++ b/arch/blackfin/include/asm/cache.h
@@ -15,7 +15,7 @@
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#define SMP_CACHE_BYTES L1_CACHE_BYTES
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#ifdef CONFIG_SMP
#define __cacheline_aligned
diff --git a/arch/frv/include/asm/mem-layout.h b/arch/frv/include/asm/mem-layout.h
index ccae981876fa..e9a0ec85a402 100644
--- a/arch/frv/include/asm/mem-layout.h
+++ b/arch/frv/include/asm/mem-layout.h
@@ -35,7 +35,7 @@
* the slab must be aligned such that load- and store-double instructions don't
* fault if used
*/
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/*****************************************************************************/
diff --git a/arch/m68k/include/asm/cache.h b/arch/m68k/include/asm/cache.h
index ecafbe1718c3..0395c51e46a6 100644
--- a/arch/m68k/include/asm/cache.h
+++ b/arch/m68k/include/asm/cache.h
@@ -8,6 +8,6 @@
#define L1_CACHE_SHIFT 4
#define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT)
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#endif
diff --git a/arch/microblaze/include/asm/page.h b/arch/microblaze/include/asm/page.h
index 4f268faa0126..cf377d91da71 100644
--- a/arch/microblaze/include/asm/page.h
+++ b/arch/microblaze/include/asm/page.h
@@ -40,7 +40,7 @@
#ifndef __ASSEMBLY__
/* MS be sure that SLAB allocates aligned objects */
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
diff --git a/arch/mips/include/asm/mach-generic/kmalloc.h b/arch/mips/include/asm/mach-generic/kmalloc.h
index b8e6deba352f..a5d669086ed9 100644
--- a/arch/mips/include/asm/mach-generic/kmalloc.h
+++ b/arch/mips/include/asm/mach-generic/kmalloc.h
@@ -7,7 +7,7 @@
* Total overkill for most systems but need as a safe default.
* Set this one if any device in the system might do non-coherent DMA.
*/
-#define ARCH_KMALLOC_MINALIGN 128
+#define ARCH_DMA_MINALIGN 128
#endif
#endif /* __ASM_MACH_GENERIC_KMALLOC_H */
diff --git a/arch/mips/include/asm/mach-ip27/kmalloc.h b/arch/mips/include/asm/mach-ip27/kmalloc.h
index 426bd049b2d7..82c23ce2afa7 100644
--- a/arch/mips/include/asm/mach-ip27/kmalloc.h
+++ b/arch/mips/include/asm/mach-ip27/kmalloc.h
@@ -2,7 +2,7 @@
#define __ASM_MACH_IP27_KMALLOC_H
/*
- * All happy, no need to define ARCH_KMALLOC_MINALIGN
+ * All happy, no need to define ARCH_DMA_MINALIGN
*/
#endif /* __ASM_MACH_IP27_KMALLOC_H */
diff --git a/arch/mips/include/asm/mach-ip32/kmalloc.h b/arch/mips/include/asm/mach-ip32/kmalloc.h
index b1e0be60f720..042ca926c48f 100644
--- a/arch/mips/include/asm/mach-ip32/kmalloc.h
+++ b/arch/mips/include/asm/mach-ip32/kmalloc.h
@@ -3,9 +3,9 @@
#if defined(CONFIG_CPU_R5000) || defined(CONFIG_CPU_RM7000)
-#define ARCH_KMALLOC_MINALIGN 32
+#define ARCH_DMA_MINALIGN 32
#else
-#define ARCH_KMALLOC_MINALIGN 128
+#define ARCH_DMA_MINALIGN 128
#endif
#endif /* __ASM_MACH_IP32_KMALLOC_H */
diff --git a/arch/mn10300/include/asm/cache.h b/arch/mn10300/include/asm/cache.h
index 6e2fe28dde4e..781bf613366d 100644
--- a/arch/mn10300/include/asm/cache.h
+++ b/arch/mn10300/include/asm/cache.h
@@ -21,7 +21,7 @@
#define L1_CACHE_DISPARITY L1_CACHE_NENTRIES * L1_CACHE_BYTES
#endif
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/* data cache purge registers
* - read from the register to unconditionally purge that cache line
diff --git a/arch/powerpc/include/asm/page_32.h b/arch/powerpc/include/asm/page_32.h
index bd0849dbcaaa..68d73b2a7bfc 100644
--- a/arch/powerpc/include/asm/page_32.h
+++ b/arch/powerpc/include/asm/page_32.h
@@ -10,7 +10,7 @@
#define VM_DATA_DEFAULT_FLAGS VM_DATA_DEFAULT_FLAGS32
#ifdef CONFIG_NOT_COHERENT_CACHE
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#endif
#ifdef CONFIG_PTE_64BIT
diff --git a/arch/sh/include/asm/page.h b/arch/sh/include/asm/page.h
index fb703d120d09..c4e0b3d472b9 100644
--- a/arch/sh/include/asm/page.h
+++ b/arch/sh/include/asm/page.h
@@ -180,7 +180,7 @@ typedef struct page *pgtable_t;
* Some drivers need to perform DMA into kmalloc'ed buffers
* and so we have to increase the kmalloc minalign for this.
*/
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#ifdef CONFIG_SUPERH64
/*
diff --git a/arch/xtensa/include/asm/cache.h b/arch/xtensa/include/asm/cache.h
index ed8cd3cbd499..d2fd932fdb4d 100644
--- a/arch/xtensa/include/asm/cache.h
+++ b/arch/xtensa/include/asm/cache.h
@@ -29,6 +29,6 @@
# define CACHE_WAY_SIZE ICACHE_WAY_SIZE
#endif
-#define ARCH_KMALLOC_MINALIGN L1_CACHE_BYTES
+#define ARCH_DMA_MINALIGN L1_CACHE_BYTES
#endif /* _XTENSA_CACHE_H */