summaryrefslogtreecommitdiffstats
path: root/mm/slab.c
diff options
context:
space:
mode:
authorDavid Woodhouse <dwmw2@infradead.org>2010-05-19 12:01:42 +0100
committerPekka Enberg <penberg@cs.helsinki.fi>2010-05-19 22:03:13 +0300
commit1f0ce8b3dd667dca720a47869f8110c298f0e5b8 (patch)
treec0e260b5d8e6d3fe55be2bf6772265ad48878efd /mm/slab.c
parente40152ee1e1c7a63f4777791863215e3faa37a86 (diff)
downloadlinux-1f0ce8b3dd667dca720a47869f8110c298f0e5b8.tar.bz2
mm: Move ARCH_SLAB_MINALIGN and ARCH_KMALLOC_MINALIGN to <linux/slab_def.h>
Acked-by: Herbert Xu <herbert@gondor.apana.org.au> Signed-off-by: David Woodhouse <David.Woodhouse@intel.com> Signed-off-by: Pekka Enberg <penberg@cs.helsinki.fi>
Diffstat (limited to 'mm/slab.c')
-rw-r--r--mm/slab.c24
1 files changed, 0 insertions, 24 deletions
diff --git a/mm/slab.c b/mm/slab.c
index bac0f4fcc216..7401ddc24306 100644
--- a/mm/slab.c
+++ b/mm/slab.c
@@ -144,30 +144,6 @@
#define BYTES_PER_WORD sizeof(void *)
#define REDZONE_ALIGN max(BYTES_PER_WORD, __alignof__(unsigned long long))
-#ifndef ARCH_KMALLOC_MINALIGN
-/*
- * Enforce a minimum alignment for the kmalloc caches.
- * Usually, the kmalloc caches are cache_line_size() aligned, except when
- * DEBUG and FORCED_DEBUG are enabled, then they are BYTES_PER_WORD aligned.
- * Some archs want to perform DMA into kmalloc caches and need a guaranteed
- * alignment larger than the alignment of a 64-bit integer.
- * ARCH_KMALLOC_MINALIGN allows that.
- * Note that increasing this value may disable some debug features.
- */
-#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long)
-#endif
-
-#ifndef ARCH_SLAB_MINALIGN
-/*
- * Enforce a minimum alignment for all caches.
- * Intended for archs that get misalignment faults even for BYTES_PER_WORD
- * aligned buffers. Includes ARCH_KMALLOC_MINALIGN.
- * If possible: Do not enable this flag for CONFIG_DEBUG_SLAB, it disables
- * some debug features.
- */
-#define ARCH_SLAB_MINALIGN 0
-#endif
-
#ifndef ARCH_KMALLOC_FLAGS
#define ARCH_KMALLOC_FLAGS SLAB_HWCACHE_ALIGN
#endif