summaryrefslogtreecommitdiff
path: root/mm/slab.h
diff options
context:
space:
mode:
authorChristoph Lameter <cl@linux.com>2013-01-10 23:14:19 +0400
committerPekka Enberg <penberg@kernel.org>2013-02-01 14:32:08 +0400
commit2c59dd6544212faa5ce761920d2251f4152f408d (patch)
treec2547eb50205b72368e0b4758fc7c9a0111238a5 /mm/slab.h
parent9e5e8deca74603357626471a9b44f05dea9e32b1 (diff)
downloadlinux-2c59dd6544212faa5ce761920d2251f4152f408d.tar.xz
slab: Common Kmalloc cache determination
Extract the optimized lookup functions from slub and put them into slab_common.c. Then make slab use these functions as well. Joonsoo notes that this fixes some issues with constant folding which also reduces the code size for slub. https://lkml.org/lkml/2012/10/20/82 Signed-off-by: Christoph Lameter <cl@linux.com> Signed-off-by: Pekka Enberg <penberg@kernel.org>
Diffstat (limited to 'mm/slab.h')
-rw-r--r--mm/slab.h3
1 files changed, 3 insertions, 0 deletions
diff --git a/mm/slab.h b/mm/slab.h
index 44c0bd6dc19e..c01bc8921ac5 100644
--- a/mm/slab.h
+++ b/mm/slab.h
@@ -38,6 +38,9 @@ unsigned long calculate_alignment(unsigned long flags,
#ifndef CONFIG_SLOB
/* Kmalloc array related functions */
void create_kmalloc_caches(unsigned long);
+
+/* Find the kmalloc slab corresponding for a certain size */
+struct kmem_cache *kmalloc_slab(size_t, gfp_t);
#endif