kmem_cache_node 89 include/linux/slab_def.h struct kmem_cache_node *node[MAX_NUMNODES]; kmem_cache_node 142 include/linux/slub_def.h struct kmem_cache_node *node[MAX_NUMNODES]; kmem_cache_node 205 mm/slab.c static struct kmem_cache_node __initdata init_kmem_cache_node[NUM_INIT_LISTS]; kmem_cache_node 210 mm/slab.c struct kmem_cache_node *n, int tofree); kmem_cache_node 220 mm/slab.c struct kmem_cache_node *n, struct page *page, kmem_cache_node 224 mm/slab.c #define INDEX_NODE kmalloc_index(sizeof(struct kmem_cache_node)) kmem_cache_node 226 mm/slab.c static void kmem_cache_node_init(struct kmem_cache_node *parent) kmem_cache_node 554 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 688 mm/slab.c struct kmem_cache_node *n = get_node(cachep, node); kmem_cache_node 709 mm/slab.c static void reap_alien(struct kmem_cache *cachep, struct kmem_cache_node *n) kmem_cache_node 755 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 809 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 826 mm/slab.c n = kmalloc_node(sizeof(struct kmem_cache_node), gfp, node); kmem_cache_node 876 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 943 mm/slab.c struct kmem_cache_node *n = NULL; kmem_cache_node 1101 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 1154 mm/slab.c static void __init init_list(struct kmem_cache *cachep, struct kmem_cache_node *list, kmem_cache_node 1157 mm/slab.c struct kmem_cache_node *ptr; kmem_cache_node 1159 mm/slab.c ptr = kmalloc_node(sizeof(struct kmem_cache_node), GFP_NOWAIT, nodeid); kmem_cache_node 1162 mm/slab.c memcpy(ptr, list, sizeof(struct kmem_cache_node)); kmem_cache_node 1239 mm/slab.c nr_node_ids * sizeof(struct kmem_cache_node *), kmem_cache_node 1320 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 1762 mm/slab.c sizeof(struct kmem_cache_node), gfp, node); kmem_cache_node 2141 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2156 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2182 mm/slab.c struct kmem_cache_node *n, int tofree) kmem_cache_node 2218 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2231 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2264 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2574 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2649 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 2766 mm/slab.c struct kmem_cache_node *n, struct page *page, kmem_cache_node 2790 mm/slab.c static noinline struct page *get_valid_first_slab(struct kmem_cache_node *n, kmem_cache_node 2832 mm/slab.c static struct page *get_first_slab(struct kmem_cache_node *n, bool pfmemalloc) kmem_cache_node 2854 mm/slab.c struct kmem_cache_node *n, gfp_t flags) kmem_cache_node 2908 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 3174 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 3331 mm/slab.c struct kmem_cache_node *n = get_node(cachep, node); kmem_cache_node 3374 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 3768 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 3828 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 3944 mm/slab.c static void drain_array(struct kmem_cache *cachep, struct kmem_cache_node *n, kmem_cache_node 3982 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 4042 mm/slab.c struct kmem_cache_node *n; kmem_cache_node 628 mm/slab.h static inline struct kmem_cache_node *get_node(struct kmem_cache *s, int node) kmem_cache_node 1012 mm/slub.c struct kmem_cache_node *n, struct page *page) kmem_cache_node 1021 mm/slub.c static void remove_full(struct kmem_cache *s, struct kmem_cache_node *n, struct page *page) kmem_cache_node 1033 mm/slub.c struct kmem_cache_node *n = get_node(s, node); kmem_cache_node 1038 mm/slub.c static inline unsigned long node_nr_slabs(struct kmem_cache_node *n) kmem_cache_node 1045 mm/slub.c struct kmem_cache_node *n = get_node(s, node); kmem_cache_node 1060 mm/slub.c struct kmem_cache_node *n = get_node(s, node); kmem_cache_node 1173 mm/slub.c struct kmem_cache_node *n = get_node(s, page_to_nid(page)); kmem_cache_node 1359 mm/slub.c static inline void add_full(struct kmem_cache *s, struct kmem_cache_node *n, kmem_cache_node 1361 mm/slub.c static inline void remove_full(struct kmem_cache *s, struct kmem_cache_node *n, kmem_cache_node 1375 mm/slub.c static inline unsigned long node_nr_slabs(struct kmem_cache_node *n) kmem_cache_node 1759 mm/slub.c __add_partial(struct kmem_cache_node *n, struct page *page, int tail) kmem_cache_node 1768 mm/slub.c static inline void add_partial(struct kmem_cache_node *n, kmem_cache_node 1775 mm/slub.c static inline void remove_partial(struct kmem_cache_node *n, kmem_cache_node 1790 mm/slub.c struct kmem_cache_node *n, struct page *page, kmem_cache_node 1835 mm/slub.c static void *get_partial_node(struct kmem_cache *s, struct kmem_cache_node *n, kmem_cache_node 1921 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 2040 mm/slub.c struct kmem_cache_node *n = get_node(s, page_to_nid(page)); kmem_cache_node 2185 mm/slub.c struct kmem_cache_node *n = NULL, *n2 = NULL; kmem_cache_node 2382 mm/slub.c static inline unsigned long node_nr_objs(struct kmem_cache_node *n) kmem_cache_node 2389 mm/slub.c static unsigned long count_partial(struct kmem_cache_node *n, kmem_cache_node 2411 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 2845 mm/slub.c struct kmem_cache_node *n = NULL; kmem_cache_node 3342 mm/slub.c init_kmem_cache_node(struct kmem_cache_node *n) kmem_cache_node 3374 mm/slub.c static struct kmem_cache *kmem_cache_node; kmem_cache_node 3388 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 3390 mm/slub.c BUG_ON(kmem_cache_node->size < sizeof(struct kmem_cache_node)); kmem_cache_node 3392 mm/slub.c page = new_slab(kmem_cache_node, GFP_NOWAIT, node); kmem_cache_node 3403 mm/slub.c init_object(kmem_cache_node, n, SLUB_RED_ACTIVE); kmem_cache_node 3404 mm/slub.c init_tracking(kmem_cache_node, n); kmem_cache_node 3406 mm/slub.c n = kasan_kmalloc(kmem_cache_node, n, sizeof(struct kmem_cache_node), kmem_cache_node 3408 mm/slub.c page->freelist = get_freepointer(kmem_cache_node, n); kmem_cache_node 3411 mm/slub.c kmem_cache_node->node[node] = n; kmem_cache_node 3413 mm/slub.c inc_slabs_node(kmem_cache_node, node, page->objects); kmem_cache_node 3425 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 3429 mm/slub.c kmem_cache_free(kmem_cache_node, n); kmem_cache_node 3445 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 3451 mm/slub.c n = kmem_cache_alloc_node(kmem_cache_node, kmem_cache_node 3720 mm/slub.c static void free_partial(struct kmem_cache *s, struct kmem_cache_node *n) kmem_cache_node 3745 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 3759 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 3994 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4096 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4123 mm/slub.c kmem_cache_free(kmem_cache_node, n); kmem_cache_node 4131 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4156 mm/slub.c n = kmem_cache_alloc(kmem_cache_node, GFP_KERNEL); kmem_cache_node 4215 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4250 mm/slub.c kmem_cache_node = &boot_kmem_cache_node; kmem_cache_node 4253 mm/slub.c create_boot_cache(kmem_cache_node, "kmem_cache_node", kmem_cache_node 4254 mm/slub.c sizeof(struct kmem_cache_node), SLAB_HWCACHE_ALIGN, 0, 0); kmem_cache_node 4263 mm/slub.c nr_node_ids * sizeof(struct kmem_cache_node *), kmem_cache_node 4267 mm/slub.c kmem_cache_node = bootstrap(&boot_kmem_cache_node); kmem_cache_node 4441 mm/slub.c struct kmem_cache_node *n, unsigned long *map) kmem_cache_node 4477 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4637 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4870 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4888 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 4916 mm/slub.c struct kmem_cache_node *n; kmem_cache_node 5924 mm/slub.c struct kmem_cache_node *n;