Lines Matching refs:s

64 unsigned int kmem_cache_size(struct kmem_cache *s)  in kmem_cache_size()  argument
66 return s->object_size; in kmem_cache_size()
73 struct kmem_cache *s = NULL; in kmem_cache_sanity_check() local
81 list_for_each_entry(s, &slab_caches, list) { in kmem_cache_sanity_check()
90 res = probe_kernel_address(s->name, tmp); in kmem_cache_sanity_check()
93 s->object_size); in kmem_cache_sanity_check()
109 void slab_init_memcg_params(struct kmem_cache *s) in slab_init_memcg_params() argument
111 s->memcg_params.is_root_cache = true; in slab_init_memcg_params()
112 INIT_LIST_HEAD(&s->memcg_params.list); in slab_init_memcg_params()
113 RCU_INIT_POINTER(s->memcg_params.memcg_caches, NULL); in slab_init_memcg_params()
116 static int init_memcg_params(struct kmem_cache *s, in init_memcg_params() argument
122 s->memcg_params.is_root_cache = false; in init_memcg_params()
123 s->memcg_params.memcg = memcg; in init_memcg_params()
124 s->memcg_params.root_cache = root_cache; in init_memcg_params()
128 slab_init_memcg_params(s); in init_memcg_params()
139 RCU_INIT_POINTER(s->memcg_params.memcg_caches, arr); in init_memcg_params()
143 static void destroy_memcg_params(struct kmem_cache *s) in destroy_memcg_params() argument
145 if (is_root_cache(s)) in destroy_memcg_params()
146 kfree(rcu_access_pointer(s->memcg_params.memcg_caches)); in destroy_memcg_params()
149 static int update_memcg_params(struct kmem_cache *s, int new_array_size) in update_memcg_params() argument
153 if (!is_root_cache(s)) in update_memcg_params()
161 old = rcu_dereference_protected(s->memcg_params.memcg_caches, in update_memcg_params()
167 rcu_assign_pointer(s->memcg_params.memcg_caches, new); in update_memcg_params()
175 struct kmem_cache *s; in memcg_update_all_caches() local
179 list_for_each_entry(s, &slab_caches, list) { in memcg_update_all_caches()
180 ret = update_memcg_params(s, num_memcgs); in memcg_update_all_caches()
192 static inline int init_memcg_params(struct kmem_cache *s, in init_memcg_params() argument
198 static inline void destroy_memcg_params(struct kmem_cache *s) in destroy_memcg_params() argument
206 int slab_unmergeable(struct kmem_cache *s) in slab_unmergeable() argument
208 if (slab_nomerge || (s->flags & SLAB_NEVER_MERGE)) in slab_unmergeable()
211 if (!is_root_cache(s)) in slab_unmergeable()
214 if (s->ctor) in slab_unmergeable()
220 if (s->refcount < 0) in slab_unmergeable()
229 struct kmem_cache *s; in find_mergeable() local
242 list_for_each_entry_reverse(s, &slab_caches, list) { in find_mergeable()
243 if (slab_unmergeable(s)) in find_mergeable()
246 if (size > s->size) in find_mergeable()
249 if ((flags & SLAB_MERGE_SAME) != (s->flags & SLAB_MERGE_SAME)) in find_mergeable()
255 if ((s->size & ~(align - 1)) != s->size) in find_mergeable()
258 if (s->size - size >= sizeof(void *)) in find_mergeable()
262 (align > s->align || s->align % align)) in find_mergeable()
265 return s; in find_mergeable()
302 struct kmem_cache *s; in do_kmem_cache_create() local
306 s = kmem_cache_zalloc(kmem_cache, GFP_KERNEL); in do_kmem_cache_create()
307 if (!s) in do_kmem_cache_create()
310 s->name = name; in do_kmem_cache_create()
311 s->object_size = object_size; in do_kmem_cache_create()
312 s->size = size; in do_kmem_cache_create()
313 s->align = align; in do_kmem_cache_create()
314 s->ctor = ctor; in do_kmem_cache_create()
316 err = init_memcg_params(s, memcg, root_cache); in do_kmem_cache_create()
320 err = __kmem_cache_create(s, flags); in do_kmem_cache_create()
324 s->refcount = 1; in do_kmem_cache_create()
325 list_add(&s->list, &slab_caches); in do_kmem_cache_create()
329 return s; in do_kmem_cache_create()
332 destroy_memcg_params(s); in do_kmem_cache_create()
333 kmem_cache_free(kmem_cache, s); in do_kmem_cache_create()
365 struct kmem_cache *s; in kmem_cache_create() local
377 s = NULL; /* suppress uninit var warning */ in kmem_cache_create()
389 s = __kmem_cache_alias(name, size, align, flags, ctor); in kmem_cache_create()
390 if (s) in kmem_cache_create()
399 s = do_kmem_cache_create(cache_name, size, size, in kmem_cache_create()
402 if (IS_ERR(s)) { in kmem_cache_create()
403 err = PTR_ERR(s); in kmem_cache_create()
425 return s; in kmem_cache_create()
429 static int do_kmem_cache_shutdown(struct kmem_cache *s, in do_kmem_cache_shutdown() argument
432 if (__kmem_cache_shutdown(s) != 0) { in do_kmem_cache_shutdown()
434 "Slab cache still has objects\n", s->name); in do_kmem_cache_shutdown()
439 if (s->flags & SLAB_DESTROY_BY_RCU) in do_kmem_cache_shutdown()
443 if (!is_root_cache(s)) in do_kmem_cache_shutdown()
444 list_del(&s->memcg_params.list); in do_kmem_cache_shutdown()
446 list_move(&s->list, release); in do_kmem_cache_shutdown()
453 struct kmem_cache *s, *s2; in do_kmem_cache_release() local
458 list_for_each_entry_safe(s, s2, release, list) { in do_kmem_cache_release()
460 sysfs_slab_remove(s); in do_kmem_cache_release()
462 slab_kmem_cache_release(s); in do_kmem_cache_release()
483 struct kmem_cache *s = NULL; in memcg_create_kmem_cache() local
517 s = do_kmem_cache_create(cache_name, root_cache->object_size, in memcg_create_kmem_cache()
526 if (IS_ERR(s)) { in memcg_create_kmem_cache()
531 list_add(&s->memcg_params.list, &root_cache->memcg_params.list); in memcg_create_kmem_cache()
539 arr->entries[idx] = s; in memcg_create_kmem_cache()
552 struct kmem_cache *s, *c; in memcg_deactivate_kmem_caches() local
560 list_for_each_entry(s, &slab_caches, list) { in memcg_deactivate_kmem_caches()
561 if (!is_root_cache(s)) in memcg_deactivate_kmem_caches()
564 arr = rcu_dereference_protected(s->memcg_params.memcg_caches, in memcg_deactivate_kmem_caches()
583 struct kmem_cache *s, *s2; in memcg_destroy_kmem_caches() local
589 list_for_each_entry_safe(s, s2, &slab_caches, list) { in memcg_destroy_kmem_caches()
590 if (is_root_cache(s) || s->memcg_params.memcg != memcg) in memcg_destroy_kmem_caches()
596 BUG_ON(do_kmem_cache_shutdown(s, &release, &need_rcu_barrier)); in memcg_destroy_kmem_caches()
607 void slab_kmem_cache_release(struct kmem_cache *s) in slab_kmem_cache_release() argument
609 destroy_memcg_params(s); in slab_kmem_cache_release()
610 kfree_const(s->name); in slab_kmem_cache_release()
611 kmem_cache_free(kmem_cache, s); in slab_kmem_cache_release()
614 void kmem_cache_destroy(struct kmem_cache *s) in kmem_cache_destroy() argument
621 BUG_ON(!is_root_cache(s)); in kmem_cache_destroy()
628 s->refcount--; in kmem_cache_destroy()
629 if (s->refcount) in kmem_cache_destroy()
632 for_each_memcg_cache_safe(c, c2, s) { in kmem_cache_destroy()
638 do_kmem_cache_shutdown(s, &release, &need_rcu_barrier); in kmem_cache_destroy()
677 void __init create_boot_cache(struct kmem_cache *s, const char *name, size_t size, in create_boot_cache() argument
682 s->name = name; in create_boot_cache()
683 s->size = s->object_size = size; in create_boot_cache()
684 s->align = calculate_alignment(flags, ARCH_KMALLOC_MINALIGN, size); in create_boot_cache()
686 slab_init_memcg_params(s); in create_boot_cache()
688 err = __kmem_cache_create(s, flags); in create_boot_cache()
694 s->refcount = -1; /* Exempt from merging for now */ in create_boot_cache()
700 struct kmem_cache *s = kmem_cache_zalloc(kmem_cache, GFP_NOWAIT); in create_kmalloc_cache() local
702 if (!s) in create_kmalloc_cache()
705 create_boot_cache(s, name, size, flags); in create_kmalloc_cache()
706 list_add(&s->list, &slab_caches); in create_kmalloc_cache()
707 s->refcount = 1; in create_kmalloc_cache()
708 return s; in create_kmalloc_cache()
858 struct kmem_cache *s = kmalloc_caches[i]; in create_kmalloc_caches() local
861 if (s) { in create_kmalloc_caches()
865 s->name = n; in create_kmalloc_caches()
871 struct kmem_cache *s = kmalloc_caches[i]; in create_kmalloc_caches() local
873 if (s) { in create_kmalloc_caches()
964 memcg_accumulate_slabinfo(struct kmem_cache *s, struct slabinfo *info) in memcg_accumulate_slabinfo() argument
969 if (!is_root_cache(s)) in memcg_accumulate_slabinfo()
972 for_each_memcg_cache(c, s) { in memcg_accumulate_slabinfo()
984 static void cache_show(struct kmem_cache *s, struct seq_file *m) in cache_show() argument
989 get_slabinfo(s, &sinfo); in cache_show()
991 memcg_accumulate_slabinfo(s, &sinfo); in cache_show()
994 cache_name(s), sinfo.active_objs, sinfo.num_objs, s->size, in cache_show()
1001 slabinfo_show_stats(m, s); in cache_show()
1007 struct kmem_cache *s = list_entry(p, struct kmem_cache, list); in slab_show() local
1011 if (is_root_cache(s)) in slab_show()
1012 cache_show(s, m); in slab_show()
1019 struct kmem_cache *s = list_entry(p, struct kmem_cache, list); in memcg_slab_show() local
1024 if (!is_root_cache(s) && s->memcg_params.memcg == memcg) in memcg_slab_show()
1025 cache_show(s, m); in memcg_slab_show()