Lines Matching refs:objects
237 if (object < base || object >= base + page->objects * s->size || in check_valid_pointer()
361 page->objects = tmp.objects; in set_page_slub_counters()
587 page, page->objects, page->inuse, page->freelist, page->flags); in print_page_info()
877 if (page->objects > maxobj) { in check_slab()
879 page->objects, maxobj); in check_slab()
882 if (page->inuse > page->objects) { in check_slab()
884 page->inuse, page->objects); in check_slab()
904 while (fp && nr <= page->objects) { in on_freelist()
915 page->inuse = page->objects; in on_freelist()
930 if (page->objects != max_objects) { in on_freelist()
932 "should be %d", page->objects, max_objects); in on_freelist()
933 page->objects = max_objects; in on_freelist()
936 if (page->inuse != page->objects - nr) { in on_freelist()
938 "counted were %d", page->inuse, page->objects - nr); in on_freelist()
939 page->inuse = page->objects - nr; in on_freelist()
998 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() argument
1010 atomic_long_add(objects, &n->total_objects); in inc_slabs_node()
1013 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() argument
1018 atomic_long_sub(objects, &n->total_objects); in dec_slabs_node()
1062 page->inuse = page->objects; in alloc_debug_processing()
1260 int objects) {} in inc_slabs_node() argument
1262 int objects) {} in dec_slabs_node() argument
1448 page->objects = oo_objects(oo); in allocate_slab()
1463 for_each_object_idx(p, idx, s, start, page->objects) { in allocate_slab()
1465 if (likely(idx < page->objects)) in allocate_slab()
1472 page->inuse = page->objects; in allocate_slab()
1486 inc_slabs_node(s, page_to_nid(page), page->objects); in allocate_slab()
1512 page->objects) in __free_slab()
1569 dec_slabs_node(s, page_to_nid(page), page->objects); in discard_slab()
1615 int mode, int *objects) in acquire_slab() argument
1631 *objects = new.objects - new.inuse; in acquire_slab()
1633 new.inuse = page->objects; in acquire_slab()
1665 int objects; in get_partial_node() local
1683 t = acquire_slab(s, n, page, object == NULL, &objects); in get_partial_node()
1687 available += objects; in get_partial_node()
2112 pobjects += page->objects - page->inuse; in put_cpu_partial()
2194 return page->objects - page->inuse; in count_free()
2321 new.inuse = page->objects; in get_freelist()
3165 inc_slabs_node(kmem_cache_node, node, page->objects); in early_kmem_cache_node_alloc()
3422 unsigned long *map = kzalloc(BITS_TO_LONGS(page->objects) * in list_slab_objects()
3430 for_each_object(p, s, addr, page->objects) { in list_slab_objects()
3689 int free = page->objects - page->inuse; in __kmem_cache_shrink()
3697 if (free == page->objects) { in __kmem_cache_shrink()
4083 return page->objects; in count_total()
4099 bitmap_zero(map, page->objects); in validate_slab()
4102 for_each_object(p, s, addr, page->objects) { in validate_slab()
4108 for_each_object(p, s, addr, page->objects) in validate_slab()
4306 bitmap_zero(map, page->objects); in process_slab()
4309 for_each_object(p, s, addr, page->objects) in process_slab()
4501 x = page->objects; in show_slab_objects()
4677 unsigned long objects; in cpu_partial_store() local
4680 err = kstrtoul(buf, 10, &objects); in cpu_partial_store()
4683 if (objects && !kmem_cache_has_cpu_partial(s)) in cpu_partial_store()
4686 s->cpu_partial = objects; in cpu_partial_store()
4722 SLAB_ATTR_RO(objects);
4732 int objects = 0; in slabs_cpu_partial_show() local
4742 objects += page->pobjects; in slabs_cpu_partial_show()
4746 len = sprintf(buf, "%d(%d)", objects, pages); in slabs_cpu_partial_show()