Lines Matching refs:objects

237 	if (object < base || object >= base + page->objects * s->size ||  in check_valid_pointer()
361 page->objects = tmp.objects; in set_page_slub_counters()
585 page, page->objects, page->inuse, page->freelist, page->flags); in print_page_info()
875 if (page->objects > maxobj) { in check_slab()
877 page->objects, maxobj); in check_slab()
880 if (page->inuse > page->objects) { in check_slab()
882 page->inuse, page->objects); in check_slab()
902 while (fp && nr <= page->objects) { in on_freelist()
913 page->inuse = page->objects; in on_freelist()
928 if (page->objects != max_objects) { in on_freelist()
930 "should be %d", page->objects, max_objects); in on_freelist()
931 page->objects = max_objects; in on_freelist()
934 if (page->inuse != page->objects - nr) { in on_freelist()
936 "counted were %d", page->inuse, page->objects - nr); in on_freelist()
937 page->inuse = page->objects - nr; in on_freelist()
996 static inline void inc_slabs_node(struct kmem_cache *s, int node, int objects) in inc_slabs_node() argument
1008 atomic_long_add(objects, &n->total_objects); in inc_slabs_node()
1011 static inline void dec_slabs_node(struct kmem_cache *s, int node, int objects) in dec_slabs_node() argument
1016 atomic_long_sub(objects, &n->total_objects); in dec_slabs_node()
1060 page->inuse = page->objects; in alloc_debug_processing()
1239 int objects) {} in inc_slabs_node() argument
1241 int objects) {} in dec_slabs_node() argument
1388 page->objects = oo_objects(oo); in allocate_slab()
1427 inc_slabs_node(s, page_to_nid(page), page->objects); in new_slab()
1440 for_each_object_idx(p, idx, s, start, page->objects) { in new_slab()
1442 if (likely(idx < page->objects)) in new_slab()
1449 page->inuse = page->objects; in new_slab()
1465 page->objects) in __free_slab()
1526 dec_slabs_node(s, page_to_nid(page), page->objects); in discard_slab()
1572 int mode, int *objects) in acquire_slab() argument
1588 *objects = new.objects - new.inuse; in acquire_slab()
1590 new.inuse = page->objects; in acquire_slab()
1622 int objects; in get_partial_node() local
1640 t = acquire_slab(s, n, page, object == NULL, &objects); in get_partial_node()
1644 available += objects; in get_partial_node()
2069 pobjects += page->objects - page->inuse; in put_cpu_partial()
2151 return page->objects - page->inuse; in count_free()
2278 new.inuse = page->objects; in get_freelist()
2951 inc_slabs_node(kmem_cache_node, node, page->objects); in early_kmem_cache_node_alloc()
3208 unsigned long *map = kzalloc(BITS_TO_LONGS(page->objects) * in list_slab_objects()
3216 for_each_object(p, s, addr, page->objects) { in list_slab_objects()
3475 int free = page->objects - page->inuse; in __kmem_cache_shrink()
3483 if (free == page->objects) { in __kmem_cache_shrink()
3868 return page->objects; in count_total()
3884 bitmap_zero(map, page->objects); in validate_slab()
3887 for_each_object(p, s, addr, page->objects) { in validate_slab()
3893 for_each_object(p, s, addr, page->objects) in validate_slab()
4091 bitmap_zero(map, page->objects); in process_slab()
4094 for_each_object(p, s, addr, page->objects) in process_slab()
4286 x = page->objects; in show_slab_objects()
4462 unsigned long objects; in cpu_partial_store() local
4465 err = kstrtoul(buf, 10, &objects); in cpu_partial_store()
4468 if (objects && !kmem_cache_has_cpu_partial(s)) in cpu_partial_store()
4471 s->cpu_partial = objects; in cpu_partial_store()
4507 SLAB_ATTR_RO(objects);
4517 int objects = 0; in slabs_cpu_partial_show() local
4527 objects += page->pobjects; in slabs_cpu_partial_show()
4531 len = sprintf(buf, "%d(%d)", objects, pages); in slabs_cpu_partial_show()