Lines Matching refs:partial
1538 list_add_tail(&page->lru, &n->partial); in __add_partial()
1540 list_add(&page->lru, &n->partial); in __add_partial()
1634 list_for_each_entry_safe(page, page2, &n->partial, lru) { in get_partial_node()
1973 while ((page = c->partial)) { in unfreeze_partials()
1977 c->partial = page->next; in unfreeze_partials()
2047 oldpage = this_cpu_read(s->cpu_slab->partial); in put_cpu_partial()
2075 } while (this_cpu_cmpxchg(s->cpu_slab->partial, oldpage, page) in put_cpu_partial()
2127 return c->page || c->partial; in has_cpu_slab()
2169 list_for_each_entry(page, &n->partial, lru) in count_partial()
2383 if (c->partial) { in __slab_alloc()
2384 page = c->page = c->partial; in __slab_alloc()
2385 c->partial = page->next; in __slab_alloc()
2885 INIT_LIST_HEAD(&n->partial); in init_kmem_cache_node()
3237 list_for_each_entry_safe(page, h, &n->partial, lru) { in free_partial()
3474 list_for_each_entry_safe(page, t, &n->partial, lru) { in __kmem_cache_shrink()
3495 list_splice(promote + i, &n->partial); in __kmem_cache_shrink()
3656 list_for_each_entry(p, &n->partial, lru) in bootstrap()
3917 list_for_each_entry(page, &n->partial, lru) { in validate_slab_node()
4126 list_for_each_entry(page, &n->partial, lru) in list_locations()
4295 page = READ_ONCE(c->partial); in show_slab_objects()
4495 SLAB_ATTR_RO(partial);
4523 struct page *page = per_cpu_ptr(s->cpu_slab, cpu)->partial; in slabs_cpu_partial_show()
4535 struct page *page = per_cpu_ptr(s->cpu_slab, cpu) ->partial; in slabs_cpu_partial_show()