Lines Matching refs:gfpflags
1611 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags);
2177 slab_out_of_memory(struct kmem_cache *s, gfp_t gfpflags, int nid) in slab_out_of_memory() argument
2185 if ((gfpflags & __GFP_NOWARN) || !__ratelimit(&slub_oom_rs)) in slab_out_of_memory()
2189 nid, gfpflags); in slab_out_of_memory()
2247 static inline bool pfmemalloc_match(struct page *page, gfp_t gfpflags) in pfmemalloc_match() argument
2250 return gfp_pfmemalloc_allowed(gfpflags); in pfmemalloc_match()
2305 static void *__slab_alloc(struct kmem_cache *s, gfp_t gfpflags, int node, in __slab_alloc() argument
2347 if (unlikely(!pfmemalloc_match(page, gfpflags))) { in __slab_alloc()
2391 freelist = new_slab_objects(s, gfpflags, node, &c); in __slab_alloc()
2394 slab_out_of_memory(s, gfpflags, node); in __slab_alloc()
2400 if (likely(!kmem_cache_debug(s) && pfmemalloc_match(page, gfpflags))) in __slab_alloc()
2426 gfp_t gfpflags, int node, unsigned long addr) in slab_alloc_node() argument
2433 s = slab_pre_alloc_hook(s, gfpflags); in slab_alloc_node()
2473 object = __slab_alloc(s, gfpflags, node, addr, c); in slab_alloc_node()
2504 if (unlikely(gfpflags & __GFP_ZERO) && object) in slab_alloc_node()
2507 slab_post_alloc_hook(s, gfpflags, object); in slab_alloc_node()
2513 gfp_t gfpflags, unsigned long addr) in slab_alloc() argument
2515 return slab_alloc_node(s, gfpflags, NUMA_NO_NODE, addr); in slab_alloc()
2518 void *kmem_cache_alloc(struct kmem_cache *s, gfp_t gfpflags) in kmem_cache_alloc() argument
2520 void *ret = slab_alloc(s, gfpflags, _RET_IP_); in kmem_cache_alloc()
2523 s->size, gfpflags); in kmem_cache_alloc()
2530 void *kmem_cache_alloc_trace(struct kmem_cache *s, gfp_t gfpflags, size_t size) in kmem_cache_alloc_trace() argument
2532 void *ret = slab_alloc(s, gfpflags, _RET_IP_); in kmem_cache_alloc_trace()
2533 trace_kmalloc(_RET_IP_, ret, size, s->size, gfpflags); in kmem_cache_alloc_trace()
2541 void *kmem_cache_alloc_node(struct kmem_cache *s, gfp_t gfpflags, int node) in kmem_cache_alloc_node() argument
2543 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_); in kmem_cache_alloc_node()
2546 s->object_size, s->size, gfpflags, node); in kmem_cache_alloc_node()
2554 gfp_t gfpflags, in kmem_cache_alloc_node_trace() argument
2557 void *ret = slab_alloc_node(s, gfpflags, node, _RET_IP_); in kmem_cache_alloc_node_trace()
2560 size, s->size, gfpflags, node); in kmem_cache_alloc_node_trace()
3808 void *__kmalloc_track_caller(size_t size, gfp_t gfpflags, unsigned long caller) in __kmalloc_track_caller() argument
3814 return kmalloc_large(size, gfpflags); in __kmalloc_track_caller()
3816 s = kmalloc_slab(size, gfpflags); in __kmalloc_track_caller()
3821 ret = slab_alloc(s, gfpflags, caller); in __kmalloc_track_caller()
3824 trace_kmalloc(caller, ret, size, s->size, gfpflags); in __kmalloc_track_caller()
3830 void *__kmalloc_node_track_caller(size_t size, gfp_t gfpflags, in __kmalloc_node_track_caller() argument
3837 ret = kmalloc_large_node(size, gfpflags, node); in __kmalloc_node_track_caller()
3841 gfpflags, node); in __kmalloc_node_track_caller()
3846 s = kmalloc_slab(size, gfpflags); in __kmalloc_node_track_caller()
3851 ret = slab_alloc_node(s, gfpflags, node, caller); in __kmalloc_node_track_caller()
3854 trace_kmalloc_node(caller, ret, size, s->size, gfpflags, node); in __kmalloc_node_track_caller()