Lines Matching refs:size
264 static __always_inline int kmalloc_index(size_t size) in kmalloc_index() argument
266 if (!size) in kmalloc_index()
269 if (size <= KMALLOC_MIN_SIZE) in kmalloc_index()
272 if (KMALLOC_MIN_SIZE <= 32 && size > 64 && size <= 96) in kmalloc_index()
274 if (KMALLOC_MIN_SIZE <= 64 && size > 128 && size <= 192) in kmalloc_index()
276 if (size <= 8) return 3; in kmalloc_index()
277 if (size <= 16) return 4; in kmalloc_index()
278 if (size <= 32) return 5; in kmalloc_index()
279 if (size <= 64) return 6; in kmalloc_index()
280 if (size <= 128) return 7; in kmalloc_index()
281 if (size <= 256) return 8; in kmalloc_index()
282 if (size <= 512) return 9; in kmalloc_index()
283 if (size <= 1024) return 10; in kmalloc_index()
284 if (size <= 2 * 1024) return 11; in kmalloc_index()
285 if (size <= 4 * 1024) return 12; in kmalloc_index()
286 if (size <= 8 * 1024) return 13; in kmalloc_index()
287 if (size <= 16 * 1024) return 14; in kmalloc_index()
288 if (size <= 32 * 1024) return 15; in kmalloc_index()
289 if (size <= 64 * 1024) return 16; in kmalloc_index()
290 if (size <= 128 * 1024) return 17; in kmalloc_index()
291 if (size <= 256 * 1024) return 18; in kmalloc_index()
292 if (size <= 512 * 1024) return 19; in kmalloc_index()
293 if (size <= 1024 * 1024) return 20; in kmalloc_index()
294 if (size <= 2 * 1024 * 1024) return 21; in kmalloc_index()
295 if (size <= 4 * 1024 * 1024) return 22; in kmalloc_index()
296 if (size <= 8 * 1024 * 1024) return 23; in kmalloc_index()
297 if (size <= 16 * 1024 * 1024) return 24; in kmalloc_index()
298 if (size <= 32 * 1024 * 1024) return 25; in kmalloc_index()
299 if (size <= 64 * 1024 * 1024) return 26; in kmalloc_index()
307 void *__kmalloc(size_t size, gfp_t flags) __assume_kmalloc_alignment;
322 void *__kmalloc_node(size_t size, gfp_t flags, int node) __assume_kmalloc_alignment;
325 static __always_inline void *__kmalloc_node(size_t size, gfp_t flags, int node) in __kmalloc_node() argument
327 return __kmalloc(size, flags); in __kmalloc_node()
342 int node, size_t size) __assume_slab_alignment;
347 int node, size_t size) in kmem_cache_alloc_node_trace() argument
349 return kmem_cache_alloc_trace(s, gfpflags, size); in kmem_cache_alloc_node_trace()
355 gfp_t flags, size_t size) in kmem_cache_alloc_trace() argument
359 kasan_kmalloc(s, ret, size); in kmem_cache_alloc_trace()
366 int node, size_t size) in kmem_cache_alloc_node_trace() argument
370 kasan_kmalloc(s, ret, size); in kmem_cache_alloc_node_trace()
375 extern void *kmalloc_order(size_t size, gfp_t flags, unsigned int order) __assume_page_alignment;
378 extern void *kmalloc_order_trace(size_t size, gfp_t flags, unsigned int order) __assume_page_alignm…
381 kmalloc_order_trace(size_t size, gfp_t flags, unsigned int order) in kmalloc_order_trace() argument
383 return kmalloc_order(size, flags, order); in kmalloc_order_trace()
387 static __always_inline void *kmalloc_large(size_t size, gfp_t flags) in kmalloc_large() argument
389 unsigned int order = get_order(size); in kmalloc_large()
390 return kmalloc_order_trace(size, flags, order); in kmalloc_large()
446 static __always_inline void *kmalloc(size_t size, gfp_t flags) in kmalloc() argument
448 if (__builtin_constant_p(size)) { in kmalloc()
449 if (size > KMALLOC_MAX_CACHE_SIZE) in kmalloc()
450 return kmalloc_large(size, flags); in kmalloc()
453 int index = kmalloc_index(size); in kmalloc()
459 flags, size); in kmalloc()
463 return __kmalloc(size, flags); in kmalloc()
486 static __always_inline void *kmalloc_node(size_t size, gfp_t flags, int node) in kmalloc_node() argument
489 if (__builtin_constant_p(size) && in kmalloc_node()
490 size <= KMALLOC_MAX_CACHE_SIZE && !(flags & GFP_DMA)) { in kmalloc_node()
491 int i = kmalloc_index(size); in kmalloc_node()
497 flags, node, size); in kmalloc_node()
500 return __kmalloc_node(size, flags, node); in kmalloc_node()
544 static inline void *kmalloc_array(size_t n, size_t size, gfp_t flags) in kmalloc_array() argument
546 if (size != 0 && n > SIZE_MAX / size) in kmalloc_array()
548 return __kmalloc(n * size, flags); in kmalloc_array()
557 static inline void *kcalloc(size_t n, size_t size, gfp_t flags) in kcalloc() argument
559 return kmalloc_array(n, size, flags | __GFP_ZERO); in kcalloc()
571 #define kmalloc_track_caller(size, flags) \ argument
572 __kmalloc_track_caller(size, flags, _RET_IP_)
576 #define kmalloc_node_track_caller(size, flags, node) \ argument
577 __kmalloc_node_track_caller(size, flags, node, \
582 #define kmalloc_node_track_caller(size, flags, node) \ argument
583 kmalloc_track_caller(size, flags)
600 static inline void *kzalloc(size_t size, gfp_t flags) in kzalloc() argument
602 return kmalloc(size, flags | __GFP_ZERO); in kzalloc()
611 static inline void *kzalloc_node(size_t size, gfp_t flags, int node) in kzalloc_node() argument
613 return kmalloc_node(size, flags | __GFP_ZERO, node); in kzalloc_node()