Lines Matching refs:object

229 				struct page *page, const void *object)  in check_valid_pointer()  argument
233 if (!object) in check_valid_pointer()
237 if (object < base || object >= base + page->objects * s->size || in check_valid_pointer()
238 (object - base) % s->size) { in check_valid_pointer()
245 static inline void *get_freepointer(struct kmem_cache *s, void *object) in get_freepointer() argument
247 return *(void **)(object + s->offset); in get_freepointer()
250 static void prefetch_freepointer(const struct kmem_cache *s, void *object) in prefetch_freepointer() argument
252 prefetch(object + s->offset); in prefetch_freepointer()
255 static inline void *get_freepointer_safe(struct kmem_cache *s, void *object) in get_freepointer_safe() argument
260 probe_kernel_read(&p, (void **)(object + s->offset), sizeof(p)); in get_freepointer_safe()
262 p = get_freepointer(s, object); in get_freepointer_safe()
267 static inline void set_freepointer(struct kmem_cache *s, void *object, void *fp) in set_freepointer() argument
269 *(void **)(object + s->offset) = fp; in set_freepointer()
500 static struct track *get_track(struct kmem_cache *s, void *object, in get_track() argument
506 p = object + s->offset + sizeof(void *); in get_track()
508 p = object + s->inuse; in get_track()
513 static void set_track(struct kmem_cache *s, void *object, in set_track() argument
516 struct track *p = get_track(s, object, alloc); in set_track()
547 static void init_tracking(struct kmem_cache *s, void *object) in init_tracking() argument
552 set_track(s, object, TRACK_FREE, 0UL); in init_tracking()
553 set_track(s, object, TRACK_ALLOC, 0UL); in init_tracking()
575 static void print_tracking(struct kmem_cache *s, void *object) in print_tracking() argument
580 print_track("Allocated", get_track(s, object, TRACK_ALLOC)); in print_tracking()
581 print_track("Freed", get_track(s, object, TRACK_FREE)); in print_tracking()
656 u8 *object, char *reason) in object_err() argument
659 print_trailer(s, page, object); in object_err()
676 static void init_object(struct kmem_cache *s, void *object, u8 val) in init_object() argument
678 u8 *p = object; in init_object()
697 u8 *object, char *what, in check_bytes_and_report() argument
716 print_trailer(s, page, object); in check_bytes_and_report()
814 void *object, u8 val) in check_object() argument
816 u8 *p = object; in check_object()
817 u8 *endobject = object + s->object_size; in check_object()
820 if (!check_bytes_and_report(s, page, object, "Redzone", in check_object()
900 void *object = NULL; in on_freelist() local
908 if (object) { in on_freelist()
909 object_err(s, page, object, in on_freelist()
911 set_freepointer(s, object, NULL); in on_freelist()
921 object = fp; in on_freelist()
922 fp = get_freepointer(s, object); in on_freelist()
945 static void trace(struct kmem_cache *s, struct page *page, void *object, in trace() argument
952 object, page->inuse, in trace()
956 print_section("Object ", (void *)object, in trace()
1023 void *object) in setup_object_debug() argument
1028 init_object(s, object, SLUB_RED_INACTIVE); in setup_object_debug()
1029 init_tracking(s, object); in setup_object_debug()
1034 void *object, unsigned long addr) in alloc_debug_processing() argument
1039 if (!check_valid_pointer(s, page, object)) { in alloc_debug_processing()
1040 object_err(s, page, object, "Freelist Pointer check fails"); in alloc_debug_processing()
1044 if (!check_object(s, page, object, SLUB_RED_INACTIVE)) in alloc_debug_processing()
1049 set_track(s, object, TRACK_ALLOC, addr); in alloc_debug_processing()
1050 trace(s, page, object, 1); in alloc_debug_processing()
1051 init_object(s, object, SLUB_RED_ACTIVE); in alloc_debug_processing()
1075 void *object = head; in free_debug_processing() local
1087 if (!check_valid_pointer(s, page, object)) { in free_debug_processing()
1088 slab_err(s, page, "Invalid object pointer 0x%p", object); in free_debug_processing()
1092 if (on_freelist(s, page, object)) { in free_debug_processing()
1093 object_err(s, page, object, "Object already free"); in free_debug_processing()
1097 if (!check_object(s, page, object, SLUB_RED_ACTIVE)) in free_debug_processing()
1103 "outside of slab", object); in free_debug_processing()
1106 object); in free_debug_processing()
1109 object_err(s, page, object, in free_debug_processing()
1115 set_track(s, object, TRACK_FREE, addr); in free_debug_processing()
1116 trace(s, page, object, 0); in free_debug_processing()
1118 init_object(s, object, SLUB_RED_INACTIVE); in free_debug_processing()
1121 if (object != tail) { in free_debug_processing()
1122 object = get_freepointer(s, object); in free_debug_processing()
1140 slab_fix(s, "Object at 0x%p not freed", object); in free_debug_processing()
1227 struct page *page, void *object) {} in setup_object_debug() argument
1230 struct page *page, void *object, unsigned long addr) { return 0; } in alloc_debug_processing() argument
1240 void *object, u8 val) { return 1; } in check_object() argument
1302 void *object = p[i]; in slab_post_alloc_hook() local
1304 kmemcheck_slab_alloc(s, flags, object, slab_ksize(s)); in slab_post_alloc_hook()
1305 kmemleak_alloc_recursive(object, s->object_size, 1, in slab_post_alloc_hook()
1307 kasan_slab_alloc(s, object); in slab_post_alloc_hook()
1350 void *object = head; in slab_free_freelist_hook() local
1354 slab_free_hook(s, object); in slab_free_freelist_hook()
1355 } while ((object != tail_obj) && in slab_free_freelist_hook()
1356 (object = get_freepointer(s, object))); in slab_free_freelist_hook()
1361 void *object) in setup_object() argument
1363 setup_object_debug(s, page, object); in setup_object()
1365 kasan_unpoison_object_data(s, object); in setup_object()
1366 s->ctor(object); in setup_object()
1367 kasan_poison_object_data(s, object); in setup_object()
1663 void *object = NULL; in get_partial_node() local
1683 t = acquire_slab(s, n, page, object == NULL, &objects); in get_partial_node()
1688 if (!object) { in get_partial_node()
1691 object = t; in get_partial_node()
1702 return object; in get_partial_node()
1716 void *object; in get_any_partial() local
1751 object = get_partial_node(s, n, c, flags); in get_any_partial()
1752 if (object) { in get_any_partial()
1760 return object; in get_any_partial()
1775 void *object; in get_partial() local
1783 object = get_partial_node(s, get_node(s, searchnode), c, flags); in get_partial()
1784 if (object || node != NUMA_NO_NODE) in get_partial()
1785 return object; in get_partial()
2485 void *object; in slab_alloc_node() local
2527 object = c->freelist; in slab_alloc_node()
2529 if (unlikely(!object || !node_match(page, node))) { in slab_alloc_node()
2530 object = __slab_alloc(s, gfpflags, node, addr, c); in slab_alloc_node()
2533 void *next_object = get_freepointer_safe(s, object); in slab_alloc_node()
2551 object, tid, in slab_alloc_node()
2561 if (unlikely(gfpflags & __GFP_ZERO) && object) in slab_alloc_node()
2562 memset(object, 0, s->object_size); in slab_alloc_node()
2564 slab_post_alloc_hook(s, gfpflags, 1, &object); in slab_alloc_node()
2566 return object; in slab_alloc_node()
2843 void *object; in build_detached_freelist() local
2849 object = p[--size]; in build_detached_freelist()
2850 } while (!object && size); in build_detached_freelist()
2852 if (!object) in build_detached_freelist()
2856 df->s = cache_from_obj(s, object); in build_detached_freelist()
2859 set_freepointer(df->s, object, NULL); in build_detached_freelist()
2860 df->page = virt_to_head_page(object); in build_detached_freelist()
2861 df->tail = object; in build_detached_freelist()
2862 df->freelist = object; in build_detached_freelist()
2867 object = p[--size]; in build_detached_freelist()
2868 if (!object) in build_detached_freelist()
2872 if (df->page == virt_to_head_page(object)) { in build_detached_freelist()
2874 set_freepointer(df->s, object, df->freelist); in build_detached_freelist()
2875 df->freelist = object; in build_detached_freelist()
2931 void *object = c->freelist; in kmem_cache_alloc_bulk() local
2933 if (unlikely(!object)) { in kmem_cache_alloc_bulk()
2946 c->freelist = get_freepointer(s, object); in kmem_cache_alloc_bulk()
2947 p[i] = object; in kmem_cache_alloc_bulk()
3588 static size_t __ksize(const void *object) in __ksize() argument
3592 if (unlikely(object == ZERO_SIZE_PTR)) in __ksize()
3595 page = virt_to_head_page(object); in __ksize()
3605 size_t ksize(const void *object) in ksize() argument
3607 size_t size = __ksize(object); in ksize()
3610 kasan_krealloc(object, size); in ksize()
3618 void *object = (void *)x; in kfree() local
3632 slab_free(page->slab_cache, page, object, NULL, 1, _RET_IP_); in kfree()