Lines Matching refs:size
43 static void kasan_poison_shadow(const void *address, size_t size, u8 value) in kasan_poison_shadow() argument
48 shadow_end = kasan_mem_to_shadow(address + size); in kasan_poison_shadow()
53 void kasan_unpoison_shadow(const void *address, size_t size) in kasan_unpoison_shadow() argument
55 kasan_poison_shadow(address, size, 0); in kasan_unpoison_shadow()
57 if (size & KASAN_SHADOW_MASK) { in kasan_unpoison_shadow()
58 u8 *shadow = (u8 *)kasan_mem_to_shadow(address + size); in kasan_unpoison_shadow()
59 *shadow = size & KASAN_SHADOW_MASK; in kasan_unpoison_shadow()
173 size_t size) in bytes_is_zero() argument
175 while (size) { in bytes_is_zero()
179 size--; in bytes_is_zero()
215 size_t size) in memory_is_poisoned_n() argument
220 kasan_mem_to_shadow((void *)addr + size - 1) + 1); in memory_is_poisoned_n()
223 unsigned long last_byte = addr + size - 1; in memory_is_poisoned_n()
233 static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size) in memory_is_poisoned() argument
235 if (__builtin_constant_p(size)) { in memory_is_poisoned()
236 switch (size) { in memory_is_poisoned()
252 return memory_is_poisoned_n(addr, size); in memory_is_poisoned()
257 size_t size, bool write) in check_memory_region() argument
259 if (unlikely(size == 0)) in check_memory_region()
264 kasan_report(addr, size, write, _RET_IP_); in check_memory_region()
268 if (likely(!memory_is_poisoned(addr, size))) in check_memory_region()
271 kasan_report(addr, size, write, _RET_IP_); in check_memory_region()
274 void __asan_loadN(unsigned long addr, size_t size);
275 void __asan_storeN(unsigned long addr, size_t size);
343 unsigned long size = cache->object_size; in kasan_slab_free() local
344 unsigned long rounded_up_size = round_up(size, KASAN_SHADOW_SCALE_SIZE); in kasan_slab_free()
353 void kasan_kmalloc(struct kmem_cache *cache, const void *object, size_t size) in kasan_kmalloc() argument
361 redzone_start = round_up((unsigned long)(object + size), in kasan_kmalloc()
366 kasan_unpoison_shadow(object, size); in kasan_kmalloc()
372 void kasan_kmalloc_large(const void *ptr, size_t size) in kasan_kmalloc_large() argument
382 redzone_start = round_up((unsigned long)(ptr + size), in kasan_kmalloc_large()
386 kasan_unpoison_shadow(ptr, size); in kasan_kmalloc_large()
391 void kasan_krealloc(const void *object, size_t size) in kasan_krealloc() argument
401 kasan_kmalloc_large(object, size); in kasan_krealloc()
403 kasan_kmalloc(page->slab_cache, object, size); in kasan_krealloc()
427 int kasan_module_alloc(void *addr, size_t size) in kasan_module_alloc() argument
434 shadow_size = round_up(size >> KASAN_SHADOW_SCALE_SHIFT, in kasan_module_alloc()
463 size_t aligned_size = round_up(global->size, KASAN_SHADOW_SCALE_SIZE); in register_global()
465 kasan_unpoison_shadow(global->beg, global->size); in register_global()
472 void __asan_register_globals(struct kasan_global *globals, size_t size) in __asan_register_globals() argument
476 for (i = 0; i < size; i++) in __asan_register_globals()
481 void __asan_unregister_globals(struct kasan_global *globals, size_t size) in __asan_unregister_globals() argument
486 #define DEFINE_ASAN_LOAD_STORE(size) \ argument
487 void __asan_load##size(unsigned long addr) \
489 check_memory_region(addr, size, false); \
491 EXPORT_SYMBOL(__asan_load##size); \
492 __alias(__asan_load##size) \
493 void __asan_load##size##_noabort(unsigned long); \
494 EXPORT_SYMBOL(__asan_load##size##_noabort); \
495 void __asan_store##size(unsigned long addr) \
497 check_memory_region(addr, size, true); \
499 EXPORT_SYMBOL(__asan_store##size); \
500 __alias(__asan_store##size) \
501 void __asan_store##size##_noabort(unsigned long); \
502 EXPORT_SYMBOL(__asan_store##size##_noabort)
510 void __asan_loadN(unsigned long addr, size_t size) in __asan_loadN() argument
512 check_memory_region(addr, size, false); in __asan_loadN()
520 void __asan_storeN(unsigned long addr, size_t size) in __asan_storeN() argument
522 check_memory_region(addr, size, true); in __asan_storeN()