shadow_start       23 include/linux/kasan.h int kasan_populate_early_shadow(const void *shadow_start,
shadow_start      134 mm/kasan/common.c 	void *shadow_start, *shadow_end;
shadow_start      143 mm/kasan/common.c 	shadow_start = kasan_mem_to_shadow(address);
shadow_start      146 mm/kasan/common.c 	__memset(shadow_start, value, shadow_end - shadow_start);
shadow_start      598 mm/kasan/common.c 	unsigned long shadow_start;
shadow_start      600 mm/kasan/common.c 	shadow_start = (unsigned long)kasan_mem_to_shadow(addr);
shadow_start      604 mm/kasan/common.c 	if (WARN_ON(!PAGE_ALIGNED(shadow_start)))
shadow_start      607 mm/kasan/common.c 	ret = __vmalloc_node_range(shadow_size, 1, shadow_start,
shadow_start      608 mm/kasan/common.c 			shadow_start + shadow_size,
shadow_start      677 mm/kasan/common.c 	unsigned long nr_shadow_pages, start_kaddr, shadow_start;
shadow_start      682 mm/kasan/common.c 	shadow_start = (unsigned long)kasan_mem_to_shadow((void *)start_kaddr);
shadow_start      684 mm/kasan/common.c 	shadow_end = shadow_start + shadow_size;
shadow_start      699 mm/kasan/common.c 		if (shadow_mapped(shadow_start))
shadow_start      702 mm/kasan/common.c 		ret = __vmalloc_node_range(shadow_size, PAGE_SIZE, shadow_start,
shadow_start      729 mm/kasan/common.c 		vm = find_vm_area((void *)shadow_start);
shadow_start      731 mm/kasan/common.c 			vfree((void *)shadow_start);
shadow_start      230 mm/kasan/init.c int __ref kasan_populate_early_shadow(const void *shadow_start,
shadow_start      233 mm/kasan/init.c 	unsigned long addr = (unsigned long)shadow_start;
shadow_start      490 mm/kasan/init.c 	void *shadow_start, *shadow_end;
shadow_start      492 mm/kasan/init.c 	shadow_start = kasan_mem_to_shadow(start);
shadow_start      493 mm/kasan/init.c 	shadow_end = shadow_start + (size >> KASAN_SHADOW_SCALE_SHIFT);
shadow_start      500 mm/kasan/init.c 	ret = kasan_populate_early_shadow(shadow_start, shadow_end);
shadow_start      502 mm/kasan/init.c 		kasan_remove_zero_shadow(shadow_start,