__init_end 695 arch/arm/mm/init.c poison_init_mem(__init_begin, __init_end - __init_begin); __init_end 1454 arch/arm/mm/mmu.c phys_addr_t kernel_x_end = round_up(__pa(__init_end), SECTION_SIZE); __init_end 573 arch/arm64/mm/init.c lm_alias(__init_end), __init_end 580 arch/arm64/mm/init.c unmap_kernel_range((u64)__init_begin, (u64)(__init_end - __init_begin)); __init_end 64 arch/csky/mm/init.c extern char __init_begin[], __init_end[]; __init_end 72 arch/csky/mm/init.c while (addr < (unsigned long) &__init_end) { __init_end 81 arch/csky/mm/init.c ((unsigned int)&__init_end - (unsigned int)&__init_begin) >> 10); __init_end 153 arch/ia64/mm/init.c free_reserved_area(ia64_imva(__init_begin), ia64_imva(__init_end), __init_end 110 arch/microblaze/kernel/setup.c romfs_base = (ram ? ram : (unsigned int)&__init_end); __init_end 506 arch/mips/mm/init.c free_init_pages_eva((void *)&__init_begin, (void *)&__init_end); __init_end 225 arch/nds32/mm/init.c (unsigned long)&__init_begin, (unsigned long)&__init_end, __init_end 226 arch/nds32/mm/init.c ((unsigned long)&__init_end - __init_end 491 arch/parisc/mm/init.c unsigned long init_end = (unsigned long)__init_end; __init_end 627 arch/parisc/mm/init.c __init_begin, __init_end, __init_end 628 arch/parisc/mm/init.c ((unsigned long)__init_end - (unsigned long)__init_begin) >> 10, __init_end 21 arch/powerpc/include/asm/sections.h addr < (unsigned long)__init_end; __init_end 43 arch/powerpc/include/asm/sections.h if (addr >= (unsigned long)_stext && addr < (unsigned long)__init_end) __init_end 74 arch/powerpc/include/asm/sections.h return start < (unsigned long)__init_end && __init_end 455 arch/powerpc/mm/book3s64/hash_pgtable.c end = (unsigned long)__init_end; __init_end 224 arch/powerpc/mm/book3s64/radix_pgtable.c unsigned long end = (unsigned long)__init_end; __init_end 16 arch/s390/include/asm/sections.h addr < (unsigned long)__init_end; __init_end 394 arch/sh/mm/init.c (unsigned long)&__init_begin, (unsigned long)&__init_end, __init_end 395 arch/sh/mm/init.c ((unsigned long)&__init_end - __init_end 299 arch/sparc/mm/fault_64.c if ((tpc >= KERNBASE && tpc < (unsigned long) __init_end) || __init_end 2561 arch/sparc/mm/init_64.c initend = (unsigned long)(__init_end) & PAGE_MASK; __init_end 868 arch/x86/mm/init.c free_kernel_image_pages(&__init_begin, &__init_end); __init_end 242 arch/x86/mm/init_32.c if (addr >= (unsigned long)_text && addr <= (unsigned long)__init_end) __init_end 819 arch/x86/mm/init_32.c (unsigned long)&__init_begin, (unsigned long)&__init_end, __init_end 820 arch/x86/mm/init_32.c ((unsigned long)&__init_end - __init_end 913 arch/x86/mm/init_32.c unsigned long size = (((unsigned long)__init_end + HPAGE_SIZE) & HPAGE_MASK) - start; __init_end 200 arch/xtensa/mm/init.c (unsigned long)__init_begin, (unsigned long)__init_end, __init_end 201 arch/xtensa/mm/init.c (unsigned long)(__init_end - __init_begin) >> 10, __init_end 38 include/asm-generic/sections.h extern char __init_begin[], __init_end[]; __init_end 141 include/asm-generic/sections.h return memory_contains(__init_begin, __init_end, virt, size); __init_end 155 include/asm-generic/sections.h return memory_intersects(__init_begin, __init_end, virt, size); __init_end 2129 include/linux/mm.h extern char __init_begin[], __init_end[]; __init_end 2131 include/linux/mm.h return free_reserved_area(&__init_begin, &__init_end, __init_end 6191 kernel/trace/ftrace.c void *end = (void *)(&__init_end); __init_end 7546 mm/page_alloc.c init_data_size = __init_end - __init_begin; __init_end 7562 mm/page_alloc.c adj_init_size(__init_begin, __init_end, init_data_size,