kernel_end 311 arch/alpha/kernel/setup.c setup_memory(void *kernel_end) kernel_end 370 arch/alpha/kernel/setup.c kernel_size = virt_to_phys(kernel_end) - KERNEL_START_PHYS; kernel_end 441 arch/alpha/kernel/setup.c void *kernel_end = _end; /* end of kernel */ kernel_end 480 arch/alpha/kernel/setup.c kernel_end = callback_init(kernel_end); kernel_end 636 arch/alpha/kernel/setup.c setup_memory(kernel_end); kernel_end 145 arch/alpha/mm/init.c callback_init(void * kernel_end) kernel_end 182 arch/alpha/mm/init.c (((unsigned long)kernel_end + ~PAGE_MASK) & PAGE_MASK); kernel_end 183 arch/alpha/mm/init.c kernel_end = two_pages + 2*PAGE_SIZE; kernel_end 218 arch/alpha/mm/init.c memset(kernel_end, 0, PAGE_SIZE); kernel_end 220 arch/alpha/mm/init.c pmd_set(pmd, (pte_t *)kernel_end); kernel_end 221 arch/alpha/mm/init.c kernel_end += PAGE_SIZE; kernel_end 232 arch/alpha/mm/init.c return kernel_end; kernel_end 56 arch/alpha/mm/numa.c setup_memory_node(int nid, void *kernel_end) kernel_end 142 arch/alpha/mm/numa.c end_kernel_pfn = PFN_UP(virt_to_phys(kernel_end)); kernel_end 162 arch/alpha/mm/numa.c setup_memory(void *kernel_end) kernel_end 174 arch/alpha/mm/numa.c setup_memory_node(nid, kernel_end); kernel_end 176 arch/alpha/mm/numa.c kernel_size = virt_to_phys(kernel_end) - KERNEL_START_PHYS; kernel_end 462 arch/arm64/mm/mmu.c phys_addr_t kernel_end = __pa_symbol(__init_begin); kernel_end 475 arch/arm64/mm/mmu.c memblock_mark_nomap(kernel_start, kernel_end - kernel_start); kernel_end 505 arch/arm64/mm/mmu.c __map_memblock(pgdp, kernel_start, kernel_end, kernel_end 507 arch/arm64/mm/mmu.c memblock_clear_nomap(kernel_start, kernel_end - kernel_start); kernel_end 24 arch/mips/ar7/memory.c u32 *kernel_end = (u32 *)KSEG1ADDR(CPHYSADDR((u32)&_end)); kernel_end 27 arch/mips/ar7/memory.c while (tmpaddr > kernel_end) { kernel_end 366 arch/parisc/mm/init.c unsigned long kernel_start, kernel_end; kernel_end 371 arch/parisc/mm/init.c kernel_end = __pa((unsigned long)&_end); kernel_end 438 arch/parisc/mm/init.c } else if (address < kernel_start || address >= kernel_end) { kernel_end 492 arch/parisc/mm/init.c unsigned long kernel_end = (unsigned long)&_end; kernel_end 496 arch/parisc/mm/init.c map_pages(init_end, __pa(init_end), kernel_end - init_end, kernel_end 514 arch/parisc/mm/init.c __flush_tlb_range(0, init_begin, kernel_end); kernel_end 204 arch/powerpc/kernel/machine_kexec.c static phys_addr_t kernel_end; kernel_end 212 arch/powerpc/kernel/machine_kexec.c .value = &kernel_end, kernel_end 271 arch/powerpc/kernel/machine_kexec.c kernel_end = cpu_to_be_ulong(__pa(_end)); kernel_end 674 arch/x86/mm/init.c unsigned long kernel_end = __pa_symbol(_end); kernel_end 683 arch/x86/mm/init.c memory_map_bottom_up(kernel_end, end); kernel_end 684 arch/x86/mm/init.c memory_map_bottom_up(ISA_END_ADDRESS, kernel_end); kernel_end 284 arch/x86/mm/mem_encrypt_identity.c unsigned long kernel_start, kernel_end, kernel_len; kernel_end 310 arch/x86/mm/mem_encrypt_identity.c kernel_end = ALIGN(__pa_symbol(_end), PMD_PAGE_SIZE); kernel_end 311 arch/x86/mm/mem_encrypt_identity.c kernel_len = kernel_end - kernel_start; kernel_end 420 arch/x86/mm/mem_encrypt_identity.c ppd.vaddr_end = kernel_end; kernel_end 426 arch/x86/mm/mem_encrypt_identity.c ppd.vaddr_end = kernel_end + decrypted_base; kernel_end 470 arch/x86/mm/mem_encrypt_identity.c ppd.vaddr_end = kernel_end + decrypted_base; kernel_end 1096 arch/x86/xen/mmu_pv.c unsigned long kernel_end = roundup((unsigned long)_brk_end, PMD_SIZE) - 1; kernel_end 1105 arch/x86/xen/mmu_pv.c if (vaddr < (unsigned long) _text || vaddr > kernel_end) kernel_end 276 mm/memblock.c phys_addr_t kernel_end, ret; kernel_end 286 mm/memblock.c kernel_end = __pa_symbol(_end); kernel_end 292 mm/memblock.c if (memblock_bottom_up() && end > kernel_end) { kernel_end 296 mm/memblock.c bottom_up_start = max(start, kernel_end);