vaddr_end         116 arch/x86/kernel/head64.c 	unsigned long vaddr, vaddr_end;
vaddr_end         268 arch/x86/kernel/head64.c 		vaddr_end = (unsigned long)__end_bss_decrypted;
vaddr_end         269 arch/x86/kernel/head64.c 		for (; vaddr < vaddr_end; vaddr += PMD_SIZE) {
vaddr_end        6518 arch/x86/kvm/svm.c 	unsigned long vaddr, vaddr_end, next_vaddr, npages, pages, size, i;
vaddr_end        6537 arch/x86/kvm/svm.c 	vaddr_end = vaddr + size;
vaddr_end        6554 arch/x86/kvm/svm.c 	for (i = 0; vaddr < vaddr_end; vaddr = next_vaddr, i += pages) {
vaddr_end        6867 arch/x86/kvm/svm.c 	unsigned long vaddr, vaddr_end, next_vaddr;
vaddr_end        6888 arch/x86/kvm/svm.c 	vaddr_end = vaddr + size;
vaddr_end        6891 arch/x86/kvm/svm.c 	for (; vaddr < vaddr_end; vaddr = next_vaddr) {
vaddr_end         423 arch/x86/mm/init_64.c 	unsigned long vaddr_end = __START_KERNEL_map + KERNEL_IMAGE_SIZE;
vaddr_end         433 arch/x86/mm/init_64.c 		vaddr_end = __START_KERNEL_map + (max_pfn_mapped << PAGE_SHIFT);
vaddr_end         435 arch/x86/mm/init_64.c 	for (; vaddr + PMD_SIZE - 1 < vaddr_end; pmd++, vaddr += PMD_SIZE) {
vaddr_end         674 arch/x86/mm/init_64.c 	unsigned long vaddr, vaddr_end, vaddr_next, paddr_next, paddr_last;
vaddr_end         678 arch/x86/mm/init_64.c 	vaddr_end = (unsigned long)__va(paddr_end);
vaddr_end         684 arch/x86/mm/init_64.c 	for (; vaddr < vaddr_end; vaddr = vaddr_next) {
vaddr_end         704 arch/x86/mm/init_64.c 			paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end),
vaddr_end         710 arch/x86/mm/init_64.c 		paddr_last = phys_pud_init(pud, paddr, __pa(vaddr_end),
vaddr_end         728 arch/x86/mm/init_64.c 	unsigned long vaddr, vaddr_start, vaddr_end, vaddr_next, paddr_last;
vaddr_end         732 arch/x86/mm/init_64.c 	vaddr_end = (unsigned long)__va(paddr_end);
vaddr_end         735 arch/x86/mm/init_64.c 	for (; vaddr < vaddr_end; vaddr = vaddr_next) {
vaddr_end         744 arch/x86/mm/init_64.c 						   __pa(vaddr_end),
vaddr_end         751 arch/x86/mm/init_64.c 		paddr_last = phys_p4d_init(p4d, __pa(vaddr), __pa(vaddr_end),
vaddr_end         766 arch/x86/mm/init_64.c 		sync_global_pgds(vaddr_start, vaddr_end - 1);
vaddr_end          42 arch/x86/mm/kaslr.c static const unsigned long vaddr_end = CPU_ENTRY_AREA_BASE;
vaddr_end          91 arch/x86/mm/kaslr.c 	BUILD_BUG_ON(vaddr_start >= vaddr_end);
vaddr_end          92 arch/x86/mm/kaslr.c 	BUILD_BUG_ON(vaddr_end != CPU_ENTRY_AREA_BASE);
vaddr_end          93 arch/x86/mm/kaslr.c 	BUILD_BUG_ON(vaddr_end > __START_KERNEL_map);
vaddr_end         122 arch/x86/mm/kaslr.c 	remain_entropy = vaddr_end - vaddr_start;
vaddr_end         256 arch/x86/mm/mem_encrypt.c 	unsigned long vaddr_end, vaddr_next;
vaddr_end         263 arch/x86/mm/mem_encrypt.c 	vaddr_end = vaddr + size;
vaddr_end         265 arch/x86/mm/mem_encrypt.c 	for (; vaddr < vaddr_end; vaddr = vaddr_next) {
vaddr_end         288 arch/x86/mm/mem_encrypt.c 		    ((vaddr_end - vaddr) >= psize)) {
vaddr_end         310 arch/x86/mm/mem_encrypt.c 					       __pa((vaddr_end & pmask) + psize),
vaddr_end         382 arch/x86/mm/mem_encrypt.c 	unsigned long vaddr, vaddr_end, npages;
vaddr_end         386 arch/x86/mm/mem_encrypt.c 	vaddr_end = (unsigned long)__end_bss_decrypted;
vaddr_end         387 arch/x86/mm/mem_encrypt.c 	npages = (vaddr_end - vaddr) >> PAGE_SHIFT;
vaddr_end         401 arch/x86/mm/mem_encrypt.c 	free_init_pages("unused decrypted", vaddr, vaddr_end);
vaddr_end          70 arch/x86/mm/mem_encrypt_identity.c 	unsigned long vaddr_end;
vaddr_end          96 arch/x86/mm/mem_encrypt_identity.c 	pgd_end = ppd->vaddr_end & PGDIR_MASK;
vaddr_end         186 arch/x86/mm/mem_encrypt_identity.c 	while (ppd->vaddr < ppd->vaddr_end) {
vaddr_end         196 arch/x86/mm/mem_encrypt_identity.c 	while (ppd->vaddr < ppd->vaddr_end) {
vaddr_end         207 arch/x86/mm/mem_encrypt_identity.c 	unsigned long vaddr_end;
vaddr_end         213 arch/x86/mm/mem_encrypt_identity.c 	vaddr_end = ppd->vaddr_end;
vaddr_end         216 arch/x86/mm/mem_encrypt_identity.c 	ppd->vaddr_end = ALIGN(ppd->vaddr, PMD_PAGE_SIZE);
vaddr_end         220 arch/x86/mm/mem_encrypt_identity.c 	ppd->vaddr_end = vaddr_end & PMD_PAGE_MASK;
vaddr_end         224 arch/x86/mm/mem_encrypt_identity.c 	ppd->vaddr_end = vaddr_end;
vaddr_end         386 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = workarea_end;
vaddr_end         420 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = kernel_end;
vaddr_end         426 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = kernel_end + decrypted_base;
vaddr_end         433 arch/x86/mm/mem_encrypt_identity.c 		ppd.vaddr_end = initrd_end;
vaddr_end         440 arch/x86/mm/mem_encrypt_identity.c 		ppd.vaddr_end = initrd_end + decrypted_base;
vaddr_end         447 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = workarea_end;
vaddr_end         452 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = workarea_end + decrypted_base;
vaddr_end         470 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = kernel_end + decrypted_base;
vaddr_end         475 arch/x86/mm/mem_encrypt_identity.c 		ppd.vaddr_end = initrd_end + decrypted_base;
vaddr_end         480 arch/x86/mm/mem_encrypt_identity.c 	ppd.vaddr_end = workarea_end + decrypted_base;
vaddr_end        1094 arch/x86/xen/mmu_pv.c 				    unsigned long vaddr_end)
vaddr_end        1101 arch/x86/xen/mmu_pv.c 	for (; vaddr <= vaddr_end && (pmd < (level2_kernel_pgt + PTRS_PER_PMD));
vaddr_end        1119 arch/x86/xen/mmu_pv.c 	void *vaddr_end = vaddr + size;
vaddr_end        1121 arch/x86/xen/mmu_pv.c 	for (; vaddr < vaddr_end; vaddr += PAGE_SIZE)