start_pte         359 arch/parisc/mm/init.c 	unsigned long start_pte;
start_pte         382 arch/parisc/mm/init.c 	start_pte = ((start_vaddr >> PAGE_SHIFT) & (PTRS_PER_PTE - 1));
start_pte         430 arch/parisc/mm/init.c 			pg_table = (pte_t *) __va(pg_table) + start_pte;
start_pte         431 arch/parisc/mm/init.c 			for (tmp2 = start_pte; tmp2 < PTRS_PER_PTE; tmp2++, pg_table++) {
start_pte         466 arch/parisc/mm/init.c 			start_pte = 0;
start_pte         238 arch/powerpc/mm/book3s64/hash_tlb.c 	pte_t *start_pte;
start_pte         252 arch/powerpc/mm/book3s64/hash_tlb.c 	start_pte = pte_offset_map(pmd, addr);
start_pte         253 arch/powerpc/mm/book3s64/hash_tlb.c 	for (pte = start_pte; pte < start_pte + PTRS_PER_PTE; pte++) {
start_pte        1295 mm/khugepaged.c 	pte_t *start_pte, *pte;
start_pte        1318 mm/khugepaged.c 	start_pte = pte_offset_map_lock(mm, pmd, haddr, &ptl);
start_pte        1321 mm/khugepaged.c 	for (i = 0, addr = haddr, pte = start_pte;
start_pte        1364 mm/khugepaged.c 	for (i = 0, addr = haddr, pte = start_pte;
start_pte        1374 mm/khugepaged.c 	pte_unmap_unlock(start_pte, ptl);
start_pte        1391 mm/khugepaged.c 	pte_unmap_unlock(start_pte, ptl);
start_pte        1013 mm/memory.c    	pte_t *start_pte;
start_pte        1020 mm/memory.c    	start_pte = pte_offset_map_lock(mm, pmd, addr, &ptl);
start_pte        1021 mm/memory.c    	pte = start_pte;
start_pte        1118 mm/memory.c    	pte_unmap_unlock(start_pte, ptl);
start_pte         246 virt/kvm/arm/mmu.c 	pte_t *pte, *start_pte;
start_pte         248 virt/kvm/arm/mmu.c 	start_pte = pte = pte_offset_kernel(pmd, addr);
start_pte         264 virt/kvm/arm/mmu.c 	if (stage2_pte_table_empty(kvm, start_pte))
start_pte         481 virt/kvm/arm/mmu.c 	pte_t *pte, *start_pte;
start_pte         483 virt/kvm/arm/mmu.c 	start_pte = pte = pte_offset_kernel(pmd, addr);
start_pte         491 virt/kvm/arm/mmu.c 	if (hyp_pte_table_empty(start_pte))