ptev               19 arch/nds32/kernel/pm.c 	pte_t *ptev;
ptev               26 arch/nds32/kernel/pm.c 	ptev = pte_offset_map(pmdv, (unsigned int)cpu_resume);
ptev               28 arch/nds32/kernel/pm.c 	resume_addr = ((*ptev) & TLB_DATA_mskPPN)
ptev              770 arch/powerpc/include/asm/book3s/64/pgtable.h static inline bool check_pte_access(unsigned long access, unsigned long ptev)
ptev              775 arch/powerpc/include/asm/book3s/64/pgtable.h 	if (access & ~ptev)
ptev              780 arch/powerpc/include/asm/book3s/64/pgtable.h 	if ((access & _PAGE_PRIVILEGED) != (ptev & _PAGE_PRIVILEGED))
ptev             1125 arch/s390/kvm/priv.c 	unsigned long gfn, hva, res, pgstev, ptev;
ptev             1141 arch/s390/kvm/priv.c 	nappended = pgste_perform_essa(vcpu->kvm->mm, hva, orc, &ptev, &pgstev);
ptev             1154 arch/s390/kvm/priv.c 	if (ptev & _PAGE_INVALID) {
ptev              709 arch/s390/mm/pgtable.c 	unsigned long ptev;
ptev              717 arch/s390/mm/pgtable.c 	ptev = pte_val(*ptep);
ptev              718 arch/s390/mm/pgtable.c 	if (!(ptev & _PAGE_INVALID) && (ptev & _PAGE_WRITE))
ptev              719 arch/s390/mm/pgtable.c 		page_set_storage_key(ptev & PAGE_MASK, PAGE_DEFAULT_KEY, 1);
ptev              138 arch/x86/xen/mmu_pv.c 	pte_t *pte, ptev;
ptev              146 arch/x86/xen/mmu_pv.c 	ptev = pte_wrprotect(*pte);
ptev              148 arch/x86/xen/mmu_pv.c 	if (HYPERVISOR_update_va_mapping(address, ptev, 0))
ptev              154 arch/x86/xen/mmu_pv.c 	pte_t *pte, ptev;
ptev              162 arch/x86/xen/mmu_pv.c 	ptev = pte_mkwrite(*pte);
ptev              164 arch/x86/xen/mmu_pv.c 	if (HYPERVISOR_update_va_mapping(address, ptev, 0))