Lines Matching refs:hptep

315 	__be64 *hptep;  in kvmppc_mmu_book3s_64_hv_xlate()  local
338 hptep = (__be64 *)(kvm->arch.hpt_virt + (index << 4)); in kvmppc_mmu_book3s_64_hv_xlate()
339 v = be64_to_cpu(hptep[0]) & ~HPTE_V_HVLOCK; in kvmppc_mmu_book3s_64_hv_xlate()
342 unlock_hpte(hptep, v); in kvmppc_mmu_book3s_64_hv_xlate()
440 __be64 *hptep; in kvmppc_book3s_hv_page_fault() local
463 hptep = (__be64 *)(kvm->arch.hpt_virt + (index << 4)); in kvmppc_book3s_hv_page_fault()
466 while (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) in kvmppc_book3s_hv_page_fault()
468 hpte[0] = be64_to_cpu(hptep[0]) & ~HPTE_V_HVLOCK; in kvmppc_book3s_hv_page_fault()
469 hpte[1] = be64_to_cpu(hptep[1]); in kvmppc_book3s_hv_page_fault()
471 unlock_hpte(hptep, hpte[0]); in kvmppc_book3s_hv_page_fault()
583 while (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) in kvmppc_book3s_hv_page_fault()
585 if ((be64_to_cpu(hptep[0]) & ~HPTE_V_HVLOCK) != hpte[0] || in kvmppc_book3s_hv_page_fault()
586 be64_to_cpu(hptep[1]) != hpte[1] || in kvmppc_book3s_hv_page_fault()
607 if (be64_to_cpu(hptep[0]) & HPTE_V_VALID) { in kvmppc_book3s_hv_page_fault()
610 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvmppc_book3s_hv_page_fault()
611 kvmppc_invalidate_hpte(kvm, hptep, index); in kvmppc_book3s_hv_page_fault()
613 r |= be64_to_cpu(hptep[1]) & (HPTE_R_R | HPTE_R_C); in kvmppc_book3s_hv_page_fault()
618 hptep[1] = cpu_to_be64(r); in kvmppc_book3s_hv_page_fault()
620 __unlock_hpte(hptep, hpte[0]); in kvmppc_book3s_hv_page_fault()
641 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvmppc_book3s_hv_page_fault()
717 __be64 *hptep; in kvm_unmap_rmapp() local
733 hptep = (__be64 *) (kvm->arch.hpt_virt + (i << 4)); in kvm_unmap_rmapp()
734 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_unmap_rmapp()
737 while (be64_to_cpu(hptep[0]) & HPTE_V_HVLOCK) in kvm_unmap_rmapp()
756 psize = hpte_page_size(be64_to_cpu(hptep[0]), ptel); in kvm_unmap_rmapp()
757 if ((be64_to_cpu(hptep[0]) & HPTE_V_VALID) && in kvm_unmap_rmapp()
759 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvm_unmap_rmapp()
760 kvmppc_invalidate_hpte(kvm, hptep, i); in kvm_unmap_rmapp()
762 rcbits = be64_to_cpu(hptep[1]) & (HPTE_R_R | HPTE_R_C); in kvm_unmap_rmapp()
770 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_unmap_rmapp()
815 __be64 *hptep; in kvm_age_rmapp() local
831 hptep = (__be64 *) (kvm->arch.hpt_virt + (i << 4)); in kvm_age_rmapp()
835 if (!(be64_to_cpu(hptep[1]) & HPTE_R_R)) in kvm_age_rmapp()
838 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_age_rmapp()
841 while (be64_to_cpu(hptep[0]) & HPTE_V_HVLOCK) in kvm_age_rmapp()
847 if ((be64_to_cpu(hptep[0]) & HPTE_V_VALID) && in kvm_age_rmapp()
848 (be64_to_cpu(hptep[1]) & HPTE_R_R)) { in kvm_age_rmapp()
849 kvmppc_clear_ref_hpte(kvm, hptep, i); in kvm_age_rmapp()
856 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_age_rmapp()
924 __be64 *hptep; in kvm_test_clear_dirty_npages() local
941 hptep = (__be64 *) (kvm->arch.hpt_virt + (i << 4)); in kvm_test_clear_dirty_npages()
958 hptep1 = be64_to_cpu(hptep[1]); in kvm_test_clear_dirty_npages()
963 if (!try_lock_hpte(hptep, HPTE_V_HVLOCK)) { in kvm_test_clear_dirty_npages()
966 while (hptep[0] & cpu_to_be64(HPTE_V_HVLOCK)) in kvm_test_clear_dirty_npages()
972 if (!(hptep[0] & cpu_to_be64(HPTE_V_VALID))) { in kvm_test_clear_dirty_npages()
973 __unlock_hpte(hptep, be64_to_cpu(hptep[0])); in kvm_test_clear_dirty_npages()
978 hptep[0] |= cpu_to_be64(HPTE_V_ABSENT); in kvm_test_clear_dirty_npages()
979 kvmppc_invalidate_hpte(kvm, hptep, i); in kvm_test_clear_dirty_npages()
980 v = be64_to_cpu(hptep[0]); in kvm_test_clear_dirty_npages()
981 r = be64_to_cpu(hptep[1]); in kvm_test_clear_dirty_npages()
983 hptep[1] = cpu_to_be64(r & ~HPTE_R_C); in kvm_test_clear_dirty_npages()
996 __unlock_hpte(hptep, v); in kvm_test_clear_dirty_npages()