Lines Matching refs:arch
53 if (kvm->arch.online_vcores == 1 && local_paca->kvm_hstate.kvm_vcpu) in global_invalidates()
61 cpumask_setall(&kvm->arch.need_tlb_flush); in global_invalidates()
63 &kvm->arch.need_tlb_flush); in global_invalidates()
81 head = &kvm->arch.revmap[i]; in kvmppc_add_revmap_chain()
84 tail = &kvm->arch.revmap[head->back]; in kvmppc_add_revmap_chain()
118 rmap = real_vmalloc_addr(&memslot->arch.rmap[gfn - memslot->base_gfn]); in remove_revmap_chain()
122 next = real_vmalloc_addr(&kvm->arch.revmap[rev->forw]); in remove_revmap_chain()
123 prev = real_vmalloc_addr(&kvm->arch.revmap[rev->back]); in remove_revmap_chain()
185 rmap = &memslot->arch.rmap[slot_fn]; in kvmppc_do_h_enter()
252 if (pte_index >= kvm->arch.hpt_npte) in kvmppc_do_h_enter()
256 hpte = (__be64 *)(kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_do_h_enter()
287 hpte = (__be64 *)(kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_do_h_enter()
304 rev = &kvm->arch.revmap[pte_index]; in kvmppc_do_h_enter()
348 vcpu->arch.pgdir, true, &vcpu->arch.gpr[4]); in kvmppc_h_enter()
381 while (!try_lock_tlbie(&kvm->arch.tlbie_lock)) in do_tlbies()
387 "r" (rbvalues[i]), "r" (kvm->arch.lpid)); in do_tlbies()
389 kvm->arch.tlbie_lock = 0; in do_tlbies()
408 if (pte_index >= kvm->arch.hpt_npte) in kvmppc_do_h_remove()
410 hpte = (__be64 *)(kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_do_h_remove()
421 rev = real_vmalloc_addr(&kvm->arch.revmap[pte_index]); in kvmppc_do_h_remove()
453 &vcpu->arch.gpr[4]); in kvmppc_h_remove()
459 unsigned long *args = &vcpu->arch.gpr[4]; in kvmppc_h_bulk_remove()
484 pte_index >= kvm->arch.hpt_npte) { in kvmppc_h_bulk_remove()
490 hp = (__be64 *) (kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_h_bulk_remove()
522 rev = real_vmalloc_addr(&kvm->arch.revmap[pte_index]); in kvmppc_h_bulk_remove()
576 if (pte_index >= kvm->arch.hpt_npte) in kvmppc_h_protect()
579 hpte = (__be64 *)(kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_h_protect()
597 rev = real_vmalloc_addr(&kvm->arch.revmap[pte_index]); in kvmppc_h_protect()
639 if (pte_index >= kvm->arch.hpt_npte) in kvmppc_h_read()
645 rev = real_vmalloc_addr(&kvm->arch.revmap[pte_index]); in kvmppc_h_read()
647 hpte = (__be64 *)(kvm->arch.hpt_virt + (pte_index << 4)); in kvmppc_h_read()
658 vcpu->arch.gpr[4 + i * 2] = v; in kvmppc_h_read()
659 vcpu->arch.gpr[5 + i * 2] = r; in kvmppc_h_read()
731 hash = (vsid ^ ((eaddr & somask) >> pshift)) & kvm->arch.hpt_mask; in kvmppc_hv_find_lock_hpte()
742 hpte = (__be64 *)(kvm->arch.hpt_virt + (hash << 7)); in kvmppc_hv_find_lock_hpte()
772 hash = hash ^ kvm->arch.hpt_mask; in kvmppc_hv_find_lock_hpte()
811 hpte = (__be64 *)(kvm->arch.hpt_virt + (index << 4)); in kvmppc_hpte_hv_fault()
814 rev = real_vmalloc_addr(&kvm->arch.revmap[index]); in kvmppc_hpte_hv_fault()
825 key = (vcpu->arch.shregs.msr & MSR_PR) ? SLB_VSID_KP : SLB_VSID_KS; in kvmppc_hpte_hv_fault()
842 if (data && (vcpu->arch.shregs.msr & MSR_DR)) { in kvmppc_hpte_hv_fault()
843 unsigned int perm = hpte_get_skey_perm(gr, vcpu->arch.amr); in kvmppc_hpte_hv_fault()
851 vcpu->arch.pgfault_addr = addr; in kvmppc_hpte_hv_fault()
852 vcpu->arch.pgfault_index = index; in kvmppc_hpte_hv_fault()
853 vcpu->arch.pgfault_hpte[0] = v; in kvmppc_hpte_hv_fault()
854 vcpu->arch.pgfault_hpte[1] = r; in kvmppc_hpte_hv_fault()
857 if (data && (vcpu->arch.shregs.msr & MSR_IR) && in kvmppc_hpte_hv_fault()