Lines Matching refs:vcpu

61 void kvmppc_mmu_invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte)  in kvmppc_mmu_invalidate_pte()  argument
78 static u16 kvmppc_sid_hash(struct kvm_vcpu *vcpu, u64 gvsid) in kvmppc_sid_hash() argument
91 static struct kvmppc_sid_map *find_sid_vsid(struct kvm_vcpu *vcpu, u64 gvsid) in find_sid_vsid() argument
96 if (kvmppc_get_msr(vcpu) & MSR_PR) in find_sid_vsid()
99 sid_map_mask = kvmppc_sid_hash(vcpu, gvsid); in find_sid_vsid()
100 map = &to_book3s(vcpu)->sid_map[sid_map_mask]; in find_sid_vsid()
107 map = &to_book3s(vcpu)->sid_map[SID_MAP_MASK - sid_map_mask]; in find_sid_vsid()
118 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, in kvmppc_mmu_get_pteg() argument
142 int kvmppc_mmu_map_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *orig_pte, in kvmppc_mmu_map_page() argument
160 hpaddr = kvmppc_gpa_to_pfn(vcpu, orig_pte->raddr, iswrite, &writable); in kvmppc_mmu_map_page()
170 vcpu->arch.mmu.esid_to_vsid(vcpu, orig_pte->eaddr >> SID_SHIFT, &vsid); in kvmppc_mmu_map_page()
171 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
173 kvmppc_mmu_map_segment(vcpu, eaddr); in kvmppc_mmu_map_page()
174 map = find_sid_vsid(vcpu, vsid); in kvmppc_mmu_map_page()
188 pteg = kvmppc_mmu_get_pteg(vcpu, vsid, eaddr, primary); in kvmppc_mmu_map_page()
212 mark_page_dirty(vcpu->kvm, orig_pte->raddr >> PAGE_SHIFT); in kvmppc_mmu_map_page()
245 pte = kvmppc_mmu_hpte_cache_next(vcpu); in kvmppc_mmu_map_page()
263 kvmppc_mmu_hpte_cache_map(vcpu, pte); in kvmppc_mmu_map_page()
270 void kvmppc_mmu_unmap_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte) in kvmppc_mmu_unmap_page() argument
272 kvmppc_mmu_pte_vflush(vcpu, pte->vpage, 0xfffffffffULL); in kvmppc_mmu_unmap_page()
275 static struct kvmppc_sid_map *create_sid_map(struct kvm_vcpu *vcpu, u64 gvsid) in create_sid_map() argument
278 struct kvmppc_vcpu_book3s *vcpu_book3s = to_book3s(vcpu); in create_sid_map()
282 if (kvmppc_get_msr(vcpu) & MSR_PR) in create_sid_map()
288 sid_map_mask = kvmppc_sid_hash(vcpu, gvsid); in create_sid_map()
292 map = &to_book3s(vcpu)->sid_map[sid_map_mask]; in create_sid_map()
302 kvmppc_mmu_pte_flush(vcpu, 0, 0); in create_sid_map()
303 kvmppc_mmu_flush_segments(vcpu); in create_sid_map()
314 int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) in kvmppc_mmu_map_segment() argument
320 struct kvmppc_book3s_shadow_vcpu *svcpu = svcpu_get(vcpu); in kvmppc_mmu_map_segment()
323 if (vcpu->arch.mmu.esid_to_vsid(vcpu, esid, &gvsid)) { in kvmppc_mmu_map_segment()
330 map = find_sid_vsid(vcpu, gvsid); in kvmppc_mmu_map_segment()
332 map = create_sid_map(vcpu, gvsid); in kvmppc_mmu_map_segment()
345 void kvmppc_mmu_flush_segments(struct kvm_vcpu *vcpu) in kvmppc_mmu_flush_segments() argument
348 struct kvmppc_book3s_shadow_vcpu *svcpu = svcpu_get(vcpu); in kvmppc_mmu_flush_segments()
357 void kvmppc_mmu_destroy_pr(struct kvm_vcpu *vcpu) in kvmppc_mmu_destroy_pr() argument
361 kvmppc_mmu_hpte_destroy(vcpu); in kvmppc_mmu_destroy_pr()
364 __destroy_context(to_book3s(vcpu)->context_id[i]); in kvmppc_mmu_destroy_pr()
371 int kvmppc_mmu_init(struct kvm_vcpu *vcpu) in kvmppc_mmu_init() argument
373 struct kvmppc_vcpu_book3s *vcpu3s = to_book3s(vcpu); in kvmppc_mmu_init()
397 kvmppc_mmu_hpte_init(vcpu); in kvmppc_mmu_init()
406 __destroy_context(to_book3s(vcpu)->context_id[j]); in kvmppc_mmu_init()