Lines Matching refs:vcpu

26 static unsigned long get_pteg_addr(struct kvm_vcpu *vcpu, long pte_index)  in get_pteg_addr()  argument
28 struct kvmppc_vcpu_book3s *vcpu_book3s = to_book3s(vcpu); in get_pteg_addr()
39 static int kvmppc_h_pr_enter(struct kvm_vcpu *vcpu) in kvmppc_h_pr_enter() argument
41 long flags = kvmppc_get_gpr(vcpu, 4); in kvmppc_h_pr_enter()
42 long pte_index = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_pr_enter()
50 pteg_addr = get_pteg_addr(vcpu, pte_index); in kvmppc_h_pr_enter()
52 mutex_lock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_enter()
71 hpte[0] = cpu_to_be64(kvmppc_get_gpr(vcpu, 6)); in kvmppc_h_pr_enter()
72 hpte[1] = cpu_to_be64(kvmppc_get_gpr(vcpu, 7)); in kvmppc_h_pr_enter()
75 kvmppc_set_gpr(vcpu, 4, pte_index | i); in kvmppc_h_pr_enter()
79 mutex_unlock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_enter()
80 kvmppc_set_gpr(vcpu, 3, ret); in kvmppc_h_pr_enter()
85 static int kvmppc_h_pr_remove(struct kvm_vcpu *vcpu) in kvmppc_h_pr_remove() argument
87 unsigned long flags= kvmppc_get_gpr(vcpu, 4); in kvmppc_h_pr_remove()
88 unsigned long pte_index = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_pr_remove()
89 unsigned long avpn = kvmppc_get_gpr(vcpu, 6); in kvmppc_h_pr_remove()
94 pteg = get_pteg_addr(vcpu, pte_index); in kvmppc_h_pr_remove()
95 mutex_lock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_remove()
109 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_remove()
112 kvmppc_set_gpr(vcpu, 4, pte[0]); in kvmppc_h_pr_remove()
113 kvmppc_set_gpr(vcpu, 5, pte[1]); in kvmppc_h_pr_remove()
116 mutex_unlock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_remove()
117 kvmppc_set_gpr(vcpu, 3, ret); in kvmppc_h_pr_remove()
140 static int kvmppc_h_pr_bulk_remove(struct kvm_vcpu *vcpu) in kvmppc_h_pr_bulk_remove() argument
146 mutex_lock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_bulk_remove()
148 unsigned long tsh = kvmppc_get_gpr(vcpu, paramnr+(2*i)); in kvmppc_h_pr_bulk_remove()
149 unsigned long tsl = kvmppc_get_gpr(vcpu, paramnr+(2*i)+1); in kvmppc_h_pr_bulk_remove()
168 kvmppc_set_gpr(vcpu, paramnr+(2*i), tsh); in kvmppc_h_pr_bulk_remove()
173 pteg = get_pteg_addr(vcpu, tsh & H_BULK_REMOVE_PTEX); in kvmppc_h_pr_bulk_remove()
191 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_bulk_remove()
195 kvmppc_set_gpr(vcpu, paramnr+(2*i), tsh); in kvmppc_h_pr_bulk_remove()
197 mutex_unlock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_bulk_remove()
198 kvmppc_set_gpr(vcpu, 3, ret); in kvmppc_h_pr_bulk_remove()
203 static int kvmppc_h_pr_protect(struct kvm_vcpu *vcpu) in kvmppc_h_pr_protect() argument
205 unsigned long flags = kvmppc_get_gpr(vcpu, 4); in kvmppc_h_pr_protect()
206 unsigned long pte_index = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_pr_protect()
207 unsigned long avpn = kvmppc_get_gpr(vcpu, 6); in kvmppc_h_pr_protect()
212 pteg = get_pteg_addr(vcpu, pte_index); in kvmppc_h_pr_protect()
213 mutex_lock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_protect()
234 vcpu->arch.mmu.tlbie(vcpu, rb, rb & 1 ? true : false); in kvmppc_h_pr_protect()
241 mutex_unlock(&vcpu->kvm->arch.hpt_mutex); in kvmppc_h_pr_protect()
242 kvmppc_set_gpr(vcpu, 3, ret); in kvmppc_h_pr_protect()
247 static int kvmppc_h_pr_put_tce(struct kvm_vcpu *vcpu) in kvmppc_h_pr_put_tce() argument
249 unsigned long liobn = kvmppc_get_gpr(vcpu, 4); in kvmppc_h_pr_put_tce()
250 unsigned long ioba = kvmppc_get_gpr(vcpu, 5); in kvmppc_h_pr_put_tce()
251 unsigned long tce = kvmppc_get_gpr(vcpu, 6); in kvmppc_h_pr_put_tce()
254 rc = kvmppc_h_put_tce(vcpu, liobn, ioba, tce); in kvmppc_h_pr_put_tce()
257 kvmppc_set_gpr(vcpu, 3, rc); in kvmppc_h_pr_put_tce()
261 static int kvmppc_h_pr_logical_ci_load(struct kvm_vcpu *vcpu) in kvmppc_h_pr_logical_ci_load() argument
265 rc = kvmppc_h_logical_ci_load(vcpu); in kvmppc_h_pr_logical_ci_load()
268 kvmppc_set_gpr(vcpu, 3, rc); in kvmppc_h_pr_logical_ci_load()
272 static int kvmppc_h_pr_logical_ci_store(struct kvm_vcpu *vcpu) in kvmppc_h_pr_logical_ci_store() argument
276 rc = kvmppc_h_logical_ci_store(vcpu); in kvmppc_h_pr_logical_ci_store()
279 kvmppc_set_gpr(vcpu, 3, rc); in kvmppc_h_pr_logical_ci_store()
283 static int kvmppc_h_pr_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd) in kvmppc_h_pr_xics_hcall() argument
285 long rc = kvmppc_xics_hcall(vcpu, cmd); in kvmppc_h_pr_xics_hcall()
286 kvmppc_set_gpr(vcpu, 3, rc); in kvmppc_h_pr_xics_hcall()
290 int kvmppc_h_pr(struct kvm_vcpu *vcpu, unsigned long cmd) in kvmppc_h_pr() argument
295 !test_bit(cmd/4, vcpu->kvm->arch.enabled_hcalls)) in kvmppc_h_pr()
300 return kvmppc_h_pr_enter(vcpu); in kvmppc_h_pr()
302 return kvmppc_h_pr_remove(vcpu); in kvmppc_h_pr()
304 return kvmppc_h_pr_protect(vcpu); in kvmppc_h_pr()
306 return kvmppc_h_pr_bulk_remove(vcpu); in kvmppc_h_pr()
308 return kvmppc_h_pr_put_tce(vcpu); in kvmppc_h_pr()
310 kvmppc_set_msr_fast(vcpu, kvmppc_get_msr(vcpu) | MSR_EE); in kvmppc_h_pr()
311 kvm_vcpu_block(vcpu); in kvmppc_h_pr()
312 clear_bit(KVM_REQ_UNHALT, &vcpu->requests); in kvmppc_h_pr()
313 vcpu->stat.halt_wakeup++; in kvmppc_h_pr()
316 return kvmppc_h_pr_logical_ci_load(vcpu); in kvmppc_h_pr()
318 return kvmppc_h_pr_logical_ci_store(vcpu); in kvmppc_h_pr()
325 if (kvmppc_xics_enabled(vcpu)) in kvmppc_h_pr()
326 return kvmppc_h_pr_xics_hcall(vcpu, cmd); in kvmppc_h_pr()
329 if (list_empty(&vcpu->kvm->arch.rtas_tokens)) in kvmppc_h_pr()
331 idx = srcu_read_lock(&vcpu->kvm->srcu); in kvmppc_h_pr()
332 rc = kvmppc_rtas_hcall(vcpu); in kvmppc_h_pr()
333 srcu_read_unlock(&vcpu->kvm->srcu, idx); in kvmppc_h_pr()
336 kvmppc_set_gpr(vcpu, 3, 0); in kvmppc_h_pr()