Lines Matching refs:vcpu

76 static bool spr_allowed(struct kvm_vcpu *vcpu, enum priv_level level)  in spr_allowed()  argument
79 if (vcpu->arch.papr_enabled && (level > PRIV_SUPER)) in spr_allowed()
83 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed()
89 int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_core_emulate_op_pr() argument
102 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
111 kvmppc_set_gpr(vcpu, 3, EV_UNIMPLEMENTED); in kvmppc_core_emulate_op_pr()
112 kvmppc_set_pc(vcpu, kvmppc_get_pc(vcpu) + 4); in kvmppc_core_emulate_op_pr()
120 kvmppc_set_pc(vcpu, kvmppc_get_srr0(vcpu)); in kvmppc_core_emulate_op_pr()
121 kvmppc_set_msr(vcpu, kvmppc_get_srr1(vcpu)); in kvmppc_core_emulate_op_pr()
133 kvmppc_set_gpr(vcpu, rt, kvmppc_get_msr(vcpu)); in kvmppc_core_emulate_op_pr()
137 ulong rs_val = kvmppc_get_gpr(vcpu, rs); in kvmppc_core_emulate_op_pr()
139 ulong new_msr = kvmppc_get_msr(vcpu); in kvmppc_core_emulate_op_pr()
142 kvmppc_set_msr_fast(vcpu, new_msr); in kvmppc_core_emulate_op_pr()
144 kvmppc_set_msr(vcpu, rs_val); in kvmppc_core_emulate_op_pr()
148 kvmppc_set_msr(vcpu, kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
155 if (vcpu->arch.mmu.mfsrin) { in kvmppc_core_emulate_op_pr()
157 sr = vcpu->arch.mmu.mfsrin(vcpu, srnum); in kvmppc_core_emulate_op_pr()
158 kvmppc_set_gpr(vcpu, rt, sr); in kvmppc_core_emulate_op_pr()
166 srnum = (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf; in kvmppc_core_emulate_op_pr()
167 if (vcpu->arch.mmu.mfsrin) { in kvmppc_core_emulate_op_pr()
169 sr = vcpu->arch.mmu.mfsrin(vcpu, srnum); in kvmppc_core_emulate_op_pr()
170 kvmppc_set_gpr(vcpu, rt, sr); in kvmppc_core_emulate_op_pr()
175 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
177 kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
180 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
181 (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf, in kvmppc_core_emulate_op_pr()
182 kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
188 ulong addr = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
189 vcpu->arch.mmu.tlbie(vcpu, addr, large); in kvmppc_core_emulate_op_pr()
196 ulong cmd = kvmppc_get_gpr(vcpu, 3); in kvmppc_core_emulate_op_pr()
199 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr()
200 !vcpu->arch.papr_enabled) { in kvmppc_core_emulate_op_pr()
205 if (kvmppc_h_pr(vcpu, cmd) == EMULATE_DONE) in kvmppc_core_emulate_op_pr()
210 ulong gpr = kvmppc_get_gpr(vcpu, 4 + i); in kvmppc_core_emulate_op_pr()
215 vcpu->arch.hcall_needed = 1; in kvmppc_core_emulate_op_pr()
223 if (!vcpu->arch.mmu.slbmte) in kvmppc_core_emulate_op_pr()
226 vcpu->arch.mmu.slbmte(vcpu, in kvmppc_core_emulate_op_pr()
227 kvmppc_get_gpr(vcpu, rs), in kvmppc_core_emulate_op_pr()
228 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
231 if (!vcpu->arch.mmu.slbie) in kvmppc_core_emulate_op_pr()
234 vcpu->arch.mmu.slbie(vcpu, in kvmppc_core_emulate_op_pr()
235 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
238 if (!vcpu->arch.mmu.slbia) in kvmppc_core_emulate_op_pr()
241 vcpu->arch.mmu.slbia(vcpu); in kvmppc_core_emulate_op_pr()
244 if (!vcpu->arch.mmu.slbmfee) { in kvmppc_core_emulate_op_pr()
249 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
250 t = vcpu->arch.mmu.slbmfee(vcpu, rb_val); in kvmppc_core_emulate_op_pr()
251 kvmppc_set_gpr(vcpu, rt, t); in kvmppc_core_emulate_op_pr()
255 if (!vcpu->arch.mmu.slbmfev) { in kvmppc_core_emulate_op_pr()
260 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
261 t = vcpu->arch.mmu.slbmfev(vcpu, rb_val); in kvmppc_core_emulate_op_pr()
262 kvmppc_set_gpr(vcpu, rt, t); in kvmppc_core_emulate_op_pr()
270 ulong rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
278 ra_val = kvmppc_get_gpr(vcpu, ra); in kvmppc_core_emulate_op_pr()
281 if (!(kvmppc_get_msr(vcpu) & MSR_SF)) in kvmppc_core_emulate_op_pr()
285 r = kvmppc_st(vcpu, &addr, 32, zeros, true); in kvmppc_core_emulate_op_pr()
288 kvmppc_set_dar(vcpu, vaddr); in kvmppc_core_emulate_op_pr()
289 vcpu->arch.fault_dar = vaddr; in kvmppc_core_emulate_op_pr()
297 kvmppc_set_dsisr(vcpu, dsisr); in kvmppc_core_emulate_op_pr()
298 vcpu->arch.fault_dsisr = dsisr; in kvmppc_core_emulate_op_pr()
300 kvmppc_book3s_queue_irqprio(vcpu, in kvmppc_core_emulate_op_pr()
315 emulated = kvmppc_emulate_paired_single(run, vcpu); in kvmppc_core_emulate_op_pr()
320 void kvmppc_set_bat(struct kvm_vcpu *vcpu, struct kvmppc_bat *bat, bool upper, in kvmppc_set_bat() argument
340 static struct kvmppc_bat *kvmppc_find_bat(struct kvm_vcpu *vcpu, int sprn) in kvmppc_find_bat() argument
342 struct kvmppc_vcpu_book3s *vcpu_book3s = to_book3s(vcpu); in kvmppc_find_bat()
365 int kvmppc_core_emulate_mtspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong spr_val) in kvmppc_core_emulate_mtspr_pr() argument
371 if (!spr_allowed(vcpu, PRIV_HYPER)) in kvmppc_core_emulate_mtspr_pr()
373 to_book3s(vcpu)->sdr1 = spr_val; in kvmppc_core_emulate_mtspr_pr()
376 kvmppc_set_dsisr(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
379 kvmppc_set_dar(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
382 to_book3s(vcpu)->hior = spr_val; in kvmppc_core_emulate_mtspr_pr()
389 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn); in kvmppc_core_emulate_mtspr_pr()
391 kvmppc_set_bat(vcpu, bat, !(sprn % 2), (u32)spr_val); in kvmppc_core_emulate_mtspr_pr()
394 kvmppc_mmu_pte_flush(vcpu, 0, 0); in kvmppc_core_emulate_mtspr_pr()
395 kvmppc_mmu_flush_segments(vcpu); in kvmppc_core_emulate_mtspr_pr()
399 to_book3s(vcpu)->hid[0] = spr_val; in kvmppc_core_emulate_mtspr_pr()
402 to_book3s(vcpu)->hid[1] = spr_val; in kvmppc_core_emulate_mtspr_pr()
405 to_book3s(vcpu)->hid[2] = spr_val; in kvmppc_core_emulate_mtspr_pr()
408 to_book3s(vcpu)->hid[2] = spr_val; in kvmppc_core_emulate_mtspr_pr()
410 switch (vcpu->arch.pvr) { in kvmppc_core_emulate_mtspr_pr()
420 if (vcpu->arch.hflags & BOOK3S_HFLAG_NATIVE_PS) { in kvmppc_core_emulate_mtspr_pr()
423 vcpu->arch.hflags |= BOOK3S_HFLAG_PAIRED_SINGLE; in kvmppc_core_emulate_mtspr_pr()
424 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_core_emulate_mtspr_pr()
426 vcpu->arch.hflags &= ~BOOK3S_HFLAG_PAIRED_SINGLE; in kvmppc_core_emulate_mtspr_pr()
433 to_book3s(vcpu)->hid[4] = spr_val; in kvmppc_core_emulate_mtspr_pr()
436 to_book3s(vcpu)->hid[5] = spr_val; in kvmppc_core_emulate_mtspr_pr()
438 if (vcpu->arch.mmu.is_dcbz32(vcpu) && in kvmppc_core_emulate_mtspr_pr()
440 vcpu->arch.hflags |= BOOK3S_HFLAG_DCBZ32; in kvmppc_core_emulate_mtspr_pr()
450 to_book3s(vcpu)->gqr[sprn - SPRN_GQR0] = spr_val; in kvmppc_core_emulate_mtspr_pr()
454 kvmppc_set_fscr(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
457 vcpu->arch.bescr = spr_val; in kvmppc_core_emulate_mtspr_pr()
460 vcpu->arch.ebbhr = spr_val; in kvmppc_core_emulate_mtspr_pr()
463 vcpu->arch.ebbrr = spr_val; in kvmppc_core_emulate_mtspr_pr()
467 vcpu->arch.tfhar = spr_val; in kvmppc_core_emulate_mtspr_pr()
470 vcpu->arch.texasr = spr_val; in kvmppc_core_emulate_mtspr_pr()
473 vcpu->arch.tfiar = spr_val; in kvmppc_core_emulate_mtspr_pr()
514 int kvmppc_core_emulate_mfspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val) in kvmppc_core_emulate_mfspr_pr() argument
524 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn); in kvmppc_core_emulate_mfspr_pr()
534 if (!spr_allowed(vcpu, PRIV_HYPER)) in kvmppc_core_emulate_mfspr_pr()
536 *spr_val = to_book3s(vcpu)->sdr1; in kvmppc_core_emulate_mfspr_pr()
539 *spr_val = kvmppc_get_dsisr(vcpu); in kvmppc_core_emulate_mfspr_pr()
542 *spr_val = kvmppc_get_dar(vcpu); in kvmppc_core_emulate_mfspr_pr()
545 *spr_val = to_book3s(vcpu)->hior; in kvmppc_core_emulate_mfspr_pr()
548 *spr_val = to_book3s(vcpu)->hid[0]; in kvmppc_core_emulate_mfspr_pr()
551 *spr_val = to_book3s(vcpu)->hid[1]; in kvmppc_core_emulate_mfspr_pr()
555 *spr_val = to_book3s(vcpu)->hid[2]; in kvmppc_core_emulate_mfspr_pr()
559 *spr_val = to_book3s(vcpu)->hid[4]; in kvmppc_core_emulate_mfspr_pr()
562 *spr_val = to_book3s(vcpu)->hid[5]; in kvmppc_core_emulate_mfspr_pr()
572 *spr_val = vcpu->arch.purr; in kvmppc_core_emulate_mfspr_pr()
578 *spr_val = vcpu->arch.spurr; in kvmppc_core_emulate_mfspr_pr()
581 *spr_val = vcpu->arch.vtb; in kvmppc_core_emulate_mfspr_pr()
584 *spr_val = vcpu->arch.ic; in kvmppc_core_emulate_mfspr_pr()
594 *spr_val = to_book3s(vcpu)->gqr[sprn - SPRN_GQR0]; in kvmppc_core_emulate_mfspr_pr()
598 *spr_val = vcpu->arch.fscr; in kvmppc_core_emulate_mfspr_pr()
601 *spr_val = vcpu->arch.bescr; in kvmppc_core_emulate_mfspr_pr()
604 *spr_val = vcpu->arch.ebbhr; in kvmppc_core_emulate_mfspr_pr()
607 *spr_val = vcpu->arch.ebbrr; in kvmppc_core_emulate_mfspr_pr()
611 *spr_val = vcpu->arch.tfhar; in kvmppc_core_emulate_mfspr_pr()
614 *spr_val = vcpu->arch.texasr; in kvmppc_core_emulate_mfspr_pr()
617 *spr_val = vcpu->arch.tfiar; in kvmppc_core_emulate_mfspr_pr()
658 u32 kvmppc_alignment_dsisr(struct kvm_vcpu *vcpu, unsigned int inst) in kvmppc_alignment_dsisr() argument
663 ulong kvmppc_alignment_dar(struct kvm_vcpu *vcpu, unsigned int inst) in kvmppc_alignment_dar() argument
669 return vcpu->arch.fault_dar; in kvmppc_alignment_dar()
681 dar = kvmppc_get_gpr(vcpu, ra); in kvmppc_alignment_dar()
686 dar = kvmppc_get_gpr(vcpu, ra); in kvmppc_alignment_dar()
687 dar += kvmppc_get_gpr(vcpu, rb); in kvmppc_alignment_dar()