Lines Matching refs:vcpu

77 static bool spr_allowed(struct kvm_vcpu *vcpu, enum priv_level level)  in spr_allowed()  argument
80 if (vcpu->arch.papr_enabled && (level > PRIV_SUPER)) in spr_allowed()
84 if ((kvmppc_get_msr(vcpu) & MSR_PR) && level > PRIV_PROBLEM) in spr_allowed()
90 int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_core_emulate_op_pr() argument
103 if ((kvmppc_get_msr(vcpu) & MSR_LE) && in kvmppc_core_emulate_op_pr()
112 kvmppc_set_gpr(vcpu, 3, EV_UNIMPLEMENTED); in kvmppc_core_emulate_op_pr()
113 kvmppc_set_pc(vcpu, kvmppc_get_pc(vcpu) + 4); in kvmppc_core_emulate_op_pr()
121 kvmppc_set_pc(vcpu, kvmppc_get_srr0(vcpu)); in kvmppc_core_emulate_op_pr()
122 kvmppc_set_msr(vcpu, kvmppc_get_srr1(vcpu)); in kvmppc_core_emulate_op_pr()
134 kvmppc_set_gpr(vcpu, rt, kvmppc_get_msr(vcpu)); in kvmppc_core_emulate_op_pr()
138 ulong rs_val = kvmppc_get_gpr(vcpu, rs); in kvmppc_core_emulate_op_pr()
140 ulong new_msr = kvmppc_get_msr(vcpu); in kvmppc_core_emulate_op_pr()
143 kvmppc_set_msr_fast(vcpu, new_msr); in kvmppc_core_emulate_op_pr()
145 kvmppc_set_msr(vcpu, rs_val); in kvmppc_core_emulate_op_pr()
149 kvmppc_set_msr(vcpu, kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
156 if (vcpu->arch.mmu.mfsrin) { in kvmppc_core_emulate_op_pr()
158 sr = vcpu->arch.mmu.mfsrin(vcpu, srnum); in kvmppc_core_emulate_op_pr()
159 kvmppc_set_gpr(vcpu, rt, sr); in kvmppc_core_emulate_op_pr()
167 srnum = (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf; in kvmppc_core_emulate_op_pr()
168 if (vcpu->arch.mmu.mfsrin) { in kvmppc_core_emulate_op_pr()
170 sr = vcpu->arch.mmu.mfsrin(vcpu, srnum); in kvmppc_core_emulate_op_pr()
171 kvmppc_set_gpr(vcpu, rt, sr); in kvmppc_core_emulate_op_pr()
176 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
178 kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
181 vcpu->arch.mmu.mtsrin(vcpu, in kvmppc_core_emulate_op_pr()
182 (kvmppc_get_gpr(vcpu, rb) >> 28) & 0xf, in kvmppc_core_emulate_op_pr()
183 kvmppc_get_gpr(vcpu, rs)); in kvmppc_core_emulate_op_pr()
189 ulong addr = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
190 vcpu->arch.mmu.tlbie(vcpu, addr, large); in kvmppc_core_emulate_op_pr()
197 ulong cmd = kvmppc_get_gpr(vcpu, 3); in kvmppc_core_emulate_op_pr()
200 if ((kvmppc_get_msr(vcpu) & MSR_PR) || in kvmppc_core_emulate_op_pr()
201 !vcpu->arch.papr_enabled) { in kvmppc_core_emulate_op_pr()
206 if (kvmppc_h_pr(vcpu, cmd) == EMULATE_DONE) in kvmppc_core_emulate_op_pr()
211 ulong gpr = kvmppc_get_gpr(vcpu, 4 + i); in kvmppc_core_emulate_op_pr()
216 vcpu->arch.hcall_needed = 1; in kvmppc_core_emulate_op_pr()
224 if (!vcpu->arch.mmu.slbmte) in kvmppc_core_emulate_op_pr()
227 vcpu->arch.mmu.slbmte(vcpu, in kvmppc_core_emulate_op_pr()
228 kvmppc_get_gpr(vcpu, rs), in kvmppc_core_emulate_op_pr()
229 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
232 if (!vcpu->arch.mmu.slbie) in kvmppc_core_emulate_op_pr()
235 vcpu->arch.mmu.slbie(vcpu, in kvmppc_core_emulate_op_pr()
236 kvmppc_get_gpr(vcpu, rb)); in kvmppc_core_emulate_op_pr()
239 if (!vcpu->arch.mmu.slbia) in kvmppc_core_emulate_op_pr()
242 vcpu->arch.mmu.slbia(vcpu); in kvmppc_core_emulate_op_pr()
245 if (!vcpu->arch.mmu.slbmfee) { in kvmppc_core_emulate_op_pr()
250 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
251 t = vcpu->arch.mmu.slbmfee(vcpu, rb_val); in kvmppc_core_emulate_op_pr()
252 kvmppc_set_gpr(vcpu, rt, t); in kvmppc_core_emulate_op_pr()
256 if (!vcpu->arch.mmu.slbmfev) { in kvmppc_core_emulate_op_pr()
261 rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
262 t = vcpu->arch.mmu.slbmfev(vcpu, rb_val); in kvmppc_core_emulate_op_pr()
263 kvmppc_set_gpr(vcpu, rt, t); in kvmppc_core_emulate_op_pr()
271 ulong rb_val = kvmppc_get_gpr(vcpu, rb); in kvmppc_core_emulate_op_pr()
279 ra_val = kvmppc_get_gpr(vcpu, ra); in kvmppc_core_emulate_op_pr()
282 if (!(kvmppc_get_msr(vcpu) & MSR_SF)) in kvmppc_core_emulate_op_pr()
286 r = kvmppc_st(vcpu, &addr, 32, zeros, true); in kvmppc_core_emulate_op_pr()
289 kvmppc_set_dar(vcpu, vaddr); in kvmppc_core_emulate_op_pr()
290 vcpu->arch.fault_dar = vaddr; in kvmppc_core_emulate_op_pr()
298 kvmppc_set_dsisr(vcpu, dsisr); in kvmppc_core_emulate_op_pr()
299 vcpu->arch.fault_dsisr = dsisr; in kvmppc_core_emulate_op_pr()
301 kvmppc_book3s_queue_irqprio(vcpu, in kvmppc_core_emulate_op_pr()
316 emulated = kvmppc_emulate_paired_single(run, vcpu); in kvmppc_core_emulate_op_pr()
321 void kvmppc_set_bat(struct kvm_vcpu *vcpu, struct kvmppc_bat *bat, bool upper, in kvmppc_set_bat() argument
341 static struct kvmppc_bat *kvmppc_find_bat(struct kvm_vcpu *vcpu, int sprn) in kvmppc_find_bat() argument
343 struct kvmppc_vcpu_book3s *vcpu_book3s = to_book3s(vcpu); in kvmppc_find_bat()
366 int kvmppc_core_emulate_mtspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong spr_val) in kvmppc_core_emulate_mtspr_pr() argument
372 if (!spr_allowed(vcpu, PRIV_HYPER)) in kvmppc_core_emulate_mtspr_pr()
374 to_book3s(vcpu)->sdr1 = spr_val; in kvmppc_core_emulate_mtspr_pr()
377 kvmppc_set_dsisr(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
380 kvmppc_set_dar(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
383 to_book3s(vcpu)->hior = spr_val; in kvmppc_core_emulate_mtspr_pr()
390 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn); in kvmppc_core_emulate_mtspr_pr()
392 kvmppc_set_bat(vcpu, bat, !(sprn % 2), (u32)spr_val); in kvmppc_core_emulate_mtspr_pr()
395 kvmppc_mmu_pte_flush(vcpu, 0, 0); in kvmppc_core_emulate_mtspr_pr()
396 kvmppc_mmu_flush_segments(vcpu); in kvmppc_core_emulate_mtspr_pr()
400 to_book3s(vcpu)->hid[0] = spr_val; in kvmppc_core_emulate_mtspr_pr()
403 to_book3s(vcpu)->hid[1] = spr_val; in kvmppc_core_emulate_mtspr_pr()
406 to_book3s(vcpu)->hid[2] = spr_val; in kvmppc_core_emulate_mtspr_pr()
409 to_book3s(vcpu)->hid[2] = spr_val; in kvmppc_core_emulate_mtspr_pr()
411 switch (vcpu->arch.pvr) { in kvmppc_core_emulate_mtspr_pr()
421 if (vcpu->arch.hflags & BOOK3S_HFLAG_NATIVE_PS) { in kvmppc_core_emulate_mtspr_pr()
424 vcpu->arch.hflags |= BOOK3S_HFLAG_PAIRED_SINGLE; in kvmppc_core_emulate_mtspr_pr()
425 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_core_emulate_mtspr_pr()
427 vcpu->arch.hflags &= ~BOOK3S_HFLAG_PAIRED_SINGLE; in kvmppc_core_emulate_mtspr_pr()
434 to_book3s(vcpu)->hid[4] = spr_val; in kvmppc_core_emulate_mtspr_pr()
437 to_book3s(vcpu)->hid[5] = spr_val; in kvmppc_core_emulate_mtspr_pr()
439 if (vcpu->arch.mmu.is_dcbz32(vcpu) && in kvmppc_core_emulate_mtspr_pr()
441 vcpu->arch.hflags |= BOOK3S_HFLAG_DCBZ32; in kvmppc_core_emulate_mtspr_pr()
451 to_book3s(vcpu)->gqr[sprn - SPRN_GQR0] = spr_val; in kvmppc_core_emulate_mtspr_pr()
455 kvmppc_set_fscr(vcpu, spr_val); in kvmppc_core_emulate_mtspr_pr()
458 vcpu->arch.bescr = spr_val; in kvmppc_core_emulate_mtspr_pr()
461 vcpu->arch.ebbhr = spr_val; in kvmppc_core_emulate_mtspr_pr()
464 vcpu->arch.ebbrr = spr_val; in kvmppc_core_emulate_mtspr_pr()
468 vcpu->arch.tfhar = spr_val; in kvmppc_core_emulate_mtspr_pr()
471 vcpu->arch.texasr = spr_val; in kvmppc_core_emulate_mtspr_pr()
474 vcpu->arch.tfiar = spr_val; in kvmppc_core_emulate_mtspr_pr()
515 int kvmppc_core_emulate_mfspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val) in kvmppc_core_emulate_mfspr_pr() argument
525 struct kvmppc_bat *bat = kvmppc_find_bat(vcpu, sprn); in kvmppc_core_emulate_mfspr_pr()
535 if (!spr_allowed(vcpu, PRIV_HYPER)) in kvmppc_core_emulate_mfspr_pr()
537 *spr_val = to_book3s(vcpu)->sdr1; in kvmppc_core_emulate_mfspr_pr()
540 *spr_val = kvmppc_get_dsisr(vcpu); in kvmppc_core_emulate_mfspr_pr()
543 *spr_val = kvmppc_get_dar(vcpu); in kvmppc_core_emulate_mfspr_pr()
546 *spr_val = to_book3s(vcpu)->hior; in kvmppc_core_emulate_mfspr_pr()
549 *spr_val = to_book3s(vcpu)->hid[0]; in kvmppc_core_emulate_mfspr_pr()
552 *spr_val = to_book3s(vcpu)->hid[1]; in kvmppc_core_emulate_mfspr_pr()
556 *spr_val = to_book3s(vcpu)->hid[2]; in kvmppc_core_emulate_mfspr_pr()
560 *spr_val = to_book3s(vcpu)->hid[4]; in kvmppc_core_emulate_mfspr_pr()
563 *spr_val = to_book3s(vcpu)->hid[5]; in kvmppc_core_emulate_mfspr_pr()
573 *spr_val = vcpu->arch.purr; in kvmppc_core_emulate_mfspr_pr()
579 *spr_val = vcpu->arch.spurr; in kvmppc_core_emulate_mfspr_pr()
582 *spr_val = vcpu->arch.vtb; in kvmppc_core_emulate_mfspr_pr()
585 *spr_val = vcpu->arch.ic; in kvmppc_core_emulate_mfspr_pr()
595 *spr_val = to_book3s(vcpu)->gqr[sprn - SPRN_GQR0]; in kvmppc_core_emulate_mfspr_pr()
599 *spr_val = vcpu->arch.fscr; in kvmppc_core_emulate_mfspr_pr()
602 *spr_val = vcpu->arch.bescr; in kvmppc_core_emulate_mfspr_pr()
605 *spr_val = vcpu->arch.ebbhr; in kvmppc_core_emulate_mfspr_pr()
608 *spr_val = vcpu->arch.ebbrr; in kvmppc_core_emulate_mfspr_pr()
612 *spr_val = vcpu->arch.tfhar; in kvmppc_core_emulate_mfspr_pr()
615 *spr_val = vcpu->arch.texasr; in kvmppc_core_emulate_mfspr_pr()
618 *spr_val = vcpu->arch.tfiar; in kvmppc_core_emulate_mfspr_pr()
659 u32 kvmppc_alignment_dsisr(struct kvm_vcpu *vcpu, unsigned int inst) in kvmppc_alignment_dsisr() argument
664 ulong kvmppc_alignment_dar(struct kvm_vcpu *vcpu, unsigned int inst) in kvmppc_alignment_dar() argument
670 return vcpu->arch.fault_dar; in kvmppc_alignment_dar()
682 dar = kvmppc_get_gpr(vcpu, ra); in kvmppc_alignment_dar()
687 dar = kvmppc_get_gpr(vcpu, ra); in kvmppc_alignment_dar()
688 dar += kvmppc_get_gpr(vcpu, rb); in kvmppc_alignment_dar()