Lines Matching refs:vcpu
161 static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt) in kvmppc_sync_qpr() argument
163 kvm_cvt_df(&VCPU_FPR(vcpu, rt), &vcpu->arch.qpr[rt]); in kvmppc_sync_qpr()
166 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store) in kvmppc_inject_pf() argument
169 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_inject_pf()
173 kvmppc_set_msr(vcpu, msr); in kvmppc_inject_pf()
174 kvmppc_set_dar(vcpu, eaddr); in kvmppc_inject_pf()
179 kvmppc_set_dsisr(vcpu, dsisr); in kvmppc_inject_pf()
180 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_DATA_STORAGE); in kvmppc_inject_pf()
183 static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_emulate_fpr_load() argument
195 r = kvmppc_ld(vcpu, &addr, len, tmp, true); in kvmppc_emulate_fpr_load()
196 vcpu->arch.paddr_accessed = addr; in kvmppc_emulate_fpr_load()
199 kvmppc_inject_pf(vcpu, addr, false); in kvmppc_emulate_fpr_load()
202 emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs, in kvmppc_emulate_fpr_load()
212 kvm_cvt_fd((u32*)tmp, &VCPU_FPR(vcpu, rs)); in kvmppc_emulate_fpr_load()
213 vcpu->arch.qpr[rs] = *((u32*)tmp); in kvmppc_emulate_fpr_load()
216 VCPU_FPR(vcpu, rs) = *((u64*)tmp); in kvmppc_emulate_fpr_load()
227 static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_emulate_fpr_store() argument
238 kvm_cvt_df(&VCPU_FPR(vcpu, rs), (u32*)tmp); in kvmppc_emulate_fpr_store()
243 *((u32*)tmp) = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
244 val = VCPU_FPR(vcpu, rs) & 0xffffffff; in kvmppc_emulate_fpr_store()
248 *((u64*)tmp) = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
249 val = VCPU_FPR(vcpu, rs); in kvmppc_emulate_fpr_store()
257 r = kvmppc_st(vcpu, &addr, len, tmp, true); in kvmppc_emulate_fpr_store()
258 vcpu->arch.paddr_accessed = addr; in kvmppc_emulate_fpr_store()
260 kvmppc_inject_pf(vcpu, addr, true); in kvmppc_emulate_fpr_store()
262 emulated = kvmppc_handle_store(run, vcpu, val, len, 1); in kvmppc_emulate_fpr_store()
273 static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_emulate_psq_load() argument
283 r = kvmppc_ld(vcpu, &addr, sizeof(u32), tmp, true); in kvmppc_emulate_psq_load()
286 r = kvmppc_ld(vcpu, &addr, sizeof(u32) * 2, tmp, true); in kvmppc_emulate_psq_load()
288 vcpu->arch.paddr_accessed = addr; in kvmppc_emulate_psq_load()
290 kvmppc_inject_pf(vcpu, addr, false); in kvmppc_emulate_psq_load()
293 emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FPR | rs, in kvmppc_emulate_psq_load()
295 vcpu->arch.qpr[rs] = tmp[1]; in kvmppc_emulate_psq_load()
298 emulated = kvmppc_handle_load(run, vcpu, KVM_MMIO_REG_FQPR | rs, in kvmppc_emulate_psq_load()
306 kvm_cvt_fd(&tmp[0], &VCPU_FPR(vcpu, rs)); in kvmppc_emulate_psq_load()
307 vcpu->arch.qpr[rs] = tmp[1]; in kvmppc_emulate_psq_load()
316 static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu, in kvmppc_emulate_psq_store() argument
324 kvm_cvt_df(&VCPU_FPR(vcpu, rs), &tmp[0]); in kvmppc_emulate_psq_store()
325 tmp[1] = vcpu->arch.qpr[rs]; in kvmppc_emulate_psq_store()
327 r = kvmppc_st(vcpu, &addr, len, tmp, true); in kvmppc_emulate_psq_store()
328 vcpu->arch.paddr_accessed = addr; in kvmppc_emulate_psq_store()
330 kvmppc_inject_pf(vcpu, addr, true); in kvmppc_emulate_psq_store()
332 emulated = kvmppc_handle_store(run, vcpu, tmp[0], 4, 1); in kvmppc_emulate_psq_store()
335 emulated = kvmppc_handle_store(run, vcpu, val, 8, 1); in kvmppc_emulate_psq_store()
355 static bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst) in kvmppc_inst_is_paired_single() argument
357 if (!(vcpu->arch.hflags & BOOK3S_HFLAG_PAIRED_SINGLE)) in kvmppc_inst_is_paired_single()
501 static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc, in kvmppc_ps_three_in() argument
508 u32 *qpr = vcpu->arch.qpr; in kvmppc_ps_three_in()
517 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1); in kvmppc_ps_three_in()
518 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2); in kvmppc_ps_three_in()
519 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in3), &ps0_in3); in kvmppc_ps_three_in()
524 func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2, &ps0_in3); in kvmppc_ps_three_in()
530 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_three_in()
541 func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in1, &ps1_in2, &ps1_in3); in kvmppc_ps_three_in()
549 static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc, in kvmppc_ps_two_in() argument
556 u32 *qpr = vcpu->arch.qpr; in kvmppc_ps_two_in()
566 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in1), &ps0_in1); in kvmppc_ps_two_in()
571 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in2), &ps0_in2); in kvmppc_ps_two_in()
573 func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in1, &ps0_in2); in kvmppc_ps_two_in()
579 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_two_in()
589 func(&vcpu->arch.fp.fpscr, &ps1_out, &ps1_in1, &ps1_in2); in kvmppc_ps_two_in()
601 static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc, in kvmppc_ps_one_in() argument
606 u32 *qpr = vcpu->arch.qpr; in kvmppc_ps_one_in()
614 kvm_cvt_df(&VCPU_FPR(vcpu, reg_in), &ps0_in); in kvmppc_ps_one_in()
615 func(&vcpu->arch.fp.fpscr, &ps0_out, &ps0_in); in kvmppc_ps_one_in()
620 kvm_cvt_fd(&ps0_out, &VCPU_FPR(vcpu, reg_out)); in kvmppc_ps_one_in()
624 func(&vcpu->arch.fp.fpscr, &qpr[reg_out], &ps1_in); in kvmppc_ps_one_in()
632 int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu) in kvmppc_emulate_paired_single() argument
646 emulated = kvmppc_get_last_inst(vcpu, INST_GENERIC, &inst); in kvmppc_emulate_paired_single()
656 fpr_d = &VCPU_FPR(vcpu, ax_rd); in kvmppc_emulate_paired_single()
657 fpr_a = &VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
658 fpr_b = &VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
659 fpr_c = &VCPU_FPR(vcpu, ax_rc); in kvmppc_emulate_paired_single()
662 cr = kvmppc_get_cr(vcpu); in kvmppc_emulate_paired_single()
664 if (!kvmppc_inst_is_paired_single(vcpu, inst)) in kvmppc_emulate_paired_single()
667 if (!(kvmppc_get_msr(vcpu) & MSR_FP)) { in kvmppc_emulate_paired_single()
668 kvmppc_book3s_queue_irqprio(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL); in kvmppc_emulate_paired_single()
672 kvmppc_giveup_ext(vcpu, MSR_FP); in kvmppc_emulate_paired_single()
678 for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) { in kvmppc_emulate_paired_single()
680 kvm_cvt_df(&VCPU_FPR(vcpu, i), &f); in kvmppc_emulate_paired_single()
682 i, f, VCPU_FPR(vcpu, i), i, vcpu->arch.qpr[i]); in kvmppc_emulate_paired_single()
689 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; in kvmppc_emulate_paired_single()
694 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
699 ulong addr = kvmppc_get_gpr(vcpu, ax_ra); in kvmppc_emulate_paired_single()
704 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
707 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
712 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; in kvmppc_emulate_paired_single()
717 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
722 ulong addr = kvmppc_get_gpr(vcpu, ax_ra); in kvmppc_emulate_paired_single()
727 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
730 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
742 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; in kvmppc_emulate_paired_single()
746 addr += kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
747 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
756 ulong addr = kvmppc_get_gpr(vcpu, ax_ra); in kvmppc_emulate_paired_single()
760 addr += kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
761 emulated = kvmppc_emulate_psq_load(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
764 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
768 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
769 VCPU_FPR(vcpu, ax_rd) ^= 0x8000000000000000ULL; in kvmppc_emulate_paired_single()
770 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
771 vcpu->arch.qpr[ax_rd] ^= 0x80000000; in kvmppc_emulate_paired_single()
779 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
780 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
788 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
789 VCPU_FPR(vcpu, ax_rd) |= 0x8000000000000000ULL; in kvmppc_emulate_paired_single()
790 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
791 vcpu->arch.qpr[ax_rd] |= 0x80000000; in kvmppc_emulate_paired_single()
795 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rb); in kvmppc_emulate_paired_single()
796 VCPU_FPR(vcpu, ax_rd) &= ~0x8000000000000000ULL; in kvmppc_emulate_paired_single()
797 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
798 vcpu->arch.qpr[ax_rd] &= ~0x80000000; in kvmppc_emulate_paired_single()
802 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
804 kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb), in kvmppc_emulate_paired_single()
805 &vcpu->arch.qpr[ax_rd]); in kvmppc_emulate_paired_single()
809 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_ra); in kvmppc_emulate_paired_single()
810 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
815 kvm_cvt_fd(&vcpu->arch.qpr[ax_ra], in kvmppc_emulate_paired_single()
816 &VCPU_FPR(vcpu, ax_rd)); in kvmppc_emulate_paired_single()
818 kvm_cvt_df(&VCPU_FPR(vcpu, ax_rb), in kvmppc_emulate_paired_single()
819 &vcpu->arch.qpr[ax_rd]); in kvmppc_emulate_paired_single()
824 kvm_cvt_fd(&vcpu->arch.qpr[ax_ra], in kvmppc_emulate_paired_single()
825 &VCPU_FPR(vcpu, ax_rd)); in kvmppc_emulate_paired_single()
826 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rb]; in kvmppc_emulate_paired_single()
833 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; in kvmppc_emulate_paired_single()
837 addr += kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
838 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
843 ulong addr = kvmppc_get_gpr(vcpu, ax_ra); in kvmppc_emulate_paired_single()
847 addr += kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
848 emulated = kvmppc_emulate_psq_store(run, vcpu, ax_rd, addr, w, i); in kvmppc_emulate_paired_single()
851 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
858 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
860 VCPU_FPR(vcpu, ax_rd) = VCPU_FPR(vcpu, ax_rc); in kvmppc_emulate_paired_single()
863 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
865 vcpu->arch.qpr[ax_rd] = vcpu->arch.qpr[ax_rc]; in kvmppc_emulate_paired_single()
868 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
872 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
876 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
880 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
884 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
888 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
892 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
896 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
900 emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
904 emulated = kvmppc_ps_two_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
908 emulated = kvmppc_ps_one_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
912 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
916 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
920 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
924 emulated = kvmppc_ps_three_in(vcpu, rcomp, ax_rd, in kvmppc_emulate_paired_single()
934 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; in kvmppc_emulate_paired_single()
936 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
942 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; in kvmppc_emulate_paired_single()
944 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
948 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
953 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; in kvmppc_emulate_paired_single()
955 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
961 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; in kvmppc_emulate_paired_single()
963 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
967 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
972 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; in kvmppc_emulate_paired_single()
974 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
980 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; in kvmppc_emulate_paired_single()
982 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
986 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
991 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + full_d; in kvmppc_emulate_paired_single()
993 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
999 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + full_d; in kvmppc_emulate_paired_single()
1001 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, addr, in kvmppc_emulate_paired_single()
1005 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
1012 ulong addr = ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0; in kvmppc_emulate_paired_single()
1014 addr += kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1015 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1021 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + in kvmppc_emulate_paired_single()
1022 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1024 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1028 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
1033 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + in kvmppc_emulate_paired_single()
1034 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1036 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1042 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + in kvmppc_emulate_paired_single()
1043 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1045 emulated = kvmppc_emulate_fpr_load(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1049 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
1054 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + in kvmppc_emulate_paired_single()
1055 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1057 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1063 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + in kvmppc_emulate_paired_single()
1064 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1066 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1070 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
1075 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + in kvmppc_emulate_paired_single()
1076 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1078 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1084 ulong addr = kvmppc_get_gpr(vcpu, ax_ra) + in kvmppc_emulate_paired_single()
1085 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1087 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1091 kvmppc_set_gpr(vcpu, ax_ra, addr); in kvmppc_emulate_paired_single()
1096 ulong addr = (ax_ra ? kvmppc_get_gpr(vcpu, ax_ra) : 0) + in kvmppc_emulate_paired_single()
1097 kvmppc_get_gpr(vcpu, ax_rb); in kvmppc_emulate_paired_single()
1099 emulated = kvmppc_emulate_fpr_store(run, vcpu, ax_rd, in kvmppc_emulate_paired_single()
1110 fpd_fadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1111 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1114 fpd_fsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1115 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1118 fpd_fdivs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1119 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1122 fpd_fres(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1123 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1126 fpd_frsqrtes(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1127 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1132 fpd_fmuls(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c); in kvmppc_emulate_paired_single()
1133 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1136 fpd_fmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1137 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1140 fpd_fmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1141 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1144 fpd_fnmsubs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1145 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1148 fpd_fnmadds(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1149 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1163 *fpr_d = vcpu->arch.fp.fpscr; in kvmppc_emulate_paired_single()
1168 vcpu->arch.fp.fpscr = *fpr_b; in kvmppc_emulate_paired_single()
1176 fpd_fcmpu(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1187 fpd_fcmpo(&vcpu->arch.fp.fpscr, &tmp_cr, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1193 fpd_fneg(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1199 fpd_fabs(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1202 fpd_fcpsgn(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1205 fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1208 fpd_fadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1211 fpd_fsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_b); in kvmppc_emulate_paired_single()
1214 fpd_fctiw(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1217 fpd_fctiwz(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1220 fpd_frsp(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1221 kvmppc_sync_qpr(vcpu, ax_rd); in kvmppc_emulate_paired_single()
1228 fpd_fsqrt(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_b); in kvmppc_emulate_paired_single()
1230 fpd_fdiv(&vcpu->arch.fp.fpscr, &cr, fpr_d, (u64*)&one, fpr_d); in kvmppc_emulate_paired_single()
1236 fpd_fmul(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c); in kvmppc_emulate_paired_single()
1239 fpd_fsel(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1242 fpd_fmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1245 fpd_fmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1248 fpd_fnmsub(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1251 fpd_fnmadd(&vcpu->arch.fp.fpscr, &cr, fpr_d, fpr_a, fpr_c, fpr_b); in kvmppc_emulate_paired_single()
1258 for (i = 0; i < ARRAY_SIZE(vcpu->arch.fp.fpr); i++) { in kvmppc_emulate_paired_single()
1260 kvm_cvt_df(&VCPU_FPR(vcpu, i), &f); in kvmppc_emulate_paired_single()
1266 kvmppc_set_cr(vcpu, cr); in kvmppc_emulate_paired_single()