Lines Matching refs:arch

47 	struct kvm_vcpu_arch *arch = &vcpu->arch;  in kvm_compute_return_epc()  local
65 arch->gprs[insn.r_format.rd] = epc + 8; in kvm_compute_return_epc()
68 nextpc = arch->gprs[insn.r_format.rs]; in kvm_compute_return_epc()
82 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
91 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
100 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
101 if ((long)arch->gprs[insn.i_format.rs] < 0) in kvm_compute_return_epc()
110 arch->gprs[31] = epc + 8; in kvm_compute_return_epc()
111 if ((long)arch->gprs[insn.i_format.rs] >= 0) in kvm_compute_return_epc()
134 arch->gprs[31] = instpc + 8; in kvm_compute_return_epc()
146 if (arch->gprs[insn.i_format.rs] == in kvm_compute_return_epc()
147 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
156 if (arch->gprs[insn.i_format.rs] != in kvm_compute_return_epc()
157 arch->gprs[insn.i_format.rt]) in kvm_compute_return_epc()
167 if ((long)arch->gprs[insn.i_format.rs] <= 0) in kvm_compute_return_epc()
177 if ((long)arch->gprs[insn.i_format.rs] > 0) in kvm_compute_return_epc()
207 branch_pc = kvm_compute_return_epc(vcpu, vcpu->arch.pc); in update_pc()
211 vcpu->arch.pc = branch_pc; in update_pc()
213 vcpu->arch.pc); in update_pc()
216 vcpu->arch.pc += 4; in update_pc()
218 kvm_debug("update_pc(): New PC: %#lx\n", vcpu->arch.pc); in update_pc()
233 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disabled()
235 return (vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC) || in kvm_mips_count_disabled()
252 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
254 if (delta >= vcpu->arch.count_period) { in kvm_mips_ktime_to_count()
256 periods = div64_s64(now_ns, vcpu->arch.count_period); in kvm_mips_ktime_to_count()
257 vcpu->arch.count_dyn_bias = -periods * vcpu->arch.count_period; in kvm_mips_ktime_to_count()
259 delta = now_ns + vcpu->arch.count_dyn_bias; in kvm_mips_ktime_to_count()
272 return div_u64(delta * vcpu->arch.count_hz, NSEC_PER_SEC); in kvm_mips_ktime_to_count()
287 if (unlikely(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_time()
288 return vcpu->arch.count_resume; in kvm_mips_count_time()
305 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count_running()
311 count = vcpu->arch.count_bias + kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_read_count_running()
327 expires = hrtimer_get_expires(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
328 threshold = ktime_add_ns(now, vcpu->arch.count_period / 4); in kvm_mips_read_count_running()
334 running = hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_read_count_running()
345 vcpu->arch.count_period); in kvm_mips_read_count_running()
346 hrtimer_start(&vcpu->arch.comparecount_timer, expires, in kvm_mips_read_count_running()
365 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_read_count()
396 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_freeze_hrtimer()
424 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_resume_hrtimer()
432 delta = div_u64(delta * NSEC_PER_SEC, vcpu->arch.count_hz); in kvm_mips_resume_hrtimer()
436 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_resume_hrtimer()
437 hrtimer_start(&vcpu->arch.comparecount_timer, expire, HRTIMER_MODE_ABS); in kvm_mips_resume_hrtimer()
449 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_count()
454 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_write_count()
474 vcpu->arch.count_hz = 100*1000*1000; in kvm_mips_init_count()
475 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, in kvm_mips_init_count()
476 vcpu->arch.count_hz); in kvm_mips_init_count()
477 vcpu->arch.count_dyn_bias = 0; in kvm_mips_init_count()
496 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_hz()
505 if (vcpu->arch.count_hz == count_hz) in kvm_mips_set_count_hz()
518 vcpu->arch.count_hz = count_hz; in kvm_mips_set_count_hz()
519 vcpu->arch.count_period = div_u64((u64)NSEC_PER_SEC << 32, count_hz); in kvm_mips_set_count_hz()
520 vcpu->arch.count_dyn_bias = 0; in kvm_mips_set_count_hz()
523 vcpu->arch.count_bias = count - kvm_mips_ktime_to_count(vcpu, now); in kvm_mips_set_count_hz()
543 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_write_compare()
587 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable()
592 hrtimer_cancel(&vcpu->arch.comparecount_timer); in kvm_mips_count_disable()
614 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_disable_cause()
617 if (!(vcpu->arch.count_ctl & KVM_REG_MIPS_COUNT_CTL_DC)) in kvm_mips_count_disable_cause()
634 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_count_enable_cause()
660 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_set_count_ctl()
661 s64 changed = count_ctl ^ vcpu->arch.count_ctl; in kvm_mips_set_count_ctl()
671 vcpu->arch.count_ctl = count_ctl; in kvm_mips_set_count_ctl()
679 vcpu->arch.count_resume = ktime_get(); in kvm_mips_set_count_ctl()
682 vcpu->arch.count_resume = kvm_mips_count_disable(vcpu); in kvm_mips_set_count_ctl()
692 vcpu->arch.count_hz); in kvm_mips_set_count_ctl()
693 expire = ktime_add_ns(vcpu->arch.count_resume, delta); in kvm_mips_set_count_ctl()
730 vcpu->arch.count_resume = ns_to_ktime(count_resume); in kvm_mips_set_count_resume()
745 hrtimer_add_expires_ns(&vcpu->arch.comparecount_timer, in kvm_mips_count_timeout()
746 vcpu->arch.count_period); in kvm_mips_count_timeout()
752 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_eret()
756 kvm_debug("[%#lx] ERET to %#lx\n", vcpu->arch.pc, in kvm_mips_emul_eret()
759 vcpu->arch.pc = kvm_read_c0_guest_epc(cop0); in kvm_mips_emul_eret()
763 vcpu->arch.pc = kvm_read_c0_guest_errorepc(cop0); in kvm_mips_emul_eret()
766 vcpu->arch.pc); in kvm_mips_emul_eret()
775 kvm_debug("[%#lx] !!!WAIT!!! (%#lx)\n", vcpu->arch.pc, in kvm_mips_emul_wait()
776 vcpu->arch.pending_exceptions); in kvm_mips_emul_wait()
780 if (!vcpu->arch.pending_exceptions) { in kvm_mips_emul_wait()
781 vcpu->arch.wait = 1; in kvm_mips_emul_wait()
803 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbr()
804 uint32_t pc = vcpu->arch.pc; in kvm_mips_emul_tlbr()
813 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwi()
816 uint32_t pc = vcpu->arch.pc; in kvm_mips_emul_tlbwi()
828 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwi()
852 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbwr()
854 uint32_t pc = vcpu->arch.pc; in kvm_mips_emul_tlbwr()
860 tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emul_tlbwr()
883 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emul_tlbp()
885 uint32_t pc = vcpu->arch.pc; in kvm_mips_emul_tlbp()
910 if (kvm_mips_guest_can_have_fpu(&vcpu->arch)) in kvm_mips_config1_wrmask()
929 if (kvm_mips_guest_can_have_msa(&vcpu->arch)) in kvm_mips_config3_wrmask()
960 if (kvm_mips_guest_has_msa(&vcpu->arch)) in kvm_mips_config5_wrmask()
967 if (kvm_mips_guest_has_fpu(&vcpu->arch)) { in kvm_mips_config5_wrmask()
980 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_CP0()
983 uint32_t pc = vcpu->arch.pc; in kvm_mips_emulate_CP0()
990 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_CP0()
1036 vcpu->arch.gprs[rt] = kvm_mips_read_count(vcpu); in kvm_mips_emulate_CP0()
1038 vcpu->arch.gprs[rt] = 0x0; in kvm_mips_emulate_CP0()
1043 vcpu->arch.gprs[rt] = cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1052 pc, rd, sel, rt, vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1057 vcpu->arch.gprs[rt] = cop0->reg[rd][sel]; in kvm_mips_emulate_CP0()
1065 && (vcpu->arch.gprs[rt] >= in kvm_mips_emulate_CP0()
1068 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1077 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1082 vcpu->arch.gprs[rt] & ASID_MASK; in kvm_mips_emulate_CP0()
1083 if ((KSEGX(vcpu->arch.gprs[rt]) != CKSEG0) && in kvm_mips_emulate_CP0()
1089 vcpu->arch.gprs[rt] in kvm_mips_emulate_CP0()
1096 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1100 kvm_mips_write_count(vcpu, vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1105 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1110 vcpu->arch.gprs[rt], in kvm_mips_emulate_CP0()
1116 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1127 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1158 vcpu->arch.fpu_inuse & KVM_MIPS_FPU_MSA) in kvm_mips_emulate_CP0()
1169 vcpu->arch.fpu_inuse & KVM_MIPS_FPU_FPU) in kvm_mips_emulate_CP0()
1181 if (!kvm_mips_guest_has_fpu(&vcpu->arch)) in kvm_mips_emulate_CP0()
1188 val = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1204 vcpu->arch.fpu_inuse & KVM_MIPS_FPU_FPU) in kvm_mips_emulate_CP0()
1214 vcpu->arch.fpu_inuse & KVM_MIPS_FPU_MSA) in kvm_mips_emulate_CP0()
1225 new_cause = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1237 cop0->reg[rd][sel] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1249 vcpu->arch.pc, rt, rd, sel); in kvm_mips_emulate_CP0()
1258 vcpu->arch.gprs[rt] = in kvm_mips_emulate_CP0()
1264 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1268 vcpu->arch.pc); in kvm_mips_emulate_CP0()
1289 vcpu->arch.gprs[rt]); in kvm_mips_emulate_CP0()
1290 vcpu->arch.gprs[rd] = vcpu->arch.gprs[rt]; in kvm_mips_emulate_CP0()
1295 vcpu->arch.pc, copz); in kvm_mips_emulate_CP0()
1304 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_CP0()
1330 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_store()
1348 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_store()
1358 *(u8 *) data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1360 vcpu->arch.host_cp0_badvaddr, vcpu->arch.gprs[rt], in kvm_mips_emulate_store()
1372 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_store()
1383 *(uint32_t *) data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1386 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1387 vcpu->arch.gprs[rt], *(uint32_t *) data); in kvm_mips_emulate_store()
1397 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_store()
1408 *(uint16_t *) data = vcpu->arch.gprs[rt]; in kvm_mips_emulate_store()
1411 vcpu->arch.pc, vcpu->arch.host_cp0_badvaddr, in kvm_mips_emulate_store()
1412 vcpu->arch.gprs[rt], *(uint32_t *) data); in kvm_mips_emulate_store()
1423 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_store()
1441 vcpu->arch.pending_load_cause = cause; in kvm_mips_emulate_load()
1442 vcpu->arch.io_gpr = rt; in kvm_mips_emulate_load()
1454 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_load()
1477 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_load()
1506 kvm_mips_callbacks->gva_to_gpa(vcpu->arch. in kvm_mips_emulate_load()
1543 if (gfn >= kvm->arch.guest_pmap_npages) { in kvm_mips_sync_icache()
1549 pfn = kvm->arch.guest_pmap[gfn]; in kvm_mips_sync_icache()
1577 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_cache()
1580 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_cache() local
1588 curr_pc = vcpu->arch.pc; in kvm_mips_emulate_cache()
1599 va = arch->gprs[base] + offset; in kvm_mips_emulate_cache()
1602 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
1611 vcpu->arch.pc, vcpu->arch.gprs[31], cache, op, base, in kvm_mips_emulate_cache()
1612 arch->gprs[base], offset); in kvm_mips_emulate_cache()
1651 vcpu->arch.host_cp0_entryhi = (va & VPN2_MASK); in kvm_mips_emulate_cache()
1652 vcpu->arch.host_cp0_badvaddr = va; in kvm_mips_emulate_cache()
1658 struct kvm_mips_tlb *tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_emulate_cache()
1680 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
1711 cache, op, base, arch->gprs[base], offset); in kvm_mips_emulate_cache()
1721 vcpu->arch.pc = curr_pc; in kvm_mips_emulate_cache()
1778 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_syscall()
1779 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_syscall() local
1784 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_syscall()
1792 kvm_debug("Delivering SYSCALL @ pc %#lx\n", arch->pc); in kvm_mips_emulate_syscall()
1798 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_syscall()
1813 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_ld()
1814 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_ld() local
1815 unsigned long entryhi = (vcpu->arch. host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_ld()
1820 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_ld()
1829 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
1832 arch->pc = KVM_GUEST_KSEG0 + 0x0; in kvm_mips_emulate_tlbmiss_ld()
1836 arch->pc); in kvm_mips_emulate_tlbmiss_ld()
1838 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbmiss_ld()
1845 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_ld()
1859 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_ld()
1860 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_ld() local
1862 (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_ld()
1867 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_ld()
1876 arch->pc); in kvm_mips_emulate_tlbinv_ld()
1879 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbinv_ld()
1883 arch->pc); in kvm_mips_emulate_tlbinv_ld()
1884 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbinv_ld()
1891 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_ld()
1905 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmiss_st()
1906 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmiss_st() local
1907 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmiss_st()
1912 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmiss_st()
1921 arch->pc); in kvm_mips_emulate_tlbmiss_st()
1924 arch->pc = KVM_GUEST_KSEG0 + 0x0; in kvm_mips_emulate_tlbmiss_st()
1927 arch->pc); in kvm_mips_emulate_tlbmiss_st()
1928 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbmiss_st()
1935 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmiss_st()
1949 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbinv_st()
1950 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbinv_st() local
1951 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbinv_st()
1956 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbinv_st()
1965 arch->pc); in kvm_mips_emulate_tlbinv_st()
1968 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbinv_st()
1971 arch->pc); in kvm_mips_emulate_tlbinv_st()
1972 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbinv_st()
1979 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbinv_st()
1995 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_handle_tlbmod()
1996 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_handle_tlbmod()
2004 kvm_mips_host_tlb_inv(vcpu, vcpu->arch.host_cp0_badvaddr); in kvm_mips_handle_tlbmod()
2022 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_tlbmod()
2023 unsigned long entryhi = (vcpu->arch.host_cp0_badvaddr & VPN2_MASK) | in kvm_mips_emulate_tlbmod()
2025 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_tlbmod() local
2029 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_tlbmod()
2038 arch->pc); in kvm_mips_emulate_tlbmod()
2040 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbmod()
2043 arch->pc); in kvm_mips_emulate_tlbmod()
2044 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_tlbmod()
2050 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_tlbmod()
2064 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpu_exc()
2065 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpu_exc() local
2069 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpu_exc()
2079 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_fpu_exc()
2093 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_ri_exc()
2094 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_ri_exc() local
2099 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_ri_exc()
2107 kvm_debug("Delivering RI @ pc %#lx\n", arch->pc); in kvm_mips_emulate_ri_exc()
2113 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_ri_exc()
2128 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_bp_exc()
2129 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_bp_exc() local
2134 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_bp_exc()
2142 kvm_debug("Delivering BP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_bp_exc()
2148 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_bp_exc()
2163 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_trap_exc()
2164 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_trap_exc() local
2169 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_trap_exc()
2177 kvm_debug("Delivering TRAP @ pc %#lx\n", arch->pc); in kvm_mips_emulate_trap_exc()
2183 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_trap_exc()
2198 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msafpe_exc()
2199 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msafpe_exc() local
2204 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msafpe_exc()
2212 kvm_debug("Delivering MSAFPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msafpe_exc()
2218 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_msafpe_exc()
2233 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_fpe_exc()
2234 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_fpe_exc() local
2239 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_fpe_exc()
2247 kvm_debug("Delivering FPE @ pc %#lx\n", arch->pc); in kvm_mips_emulate_fpe_exc()
2253 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_fpe_exc()
2268 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_msadis_exc()
2269 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_msadis_exc() local
2274 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_msadis_exc()
2282 kvm_debug("Delivering MSADIS @ pc %#lx\n", arch->pc); in kvm_mips_emulate_msadis_exc()
2288 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_msadis_exc()
2317 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_handle_ri()
2318 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_handle_ri() local
2327 curr_pc = vcpu->arch.pc; in kvm_mips_handle_ri()
2355 arch->gprs[rt] = 0; in kvm_mips_handle_ri()
2358 arch->gprs[rt] = min(current_cpu_data.dcache.linesz, in kvm_mips_handle_ri()
2362 arch->gprs[rt] = kvm_mips_read_count(vcpu); in kvm_mips_handle_ri()
2368 arch->gprs[rt] = 1; in kvm_mips_handle_ri()
2371 arch->gprs[rt] = 2; in kvm_mips_handle_ri()
2375 arch->gprs[rt] = kvm_read_c0_guest_userlocal(cop0); in kvm_mips_handle_ri()
2394 vcpu->arch.pc = curr_pc; in kvm_mips_handle_ri()
2401 unsigned long *gpr = &vcpu->arch.gprs[vcpu->arch.io_gpr]; in kvm_mips_complete_mmio_load()
2410 er = update_pc(vcpu, vcpu->arch.pending_load_cause); in kvm_mips_complete_mmio_load()
2434 if (vcpu->arch.pending_load_cause & CAUSEF_BD) in kvm_mips_complete_mmio_load()
2436 vcpu->arch.pc, run->mmio.len, vcpu->arch.io_gpr, *gpr, in kvm_mips_complete_mmio_load()
2449 struct mips_coproc *cop0 = vcpu->arch.cop0; in kvm_mips_emulate_exc()
2450 struct kvm_vcpu_arch *arch = &vcpu->arch; in kvm_mips_emulate_exc() local
2455 kvm_write_c0_guest_epc(cop0, arch->pc); in kvm_mips_emulate_exc()
2467 arch->pc = KVM_GUEST_KSEG0 + 0x180; in kvm_mips_emulate_exc()
2468 kvm_write_c0_guest_badvaddr(cop0, vcpu->arch.host_cp0_badvaddr); in kvm_mips_emulate_exc()
2488 unsigned long badvaddr = vcpu->arch.host_cp0_badvaddr; in kvm_mips_check_privilege()
2584 unsigned long va = vcpu->arch.host_cp0_badvaddr; in kvm_mips_handle_tlbmiss()
2588 vcpu->arch.host_cp0_badvaddr, vcpu->arch.host_cp0_entryhi); in kvm_mips_handle_tlbmiss()
2599 (vcpu->arch.cop0) & ASID_MASK)); in kvm_mips_handle_tlbmiss()
2611 struct kvm_mips_tlb *tlb = &vcpu->arch.guest_tlb[index]; in kvm_mips_handle_tlbmiss()