Lines Matching refs:msr

55 			     ulong msr);
67 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_is_split_real() local
68 return (msr & (MSR_IR|MSR_DR)) == MSR_DR; in kvmppc_is_split_real()
73 ulong msr = kvmppc_get_msr(vcpu); in kvmppc_fixup_split_real() local
77 if ((msr & (MSR_IR|MSR_DR)) != MSR_DR) in kvmppc_fixup_split_real()
337 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr) in kvmppc_set_msr_pr() argument
342 printk(KERN_INFO "KVM: Set MSR to 0x%llx\n", msr); in kvmppc_set_msr_pr()
345 msr &= to_book3s(vcpu)->msr_mask; in kvmppc_set_msr_pr()
346 kvmppc_set_msr_fast(vcpu, msr); in kvmppc_set_msr_pr()
349 if (msr & MSR_POW) { in kvmppc_set_msr_pr()
356 msr &= ~MSR_POW; in kvmppc_set_msr_pr()
357 kvmppc_set_msr_fast(vcpu, msr); in kvmppc_set_msr_pr()
372 if (!(msr & MSR_PR) && vcpu->arch.magic_page_pa) { in kvmppc_set_msr_pr()
375 if (msr & MSR_DR) in kvmppc_set_msr_pr()
391 !(old_msr & MSR_PR) && !(old_msr & MSR_SF) && (msr & MSR_SF)) { in kvmppc_set_msr_pr()
598 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_handle_pagefault() local
601 kvmppc_set_msr_fast(vcpu, msr | (ssrr1 & 0xf8000000ULL)); in kvmppc_handle_pagefault()
607 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_handle_pagefault() local
611 kvmppc_set_msr_fast(vcpu, msr | (ssrr1 & 0xf8000000ULL)); in kvmppc_handle_pagefault()
648 void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr) in kvmppc_giveup_ext() argument
656 if (msr & MSR_VSX) in kvmppc_giveup_ext()
657 msr |= MSR_FP | MSR_VEC; in kvmppc_giveup_ext()
659 msr &= vcpu->arch.guest_owned_ext; in kvmppc_giveup_ext()
660 if (!msr) in kvmppc_giveup_ext()
664 printk(KERN_INFO "Giving up ext 0x%lx\n", msr); in kvmppc_giveup_ext()
667 if (msr & MSR_FP) { in kvmppc_giveup_ext()
673 if (t->regs->msr & MSR_FP) in kvmppc_giveup_ext()
679 if (msr & MSR_VEC) { in kvmppc_giveup_ext()
680 if (current->thread.regs->msr & MSR_VEC) in kvmppc_giveup_ext()
686 vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX); in kvmppc_giveup_ext()
711 ulong msr) in kvmppc_handle_ext() argument
719 if (!(kvmppc_get_msr(vcpu) & msr)) { in kvmppc_handle_ext()
724 if (msr == MSR_VSX) { in kvmppc_handle_ext()
738 msr = MSR_FP | MSR_VEC | MSR_VSX; in kvmppc_handle_ext()
742 msr &= ~vcpu->arch.guest_owned_ext; in kvmppc_handle_ext()
743 if (!msr) in kvmppc_handle_ext()
747 printk(KERN_INFO "Loading up ext 0x%lx\n", msr); in kvmppc_handle_ext()
750 if (msr & MSR_FP) { in kvmppc_handle_ext()
758 if (msr & MSR_VEC) { in kvmppc_handle_ext()
768 t->regs->msr |= msr; in kvmppc_handle_ext()
769 vcpu->arch.guest_owned_ext |= msr; in kvmppc_handle_ext()
783 lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr; in kvmppc_handle_lost_ext()
801 current->thread.regs->msr |= lost_ext; in kvmppc_handle_lost_ext()
940 u64 msr = kvmppc_get_msr(vcpu); in kvmppc_handle_exit_pr() local
941 msr |= shadow_srr1 & 0x58000000; in kvmppc_handle_exit_pr()
942 kvmppc_set_msr_fast(vcpu, msr); in kvmppc_handle_exit_pr()
1490 if (current->thread.regs->msr & MSR_FP) in kvmppc_vcpu_run_pr()
1495 if (current->thread.regs->msr & MSR_VEC) in kvmppc_vcpu_run_pr()
1501 if (current->thread.regs->msr & MSR_VSX) in kvmppc_vcpu_run_pr()