Searched refs:smsr (Results 1 - 2 of 2) sorted by relevance

/linux-4.4.14/arch/powerpc/kvm/
H A Dbook3s_pr.c322 ulong smsr = guest_msr; kvmppc_recalc_shadow_msr() local
325 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; kvmppc_recalc_shadow_msr()
327 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; kvmppc_recalc_shadow_msr()
329 smsr |= (guest_msr & vcpu->arch.guest_owned_ext); kvmppc_recalc_shadow_msr()
332 smsr |= MSR_ISF | MSR_HV; kvmppc_recalc_shadow_msr()
334 vcpu->arch.shadow_msr = smsr; kvmppc_recalc_shadow_msr()
/linux-4.4.14/arch/x86/kvm/
H A Dx86.c218 struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); shared_msr_update() local
227 smsr->values[slot].host = value; shared_msr_update()
228 smsr->values[slot].curr = value; shared_msr_update()
251 struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); kvm_set_shared_msr() local
254 if (((value ^ smsr->values[slot].curr) & mask) == 0) kvm_set_shared_msr()
256 smsr->values[slot].curr = value; kvm_set_shared_msr()
261 if (!smsr->registered) { kvm_set_shared_msr()
262 smsr->urn.on_user_return = kvm_on_user_return; kvm_set_shared_msr()
263 user_return_notifier_register(&smsr->urn); kvm_set_shared_msr()
264 smsr->registered = true; kvm_set_shared_msr()
273 struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); drop_user_return_notifiers() local
275 if (smsr->registered) drop_user_return_notifiers()
276 kvm_on_user_return(&smsr->urn); drop_user_return_notifiers()

Completed in 133 milliseconds