Searched refs:smsr (Results 1 – 2 of 2) sorted by relevance
322 ulong smsr = guest_msr; in kvmppc_recalc_shadow_msr() local325 smsr &= MSR_FE0 | MSR_FE1 | MSR_SF | MSR_SE | MSR_BE | MSR_LE; in kvmppc_recalc_shadow_msr()327 smsr |= MSR_ME | MSR_RI | MSR_IR | MSR_DR | MSR_PR | MSR_EE; in kvmppc_recalc_shadow_msr()329 smsr |= (guest_msr & vcpu->arch.guest_owned_ext); in kvmppc_recalc_shadow_msr()332 smsr |= MSR_ISF | MSR_HV; in kvmppc_recalc_shadow_msr()334 vcpu->arch.shadow_msr = smsr; in kvmppc_recalc_shadow_msr()
218 struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); in shared_msr_update() local227 smsr->values[slot].host = value; in shared_msr_update()228 smsr->values[slot].curr = value; in shared_msr_update()251 struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); in kvm_set_shared_msr() local254 if (((value ^ smsr->values[slot].curr) & mask) == 0) in kvm_set_shared_msr()256 smsr->values[slot].curr = value; in kvm_set_shared_msr()261 if (!smsr->registered) { in kvm_set_shared_msr()262 smsr->urn.on_user_return = kvm_on_user_return; in kvm_set_shared_msr()263 user_return_notifier_register(&smsr->urn); in kvm_set_shared_msr()264 smsr->registered = true; in kvm_set_shared_msr()[all …]