Lines Matching refs:efer
1721 u64 guest_efer = vmx->vcpu.arch.efer; in update_transition_efer()
1755 (enable_ept && ((vmx->vcpu.arch.efer ^ host_efer) & EFER_NX))) { in update_transition_efer()
2235 if ((index >= 0) && (vmx->vcpu.arch.efer & EFER_SCE)) in setup_msrs()
3417 static void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) in vmx_set_efer() argument
3430 vcpu->arch.efer = efer; in vmx_set_efer()
3431 if (efer & EFER_LMA) { in vmx_set_efer()
3433 msr->data = efer; in vmx_set_efer()
3437 msr->data = efer & ~EFER_LME; in vmx_set_efer()
3458 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()
3464 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()
3584 if (vcpu->arch.efer & EFER_LME) { in vmx_set_cr0()
9337 vcpu->arch.efer = vmcs12->guest_ia32_efer; in prepare_vmcs02()
9339 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in prepare_vmcs02()
9341 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in prepare_vmcs02()
9343 vmx_set_efer(vcpu, vcpu->arch.efer); in prepare_vmcs02()
9820 vmcs12->guest_ia32_efer = vcpu->arch.efer; in prepare_vmcs12()
9880 vcpu->arch.efer = vmcs12->host_ia32_efer; in load_vmcs12_host_state()
9882 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in load_vmcs12_host_state()
9884 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in load_vmcs12_host_state()
9885 vmx_set_efer(vcpu, vcpu->arch.efer); in load_vmcs12_host_state()