EFER_LMA 813 arch/x86/kvm/emulate.c if (efer & EFER_LMA) EFER_LMA 1600 arch/x86/kvm/emulate.c if (!(efer & EFER_LMA)) EFER_LMA 1751 arch/x86/kvm/emulate.c if (efer & EFER_LMA) EFER_LMA 2573 arch/x86/kvm/emulate.c ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); EFER_LMA 2812 arch/x86/kvm/emulate.c if (efer & EFER_LMA) { EFER_LMA 2820 arch/x86/kvm/emulate.c if (efer & EFER_LMA) { EFER_LMA 2862 arch/x86/kvm/emulate.c if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) EFER_LMA 2879 arch/x86/kvm/emulate.c if (efer & EFER_LMA) { EFER_LMA 2888 arch/x86/kvm/emulate.c ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; EFER_LMA 2891 arch/x86/kvm/emulate.c *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data : EFER_LMA 4270 arch/x86/kvm/emulate.c if (efer & EFER_LMA) { EFER_LMA 4294 arch/x86/kvm/emulate.c if ((efer & EFER_LMA) && !(new_val & X86_CR4_PAE)) EFER_LMA 742 arch/x86/kvm/svm.c if (!(efer & EFER_LMA)) EFER_LMA 2606 arch/x86/kvm/svm.c vcpu->arch.efer |= EFER_LMA; EFER_LMA 2607 arch/x86/kvm/svm.c svm->vmcb->save.efer |= EFER_LMA | EFER_LME; EFER_LMA 2611 arch/x86/kvm/svm.c vcpu->arch.efer &= ~EFER_LMA; EFER_LMA 2612 arch/x86/kvm/svm.c svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); EFER_LMA 1967 arch/x86/kvm/vmx/nested.c return vmx->vcpu.arch.efer | (EFER_LMA | EFER_LME); EFER_LMA 1969 arch/x86/kvm/vmx/nested.c return vmx->vcpu.arch.efer & ~(EFER_LMA | EFER_LME); EFER_LMA 2154 arch/x86/kvm/vmx/nested.c if (guest_efer & EFER_LMA) EFER_LMA 2679 arch/x86/kvm/vmx/nested.c ia32e = !!(vcpu->arch.efer & EFER_LMA); EFER_LMA 2726 arch/x86/kvm/vmx/nested.c CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || EFER_LMA 2806 arch/x86/kvm/vmx/nested.c CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || EFER_LMA 3805 arch/x86/kvm/vmx/nested.c vcpu->arch.efer |= (EFER_LMA | EFER_LME); EFER_LMA 3807 arch/x86/kvm/vmx/nested.c vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); EFER_LMA 958 arch/x86/kvm/vmx/vmx.c ignore_bits |= EFER_LMA | EFER_LME; EFER_LMA 960 arch/x86/kvm/vmx/vmx.c if (guest_efer & EFER_LMA) EFER_LMA 971 arch/x86/kvm/vmx/vmx.c if (!(guest_efer & EFER_LMA)) EFER_LMA 2774 arch/x86/kvm/vmx/vmx.c if (efer & EFER_LMA) { EFER_LMA 2801 arch/x86/kvm/vmx/vmx.c vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); EFER_LMA 2807 arch/x86/kvm/vmx/vmx.c vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); EFER_LMA 5712 arch/x86/kvm/vmx/vmx.c (cr4 & X86_CR4_PAE) && !(efer & EFER_LMA)) EFER_LMA 90 arch/x86/kvm/x86.c u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA)); EFER_LMA 1439 arch/x86/kvm/x86.c if (efer & (EFER_LME | EFER_LMA) && EFER_LMA 1475 arch/x86/kvm/x86.c efer &= ~EFER_LMA; EFER_LMA 1476 arch/x86/kvm/x86.c efer |= vcpu->arch.efer & EFER_LMA; EFER_LMA 8850 arch/x86/kvm/x86.c || !(sregs->efer & EFER_LMA)) EFER_LMA 8857 arch/x86/kvm/x86.c if (sregs->efer & EFER_LMA || sregs->cs.l) EFER_LMA 87 arch/x86/kvm/x86.h return vcpu->arch.efer & EFER_LMA; EFER_LMA 106 arch/x86/kvm/x86.h return (vcpu->arch.efer & EFER_LMA) && EFER_LMA 98 arch/x86/realmode/init.c trampoline_header->efer = efer & ~EFER_LMA; EFER_LMA 627 tools/testing/selftests/kvm/lib/x86_64/processor.c sregs.efer |= (EFER_LME | EFER_LMA | EFER_NX);