Searched refs:EFER_LMA (Results 1 – 7 of 7) sorted by relevance
80 trampoline_header->efer = efer & ~EFER_LMA; in setup_real_mode()
46 return vcpu->arch.efer & EFER_LMA; in is_long_mode()
749 if (efer & EFER_LMA) in assign_eip_far()1496 if (!(efer & EFER_LMA)) in get_descriptor_ptr()1636 if (efer & EFER_LMA) in __load_segment_descriptor()2439 ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA); in rsm_load_state_64()2648 if (efer & EFER_LMA) { in em_syscall()2656 if (efer & EFER_LMA) { in em_syscall()2697 if ((ctxt->mode != X86EMUL_MODE_PROT64) && (efer & EFER_LMA) in em_sysenter()2714 if (efer & EFER_LMA) { in em_sysenter()2723 ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data; in em_sysenter()2726 *reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data : in em_sysenter()[all …]
1815 ignore_bits |= EFER_LMA | EFER_LME; in update_transition_efer()1817 if (guest_efer & EFER_LMA) in update_transition_efer()1830 if (!(guest_efer & EFER_LMA)) in update_transition_efer()3568 if (efer & EFER_LMA) { in vmx_set_efer()3595 vmx_set_efer(vcpu, vcpu->arch.efer | EFER_LMA); in enter_lmode()3601 vmx_set_efer(vcpu, vcpu->arch.efer & ~EFER_LMA); in exit_lmode()7953 (cr4 & X86_CR4_PAE) && !(efer & EFER_LMA)) in dump_vmcs()9751 vcpu->arch.efer |= (EFER_LMA | EFER_LME); in prepare_vmcs02()9753 vcpu->arch.efer &= ~(EFER_LMA | EFER_LME); in prepare_vmcs02()9909 ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA) || in nested_vmx_run()[all …]
478 if (!npt_enabled && !(efer & EFER_LMA)) in svm_set_efer()1475 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()1476 svm->vmcb->save.efer |= EFER_LMA | EFER_LME; in svm_set_cr0()1480 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()1481 svm->vmcb->save.efer &= ~(EFER_LMA | EFER_LME); in svm_set_cr0()
84 u64 __read_mostly efer_reserved_bits = ~((u64)(EFER_SCE | EFER_LME | EFER_LMA));1017 efer &= ~EFER_LMA; in set_efer()1018 efer |= vcpu->arch.efer & EFER_LMA; in set_efer()
28 #define EFER_LMA (1<<_EFER_LMA) macro