MSR_EFER          115 arch/x86/include/asm/virtext.h 	rdmsrl(MSR_EFER, efer);
MSR_EFER          116 arch/x86/include/asm/virtext.h 	wrmsrl(MSR_EFER, efer & ~EFER_SVME);
MSR_EFER           71 arch/x86/kernel/acpi/sleep.c 	if (!rdmsr_safe(MSR_EFER,
MSR_EFER           74 arch/x86/kernel/acpi/sleep.c 	    !wrmsr_safe(MSR_EFER,
MSR_EFER          159 arch/x86/kvm/cpuid.c 	rdmsrl_safe(MSR_EFER, &efer);
MSR_EFER          812 arch/x86/kvm/emulate.c 			ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         1599 arch/x86/kvm/emulate.c 		ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         1750 arch/x86/kvm/emulate.c 			ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         2573 arch/x86/kvm/emulate.c 	ctxt->ops->set_msr(ctxt, MSR_EFER, val & ~EFER_LMA);
MSR_EFER         2666 arch/x86/kvm/emulate.c 		ctxt->ops->set_msr(ctxt, MSR_EFER, efer);
MSR_EFER         2801 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         2853 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         4258 arch/x86/kvm/emulate.c 		ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         4269 arch/x86/kvm/emulate.c 		ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         4292 arch/x86/kvm/emulate.c 		ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER         4354 arch/x86/kvm/emulate.c 	ctxt->ops->get_msr(ctxt, MSR_EFER, &efer);
MSR_EFER          917 arch/x86/kvm/svm.c 	rdmsrl(MSR_EFER, efer);
MSR_EFER          939 arch/x86/kvm/svm.c 	wrmsrl(MSR_EFER, efer | EFER_SVME);
MSR_EFER         3949 arch/x86/kvm/vmx/nested.c 		if (vmx->msr_autoload.guest.val[i].index == MSR_EFER)
MSR_EFER         3953 arch/x86/kvm/vmx/nested.c 	efer_msr = find_msr_entry(vmx, MSR_EFER);
MSR_EFER          452 arch/x86/kvm/vmx/vmx.c 	MSR_EFER, MSR_TSC_AUX, MSR_STAR,
MSR_EFER          832 arch/x86/kvm/vmx/vmx.c 	case MSR_EFER:
MSR_EFER          885 arch/x86/kvm/vmx/vmx.c 	case MSR_EFER:
MSR_EFER          974 arch/x86/kvm/vmx/vmx.c 			add_atomic_switch_msr(vmx, MSR_EFER,
MSR_EFER          977 arch/x86/kvm/vmx/vmx.c 			clear_atomic_switch_msr(vmx, MSR_EFER);
MSR_EFER          980 arch/x86/kvm/vmx/vmx.c 		clear_atomic_switch_msr(vmx, MSR_EFER);
MSR_EFER         1665 arch/x86/kvm/vmx/vmx.c 	index = __find_msr_index(vmx, MSR_EFER);
MSR_EFER         1768 arch/x86/kvm/vmx/vmx.c 	case MSR_EFER:
MSR_EFER         1900 arch/x86/kvm/vmx/vmx.c 	case MSR_EFER:
MSR_EFER         2768 arch/x86/kvm/vmx/vmx.c 	struct shared_msr_entry *msr = find_msr_entry(vmx, MSR_EFER);
MSR_EFER         7616 arch/x86/kvm/vmx/vmx.c 	rdmsrl_safe(MSR_EFER, &host_efer);
MSR_EFER         2717 arch/x86/kvm/x86.c 	case MSR_EFER:
MSR_EFER         3086 arch/x86/kvm/x86.c 	case MSR_EFER:
MSR_EFER          116 arch/x86/power/cpu.c 	rdmsrl(MSR_EFER, ctxt->efer);
MSR_EFER          207 arch/x86/power/cpu.c 	wrmsrl(MSR_EFER, ctxt->efer);
MSR_EFER           97 arch/x86/realmode/init.c 	rdmsrl(MSR_EFER, efer);
MSR_EFER          281 tools/testing/selftests/kvm/lib/x86_64/vmx.c 		vmwrite(HOST_IA32_EFER, rdmsr(MSR_EFER));