msr_data         1035 arch/x86/include/asm/kvm_host.h 	int (*get_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
msr_data         1036 arch/x86/include/asm/kvm_host.h 	int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
msr_data         1393 arch/x86/include/asm/kvm_host.h int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
msr_data         1394 arch/x86/include/asm/kvm_host.h int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
msr_data         2789 arch/x86/kvm/emulate.c 	u64 msr_data;
msr_data         2807 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_STAR, &msr_data);
msr_data         2808 arch/x86/kvm/emulate.c 	msr_data >>= 32;
msr_data         2809 arch/x86/kvm/emulate.c 	cs_sel = (u16)(msr_data & 0xfffc);
msr_data         2810 arch/x86/kvm/emulate.c 	ss_sel = (u16)(msr_data + 8);
msr_data         2826 arch/x86/kvm/emulate.c 			     MSR_LSTAR : MSR_CSTAR, &msr_data);
msr_data         2827 arch/x86/kvm/emulate.c 		ctxt->_eip = msr_data;
msr_data         2829 arch/x86/kvm/emulate.c 		ops->get_msr(ctxt, MSR_SYSCALL_MASK, &msr_data);
msr_data         2830 arch/x86/kvm/emulate.c 		ctxt->eflags &= ~msr_data;
msr_data         2835 arch/x86/kvm/emulate.c 		ops->get_msr(ctxt, MSR_STAR, &msr_data);
msr_data         2836 arch/x86/kvm/emulate.c 		ctxt->_eip = (u32)msr_data;
msr_data         2849 arch/x86/kvm/emulate.c 	u64 msr_data;
msr_data         2872 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
msr_data         2873 arch/x86/kvm/emulate.c 	if ((msr_data & 0xfffc) == 0x0)
msr_data         2877 arch/x86/kvm/emulate.c 	cs_sel = (u16)msr_data & ~SEGMENT_RPL_MASK;
msr_data         2887 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_IA32_SYSENTER_EIP, &msr_data);
msr_data         2888 arch/x86/kvm/emulate.c 	ctxt->_eip = (efer & EFER_LMA) ? msr_data : (u32)msr_data;
msr_data         2890 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_IA32_SYSENTER_ESP, &msr_data);
msr_data         2891 arch/x86/kvm/emulate.c 	*reg_write(ctxt, VCPU_REGS_RSP) = (efer & EFER_LMA) ? msr_data :
msr_data         2892 arch/x86/kvm/emulate.c 							      (u32)msr_data;
msr_data         2901 arch/x86/kvm/emulate.c 	u64 msr_data, rcx, rdx;
msr_data         2922 arch/x86/kvm/emulate.c 	ops->get_msr(ctxt, MSR_IA32_SYSENTER_CS, &msr_data);
msr_data         2925 arch/x86/kvm/emulate.c 		cs_sel = (u16)(msr_data + 16);
msr_data         2926 arch/x86/kvm/emulate.c 		if ((msr_data & 0xfffc) == 0x0)
msr_data         2928 arch/x86/kvm/emulate.c 		ss_sel = (u16)(msr_data + 24);
msr_data         2933 arch/x86/kvm/emulate.c 		cs_sel = (u16)(msr_data + 32);
msr_data         2934 arch/x86/kvm/emulate.c 		if (msr_data == 0x0)
msr_data         3722 arch/x86/kvm/emulate.c 	u64 msr_data;
msr_data         3724 arch/x86/kvm/emulate.c 	msr_data = (u32)reg_read(ctxt, VCPU_REGS_RAX)
msr_data         3726 arch/x86/kvm/emulate.c 	if (ctxt->ops->set_msr(ctxt, reg_read(ctxt, VCPU_REGS_RCX), msr_data))
msr_data         3734 arch/x86/kvm/emulate.c 	u64 msr_data;
msr_data         3736 arch/x86/kvm/emulate.c 	if (ctxt->ops->get_msr(ctxt, reg_read(ctxt, VCPU_REGS_RCX), &msr_data))
msr_data         3739 arch/x86/kvm/emulate.c 	*reg_write(ctxt, VCPU_REGS_RAX) = (u32)msr_data;
msr_data         3740 arch/x86/kvm/emulate.c 	*reg_write(ctxt, VCPU_REGS_RDX) = msr_data >> 32;
msr_data           98 arch/x86/kvm/lapic.h int kvm_set_apic_base(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
msr_data          350 arch/x86/kvm/pmu.c int kvm_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data           35 arch/x86/kvm/pmu.h 	int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
msr_data          126 arch/x86/kvm/pmu.h int kvm_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
msr_data          232 arch/x86/kvm/pmu_amd.c static int amd_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         4169 arch/x86/kvm/svm.c static int svm_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         4302 arch/x86/kvm/svm.c static int svm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr)
msr_data          209 arch/x86/kvm/vmx/pmu_intel.c static int intel_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         1750 arch/x86/kvm/vmx/vmx.c static int vmx_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         1890 arch/x86/kvm/vmx/vmx.c static int vmx_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         4257 arch/x86/kvm/vmx/vmx.c 	struct msr_data apic_base_msr;
msr_data          343 arch/x86/kvm/x86.c int kvm_set_apic_base(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         1458 arch/x86/kvm/x86.c static int set_efer(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         1502 arch/x86/kvm/x86.c 	struct msr_data msr;
msr_data         1546 arch/x86/kvm/x86.c 	struct msr_data msr;
msr_data         1939 arch/x86/kvm/x86.c void kvm_write_tsc(struct kvm_vcpu *vcpu, struct msr_data *msr)
msr_data         2528 arch/x86/kvm/x86.c static int set_msr_mce(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         2692 arch/x86/kvm/x86.c int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         2990 arch/x86/kvm/x86.c int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr_info)
msr_data         8866 arch/x86/kvm/x86.c 	struct msr_data apic_base_msr;
msr_data         9181 arch/x86/kvm/x86.c 	struct msr_data msr;
msr_data          266 arch/x86/kvm/x86.h void kvm_write_tsc(struct kvm_vcpu *vcpu, struct msr_data *msr);