vcpu_vmx          336 arch/x86/kvm/vmx/evmcs.c        struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          352 arch/x86/kvm/vmx/evmcs.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          173 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          211 arch/x86/kvm/vmx/nested.c static void vmx_disable_shadow_vmcs(struct vcpu_vmx *vmx)
vcpu_vmx          220 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          236 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          274 arch/x86/kvm/vmx/nested.c static void vmx_sync_vmcs_host_state(struct vcpu_vmx *vmx,
vcpu_vmx          295 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          329 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          680 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          881 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1043 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1056 arch/x86/kvm/vmx/nested.c static int vmx_restore_vmx_basic(struct vcpu_vmx *vmx, u64 data)
vcpu_vmx         1087 arch/x86/kvm/vmx/nested.c vmx_restore_control_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data)
vcpu_vmx         1132 arch/x86/kvm/vmx/nested.c static int vmx_restore_vmx_misc(struct vcpu_vmx *vmx, u64 data)
vcpu_vmx         1169 arch/x86/kvm/vmx/nested.c static int vmx_restore_vmx_ept_vpid_cap(struct vcpu_vmx *vmx, u64 data)
vcpu_vmx         1185 arch/x86/kvm/vmx/nested.c static int vmx_restore_fixed0_msr(struct vcpu_vmx *vmx, u32 msr_index, u64 data)
vcpu_vmx         1218 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1366 arch/x86/kvm/vmx/nested.c static void copy_shadow_to_vmcs12(struct vcpu_vmx *vmx)
vcpu_vmx         1393 arch/x86/kvm/vmx/nested.c static void copy_vmcs12_to_shadow(struct vcpu_vmx *vmx)
vcpu_vmx         1427 arch/x86/kvm/vmx/nested.c static int copy_enlightened_to_vmcs12(struct vcpu_vmx *vmx)
vcpu_vmx         1647 arch/x86/kvm/vmx/nested.c static int copy_vmcs12_to_enlightened(struct vcpu_vmx *vmx)
vcpu_vmx         1821 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1903 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1927 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx =
vcpu_vmx         1928 arch/x86/kvm/vmx/nested.c 		container_of(timer, struct vcpu_vmx, nested.preemption_timer);
vcpu_vmx         1940 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1961 arch/x86/kvm/vmx/nested.c static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12)
vcpu_vmx         1972 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_constant_state(struct vcpu_vmx *vmx)
vcpu_vmx         2029 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_early_rare(struct vcpu_vmx *vmx,
vcpu_vmx         2044 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12)
vcpu_vmx         2192 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12)
vcpu_vmx         2309 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2464 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2504 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2563 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2580 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2825 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2934 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3056 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3200 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3408 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3465 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3585 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3632 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3660 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3937 arch/x86/kvm/vmx/nested.c static inline u64 nested_vmx_get_vmcs01_guest_efer(struct vcpu_vmx *vmx)
vcpu_vmx         3963 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4076 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4356 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4377 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4437 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4499 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4541 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4603 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4691 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4797 arch/x86/kvm/vmx/nested.c static void set_current_vmptr(struct vcpu_vmx *vmx, gpa_t vmptr)
vcpu_vmx         4812 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4903 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4960 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5083 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5327 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5510 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx;
vcpu_vmx         5622 arch/x86/kvm/vmx/nested.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx           50 arch/x86/kvm/vmx/nested.h 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx          621 arch/x86/kvm/vmx/vmx.c static inline int __find_msr_index(struct vcpu_vmx *vmx, u32 msr)
vcpu_vmx          631 arch/x86/kvm/vmx/vmx.c struct shared_msr_entry *find_msr_entry(struct vcpu_vmx *vmx, u32 msr)
vcpu_vmx          700 arch/x86/kvm/vmx/vmx.c static bool vmx_segment_cache_test_set(struct vcpu_vmx *vmx, unsigned seg,
vcpu_vmx          715 arch/x86/kvm/vmx/vmx.c static u16 vmx_read_guest_seg_selector(struct vcpu_vmx *vmx, unsigned seg)
vcpu_vmx          724 arch/x86/kvm/vmx/vmx.c static ulong vmx_read_guest_seg_base(struct vcpu_vmx *vmx, unsigned seg)
vcpu_vmx          733 arch/x86/kvm/vmx/vmx.c static u32 vmx_read_guest_seg_limit(struct vcpu_vmx *vmx, unsigned seg)
vcpu_vmx          742 arch/x86/kvm/vmx/vmx.c static u32 vmx_read_guest_seg_ar(struct vcpu_vmx *vmx, unsigned seg)
vcpu_vmx          808 arch/x86/kvm/vmx/vmx.c static void clear_atomic_switch_msr_special(struct vcpu_vmx *vmx,
vcpu_vmx          826 arch/x86/kvm/vmx/vmx.c static void clear_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr)
vcpu_vmx          866 arch/x86/kvm/vmx/vmx.c static void add_atomic_switch_msr_special(struct vcpu_vmx *vmx,
vcpu_vmx          878 arch/x86/kvm/vmx/vmx.c static void add_atomic_switch_msr(struct vcpu_vmx *vmx, unsigned msr,
vcpu_vmx          944 arch/x86/kvm/vmx/vmx.c static bool update_transition_efer(struct vcpu_vmx *vmx, int efer_offset)
vcpu_vmx         1049 arch/x86/kvm/vmx/vmx.c static void pt_guest_enter(struct vcpu_vmx *vmx)
vcpu_vmx         1066 arch/x86/kvm/vmx/vmx.c static void pt_guest_exit(struct vcpu_vmx *vmx)
vcpu_vmx         1109 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1174 arch/x86/kvm/vmx/vmx.c static void vmx_prepare_switch_to_host(struct vcpu_vmx *vmx)
vcpu_vmx         1214 arch/x86/kvm/vmx/vmx.c static u64 vmx_read_guest_kernel_gs_base(struct vcpu_vmx *vmx)
vcpu_vmx         1223 arch/x86/kvm/vmx/vmx.c static void vmx_write_guest_kernel_gs_base(struct vcpu_vmx *vmx, u64 data)
vcpu_vmx         1292 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1367 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1468 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1581 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1629 arch/x86/kvm/vmx/vmx.c static void move_msr_up(struct vcpu_vmx *vmx, int from, int to)
vcpu_vmx         1643 arch/x86/kvm/vmx/vmx.c static void setup_msrs(struct vcpu_vmx *vmx)
vcpu_vmx         1752 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         1892 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2646 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2719 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2767 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2886 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         2910 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3006 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3081 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3128 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3160 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3699 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3713 arch/x86/kvm/vmx/vmx.c void pt_update_intercept_for_msr(struct vcpu_vmx *vmx)
vcpu_vmx         3742 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3802 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3828 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3857 arch/x86/kvm/vmx/vmx.c void vmx_set_constant_host_state(struct vcpu_vmx *vmx)
vcpu_vmx         3914 arch/x86/kvm/vmx/vmx.c void set_cr4_guest_host_mask(struct vcpu_vmx *vmx)
vcpu_vmx         3925 arch/x86/kvm/vmx/vmx.c u32 vmx_pin_based_exec_ctrl(struct vcpu_vmx *vmx)
vcpu_vmx         3943 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         3961 arch/x86/kvm/vmx/vmx.c u32 vmx_exec_control(struct vcpu_vmx *vmx)
vcpu_vmx         3988 arch/x86/kvm/vmx/vmx.c static void vmx_compute_secondary_exec_control(struct vcpu_vmx *vmx)
vcpu_vmx         4147 arch/x86/kvm/vmx/vmx.c static void vmx_vcpu_setup(struct vcpu_vmx *vmx)
vcpu_vmx         4256 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4371 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4399 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4430 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4444 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         4596 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5092 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5221 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5275 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5291 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5511 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5608 arch/x86/kvm/vmx/vmx.c static void vmx_destroy_pml_buffer(struct vcpu_vmx *vmx)
vcpu_vmx         5618 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         5846 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6034 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6135 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6192 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6198 arch/x86/kvm/vmx/vmx.c static void handle_exception_nmi_irqoff(struct vcpu_vmx *vmx)
vcpu_vmx         6266 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6298 arch/x86/kvm/vmx/vmx.c static void vmx_recover_nmi_blocking(struct vcpu_vmx *vmx)
vcpu_vmx         6395 arch/x86/kvm/vmx/vmx.c static void vmx_complete_interrupts(struct vcpu_vmx *vmx)
vcpu_vmx         6412 arch/x86/kvm/vmx/vmx.c static void atomic_switch_perf_msrs(struct vcpu_vmx *vmx)
vcpu_vmx         6430 arch/x86/kvm/vmx/vmx.c static void atomic_switch_umwait_control_msr(struct vcpu_vmx *vmx)
vcpu_vmx         6449 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6473 arch/x86/kvm/vmx/vmx.c void vmx_update_host_rsp(struct vcpu_vmx *vmx, unsigned long host_rsp)
vcpu_vmx         6481 arch/x86/kvm/vmx/vmx.c bool __vmx_vcpu_run(struct vcpu_vmx *vmx, unsigned long *regs, bool launched);
vcpu_vmx         6485 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6660 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6677 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx;
vcpu_vmx         6681 arch/x86/kvm/vmx/vmx.c 	BUILD_BUG_ON_MSG(offsetof(struct vcpu_vmx, vcpu) != 0,
vcpu_vmx         6914 arch/x86/kvm/vmx/vmx.c static void vmcs_set_secondary_exec_control(struct vcpu_vmx *vmx)
vcpu_vmx         6940 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6979 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         6996 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         7065 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         7204 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx;
vcpu_vmx         7278 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         7563 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         7577 arch/x86/kvm/vmx/vmx.c 	struct vcpu_vmx *vmx = to_vmx(vcpu);
vcpu_vmx         8023 arch/x86/kvm/vmx/vmx.c 	r = kvm_init(&vmx_x86_ops, sizeof(struct vcpu_vmx),
vcpu_vmx         8024 arch/x86/kvm/vmx/vmx.c 		     __alignof__(struct vcpu_vmx), THIS_MODULE);
vcpu_vmx          312 arch/x86/kvm/vmx/vmx.h void vmx_set_constant_host_state(struct vcpu_vmx *vmx);
vcpu_vmx          325 arch/x86/kvm/vmx/vmx.h void set_cr4_guest_host_mask(struct vcpu_vmx *vmx);
vcpu_vmx          335 arch/x86/kvm/vmx/vmx.h struct shared_msr_entry *find_msr_entry(struct vcpu_vmx *vmx, u32 msr);
vcpu_vmx          336 arch/x86/kvm/vmx/vmx.h void pt_update_intercept_for_msr(struct vcpu_vmx *vmx);
vcpu_vmx          337 arch/x86/kvm/vmx/vmx.h void vmx_update_host_rsp(struct vcpu_vmx *vmx, unsigned long host_rsp);
vcpu_vmx          406 arch/x86/kvm/vmx/vmx.h static inline void lname##_controls_set(struct vcpu_vmx *vmx, u32 val)	    \
vcpu_vmx          413 arch/x86/kvm/vmx/vmx.h static inline u32 lname##_controls_get(struct vcpu_vmx *vmx)		    \
vcpu_vmx          417 arch/x86/kvm/vmx/vmx.h static inline void lname##_controls_setbit(struct vcpu_vmx *vmx, u32 val)   \
vcpu_vmx          421 arch/x86/kvm/vmx/vmx.h static inline void lname##_controls_clearbit(struct vcpu_vmx *vmx, u32 val) \
vcpu_vmx          431 arch/x86/kvm/vmx/vmx.h static inline void vmx_segment_cache_clear(struct vcpu_vmx *vmx)
vcpu_vmx          458 arch/x86/kvm/vmx/vmx.h u32 vmx_exec_control(struct vcpu_vmx *vmx);
vcpu_vmx          459 arch/x86/kvm/vmx/vmx.h u32 vmx_pin_based_exec_ctrl(struct vcpu_vmx *vmx);
vcpu_vmx          466 arch/x86/kvm/vmx/vmx.h static inline struct vcpu_vmx *to_vmx(struct kvm_vcpu *vcpu)
vcpu_vmx          468 arch/x86/kvm/vmx/vmx.h 	return container_of(vcpu, struct vcpu_vmx, vcpu);
vcpu_vmx          509 arch/x86/kvm/vmx/vmx.h static inline void decache_tsc_multiplier(struct vcpu_vmx *vmx)
vcpu_vmx          515 arch/x86/kvm/vmx/vmx.h static inline bool vmx_has_waitpkg(struct vcpu_vmx *vmx)