loaded_vmcs       275 arch/x86/kvm/vmx/nested.c 				     struct loaded_vmcs *prev)
loaded_vmcs       283 arch/x86/kvm/vmx/nested.c 	dest = &vmx->loaded_vmcs->host_state;
loaded_vmcs       293 arch/x86/kvm/vmx/nested.c static void vmx_switch_vmcs(struct kvm_vcpu *vcpu, struct loaded_vmcs *vmcs)
loaded_vmcs       296 arch/x86/kvm/vmx/nested.c 	struct loaded_vmcs *prev;
loaded_vmcs       299 arch/x86/kvm/vmx/nested.c 	if (vmx->loaded_vmcs == vmcs)
loaded_vmcs       303 arch/x86/kvm/vmx/nested.c 	prev = vmx->loaded_vmcs;
loaded_vmcs       304 arch/x86/kvm/vmx/nested.c 	vmx->loaded_vmcs = vmcs;
loaded_vmcs      1388 arch/x86/kvm/vmx/nested.c 	vmcs_load(vmx->loaded_vmcs->vmcs);
loaded_vmcs      1424 arch/x86/kvm/vmx/nested.c 	vmcs_load(vmx->loaded_vmcs->vmcs);
loaded_vmcs      2185 arch/x86/kvm/vmx/nested.c 		vmx->loaded_vmcs->nmi_known_unmasked =
loaded_vmcs      2850 arch/x86/kvm/vmx/nested.c 	if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) {
loaded_vmcs      2852 arch/x86/kvm/vmx/nested.c 		vmx->loaded_vmcs->host_state.cr3 = cr3;
loaded_vmcs      2856 arch/x86/kvm/vmx/nested.c 	if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) {
loaded_vmcs      2858 arch/x86/kvm/vmx/nested.c 		vmx->loaded_vmcs->host_state.cr4 = cr4;
loaded_vmcs      2884 arch/x86/kvm/vmx/nested.c 		[loaded_vmcs]"r"(vmx->loaded_vmcs),
loaded_vmcs      2885 arch/x86/kvm/vmx/nested.c 		[launched]"i"(offsetof(struct loaded_vmcs, launched)),
loaded_vmcs      2886 arch/x86/kvm/vmx/nested.c 		[host_state_rsp]"i"(offsetof(struct loaded_vmcs, host_state.rsp)),
loaded_vmcs      3639 arch/x86/kvm/vmx/nested.c 	WARN_ON_ONCE(vmx->loaded_vmcs != &vmx->vmcs01);
loaded_vmcs      3642 arch/x86/kvm/vmx/nested.c 	vmx->loaded_vmcs = &vmx->nested.vmcs02;
loaded_vmcs      3647 arch/x86/kvm/vmx/nested.c 	vmx->loaded_vmcs = &vmx->vmcs01;
loaded_vmcs      4357 arch/x86/kvm/vmx/nested.c 	struct loaded_vmcs *loaded_vmcs = vmx->loaded_vmcs;
loaded_vmcs      4365 arch/x86/kvm/vmx/nested.c 	WARN_ON(loaded_vmcs == &vmx->vmcs01 && loaded_vmcs->shadow_vmcs);
loaded_vmcs      4367 arch/x86/kvm/vmx/nested.c 	if (!loaded_vmcs->shadow_vmcs) {
loaded_vmcs      4368 arch/x86/kvm/vmx/nested.c 		loaded_vmcs->shadow_vmcs = alloc_vmcs(true);
loaded_vmcs      4369 arch/x86/kvm/vmx/nested.c 		if (loaded_vmcs->shadow_vmcs)
loaded_vmcs      4370 arch/x86/kvm/vmx/nested.c 			vmcs_clear(loaded_vmcs->shadow_vmcs);
loaded_vmcs      4372 arch/x86/kvm/vmx/nested.c 	return loaded_vmcs->shadow_vmcs;
loaded_vmcs      4788 arch/x86/kvm/vmx/nested.c 			vmcs_load(vmx->loaded_vmcs->vmcs);
loaded_vmcs       551 arch/x86/kvm/vmx/vmx.c 	evmcs = (struct hv_enlightened_vmcs *)to_vmx(vcpu)->loaded_vmcs->vmcs;
loaded_vmcs       641 arch/x86/kvm/vmx/vmx.c void loaded_vmcs_init(struct loaded_vmcs *loaded_vmcs)
loaded_vmcs       643 arch/x86/kvm/vmx/vmx.c 	vmcs_clear(loaded_vmcs->vmcs);
loaded_vmcs       644 arch/x86/kvm/vmx/vmx.c 	if (loaded_vmcs->shadow_vmcs && loaded_vmcs->launched)
loaded_vmcs       645 arch/x86/kvm/vmx/vmx.c 		vmcs_clear(loaded_vmcs->shadow_vmcs);
loaded_vmcs       646 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->cpu = -1;
loaded_vmcs       647 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->launched = 0;
loaded_vmcs       654 arch/x86/kvm/vmx/vmx.c 	struct loaded_vmcs *v;
loaded_vmcs       664 arch/x86/kvm/vmx/vmx.c 	struct loaded_vmcs *loaded_vmcs = arg;
loaded_vmcs       667 arch/x86/kvm/vmx/vmx.c 	if (loaded_vmcs->cpu != cpu)
loaded_vmcs       669 arch/x86/kvm/vmx/vmx.c 	if (per_cpu(current_vmcs, cpu) == loaded_vmcs->vmcs)
loaded_vmcs       672 arch/x86/kvm/vmx/vmx.c 	vmcs_clear(loaded_vmcs->vmcs);
loaded_vmcs       673 arch/x86/kvm/vmx/vmx.c 	if (loaded_vmcs->shadow_vmcs && loaded_vmcs->launched)
loaded_vmcs       674 arch/x86/kvm/vmx/vmx.c 		vmcs_clear(loaded_vmcs->shadow_vmcs);
loaded_vmcs       676 arch/x86/kvm/vmx/vmx.c 	list_del(&loaded_vmcs->loaded_vmcss_on_cpu_link);
loaded_vmcs       687 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->cpu = -1;
loaded_vmcs       688 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->launched = 0;
loaded_vmcs       691 arch/x86/kvm/vmx/vmx.c void loaded_vmcs_clear(struct loaded_vmcs *loaded_vmcs)
loaded_vmcs       693 arch/x86/kvm/vmx/vmx.c 	int cpu = loaded_vmcs->cpu;
loaded_vmcs       697 arch/x86/kvm/vmx/vmx.c 			 __loaded_vmcs_clear, loaded_vmcs, 1);
loaded_vmcs       796 arch/x86/kvm/vmx/vmx.c 	msr_bitmap = to_vmx(vcpu)->loaded_vmcs->msr_bitmap;
loaded_vmcs      1136 arch/x86/kvm/vmx/vmx.c 	host_state = &vmx->loaded_vmcs->host_state;
loaded_vmcs      1181 arch/x86/kvm/vmx/vmx.c 	host_state = &vmx->loaded_vmcs->host_state;
loaded_vmcs      1290 arch/x86/kvm/vmx/vmx.c 			struct loaded_vmcs *buddy)
loaded_vmcs      1293 arch/x86/kvm/vmx/vmx.c 	bool already_loaded = vmx->loaded_vmcs->cpu == cpu;
loaded_vmcs      1297 arch/x86/kvm/vmx/vmx.c 		loaded_vmcs_clear(vmx->loaded_vmcs);
loaded_vmcs      1308 arch/x86/kvm/vmx/vmx.c 		list_add(&vmx->loaded_vmcs->loaded_vmcss_on_cpu_link,
loaded_vmcs      1314 arch/x86/kvm/vmx/vmx.c 	if (prev != vmx->loaded_vmcs->vmcs) {
loaded_vmcs      1315 arch/x86/kvm/vmx/vmx.c 		per_cpu(current_vmcs, cpu) = vmx->loaded_vmcs->vmcs;
loaded_vmcs      1316 arch/x86/kvm/vmx/vmx.c 		vmcs_load(vmx->loaded_vmcs->vmcs);
loaded_vmcs      1352 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->cpu = cpu;
loaded_vmcs      2261 arch/x86/kvm/vmx/vmx.c 	struct loaded_vmcs *v, *n;
loaded_vmcs      2533 arch/x86/kvm/vmx/vmx.c void free_loaded_vmcs(struct loaded_vmcs *loaded_vmcs)
loaded_vmcs      2535 arch/x86/kvm/vmx/vmx.c 	if (!loaded_vmcs->vmcs)
loaded_vmcs      2537 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs_clear(loaded_vmcs);
loaded_vmcs      2538 arch/x86/kvm/vmx/vmx.c 	free_vmcs(loaded_vmcs->vmcs);
loaded_vmcs      2539 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->vmcs = NULL;
loaded_vmcs      2540 arch/x86/kvm/vmx/vmx.c 	if (loaded_vmcs->msr_bitmap)
loaded_vmcs      2541 arch/x86/kvm/vmx/vmx.c 		free_page((unsigned long)loaded_vmcs->msr_bitmap);
loaded_vmcs      2542 arch/x86/kvm/vmx/vmx.c 	WARN_ON(loaded_vmcs->shadow_vmcs != NULL);
loaded_vmcs      2545 arch/x86/kvm/vmx/vmx.c int alloc_loaded_vmcs(struct loaded_vmcs *loaded_vmcs)
loaded_vmcs      2547 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->vmcs = alloc_vmcs(false);
loaded_vmcs      2548 arch/x86/kvm/vmx/vmx.c 	if (!loaded_vmcs->vmcs)
loaded_vmcs      2551 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->shadow_vmcs = NULL;
loaded_vmcs      2552 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs->hv_timer_soft_disabled = false;
loaded_vmcs      2553 arch/x86/kvm/vmx/vmx.c 	loaded_vmcs_init(loaded_vmcs);
loaded_vmcs      2556 arch/x86/kvm/vmx/vmx.c 		loaded_vmcs->msr_bitmap = (unsigned long *)
loaded_vmcs      2558 arch/x86/kvm/vmx/vmx.c 		if (!loaded_vmcs->msr_bitmap)
loaded_vmcs      2560 arch/x86/kvm/vmx/vmx.c 		memset(loaded_vmcs->msr_bitmap, 0xff, PAGE_SIZE);
loaded_vmcs      2566 arch/x86/kvm/vmx/vmx.c 				(struct hv_enlightened_vmcs *)loaded_vmcs->vmcs;
loaded_vmcs      2572 arch/x86/kvm/vmx/vmx.c 	memset(&loaded_vmcs->host_state, 0, sizeof(struct vmcs_host_state));
loaded_vmcs      2573 arch/x86/kvm/vmx/vmx.c 	memset(&loaded_vmcs->controls_shadow, 0,
loaded_vmcs      2579 arch/x86/kvm/vmx/vmx.c 	free_loaded_vmcs(loaded_vmcs);
loaded_vmcs      3873 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs->host_state.cr3 = cr3;
loaded_vmcs      3878 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs->host_state.cr4 = cr4;
loaded_vmcs      4410 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->soft_vnmi_blocked = 1;
loaded_vmcs      4411 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->vnmi_blocked_time = 0;
loaded_vmcs      4415 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs->nmi_known_unmasked = false;
loaded_vmcs      4434 arch/x86/kvm/vmx/vmx.c 		return vmx->loaded_vmcs->soft_vnmi_blocked;
loaded_vmcs      4435 arch/x86/kvm/vmx/vmx.c 	if (vmx->loaded_vmcs->nmi_known_unmasked)
loaded_vmcs      4438 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs->nmi_known_unmasked = !masked;
loaded_vmcs      4447 arch/x86/kvm/vmx/vmx.c 		if (vmx->loaded_vmcs->soft_vnmi_blocked != masked) {
loaded_vmcs      4448 arch/x86/kvm/vmx/vmx.c 			vmx->loaded_vmcs->soft_vnmi_blocked = masked;
loaded_vmcs      4449 arch/x86/kvm/vmx/vmx.c 			vmx->loaded_vmcs->vnmi_blocked_time = 0;
loaded_vmcs      4452 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->nmi_known_unmasked = !masked;
loaded_vmcs      4468 arch/x86/kvm/vmx/vmx.c 	    to_vmx(vcpu)->loaded_vmcs->soft_vnmi_blocked)
loaded_vmcs      5514 arch/x86/kvm/vmx/vmx.c 	    !unlikely(vmx->loaded_vmcs->hv_timer_soft_disabled))
loaded_vmcs      5912 arch/x86/kvm/vmx/vmx.c 		     vmx->loaded_vmcs->soft_vnmi_blocked)) {
loaded_vmcs      5914 arch/x86/kvm/vmx/vmx.c 			vmx->loaded_vmcs->soft_vnmi_blocked = 0;
loaded_vmcs      5915 arch/x86/kvm/vmx/vmx.c 		} else if (vmx->loaded_vmcs->vnmi_blocked_time > 1000000000LL &&
loaded_vmcs      5926 arch/x86/kvm/vmx/vmx.c 			vmx->loaded_vmcs->soft_vnmi_blocked = 0;
loaded_vmcs      6308 arch/x86/kvm/vmx/vmx.c 		if (vmx->loaded_vmcs->nmi_known_unmasked)
loaded_vmcs      6332 arch/x86/kvm/vmx/vmx.c 			vmx->loaded_vmcs->nmi_known_unmasked =
loaded_vmcs      6335 arch/x86/kvm/vmx/vmx.c 	} else if (unlikely(vmx->loaded_vmcs->soft_vnmi_blocked))
loaded_vmcs      6336 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->vnmi_blocked_time +=
loaded_vmcs      6338 arch/x86/kvm/vmx/vmx.c 					      vmx->loaded_vmcs->entry_time));
loaded_vmcs      6455 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->hv_timer_soft_disabled = false;
loaded_vmcs      6466 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->hv_timer_soft_disabled = false;
loaded_vmcs      6467 arch/x86/kvm/vmx/vmx.c 	} else if (!vmx->loaded_vmcs->hv_timer_soft_disabled) {
loaded_vmcs      6469 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->hv_timer_soft_disabled = true;
loaded_vmcs      6475 arch/x86/kvm/vmx/vmx.c 	if (unlikely(host_rsp != vmx->loaded_vmcs->host_state.rsp)) {
loaded_vmcs      6476 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->host_state.rsp = host_rsp;
loaded_vmcs      6490 arch/x86/kvm/vmx/vmx.c 		     vmx->loaded_vmcs->soft_vnmi_blocked))
loaded_vmcs      6491 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->entry_time = ktime_get();
loaded_vmcs      6512 arch/x86/kvm/vmx/vmx.c 	if (unlikely(cr3 != vmx->loaded_vmcs->host_state.cr3)) {
loaded_vmcs      6514 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->host_state.cr3 = cr3;
loaded_vmcs      6518 arch/x86/kvm/vmx/vmx.c 	if (unlikely(cr4 != vmx->loaded_vmcs->host_state.cr4)) {
loaded_vmcs      6520 arch/x86/kvm/vmx/vmx.c 		vmx->loaded_vmcs->host_state.cr4 = cr4;
loaded_vmcs      6563 arch/x86/kvm/vmx/vmx.c 				   vmx->loaded_vmcs->launched);
loaded_vmcs      6633 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs->launched = 1;
loaded_vmcs      6666 arch/x86/kvm/vmx/vmx.c 	free_loaded_vmcs(vmx->loaded_vmcs);
loaded_vmcs      6751 arch/x86/kvm/vmx/vmx.c 	vmx->loaded_vmcs = &vmx->vmcs01;
loaded_vmcs      6793 arch/x86/kvm/vmx/vmx.c 	free_loaded_vmcs(vmx->loaded_vmcs);
loaded_vmcs       147 arch/x86/kvm/vmx/vmx.h 	struct loaded_vmcs vmcs02;
loaded_vmcs       225 arch/x86/kvm/vmx/vmx.h 	struct loaded_vmcs    vmcs01;
loaded_vmcs       226 arch/x86/kvm/vmx/vmx.h 	struct loaded_vmcs   *loaded_vmcs;
loaded_vmcs       308 arch/x86/kvm/vmx/vmx.h 			struct loaded_vmcs *buddy);
loaded_vmcs       408 arch/x86/kvm/vmx/vmx.h 	if (vmx->loaded_vmcs->controls_shadow.lname != val) {		    \
loaded_vmcs       410 arch/x86/kvm/vmx/vmx.h 		vmx->loaded_vmcs->controls_shadow.lname = val;		    \
loaded_vmcs       415 arch/x86/kvm/vmx/vmx.h 	return vmx->loaded_vmcs->controls_shadow.lname;			    \
loaded_vmcs       478 arch/x86/kvm/vmx/vmx.h int alloc_loaded_vmcs(struct loaded_vmcs *loaded_vmcs);
loaded_vmcs       479 arch/x86/kvm/vmx/vmx.h void free_loaded_vmcs(struct loaded_vmcs *loaded_vmcs);
loaded_vmcs       480 arch/x86/kvm/vmx/vmx.h void loaded_vmcs_init(struct loaded_vmcs *loaded_vmcs);
loaded_vmcs       481 arch/x86/kvm/vmx/vmx.h void loaded_vmcs_clear(struct loaded_vmcs *loaded_vmcs);