vmcs12 400 arch/x86/include/uapi/asm/kvm.h __u8 vmcs12[KVM_STATE_NESTED_VMX_VMCS_SIZE]; vmcs12 57 arch/x86/kvm/vmx/nested.c #define SHADOW_FIELD_RO(x, y) { x, offsetof(struct vmcs12, y) }, vmcs12 64 arch/x86/kvm/vmx/nested.c #define SHADOW_FIELD_RW(x, y) { x, offsetof(struct vmcs12, y) }, vmcs12 328 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 343 arch/x86/kvm/vmx/nested.c vmcs12->guest_physical_address = fault->address; vmcs12 370 arch/x86/kvm/vmx/nested.c static bool nested_vmx_is_page_fault_vmexit(struct vmcs12 *vmcs12, vmcs12 375 arch/x86/kvm/vmx/nested.c bit = (vmcs12->exception_bitmap & (1u << PF_VECTOR)) != 0; vmcs12 377 arch/x86/kvm/vmx/nested.c (error_code & vmcs12->page_fault_error_code_mask) != vmcs12 378 arch/x86/kvm/vmx/nested.c vmcs12->page_fault_error_code_match; vmcs12 389 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 399 arch/x86/kvm/vmx/nested.c if (nested_vmx_is_page_fault_vmexit(vmcs12, vmcs12 404 arch/x86/kvm/vmx/nested.c } else if (vmcs12->exception_bitmap & (1u << nr)) { vmcs12 424 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 428 arch/x86/kvm/vmx/nested.c if (nested_vmx_is_page_fault_vmexit(vmcs12, fault->error_code) && vmcs12 430 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_error_code = fault->error_code; vmcs12 446 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 448 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) vmcs12 451 arch/x86/kvm/vmx/nested.c if (CC(!page_address_valid(vcpu, vmcs12->io_bitmap_a)) || vmcs12 452 arch/x86/kvm/vmx/nested.c CC(!page_address_valid(vcpu, vmcs12->io_bitmap_b))) vmcs12 459 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 461 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) vmcs12 464 arch/x86/kvm/vmx/nested.c if (CC(!page_address_valid(vcpu, vmcs12->msr_bitmap))) vmcs12 471 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 473 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) vmcs12 476 arch/x86/kvm/vmx/nested.c if (CC(!page_address_valid(vcpu, vmcs12->virtual_apic_page_addr))) vmcs12 562 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 571 arch/x86/kvm/vmx/nested.c !nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) vmcs12 574 arch/x86/kvm/vmx/nested.c if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->msr_bitmap), map)) vmcs12 586 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_virt_x2apic_mode(vmcs12)) { vmcs12 587 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_apic_reg_virt(vmcs12)) { vmcs12 606 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_vid(vmcs12)) { vmcs12 659 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 662 arch/x86/kvm/vmx/nested.c struct vmcs12 *shadow; vmcs12 664 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_shadow_vmcs(vmcs12) || vmcs12 665 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer == -1ull) vmcs12 670 arch/x86/kvm/vmx/nested.c if (kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map)) vmcs12 678 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 682 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_shadow_vmcs(vmcs12) || vmcs12 683 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer == -1ull) vmcs12 686 arch/x86/kvm/vmx/nested.c kvm_write_guest(vmx->vcpu.kvm, vmcs12->vmcs_link_pointer, vmcs12 706 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 708 arch/x86/kvm/vmx/nested.c if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES) && vmcs12 709 arch/x86/kvm/vmx/nested.c CC(!page_address_valid(vcpu, vmcs12->apic_access_addr))) vmcs12 716 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 718 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_virt_x2apic_mode(vmcs12) && vmcs12 719 arch/x86/kvm/vmx/nested.c !nested_cpu_has_apic_reg_virt(vmcs12) && vmcs12 720 arch/x86/kvm/vmx/nested.c !nested_cpu_has_vid(vmcs12) && vmcs12 721 arch/x86/kvm/vmx/nested.c !nested_cpu_has_posted_intr(vmcs12)) vmcs12 728 arch/x86/kvm/vmx/nested.c if (CC(nested_cpu_has_virt_x2apic_mode(vmcs12) && vmcs12 729 arch/x86/kvm/vmx/nested.c nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES))) vmcs12 736 arch/x86/kvm/vmx/nested.c if (CC(nested_cpu_has_vid(vmcs12) && !nested_exit_on_intr(vcpu))) vmcs12 746 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_posted_intr(vmcs12) && vmcs12 747 arch/x86/kvm/vmx/nested.c (CC(!nested_cpu_has_vid(vmcs12)) || vmcs12 749 arch/x86/kvm/vmx/nested.c CC((vmcs12->posted_intr_nv & 0xff00)) || vmcs12 750 arch/x86/kvm/vmx/nested.c CC((vmcs12->posted_intr_desc_addr & 0x3f)) || vmcs12 751 arch/x86/kvm/vmx/nested.c CC((vmcs12->posted_intr_desc_addr >> cpuid_maxphyaddr(vcpu))))) vmcs12 755 arch/x86/kvm/vmx/nested.c if (CC(!nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW))) vmcs12 777 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 780 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_load_count, vmcs12 781 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_load_addr)) || vmcs12 783 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_store_count, vmcs12 784 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_store_addr))) vmcs12 791 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 794 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_msr_load_count, vmcs12 795 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_msr_load_addr))) vmcs12 802 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 804 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_pml(vmcs12)) vmcs12 807 arch/x86/kvm/vmx/nested.c if (CC(!nested_cpu_has_ept(vmcs12)) || vmcs12 808 arch/x86/kvm/vmx/nested.c CC(!page_address_valid(vcpu, vmcs12->pml_address))) vmcs12 815 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 817 arch/x86/kvm/vmx/nested.c if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST) && vmcs12 818 arch/x86/kvm/vmx/nested.c !nested_cpu_has_ept(vmcs12))) vmcs12 824 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 826 arch/x86/kvm/vmx/nested.c if (CC(nested_cpu_has2(vmcs12, SECONDARY_EXEC_MODE_BASED_EPT_EXEC) && vmcs12 827 arch/x86/kvm/vmx/nested.c !nested_cpu_has_ept(vmcs12))) vmcs12 833 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 835 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_shadow_vmcs(vmcs12)) vmcs12 838 arch/x86/kvm/vmx/nested.c if (CC(!page_address_valid(vcpu, vmcs12->vmread_bitmap)) || vmcs12 839 arch/x86/kvm/vmx/nested.c CC(!page_address_valid(vcpu, vmcs12->vmwrite_bitmap))) vmcs12 1035 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 1037 arch/x86/kvm/vmx/nested.c return nested_cpu_has_ept(vmcs12) || vmcs12 1038 arch/x86/kvm/vmx/nested.c (nested_cpu_has_vpid(vmcs12) && to_vmx(vcpu)->nested.vpid02); vmcs12 1369 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); vmcs12 1384 arch/x86/kvm/vmx/nested.c vmcs12_write_any(vmcs12, field.encoding, field.offset, val); vmcs12 1404 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(&vmx->vcpu); vmcs12 1417 arch/x86/kvm/vmx/nested.c val = vmcs12_read_any(vmcs12, field.encoding, vmcs12 1429 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; vmcs12 1433 arch/x86/kvm/vmx/nested.c vmcs12->tpr_threshold = evmcs->tpr_threshold; vmcs12 1434 arch/x86/kvm/vmx/nested.c vmcs12->guest_rip = evmcs->guest_rip; vmcs12 1438 arch/x86/kvm/vmx/nested.c vmcs12->guest_rsp = evmcs->guest_rsp; vmcs12 1439 arch/x86/kvm/vmx/nested.c vmcs12->guest_rflags = evmcs->guest_rflags; vmcs12 1440 arch/x86/kvm/vmx/nested.c vmcs12->guest_interruptibility_info = vmcs12 1446 arch/x86/kvm/vmx/nested.c vmcs12->cpu_based_vm_exec_control = vmcs12 1452 arch/x86/kvm/vmx/nested.c vmcs12->exception_bitmap = evmcs->exception_bitmap; vmcs12 1457 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_controls = evmcs->vm_entry_controls; vmcs12 1462 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_intr_info_field = vmcs12 1464 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_exception_error_code = vmcs12 1466 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_instruction_len = vmcs12 1472 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_pat = evmcs->host_ia32_pat; vmcs12 1473 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_efer = evmcs->host_ia32_efer; vmcs12 1474 arch/x86/kvm/vmx/nested.c vmcs12->host_cr0 = evmcs->host_cr0; vmcs12 1475 arch/x86/kvm/vmx/nested.c vmcs12->host_cr3 = evmcs->host_cr3; vmcs12 1476 arch/x86/kvm/vmx/nested.c vmcs12->host_cr4 = evmcs->host_cr4; vmcs12 1477 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_sysenter_esp = evmcs->host_ia32_sysenter_esp; vmcs12 1478 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_sysenter_eip = evmcs->host_ia32_sysenter_eip; vmcs12 1479 arch/x86/kvm/vmx/nested.c vmcs12->host_rip = evmcs->host_rip; vmcs12 1480 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_sysenter_cs = evmcs->host_ia32_sysenter_cs; vmcs12 1481 arch/x86/kvm/vmx/nested.c vmcs12->host_es_selector = evmcs->host_es_selector; vmcs12 1482 arch/x86/kvm/vmx/nested.c vmcs12->host_cs_selector = evmcs->host_cs_selector; vmcs12 1483 arch/x86/kvm/vmx/nested.c vmcs12->host_ss_selector = evmcs->host_ss_selector; vmcs12 1484 arch/x86/kvm/vmx/nested.c vmcs12->host_ds_selector = evmcs->host_ds_selector; vmcs12 1485 arch/x86/kvm/vmx/nested.c vmcs12->host_fs_selector = evmcs->host_fs_selector; vmcs12 1486 arch/x86/kvm/vmx/nested.c vmcs12->host_gs_selector = evmcs->host_gs_selector; vmcs12 1487 arch/x86/kvm/vmx/nested.c vmcs12->host_tr_selector = evmcs->host_tr_selector; vmcs12 1492 arch/x86/kvm/vmx/nested.c vmcs12->pin_based_vm_exec_control = vmcs12 1494 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_controls = evmcs->vm_exit_controls; vmcs12 1495 arch/x86/kvm/vmx/nested.c vmcs12->secondary_vm_exec_control = vmcs12 1501 arch/x86/kvm/vmx/nested.c vmcs12->io_bitmap_a = evmcs->io_bitmap_a; vmcs12 1502 arch/x86/kvm/vmx/nested.c vmcs12->io_bitmap_b = evmcs->io_bitmap_b; vmcs12 1507 arch/x86/kvm/vmx/nested.c vmcs12->msr_bitmap = evmcs->msr_bitmap; vmcs12 1512 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_base = evmcs->guest_es_base; vmcs12 1513 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_base = evmcs->guest_cs_base; vmcs12 1514 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_base = evmcs->guest_ss_base; vmcs12 1515 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_base = evmcs->guest_ds_base; vmcs12 1516 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_base = evmcs->guest_fs_base; vmcs12 1517 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_base = evmcs->guest_gs_base; vmcs12 1518 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_base = evmcs->guest_ldtr_base; vmcs12 1519 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_base = evmcs->guest_tr_base; vmcs12 1520 arch/x86/kvm/vmx/nested.c vmcs12->guest_gdtr_base = evmcs->guest_gdtr_base; vmcs12 1521 arch/x86/kvm/vmx/nested.c vmcs12->guest_idtr_base = evmcs->guest_idtr_base; vmcs12 1522 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_limit = evmcs->guest_es_limit; vmcs12 1523 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_limit = evmcs->guest_cs_limit; vmcs12 1524 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_limit = evmcs->guest_ss_limit; vmcs12 1525 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_limit = evmcs->guest_ds_limit; vmcs12 1526 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_limit = evmcs->guest_fs_limit; vmcs12 1527 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_limit = evmcs->guest_gs_limit; vmcs12 1528 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_limit = evmcs->guest_ldtr_limit; vmcs12 1529 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_limit = evmcs->guest_tr_limit; vmcs12 1530 arch/x86/kvm/vmx/nested.c vmcs12->guest_gdtr_limit = evmcs->guest_gdtr_limit; vmcs12 1531 arch/x86/kvm/vmx/nested.c vmcs12->guest_idtr_limit = evmcs->guest_idtr_limit; vmcs12 1532 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_ar_bytes = evmcs->guest_es_ar_bytes; vmcs12 1533 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_ar_bytes = evmcs->guest_cs_ar_bytes; vmcs12 1534 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_ar_bytes = evmcs->guest_ss_ar_bytes; vmcs12 1535 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_ar_bytes = evmcs->guest_ds_ar_bytes; vmcs12 1536 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_ar_bytes = evmcs->guest_fs_ar_bytes; vmcs12 1537 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_ar_bytes = evmcs->guest_gs_ar_bytes; vmcs12 1538 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_ar_bytes = evmcs->guest_ldtr_ar_bytes; vmcs12 1539 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_ar_bytes = evmcs->guest_tr_ar_bytes; vmcs12 1540 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_selector = evmcs->guest_es_selector; vmcs12 1541 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_selector = evmcs->guest_cs_selector; vmcs12 1542 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_selector = evmcs->guest_ss_selector; vmcs12 1543 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_selector = evmcs->guest_ds_selector; vmcs12 1544 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_selector = evmcs->guest_fs_selector; vmcs12 1545 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_selector = evmcs->guest_gs_selector; vmcs12 1546 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_selector = evmcs->guest_ldtr_selector; vmcs12 1547 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_selector = evmcs->guest_tr_selector; vmcs12 1552 arch/x86/kvm/vmx/nested.c vmcs12->tsc_offset = evmcs->tsc_offset; vmcs12 1553 arch/x86/kvm/vmx/nested.c vmcs12->virtual_apic_page_addr = evmcs->virtual_apic_page_addr; vmcs12 1554 arch/x86/kvm/vmx/nested.c vmcs12->xss_exit_bitmap = evmcs->xss_exit_bitmap; vmcs12 1559 arch/x86/kvm/vmx/nested.c vmcs12->cr0_guest_host_mask = evmcs->cr0_guest_host_mask; vmcs12 1560 arch/x86/kvm/vmx/nested.c vmcs12->cr4_guest_host_mask = evmcs->cr4_guest_host_mask; vmcs12 1561 arch/x86/kvm/vmx/nested.c vmcs12->cr0_read_shadow = evmcs->cr0_read_shadow; vmcs12 1562 arch/x86/kvm/vmx/nested.c vmcs12->cr4_read_shadow = evmcs->cr4_read_shadow; vmcs12 1563 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr0 = evmcs->guest_cr0; vmcs12 1564 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr3 = evmcs->guest_cr3; vmcs12 1565 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr4 = evmcs->guest_cr4; vmcs12 1566 arch/x86/kvm/vmx/nested.c vmcs12->guest_dr7 = evmcs->guest_dr7; vmcs12 1571 arch/x86/kvm/vmx/nested.c vmcs12->host_fs_base = evmcs->host_fs_base; vmcs12 1572 arch/x86/kvm/vmx/nested.c vmcs12->host_gs_base = evmcs->host_gs_base; vmcs12 1573 arch/x86/kvm/vmx/nested.c vmcs12->host_tr_base = evmcs->host_tr_base; vmcs12 1574 arch/x86/kvm/vmx/nested.c vmcs12->host_gdtr_base = evmcs->host_gdtr_base; vmcs12 1575 arch/x86/kvm/vmx/nested.c vmcs12->host_idtr_base = evmcs->host_idtr_base; vmcs12 1576 arch/x86/kvm/vmx/nested.c vmcs12->host_rsp = evmcs->host_rsp; vmcs12 1581 arch/x86/kvm/vmx/nested.c vmcs12->ept_pointer = evmcs->ept_pointer; vmcs12 1582 arch/x86/kvm/vmx/nested.c vmcs12->virtual_processor_id = evmcs->virtual_processor_id; vmcs12 1587 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer = evmcs->vmcs_link_pointer; vmcs12 1588 arch/x86/kvm/vmx/nested.c vmcs12->guest_ia32_debugctl = evmcs->guest_ia32_debugctl; vmcs12 1589 arch/x86/kvm/vmx/nested.c vmcs12->guest_ia32_pat = evmcs->guest_ia32_pat; vmcs12 1590 arch/x86/kvm/vmx/nested.c vmcs12->guest_ia32_efer = evmcs->guest_ia32_efer; vmcs12 1591 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr0 = evmcs->guest_pdptr0; vmcs12 1592 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr1 = evmcs->guest_pdptr1; vmcs12 1593 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr2 = evmcs->guest_pdptr2; vmcs12 1594 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr3 = evmcs->guest_pdptr3; vmcs12 1595 arch/x86/kvm/vmx/nested.c vmcs12->guest_pending_dbg_exceptions = vmcs12 1597 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_esp = evmcs->guest_sysenter_esp; vmcs12 1598 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_eip = evmcs->guest_sysenter_eip; vmcs12 1599 arch/x86/kvm/vmx/nested.c vmcs12->guest_bndcfgs = evmcs->guest_bndcfgs; vmcs12 1600 arch/x86/kvm/vmx/nested.c vmcs12->guest_activity_state = evmcs->guest_activity_state; vmcs12 1601 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_cs = evmcs->guest_sysenter_cs; vmcs12 1649 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = vmx->nested.cached_vmcs12; vmcs12 1720 arch/x86/kvm/vmx/nested.c evmcs->guest_es_selector = vmcs12->guest_es_selector; vmcs12 1721 arch/x86/kvm/vmx/nested.c evmcs->guest_cs_selector = vmcs12->guest_cs_selector; vmcs12 1722 arch/x86/kvm/vmx/nested.c evmcs->guest_ss_selector = vmcs12->guest_ss_selector; vmcs12 1723 arch/x86/kvm/vmx/nested.c evmcs->guest_ds_selector = vmcs12->guest_ds_selector; vmcs12 1724 arch/x86/kvm/vmx/nested.c evmcs->guest_fs_selector = vmcs12->guest_fs_selector; vmcs12 1725 arch/x86/kvm/vmx/nested.c evmcs->guest_gs_selector = vmcs12->guest_gs_selector; vmcs12 1726 arch/x86/kvm/vmx/nested.c evmcs->guest_ldtr_selector = vmcs12->guest_ldtr_selector; vmcs12 1727 arch/x86/kvm/vmx/nested.c evmcs->guest_tr_selector = vmcs12->guest_tr_selector; vmcs12 1729 arch/x86/kvm/vmx/nested.c evmcs->guest_es_limit = vmcs12->guest_es_limit; vmcs12 1730 arch/x86/kvm/vmx/nested.c evmcs->guest_cs_limit = vmcs12->guest_cs_limit; vmcs12 1731 arch/x86/kvm/vmx/nested.c evmcs->guest_ss_limit = vmcs12->guest_ss_limit; vmcs12 1732 arch/x86/kvm/vmx/nested.c evmcs->guest_ds_limit = vmcs12->guest_ds_limit; vmcs12 1733 arch/x86/kvm/vmx/nested.c evmcs->guest_fs_limit = vmcs12->guest_fs_limit; vmcs12 1734 arch/x86/kvm/vmx/nested.c evmcs->guest_gs_limit = vmcs12->guest_gs_limit; vmcs12 1735 arch/x86/kvm/vmx/nested.c evmcs->guest_ldtr_limit = vmcs12->guest_ldtr_limit; vmcs12 1736 arch/x86/kvm/vmx/nested.c evmcs->guest_tr_limit = vmcs12->guest_tr_limit; vmcs12 1737 arch/x86/kvm/vmx/nested.c evmcs->guest_gdtr_limit = vmcs12->guest_gdtr_limit; vmcs12 1738 arch/x86/kvm/vmx/nested.c evmcs->guest_idtr_limit = vmcs12->guest_idtr_limit; vmcs12 1740 arch/x86/kvm/vmx/nested.c evmcs->guest_es_ar_bytes = vmcs12->guest_es_ar_bytes; vmcs12 1741 arch/x86/kvm/vmx/nested.c evmcs->guest_cs_ar_bytes = vmcs12->guest_cs_ar_bytes; vmcs12 1742 arch/x86/kvm/vmx/nested.c evmcs->guest_ss_ar_bytes = vmcs12->guest_ss_ar_bytes; vmcs12 1743 arch/x86/kvm/vmx/nested.c evmcs->guest_ds_ar_bytes = vmcs12->guest_ds_ar_bytes; vmcs12 1744 arch/x86/kvm/vmx/nested.c evmcs->guest_fs_ar_bytes = vmcs12->guest_fs_ar_bytes; vmcs12 1745 arch/x86/kvm/vmx/nested.c evmcs->guest_gs_ar_bytes = vmcs12->guest_gs_ar_bytes; vmcs12 1746 arch/x86/kvm/vmx/nested.c evmcs->guest_ldtr_ar_bytes = vmcs12->guest_ldtr_ar_bytes; vmcs12 1747 arch/x86/kvm/vmx/nested.c evmcs->guest_tr_ar_bytes = vmcs12->guest_tr_ar_bytes; vmcs12 1749 arch/x86/kvm/vmx/nested.c evmcs->guest_es_base = vmcs12->guest_es_base; vmcs12 1750 arch/x86/kvm/vmx/nested.c evmcs->guest_cs_base = vmcs12->guest_cs_base; vmcs12 1751 arch/x86/kvm/vmx/nested.c evmcs->guest_ss_base = vmcs12->guest_ss_base; vmcs12 1752 arch/x86/kvm/vmx/nested.c evmcs->guest_ds_base = vmcs12->guest_ds_base; vmcs12 1753 arch/x86/kvm/vmx/nested.c evmcs->guest_fs_base = vmcs12->guest_fs_base; vmcs12 1754 arch/x86/kvm/vmx/nested.c evmcs->guest_gs_base = vmcs12->guest_gs_base; vmcs12 1755 arch/x86/kvm/vmx/nested.c evmcs->guest_ldtr_base = vmcs12->guest_ldtr_base; vmcs12 1756 arch/x86/kvm/vmx/nested.c evmcs->guest_tr_base = vmcs12->guest_tr_base; vmcs12 1757 arch/x86/kvm/vmx/nested.c evmcs->guest_gdtr_base = vmcs12->guest_gdtr_base; vmcs12 1758 arch/x86/kvm/vmx/nested.c evmcs->guest_idtr_base = vmcs12->guest_idtr_base; vmcs12 1760 arch/x86/kvm/vmx/nested.c evmcs->guest_ia32_pat = vmcs12->guest_ia32_pat; vmcs12 1761 arch/x86/kvm/vmx/nested.c evmcs->guest_ia32_efer = vmcs12->guest_ia32_efer; vmcs12 1763 arch/x86/kvm/vmx/nested.c evmcs->guest_pdptr0 = vmcs12->guest_pdptr0; vmcs12 1764 arch/x86/kvm/vmx/nested.c evmcs->guest_pdptr1 = vmcs12->guest_pdptr1; vmcs12 1765 arch/x86/kvm/vmx/nested.c evmcs->guest_pdptr2 = vmcs12->guest_pdptr2; vmcs12 1766 arch/x86/kvm/vmx/nested.c evmcs->guest_pdptr3 = vmcs12->guest_pdptr3; vmcs12 1769 arch/x86/kvm/vmx/nested.c vmcs12->guest_pending_dbg_exceptions; vmcs12 1770 arch/x86/kvm/vmx/nested.c evmcs->guest_sysenter_esp = vmcs12->guest_sysenter_esp; vmcs12 1771 arch/x86/kvm/vmx/nested.c evmcs->guest_sysenter_eip = vmcs12->guest_sysenter_eip; vmcs12 1773 arch/x86/kvm/vmx/nested.c evmcs->guest_activity_state = vmcs12->guest_activity_state; vmcs12 1774 arch/x86/kvm/vmx/nested.c evmcs->guest_sysenter_cs = vmcs12->guest_sysenter_cs; vmcs12 1776 arch/x86/kvm/vmx/nested.c evmcs->guest_cr0 = vmcs12->guest_cr0; vmcs12 1777 arch/x86/kvm/vmx/nested.c evmcs->guest_cr3 = vmcs12->guest_cr3; vmcs12 1778 arch/x86/kvm/vmx/nested.c evmcs->guest_cr4 = vmcs12->guest_cr4; vmcs12 1779 arch/x86/kvm/vmx/nested.c evmcs->guest_dr7 = vmcs12->guest_dr7; vmcs12 1781 arch/x86/kvm/vmx/nested.c evmcs->guest_physical_address = vmcs12->guest_physical_address; vmcs12 1783 arch/x86/kvm/vmx/nested.c evmcs->vm_instruction_error = vmcs12->vm_instruction_error; vmcs12 1784 arch/x86/kvm/vmx/nested.c evmcs->vm_exit_reason = vmcs12->vm_exit_reason; vmcs12 1785 arch/x86/kvm/vmx/nested.c evmcs->vm_exit_intr_info = vmcs12->vm_exit_intr_info; vmcs12 1786 arch/x86/kvm/vmx/nested.c evmcs->vm_exit_intr_error_code = vmcs12->vm_exit_intr_error_code; vmcs12 1787 arch/x86/kvm/vmx/nested.c evmcs->idt_vectoring_info_field = vmcs12->idt_vectoring_info_field; vmcs12 1788 arch/x86/kvm/vmx/nested.c evmcs->idt_vectoring_error_code = vmcs12->idt_vectoring_error_code; vmcs12 1789 arch/x86/kvm/vmx/nested.c evmcs->vm_exit_instruction_len = vmcs12->vm_exit_instruction_len; vmcs12 1790 arch/x86/kvm/vmx/nested.c evmcs->vmx_instruction_info = vmcs12->vmx_instruction_info; vmcs12 1792 arch/x86/kvm/vmx/nested.c evmcs->exit_qualification = vmcs12->exit_qualification; vmcs12 1794 arch/x86/kvm/vmx/nested.c evmcs->guest_linear_address = vmcs12->guest_linear_address; vmcs12 1795 arch/x86/kvm/vmx/nested.c evmcs->guest_rsp = vmcs12->guest_rsp; vmcs12 1796 arch/x86/kvm/vmx/nested.c evmcs->guest_rflags = vmcs12->guest_rflags; vmcs12 1799 arch/x86/kvm/vmx/nested.c vmcs12->guest_interruptibility_info; vmcs12 1800 arch/x86/kvm/vmx/nested.c evmcs->cpu_based_vm_exec_control = vmcs12->cpu_based_vm_exec_control; vmcs12 1801 arch/x86/kvm/vmx/nested.c evmcs->vm_entry_controls = vmcs12->vm_entry_controls; vmcs12 1802 arch/x86/kvm/vmx/nested.c evmcs->vm_entry_intr_info_field = vmcs12->vm_entry_intr_info_field; vmcs12 1804 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_exception_error_code; vmcs12 1805 arch/x86/kvm/vmx/nested.c evmcs->vm_entry_instruction_len = vmcs12->vm_entry_instruction_len; vmcs12 1807 arch/x86/kvm/vmx/nested.c evmcs->guest_rip = vmcs12->guest_rip; vmcs12 1809 arch/x86/kvm/vmx/nested.c evmcs->guest_bndcfgs = vmcs12->guest_bndcfgs; vmcs12 1883 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 1884 arch/x86/kvm/vmx/nested.c memset(vmcs12, 0, sizeof(*vmcs12)); vmcs12 1885 arch/x86/kvm/vmx/nested.c vmcs12->hdr.revision_id = VMCS12_REVISION; vmcs12 1961 arch/x86/kvm/vmx/nested.c static u64 nested_vmx_calc_efer(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) vmcs12 1964 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) vmcs12 1965 arch/x86/kvm/vmx/nested.c return vmcs12->guest_ia32_efer; vmcs12 1966 arch/x86/kvm/vmx/nested.c else if (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) vmcs12 2030 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2037 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_vpid(vmcs12) && vmx->nested.vpid02) vmcs12 2044 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_early(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) vmcs12 2047 arch/x86/kvm/vmx/nested.c u64 guest_efer = nested_vmx_calc_efer(vmx, vmcs12); vmcs12 2050 arch/x86/kvm/vmx/nested.c prepare_vmcs02_early_rare(vmx, vmcs12); vmcs12 2056 arch/x86/kvm/vmx/nested.c exec_control |= (vmcs12->pin_based_vm_exec_control & vmcs12 2060 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_posted_intr(vmcs12)) { vmcs12 2061 arch/x86/kvm/vmx/nested.c vmx->nested.posted_intr_nv = vmcs12->posted_intr_nv; vmcs12 2075 arch/x86/kvm/vmx/nested.c exec_control |= vmcs12->cpu_based_vm_exec_control; vmcs12 2078 arch/x86/kvm/vmx/nested.c vmcs_write32(TPR_THRESHOLD, vmcs12->tpr_threshold); vmcs12 2118 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, vmcs12 2120 arch/x86/kvm/vmx/nested.c vmcs12_exec_ctrl = vmcs12->secondary_vm_exec_control & vmcs12 2133 arch/x86/kvm/vmx/nested.c (vmcs12->guest_cr4 & X86_CR4_UMIP)) vmcs12 2138 arch/x86/kvm/vmx/nested.c vmcs12->guest_intr_status); vmcs12 2151 arch/x86/kvm/vmx/nested.c exec_control = (vmcs12->vm_entry_controls | vmx_vmentry_ctrl()) & vmcs12 2178 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_intr_info_field); vmcs12 2180 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_exception_error_code); vmcs12 2182 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_instruction_len); vmcs12 2184 arch/x86/kvm/vmx/nested.c vmcs12->guest_interruptibility_info); vmcs12 2186 arch/x86/kvm/vmx/nested.c !(vmcs12->guest_interruptibility_info & GUEST_INTR_STATE_NMI); vmcs12 2192 arch/x86/kvm/vmx/nested.c static void prepare_vmcs02_rare(struct vcpu_vmx *vmx, struct vmcs12 *vmcs12) vmcs12 2198 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_ES_SELECTOR, vmcs12->guest_es_selector); vmcs12 2199 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_CS_SELECTOR, vmcs12->guest_cs_selector); vmcs12 2200 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_SS_SELECTOR, vmcs12->guest_ss_selector); vmcs12 2201 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_DS_SELECTOR, vmcs12->guest_ds_selector); vmcs12 2202 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_FS_SELECTOR, vmcs12->guest_fs_selector); vmcs12 2203 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_GS_SELECTOR, vmcs12->guest_gs_selector); vmcs12 2204 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_LDTR_SELECTOR, vmcs12->guest_ldtr_selector); vmcs12 2205 arch/x86/kvm/vmx/nested.c vmcs_write16(GUEST_TR_SELECTOR, vmcs12->guest_tr_selector); vmcs12 2206 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_ES_LIMIT, vmcs12->guest_es_limit); vmcs12 2207 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_CS_LIMIT, vmcs12->guest_cs_limit); vmcs12 2208 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_SS_LIMIT, vmcs12->guest_ss_limit); vmcs12 2209 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_DS_LIMIT, vmcs12->guest_ds_limit); vmcs12 2210 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_FS_LIMIT, vmcs12->guest_fs_limit); vmcs12 2211 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_GS_LIMIT, vmcs12->guest_gs_limit); vmcs12 2212 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_LDTR_LIMIT, vmcs12->guest_ldtr_limit); vmcs12 2213 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_TR_LIMIT, vmcs12->guest_tr_limit); vmcs12 2214 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_GDTR_LIMIT, vmcs12->guest_gdtr_limit); vmcs12 2215 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_IDTR_LIMIT, vmcs12->guest_idtr_limit); vmcs12 2216 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_CS_AR_BYTES, vmcs12->guest_cs_ar_bytes); vmcs12 2217 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_SS_AR_BYTES, vmcs12->guest_ss_ar_bytes); vmcs12 2218 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_ES_AR_BYTES, vmcs12->guest_es_ar_bytes); vmcs12 2219 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_DS_AR_BYTES, vmcs12->guest_ds_ar_bytes); vmcs12 2220 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_FS_AR_BYTES, vmcs12->guest_fs_ar_bytes); vmcs12 2221 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_GS_AR_BYTES, vmcs12->guest_gs_ar_bytes); vmcs12 2222 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_LDTR_AR_BYTES, vmcs12->guest_ldtr_ar_bytes); vmcs12 2223 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_TR_AR_BYTES, vmcs12->guest_tr_ar_bytes); vmcs12 2224 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_ES_BASE, vmcs12->guest_es_base); vmcs12 2225 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_CS_BASE, vmcs12->guest_cs_base); vmcs12 2226 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_SS_BASE, vmcs12->guest_ss_base); vmcs12 2227 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_DS_BASE, vmcs12->guest_ds_base); vmcs12 2228 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_FS_BASE, vmcs12->guest_fs_base); vmcs12 2229 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_GS_BASE, vmcs12->guest_gs_base); vmcs12 2230 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_LDTR_BASE, vmcs12->guest_ldtr_base); vmcs12 2231 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_TR_BASE, vmcs12->guest_tr_base); vmcs12 2232 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_GDTR_BASE, vmcs12->guest_gdtr_base); vmcs12 2233 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_IDTR_BASE, vmcs12->guest_idtr_base); vmcs12 2238 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_SYSENTER_CS, vmcs12->guest_sysenter_cs); vmcs12 2240 arch/x86/kvm/vmx/nested.c vmcs12->guest_pending_dbg_exceptions); vmcs12 2241 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->guest_sysenter_esp); vmcs12 2242 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->guest_sysenter_eip); vmcs12 2249 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); vmcs12 2250 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); vmcs12 2251 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); vmcs12 2252 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); vmcs12 2256 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) vmcs12 2257 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_BNDCFGS, vmcs12->guest_bndcfgs); vmcs12 2260 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_xsaves(vmcs12)) vmcs12 2261 arch/x86/kvm/vmx/nested.c vmcs_write64(XSS_EXIT_BITMAP, vmcs12->xss_exit_bitmap); vmcs12 2278 arch/x86/kvm/vmx/nested.c enable_ept ? vmcs12->page_fault_error_code_mask : 0); vmcs12 2280 arch/x86/kvm/vmx/nested.c enable_ept ? vmcs12->page_fault_error_code_match : 0); vmcs12 2283 arch/x86/kvm/vmx/nested.c vmcs_write64(EOI_EXIT_BITMAP0, vmcs12->eoi_exit_bitmap0); vmcs12 2284 arch/x86/kvm/vmx/nested.c vmcs_write64(EOI_EXIT_BITMAP1, vmcs12->eoi_exit_bitmap1); vmcs12 2285 arch/x86/kvm/vmx/nested.c vmcs_write64(EOI_EXIT_BITMAP2, vmcs12->eoi_exit_bitmap2); vmcs12 2286 arch/x86/kvm/vmx/nested.c vmcs_write64(EOI_EXIT_BITMAP3, vmcs12->eoi_exit_bitmap3); vmcs12 2306 arch/x86/kvm/vmx/nested.c static int prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, vmcs12 2314 arch/x86/kvm/vmx/nested.c prepare_vmcs02_rare(vmx, vmcs12); vmcs12 2323 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) { vmcs12 2324 arch/x86/kvm/vmx/nested.c kvm_set_dr(vcpu, 7, vmcs12->guest_dr7); vmcs12 2325 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_IA32_DEBUGCTL, vmcs12->guest_ia32_debugctl); vmcs12 2331 arch/x86/kvm/vmx/nested.c !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS))) vmcs12 2333 arch/x86/kvm/vmx/nested.c vmx_set_rflags(vcpu, vmcs12->guest_rflags); vmcs12 2340 arch/x86/kvm/vmx/nested.c vcpu->arch.cr0_guest_owned_bits &= ~vmcs12->cr0_guest_host_mask; vmcs12 2344 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT)) { vmcs12 2345 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_IA32_PAT, vmcs12->guest_ia32_pat); vmcs12 2346 arch/x86/kvm/vmx/nested.c vcpu->arch.pat = vmcs12->guest_ia32_pat; vmcs12 2365 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_vpid(vmcs12) && nested_has_guest_tlb_tag(vcpu)) { vmcs12 2366 arch/x86/kvm/vmx/nested.c if (vmcs12->virtual_processor_id != vmx->nested.last_vpid) { vmcs12 2367 arch/x86/kvm/vmx/nested.c vmx->nested.last_vpid = vmcs12->virtual_processor_id; vmcs12 2383 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_ept(vmcs12)) vmcs12 2385 arch/x86/kvm/vmx/nested.c else if (nested_cpu_has2(vmcs12, vmcs12 2397 arch/x86/kvm/vmx/nested.c vmx_set_cr0(vcpu, vmcs12->guest_cr0); vmcs12 2398 arch/x86/kvm/vmx/nested.c vmcs_writel(CR0_READ_SHADOW, nested_read_cr0(vmcs12)); vmcs12 2400 arch/x86/kvm/vmx/nested.c vmx_set_cr4(vcpu, vmcs12->guest_cr4); vmcs12 2401 arch/x86/kvm/vmx/nested.c vmcs_writel(CR4_READ_SHADOW, nested_read_cr4(vmcs12)); vmcs12 2403 arch/x86/kvm/vmx/nested.c vcpu->arch.efer = nested_vmx_calc_efer(vmx, vmcs12); vmcs12 2418 arch/x86/kvm/vmx/nested.c if (nested_vmx_load_cr3(vcpu, vmcs12->guest_cr3, nested_cpu_has_ept(vmcs12), vmcs12 2430 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_CR3, vmcs12->guest_cr3); vmcs12 2433 arch/x86/kvm/vmx/nested.c if (load_guest_pdptrs_vmcs12 && nested_cpu_has_ept(vmcs12) && vmcs12 2435 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR0, vmcs12->guest_pdptr0); vmcs12 2436 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR1, vmcs12->guest_pdptr1); vmcs12 2437 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR2, vmcs12->guest_pdptr2); vmcs12 2438 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_PDPTR3, vmcs12->guest_pdptr3); vmcs12 2444 arch/x86/kvm/vmx/nested.c kvm_rsp_write(vcpu, vmcs12->guest_rsp); vmcs12 2445 arch/x86/kvm/vmx/nested.c kvm_rip_write(vcpu, vmcs12->guest_rip); vmcs12 2449 arch/x86/kvm/vmx/nested.c static int nested_vmx_check_nmi_controls(struct vmcs12 *vmcs12) vmcs12 2451 arch/x86/kvm/vmx/nested.c if (CC(!nested_cpu_has_nmi_exiting(vmcs12) && vmcs12 2452 arch/x86/kvm/vmx/nested.c nested_cpu_has_virtual_nmis(vmcs12))) vmcs12 2455 arch/x86/kvm/vmx/nested.c if (CC(!nested_cpu_has_virtual_nmis(vmcs12) && vmcs12 2456 arch/x86/kvm/vmx/nested.c nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_NMI_PENDING))) vmcs12 2502 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2506 arch/x86/kvm/vmx/nested.c if (CC(!vmx_control_verify(vmcs12->pin_based_vm_exec_control, vmcs12 2509 arch/x86/kvm/vmx/nested.c CC(!vmx_control_verify(vmcs12->cpu_based_vm_exec_control, vmcs12 2514 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, CPU_BASED_ACTIVATE_SECONDARY_CONTROLS) && vmcs12 2515 arch/x86/kvm/vmx/nested.c CC(!vmx_control_verify(vmcs12->secondary_vm_exec_control, vmcs12 2520 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->cr3_target_count > nested_cpu_vmx_misc_cr3_count(vcpu)) || vmcs12 2521 arch/x86/kvm/vmx/nested.c nested_vmx_check_io_bitmap_controls(vcpu, vmcs12) || vmcs12 2522 arch/x86/kvm/vmx/nested.c nested_vmx_check_msr_bitmap_controls(vcpu, vmcs12) || vmcs12 2523 arch/x86/kvm/vmx/nested.c nested_vmx_check_tpr_shadow_controls(vcpu, vmcs12) || vmcs12 2524 arch/x86/kvm/vmx/nested.c nested_vmx_check_apic_access_controls(vcpu, vmcs12) || vmcs12 2525 arch/x86/kvm/vmx/nested.c nested_vmx_check_apicv_controls(vcpu, vmcs12) || vmcs12 2526 arch/x86/kvm/vmx/nested.c nested_vmx_check_nmi_controls(vmcs12) || vmcs12 2527 arch/x86/kvm/vmx/nested.c nested_vmx_check_pml_controls(vcpu, vmcs12) || vmcs12 2528 arch/x86/kvm/vmx/nested.c nested_vmx_check_unrestricted_guest_controls(vcpu, vmcs12) || vmcs12 2529 arch/x86/kvm/vmx/nested.c nested_vmx_check_mode_based_ept_exec_controls(vcpu, vmcs12) || vmcs12 2530 arch/x86/kvm/vmx/nested.c nested_vmx_check_shadow_vmcs_controls(vcpu, vmcs12) || vmcs12 2531 arch/x86/kvm/vmx/nested.c CC(nested_cpu_has_vpid(vmcs12) && !vmcs12->virtual_processor_id)) vmcs12 2534 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_preemption_timer(vmcs12) && vmcs12 2535 arch/x86/kvm/vmx/nested.c nested_cpu_has_save_preemption_timer(vmcs12)) vmcs12 2538 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_ept(vmcs12) && vmcs12 2539 arch/x86/kvm/vmx/nested.c CC(!valid_ept_address(vcpu, vmcs12->ept_pointer))) vmcs12 2542 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_vmfunc(vmcs12)) { vmcs12 2543 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->vm_function_control & vmcs12 2547 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_eptp_switching(vmcs12)) { vmcs12 2548 arch/x86/kvm/vmx/nested.c if (CC(!nested_cpu_has_ept(vmcs12)) || vmcs12 2549 arch/x86/kvm/vmx/nested.c CC(!page_address_valid(vcpu, vmcs12->eptp_list_address))) vmcs12 2561 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2565 arch/x86/kvm/vmx/nested.c if (CC(!vmx_control_verify(vmcs12->vm_exit_controls, vmcs12 2568 arch/x86/kvm/vmx/nested.c CC(nested_vmx_check_exit_msr_switch_controls(vcpu, vmcs12))) vmcs12 2578 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2582 arch/x86/kvm/vmx/nested.c if (CC(!vmx_control_verify(vmcs12->vm_entry_controls, vmcs12 2593 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) { vmcs12 2594 arch/x86/kvm/vmx/nested.c u32 intr_info = vmcs12->vm_entry_intr_info_field; vmcs12 2599 arch/x86/kvm/vmx/nested.c bool urg = nested_cpu_has2(vmcs12, vmcs12 2601 arch/x86/kvm/vmx/nested.c bool prot_mode = !urg || vmcs12->guest_cr0 & X86_CR0_PE; vmcs12 2624 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_exception_error_code & GENMASK(31, 16))) vmcs12 2636 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->vm_entry_instruction_len > 15) || vmcs12 2637 arch/x86/kvm/vmx/nested.c CC(vmcs12->vm_entry_instruction_len == 0 && vmcs12 2643 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_entry_msr_switch_controls(vcpu, vmcs12)) vmcs12 2650 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2652 arch/x86/kvm/vmx/nested.c if (nested_check_vm_execution_controls(vcpu, vmcs12) || vmcs12 2653 arch/x86/kvm/vmx/nested.c nested_check_vm_exit_controls(vcpu, vmcs12) || vmcs12 2654 arch/x86/kvm/vmx/nested.c nested_check_vm_entry_controls(vcpu, vmcs12)) vmcs12 2661 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2665 arch/x86/kvm/vmx/nested.c if (CC(!nested_host_cr0_valid(vcpu, vmcs12->host_cr0)) || vmcs12 2666 arch/x86/kvm/vmx/nested.c CC(!nested_host_cr4_valid(vcpu, vmcs12->host_cr4)) || vmcs12 2667 arch/x86/kvm/vmx/nested.c CC(!nested_cr3_valid(vcpu, vmcs12->host_cr3))) vmcs12 2670 arch/x86/kvm/vmx/nested.c if (CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_esp, vcpu)) || vmcs12 2671 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_ia32_sysenter_eip, vcpu))) vmcs12 2674 arch/x86/kvm/vmx/nested.c if ((vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) && vmcs12 2675 arch/x86/kvm/vmx/nested.c CC(!kvm_pat_valid(vmcs12->host_ia32_pat))) vmcs12 2685 arch/x86/kvm/vmx/nested.c if (CC(!(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE)) || vmcs12 2686 arch/x86/kvm/vmx/nested.c CC(!(vmcs12->host_cr4 & X86_CR4_PAE))) vmcs12 2689 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) || vmcs12 2690 arch/x86/kvm/vmx/nested.c CC(vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) || vmcs12 2691 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_cr4 & X86_CR4_PCIDE) || vmcs12 2692 arch/x86/kvm/vmx/nested.c CC((vmcs12->host_rip) >> 32)) vmcs12 2696 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->host_cs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2697 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_ss_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2698 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_ds_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2699 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_es_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2700 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_fs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2701 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_gs_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2702 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_tr_selector & (SEGMENT_RPL_MASK | SEGMENT_TI_MASK)) || vmcs12 2703 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_cs_selector == 0) || vmcs12 2704 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_tr_selector == 0) || vmcs12 2705 arch/x86/kvm/vmx/nested.c CC(vmcs12->host_ss_selector == 0 && !ia32e)) vmcs12 2709 arch/x86/kvm/vmx/nested.c if (CC(is_noncanonical_address(vmcs12->host_fs_base, vcpu)) || vmcs12 2710 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_gs_base, vcpu)) || vmcs12 2711 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_gdtr_base, vcpu)) || vmcs12 2712 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_idtr_base, vcpu)) || vmcs12 2713 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_tr_base, vcpu)) || vmcs12 2714 arch/x86/kvm/vmx/nested.c CC(is_noncanonical_address(vmcs12->host_rip, vcpu))) vmcs12 2724 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) { vmcs12 2725 arch/x86/kvm/vmx/nested.c if (CC(!kvm_valid_efer(vcpu, vmcs12->host_ia32_efer)) || vmcs12 2726 arch/x86/kvm/vmx/nested.c CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LMA)) || vmcs12 2727 arch/x86/kvm/vmx/nested.c CC(ia32e != !!(vmcs12->host_ia32_efer & EFER_LME))) vmcs12 2735 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 2738 arch/x86/kvm/vmx/nested.c struct vmcs12 *shadow; vmcs12 2741 arch/x86/kvm/vmx/nested.c if (vmcs12->vmcs_link_pointer == -1ull) vmcs12 2744 arch/x86/kvm/vmx/nested.c if (CC(!page_address_valid(vcpu, vmcs12->vmcs_link_pointer))) vmcs12 2747 arch/x86/kvm/vmx/nested.c if (CC(kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->vmcs_link_pointer), &map))) vmcs12 2753 arch/x86/kvm/vmx/nested.c CC(shadow->hdr.shadow_vmcs != nested_cpu_has_shadow_vmcs(vmcs12))) vmcs12 2763 arch/x86/kvm/vmx/nested.c static int nested_check_guest_non_reg_state(struct vmcs12 *vmcs12) vmcs12 2765 arch/x86/kvm/vmx/nested.c if (CC(vmcs12->guest_activity_state != GUEST_ACTIVITY_ACTIVE && vmcs12 2766 arch/x86/kvm/vmx/nested.c vmcs12->guest_activity_state != GUEST_ACTIVITY_HLT)) vmcs12 2773 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12, vmcs12 2780 arch/x86/kvm/vmx/nested.c if (CC(!nested_guest_cr0_valid(vcpu, vmcs12->guest_cr0)) || vmcs12 2781 arch/x86/kvm/vmx/nested.c CC(!nested_guest_cr4_valid(vcpu, vmcs12->guest_cr4))) vmcs12 2784 arch/x86/kvm/vmx/nested.c if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_PAT) && vmcs12 2785 arch/x86/kvm/vmx/nested.c CC(!kvm_pat_valid(vmcs12->guest_ia32_pat))) vmcs12 2788 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_vmcs_link_ptr(vcpu, vmcs12)) { vmcs12 2803 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_IA32_EFER)) { vmcs12 2804 arch/x86/kvm/vmx/nested.c ia32e = (vmcs12->vm_entry_controls & VM_ENTRY_IA32E_MODE) != 0; vmcs12 2805 arch/x86/kvm/vmx/nested.c if (CC(!kvm_valid_efer(vcpu, vmcs12->guest_ia32_efer)) || vmcs12 2806 arch/x86/kvm/vmx/nested.c CC(ia32e != !!(vmcs12->guest_ia32_efer & EFER_LMA)) || vmcs12 2807 arch/x86/kvm/vmx/nested.c CC(((vmcs12->guest_cr0 & X86_CR0_PG) && vmcs12 2808 arch/x86/kvm/vmx/nested.c ia32e != !!(vmcs12->guest_ia32_efer & EFER_LME)))) vmcs12 2812 arch/x86/kvm/vmx/nested.c if ((vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS) && vmcs12 2813 arch/x86/kvm/vmx/nested.c (CC(is_noncanonical_address(vmcs12->guest_bndcfgs & PAGE_MASK, vcpu)) || vmcs12 2814 arch/x86/kvm/vmx/nested.c CC((vmcs12->guest_bndcfgs & MSR_IA32_BNDCFGS_RSVD)))) vmcs12 2817 arch/x86/kvm/vmx/nested.c if (nested_check_guest_non_reg_state(vmcs12)) vmcs12 2929 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12); vmcs12 2933 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 2939 arch/x86/kvm/vmx/nested.c if (nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { vmcs12 2950 arch/x86/kvm/vmx/nested.c page = kvm_vcpu_gpa_to_page(vcpu, vmcs12->apic_access_addr); vmcs12 2966 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { vmcs12 2969 arch/x86/kvm/vmx/nested.c if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->virtual_apic_page_addr), map)) { vmcs12 2971 arch/x86/kvm/vmx/nested.c } else if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING) && vmcs12 2972 arch/x86/kvm/vmx/nested.c nested_cpu_has(vmcs12, CPU_BASED_CR8_STORE_EXITING) && vmcs12 2973 arch/x86/kvm/vmx/nested.c !nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_APIC_ACCESSES)) { vmcs12 2992 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_posted_intr(vmcs12)) { vmcs12 2995 arch/x86/kvm/vmx/nested.c if (!kvm_vcpu_map(vcpu, gpa_to_gfn(vmcs12->posted_intr_desc_addr), map)) { vmcs12 2998 arch/x86/kvm/vmx/nested.c offset_in_page(vmcs12->posted_intr_desc_addr)); vmcs12 3000 arch/x86/kvm/vmx/nested.c pfn_to_hpa(map->pfn) + offset_in_page(vmcs12->posted_intr_desc_addr)); vmcs12 3003 arch/x86/kvm/vmx/nested.c if (nested_vmx_prepare_msr_bitmap(vcpu, vmcs12)) vmcs12 3041 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12); vmcs12 3057 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 3067 arch/x86/kvm/vmx/nested.c if (!(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS)) vmcs12 3070 arch/x86/kvm/vmx/nested.c !(vmcs12->vm_entry_controls & VM_ENTRY_LOAD_BNDCFGS)) vmcs12 3094 arch/x86/kvm/vmx/nested.c prepare_vmcs02_early(vmx, vmcs12); vmcs12 3105 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) vmcs12 3110 arch/x86/kvm/vmx/nested.c if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) vmcs12 3111 arch/x86/kvm/vmx/nested.c vcpu->arch.tsc_offset += vmcs12->tsc_offset; vmcs12 3113 arch/x86/kvm/vmx/nested.c if (prepare_vmcs02(vcpu, vmcs12, &exit_qual)) vmcs12 3119 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_msr_load_addr, vmcs12 3120 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_msr_load_count); vmcs12 3157 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_preemption_timer(vmcs12)) vmcs12 3174 arch/x86/kvm/vmx/nested.c if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) vmcs12 3175 arch/x86/kvm/vmx/nested.c vcpu->arch.tsc_offset -= vmcs12->tsc_offset; vmcs12 3184 arch/x86/kvm/vmx/nested.c load_vmcs12_host_state(vcpu, vmcs12); vmcs12 3185 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_reason = exit_reason | VMX_EXIT_REASONS_FAILED_VMENTRY; vmcs12 3186 arch/x86/kvm/vmx/nested.c vmcs12->exit_qualification = exit_qual; vmcs12 3198 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12; vmcs12 3212 arch/x86/kvm/vmx/nested.c vmcs12 = get_vmcs12(vcpu); vmcs12 3220 arch/x86/kvm/vmx/nested.c if (vmcs12->hdr.shadow_vmcs) vmcs12 3226 arch/x86/kvm/vmx/nested.c vmcs12->launch_state = !launch; vmcs12 3245 arch/x86/kvm/vmx/nested.c if (vmcs12->launch_state == launch) vmcs12 3250 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_controls(vcpu, vmcs12)) vmcs12 3253 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_host_state(vcpu, vmcs12)) vmcs12 3278 arch/x86/kvm/vmx/nested.c nested_cache_shadow_vmcs12(vcpu, vmcs12); vmcs12 3285 arch/x86/kvm/vmx/nested.c if ((vmcs12->guest_activity_state == GUEST_ACTIVITY_HLT) && vmcs12 3286 arch/x86/kvm/vmx/nested.c !(vmcs12->vm_entry_intr_info_field & INTR_INFO_VALID_MASK) && vmcs12 3287 arch/x86/kvm/vmx/nested.c !(vmcs12->cpu_based_vm_exec_control & CPU_BASED_VIRTUAL_NMI_PENDING) && vmcs12 3288 arch/x86/kvm/vmx/nested.c !((vmcs12->cpu_based_vm_exec_control & CPU_BASED_VIRTUAL_INTR_PENDING) && vmcs12 3289 arch/x86/kvm/vmx/nested.c (vmcs12->guest_rflags & X86_EFLAGS_IF))) { vmcs12 3323 arch/x86/kvm/vmx/nested.c vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) vmcs12 3327 arch/x86/kvm/vmx/nested.c /*2*/ (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask) | vmcs12 3328 arch/x86/kvm/vmx/nested.c /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | vmcs12 3333 arch/x86/kvm/vmx/nested.c vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) vmcs12 3337 arch/x86/kvm/vmx/nested.c /*2*/ (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask) | vmcs12 3338 arch/x86/kvm/vmx/nested.c /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | vmcs12 3343 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 3353 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_instruction_len = vmcs12 3361 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_error_code = vmcs12 3365 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_info_field = idt_vectoring; vmcs12 3367 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_info_field = vmcs12 3375 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_instruction_len = vmcs12 3380 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_info_field = idt_vectoring; vmcs12 3387 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 3395 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) { vmcs12 3396 arch/x86/kvm/vmx/nested.c gfn = vmcs12->virtual_apic_page_addr >> PAGE_SHIFT; vmcs12 3400 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_posted_intr(vmcs12)) { vmcs12 3401 arch/x86/kvm/vmx/nested.c gfn = vmcs12->posted_intr_desc_addr >> PAGE_SHIFT; vmcs12 3442 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 3447 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_error_code = vcpu->arch.exception.error_code; vmcs12 3456 arch/x86/kvm/vmx/nested.c if (!(vmcs12->idt_vectoring_info_field & VECTORING_INFO_VALID_MASK) && vmcs12 3583 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 3587 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_selector = vmcs_read16(GUEST_ES_SELECTOR); vmcs12 3588 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_selector = vmcs_read16(GUEST_CS_SELECTOR); vmcs12 3589 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_selector = vmcs_read16(GUEST_SS_SELECTOR); vmcs12 3590 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_selector = vmcs_read16(GUEST_DS_SELECTOR); vmcs12 3591 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_selector = vmcs_read16(GUEST_FS_SELECTOR); vmcs12 3592 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_selector = vmcs_read16(GUEST_GS_SELECTOR); vmcs12 3593 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_selector = vmcs_read16(GUEST_LDTR_SELECTOR); vmcs12 3594 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_selector = vmcs_read16(GUEST_TR_SELECTOR); vmcs12 3595 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_limit = vmcs_read32(GUEST_ES_LIMIT); vmcs12 3596 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_limit = vmcs_read32(GUEST_CS_LIMIT); vmcs12 3597 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_limit = vmcs_read32(GUEST_SS_LIMIT); vmcs12 3598 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_limit = vmcs_read32(GUEST_DS_LIMIT); vmcs12 3599 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_limit = vmcs_read32(GUEST_FS_LIMIT); vmcs12 3600 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_limit = vmcs_read32(GUEST_GS_LIMIT); vmcs12 3601 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_limit = vmcs_read32(GUEST_LDTR_LIMIT); vmcs12 3602 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_limit = vmcs_read32(GUEST_TR_LIMIT); vmcs12 3603 arch/x86/kvm/vmx/nested.c vmcs12->guest_gdtr_limit = vmcs_read32(GUEST_GDTR_LIMIT); vmcs12 3604 arch/x86/kvm/vmx/nested.c vmcs12->guest_idtr_limit = vmcs_read32(GUEST_IDTR_LIMIT); vmcs12 3605 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_ar_bytes = vmcs_read32(GUEST_ES_AR_BYTES); vmcs12 3606 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_ar_bytes = vmcs_read32(GUEST_DS_AR_BYTES); vmcs12 3607 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_ar_bytes = vmcs_read32(GUEST_FS_AR_BYTES); vmcs12 3608 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_ar_bytes = vmcs_read32(GUEST_GS_AR_BYTES); vmcs12 3609 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_ar_bytes = vmcs_read32(GUEST_LDTR_AR_BYTES); vmcs12 3610 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_ar_bytes = vmcs_read32(GUEST_TR_AR_BYTES); vmcs12 3611 arch/x86/kvm/vmx/nested.c vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); vmcs12 3612 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); vmcs12 3613 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); vmcs12 3614 arch/x86/kvm/vmx/nested.c vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); vmcs12 3615 arch/x86/kvm/vmx/nested.c vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); vmcs12 3616 arch/x86/kvm/vmx/nested.c vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); vmcs12 3617 arch/x86/kvm/vmx/nested.c vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); vmcs12 3618 arch/x86/kvm/vmx/nested.c vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); vmcs12 3619 arch/x86/kvm/vmx/nested.c vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); vmcs12 3620 arch/x86/kvm/vmx/nested.c vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); vmcs12 3621 arch/x86/kvm/vmx/nested.c vmcs12->guest_pending_dbg_exceptions = vmcs12 3624 arch/x86/kvm/vmx/nested.c vmcs12->guest_bndcfgs = vmcs_read64(GUEST_BNDCFGS); vmcs12 3630 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 3645 arch/x86/kvm/vmx/nested.c sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); vmcs12 3658 arch/x86/kvm/vmx/nested.c static void sync_vmcs02_to_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) vmcs12 3663 arch/x86/kvm/vmx/nested.c sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); vmcs12 3667 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr0 = vmcs12_guest_cr0(vcpu, vmcs12); vmcs12 3668 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr4 = vmcs12_guest_cr4(vcpu, vmcs12); vmcs12 3670 arch/x86/kvm/vmx/nested.c vmcs12->guest_rsp = kvm_rsp_read(vcpu); vmcs12 3671 arch/x86/kvm/vmx/nested.c vmcs12->guest_rip = kvm_rip_read(vcpu); vmcs12 3672 arch/x86/kvm/vmx/nested.c vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); vmcs12 3674 arch/x86/kvm/vmx/nested.c vmcs12->guest_cs_ar_bytes = vmcs_read32(GUEST_CS_AR_BYTES); vmcs12 3675 arch/x86/kvm/vmx/nested.c vmcs12->guest_ss_ar_bytes = vmcs_read32(GUEST_SS_AR_BYTES); vmcs12 3677 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_cs = vmcs_read32(GUEST_SYSENTER_CS); vmcs12 3678 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_esp = vmcs_readl(GUEST_SYSENTER_ESP); vmcs12 3679 arch/x86/kvm/vmx/nested.c vmcs12->guest_sysenter_eip = vmcs_readl(GUEST_SYSENTER_EIP); vmcs12 3681 arch/x86/kvm/vmx/nested.c vmcs12->guest_interruptibility_info = vmcs12 3685 arch/x86/kvm/vmx/nested.c vmcs12->guest_activity_state = GUEST_ACTIVITY_HLT; vmcs12 3687 arch/x86/kvm/vmx/nested.c vmcs12->guest_activity_state = GUEST_ACTIVITY_ACTIVE; vmcs12 3689 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_preemption_timer(vmcs12) && vmcs12 3690 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_controls & VM_EXIT_SAVE_VMX_PREEMPTION_TIMER) vmcs12 3691 arch/x86/kvm/vmx/nested.c vmcs12->vmx_preemption_timer_value = vmcs12 3703 arch/x86/kvm/vmx/nested.c vmcs12->guest_cr3 = vmcs_readl(GUEST_CR3); vmcs12 3704 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_ept(vmcs12) && is_pae_paging(vcpu)) { vmcs12 3705 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr0 = vmcs_read64(GUEST_PDPTR0); vmcs12 3706 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr1 = vmcs_read64(GUEST_PDPTR1); vmcs12 3707 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr2 = vmcs_read64(GUEST_PDPTR2); vmcs12 3708 arch/x86/kvm/vmx/nested.c vmcs12->guest_pdptr3 = vmcs_read64(GUEST_PDPTR3); vmcs12 3712 arch/x86/kvm/vmx/nested.c vmcs12->guest_linear_address = vmcs_readl(GUEST_LINEAR_ADDRESS); vmcs12 3714 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_vid(vmcs12)) vmcs12 3715 arch/x86/kvm/vmx/nested.c vmcs12->guest_intr_status = vmcs_read16(GUEST_INTR_STATUS); vmcs12 3717 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_controls = vmcs12 3718 arch/x86/kvm/vmx/nested.c (vmcs12->vm_entry_controls & ~VM_ENTRY_IA32E_MODE) | vmcs12 3721 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_DEBUG_CONTROLS) vmcs12 3722 arch/x86/kvm/vmx/nested.c kvm_get_dr(vcpu, 7, (unsigned long *)&vmcs12->guest_dr7); vmcs12 3724 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_SAVE_IA32_EFER) vmcs12 3725 arch/x86/kvm/vmx/nested.c vmcs12->guest_ia32_efer = vcpu->arch.efer; vmcs12 3739 arch/x86/kvm/vmx/nested.c static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, vmcs12 3744 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_reason = exit_reason; vmcs12 3745 arch/x86/kvm/vmx/nested.c vmcs12->exit_qualification = exit_qualification; vmcs12 3746 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_info = exit_intr_info; vmcs12 3748 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_info_field = 0; vmcs12 3749 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_instruction_len = vmcs_read32(VM_EXIT_INSTRUCTION_LEN); vmcs12 3750 arch/x86/kvm/vmx/nested.c vmcs12->vmx_instruction_info = vmcs_read32(VMX_INSTRUCTION_INFO); vmcs12 3752 arch/x86/kvm/vmx/nested.c if (!(vmcs12->vm_exit_reason & VMX_EXIT_REASONS_FAILED_VMENTRY)) { vmcs12 3753 arch/x86/kvm/vmx/nested.c vmcs12->launch_state = 1; vmcs12 3757 arch/x86/kvm/vmx/nested.c vmcs12->vm_entry_intr_info_field &= ~INTR_INFO_VALID_MASK; vmcs12 3763 arch/x86/kvm/vmx/nested.c vmcs12_save_pending_event(vcpu, vmcs12); vmcs12 3772 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_store_addr, vmcs12 3773 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_store_count)) vmcs12 3797 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 3802 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_EFER) vmcs12 3803 arch/x86/kvm/vmx/nested.c vcpu->arch.efer = vmcs12->host_ia32_efer; vmcs12 3804 arch/x86/kvm/vmx/nested.c else if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) vmcs12 3810 arch/x86/kvm/vmx/nested.c kvm_rsp_write(vcpu, vmcs12->host_rsp); vmcs12 3811 arch/x86/kvm/vmx/nested.c kvm_rip_write(vcpu, vmcs12->host_rip); vmcs12 3823 arch/x86/kvm/vmx/nested.c vmx_set_cr0(vcpu, vmcs12->host_cr0); vmcs12 3827 arch/x86/kvm/vmx/nested.c vmx_set_cr4(vcpu, vmcs12->host_cr4); vmcs12 3835 arch/x86/kvm/vmx/nested.c if (nested_vmx_load_cr3(vcpu, vmcs12->host_cr3, false, &entry_failure_code)) vmcs12 3856 arch/x86/kvm/vmx/nested.c (!nested_cpu_has_vpid(vmcs12) || !nested_has_guest_tlb_tag(vcpu))) { vmcs12 3860 arch/x86/kvm/vmx/nested.c vmcs_write32(GUEST_SYSENTER_CS, vmcs12->host_ia32_sysenter_cs); vmcs12 3861 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_SYSENTER_ESP, vmcs12->host_ia32_sysenter_esp); vmcs12 3862 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_SYSENTER_EIP, vmcs12->host_ia32_sysenter_eip); vmcs12 3863 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_IDTR_BASE, vmcs12->host_idtr_base); vmcs12 3864 arch/x86/kvm/vmx/nested.c vmcs_writel(GUEST_GDTR_BASE, vmcs12->host_gdtr_base); vmcs12 3869 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_CLEAR_BNDCFGS) vmcs12 3872 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PAT) { vmcs12 3873 arch/x86/kvm/vmx/nested.c vmcs_write64(GUEST_IA32_PAT, vmcs12->host_ia32_pat); vmcs12 3874 arch/x86/kvm/vmx/nested.c vcpu->arch.pat = vmcs12->host_ia32_pat; vmcs12 3876 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_LOAD_IA32_PERF_GLOBAL_CTRL) vmcs12 3878 arch/x86/kvm/vmx/nested.c vmcs12->host_ia32_perf_global_ctrl); vmcs12 3885 arch/x86/kvm/vmx/nested.c .selector = vmcs12->host_cs_selector, vmcs12 3891 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_exit_controls & VM_EXIT_HOST_ADDR_SPACE_SIZE) vmcs12 3905 arch/x86/kvm/vmx/nested.c seg.selector = vmcs12->host_ds_selector; vmcs12 3907 arch/x86/kvm/vmx/nested.c seg.selector = vmcs12->host_es_selector; vmcs12 3909 arch/x86/kvm/vmx/nested.c seg.selector = vmcs12->host_ss_selector; vmcs12 3911 arch/x86/kvm/vmx/nested.c seg.selector = vmcs12->host_fs_selector; vmcs12 3912 arch/x86/kvm/vmx/nested.c seg.base = vmcs12->host_fs_base; vmcs12 3914 arch/x86/kvm/vmx/nested.c seg.selector = vmcs12->host_gs_selector; vmcs12 3915 arch/x86/kvm/vmx/nested.c seg.base = vmcs12->host_gs_base; vmcs12 3918 arch/x86/kvm/vmx/nested.c .base = vmcs12->host_tr_base, vmcs12 3920 arch/x86/kvm/vmx/nested.c .selector = vmcs12->host_tr_selector, vmcs12 3932 arch/x86/kvm/vmx/nested.c if (nested_vmx_load_msr(vcpu, vmcs12->vm_exit_msr_load_addr, vmcs12 3933 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_msr_load_count)) vmcs12 3962 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 3970 arch/x86/kvm/vmx/nested.c if (vmcs12->vm_entry_controls & VM_ENTRY_LOAD_DEBUG_CONTROLS) { vmcs12 4024 arch/x86/kvm/vmx/nested.c for (i = 0; i < vmcs12->vm_entry_msr_load_count; i++) { vmcs12 4025 arch/x86/kvm/vmx/nested.c gpa = vmcs12->vm_entry_msr_load_addr + (i * sizeof(g)); vmcs12 4033 arch/x86/kvm/vmx/nested.c for (j = 0; j < vmcs12->vm_exit_msr_load_count; j++) { vmcs12 4034 arch/x86/kvm/vmx/nested.c gpa = vmcs12->vm_exit_msr_load_addr + (j * sizeof(h)); vmcs12 4077 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 4084 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_preemption_timer(vmcs12)) vmcs12 4087 arch/x86/kvm/vmx/nested.c if (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING) vmcs12 4088 arch/x86/kvm/vmx/nested.c vcpu->arch.tsc_offset -= vmcs12->tsc_offset; vmcs12 4091 arch/x86/kvm/vmx/nested.c sync_vmcs02_to_vmcs12(vcpu, vmcs12); vmcs12 4094 arch/x86/kvm/vmx/nested.c prepare_vmcs12(vcpu, vmcs12, exit_reason, exit_intr_info, vmcs12 4106 arch/x86/kvm/vmx/nested.c nested_flush_cached_shadow_vmcs12(vcpu, vmcs12); vmcs12 4132 arch/x86/kvm/vmx/nested.c } else if (!nested_cpu_has_ept(vmcs12) && vmcs12 4133 arch/x86/kvm/vmx/nested.c nested_cpu_has2(vmcs12, vmcs12 4164 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_info = irq | vmcs12 4169 arch/x86/kvm/vmx/nested.c trace_kvm_nested_vmexit_inject(vmcs12->vm_exit_reason, vmcs12 4170 arch/x86/kvm/vmx/nested.c vmcs12->exit_qualification, vmcs12 4171 arch/x86/kvm/vmx/nested.c vmcs12->idt_vectoring_info_field, vmcs12 4172 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_info, vmcs12 4173 arch/x86/kvm/vmx/nested.c vmcs12->vm_exit_intr_error_code, vmcs12 4176 arch/x86/kvm/vmx/nested.c load_vmcs12_host_state(vcpu, vmcs12); vmcs12 4576 arch/x86/kvm/vmx/nested.c vmptr + offsetof(struct vmcs12, vmcs12 4608 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) vmcs12 4634 arch/x86/kvm/vmx/nested.c copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); vmcs12 4637 arch/x86/kvm/vmx/nested.c field_value = vmcs12_read_any(vmcs12, field, offset); vmcs12 4703 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = is_guest_mode(vcpu) ? get_shadow_vmcs12(vcpu) vmcs12 4755 arch/x86/kvm/vmx/nested.c copy_vmcs02_to_vmcs12_rare(vcpu, vmcs12); vmcs12 4768 arch/x86/kvm/vmx/nested.c vmcs12_write_any(vmcs12, field, offset, field_value); vmcs12 4835 arch/x86/kvm/vmx/nested.c struct vmcs12 *new_vmcs12; vmcs12 5037 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 5044 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_eptp_switching(vmcs12) || vmcs12 5045 arch/x86/kvm/vmx/nested.c !nested_cpu_has_ept(vmcs12)) vmcs12 5052 arch/x86/kvm/vmx/nested.c if (kvm_vcpu_read_guest_page(vcpu, vmcs12->eptp_list_address >> PAGE_SHIFT, vmcs12 5062 arch/x86/kvm/vmx/nested.c if (vmcs12->ept_pointer != address) { vmcs12 5069 arch/x86/kvm/vmx/nested.c vmcs12->ept_pointer = address; vmcs12 5084 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12; vmcs12 5097 arch/x86/kvm/vmx/nested.c vmcs12 = get_vmcs12(vcpu); vmcs12 5098 arch/x86/kvm/vmx/nested.c if ((vmcs12->vm_function_control & (1 << function)) == 0) vmcs12 5103 arch/x86/kvm/vmx/nested.c if (nested_vmx_eptp_switching(vcpu, vmcs12)) vmcs12 5125 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 5134 arch/x86/kvm/vmx/nested.c bitmap = vmcs12->io_bitmap_a; vmcs12 5136 arch/x86/kvm/vmx/nested.c bitmap = vmcs12->io_bitmap_b; vmcs12 5156 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 5162 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) vmcs12 5163 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_UNCOND_IO_EXITING); vmcs12 5180 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12, u32 exit_reason) vmcs12 5185 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has(vmcs12, CPU_BASED_USE_MSR_BITMAPS)) vmcs12 5193 arch/x86/kvm/vmx/nested.c bitmap = vmcs12->msr_bitmap; vmcs12 5217 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12) vmcs12 5230 arch/x86/kvm/vmx/nested.c if (vmcs12->cr0_guest_host_mask & vmcs12 5231 arch/x86/kvm/vmx/nested.c (val ^ vmcs12->cr0_read_shadow)) vmcs12 5235 arch/x86/kvm/vmx/nested.c if ((vmcs12->cr3_target_count >= 1 && vmcs12 5236 arch/x86/kvm/vmx/nested.c vmcs12->cr3_target_value0 == val) || vmcs12 5237 arch/x86/kvm/vmx/nested.c (vmcs12->cr3_target_count >= 2 && vmcs12 5238 arch/x86/kvm/vmx/nested.c vmcs12->cr3_target_value1 == val) || vmcs12 5239 arch/x86/kvm/vmx/nested.c (vmcs12->cr3_target_count >= 3 && vmcs12 5240 arch/x86/kvm/vmx/nested.c vmcs12->cr3_target_value2 == val) || vmcs12 5241 arch/x86/kvm/vmx/nested.c (vmcs12->cr3_target_count >= 4 && vmcs12 5242 arch/x86/kvm/vmx/nested.c vmcs12->cr3_target_value3 == val)) vmcs12 5244 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, CPU_BASED_CR3_LOAD_EXITING)) vmcs12 5248 arch/x86/kvm/vmx/nested.c if (vmcs12->cr4_guest_host_mask & vmcs12 5249 arch/x86/kvm/vmx/nested.c (vmcs12->cr4_read_shadow ^ val)) vmcs12 5253 arch/x86/kvm/vmx/nested.c if (nested_cpu_has(vmcs12, CPU_BASED_CR8_LOAD_EXITING)) vmcs12 5259 arch/x86/kvm/vmx/nested.c if ((vmcs12->cr0_guest_host_mask & X86_CR0_TS) && vmcs12 5260 arch/x86/kvm/vmx/nested.c (vmcs12->cr0_read_shadow & X86_CR0_TS)) vmcs12 5266 arch/x86/kvm/vmx/nested.c if (vmcs12->cpu_based_vm_exec_control & vmcs12 5271 arch/x86/kvm/vmx/nested.c if (vmcs12->cpu_based_vm_exec_control & vmcs12 5283 arch/x86/kvm/vmx/nested.c if (vmcs12->cr0_guest_host_mask & 0xe & vmcs12 5284 arch/x86/kvm/vmx/nested.c (val ^ vmcs12->cr0_read_shadow)) vmcs12 5286 arch/x86/kvm/vmx/nested.c if ((vmcs12->cr0_guest_host_mask & 0x1) && vmcs12 5287 arch/x86/kvm/vmx/nested.c !(vmcs12->cr0_read_shadow & 0x1) && vmcs12 5296 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12, gpa_t bitmap) vmcs12 5302 arch/x86/kvm/vmx/nested.c if (!nested_cpu_has_shadow_vmcs(vmcs12)) vmcs12 5328 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 5373 arch/x86/kvm/vmx/nested.c return vmcs12->exception_bitmap & vmcs12 5380 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_INTR_PENDING); vmcs12 5382 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_VIRTUAL_NMI_PENDING); vmcs12 5388 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_HLT_EXITING); vmcs12 5392 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); vmcs12 5394 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_RDPMC_EXITING); vmcs12 5396 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDRAND_EXITING); vmcs12 5398 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDSEED_EXITING); vmcs12 5400 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_RDTSC_EXITING); vmcs12 5402 arch/x86/kvm/vmx/nested.c return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, vmcs12 5403 arch/x86/kvm/vmx/nested.c vmcs12->vmread_bitmap); vmcs12 5405 arch/x86/kvm/vmx/nested.c return nested_vmx_exit_handled_vmcs_access(vcpu, vmcs12, vmcs12 5406 arch/x86/kvm/vmx/nested.c vmcs12->vmwrite_bitmap); vmcs12 5418 arch/x86/kvm/vmx/nested.c return nested_vmx_exit_handled_cr(vcpu, vmcs12); vmcs12 5420 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_MOV_DR_EXITING); vmcs12 5422 arch/x86/kvm/vmx/nested.c return nested_vmx_exit_handled_io(vcpu, vmcs12); vmcs12 5424 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC); vmcs12 5427 arch/x86/kvm/vmx/nested.c return nested_vmx_exit_handled_msr(vcpu, vmcs12, exit_reason); vmcs12 5431 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_MWAIT_EXITING); vmcs12 5433 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_TRAP_FLAG); vmcs12 5435 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_MONITOR_EXITING); vmcs12 5437 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_PAUSE_EXITING) || vmcs12 5438 arch/x86/kvm/vmx/nested.c nested_cpu_has2(vmcs12, vmcs12 5443 arch/x86/kvm/vmx/nested.c return nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW); vmcs12 5471 arch/x86/kvm/vmx/nested.c nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_INVPCID) && vmcs12 5472 arch/x86/kvm/vmx/nested.c nested_cpu_has(vmcs12, CPU_BASED_INVLPG_EXITING); vmcs12 5474 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, SECONDARY_EXEC_WBINVD_EXITING); vmcs12 5484 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); vmcs12 5498 arch/x86/kvm/vmx/nested.c return nested_cpu_has2(vmcs12, vmcs12 5511 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12; vmcs12 5526 arch/x86/kvm/vmx/nested.c vmcs12 = get_vmcs12(vcpu); vmcs12 5534 arch/x86/kvm/vmx/nested.c kvm_state.size += sizeof(user_vmx_nested_state->vmcs12); vmcs12 5540 arch/x86/kvm/vmx/nested.c nested_cpu_has_shadow_vmcs(vmcs12) && vmcs12 5541 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer != -1ull) vmcs12 5576 arch/x86/kvm/vmx/nested.c sync_vmcs02_to_vmcs12(vcpu, vmcs12); vmcs12 5577 arch/x86/kvm/vmx/nested.c sync_vmcs02_to_vmcs12_rare(vcpu, vmcs12); vmcs12 5585 arch/x86/kvm/vmx/nested.c BUILD_BUG_ON(sizeof(user_vmx_nested_state->vmcs12) < VMCS12_SIZE); vmcs12 5592 arch/x86/kvm/vmx/nested.c if (copy_to_user(user_vmx_nested_state->vmcs12, vmcs12, VMCS12_SIZE)) vmcs12 5595 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_shadow_vmcs(vmcs12) && vmcs12 5596 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer != -1ull) { vmcs12 5623 arch/x86/kvm/vmx/nested.c struct vmcs12 *vmcs12; vmcs12 5696 arch/x86/kvm/vmx/nested.c if (kvm_state->size < sizeof(*kvm_state) + sizeof(*vmcs12)) vmcs12 5723 arch/x86/kvm/vmx/nested.c vmcs12 = get_vmcs12(vcpu); vmcs12 5724 arch/x86/kvm/vmx/nested.c if (copy_from_user(vmcs12, user_vmx_nested_state->vmcs12, sizeof(*vmcs12))) vmcs12 5727 arch/x86/kvm/vmx/nested.c if (vmcs12->hdr.revision_id != VMCS12_REVISION) vmcs12 5737 arch/x86/kvm/vmx/nested.c if (nested_cpu_has_shadow_vmcs(vmcs12) && vmcs12 5738 arch/x86/kvm/vmx/nested.c vmcs12->vmcs_link_pointer != -1ull) { vmcs12 5739 arch/x86/kvm/vmx/nested.c struct vmcs12 *shadow_vmcs12 = get_shadow_vmcs12(vcpu); vmcs12 5743 arch/x86/kvm/vmx/nested.c sizeof(user_vmx_nested_state->vmcs12) + sizeof(*shadow_vmcs12)) vmcs12 5758 arch/x86/kvm/vmx/nested.c if (nested_vmx_check_controls(vcpu, vmcs12) || vmcs12 5759 arch/x86/kvm/vmx/nested.c nested_vmx_check_host_state(vcpu, vmcs12) || vmcs12 5760 arch/x86/kvm/vmx/nested.c nested_vmx_check_guest_state(vcpu, vmcs12, &exit_qual)) vmcs12 38 arch/x86/kvm/vmx/nested.h static inline struct vmcs12 *get_vmcs12(struct kvm_vcpu *vcpu) vmcs12 43 arch/x86/kvm/vmx/nested.h static inline struct vmcs12 *get_shadow_vmcs12(struct kvm_vcpu *vcpu) vmcs12 90 arch/x86/kvm/vmx/nested.h struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 92 arch/x86/kvm/vmx/nested.h vmcs12->vm_exit_intr_error_code = vmcs12 106 arch/x86/kvm/vmx/nested.h static inline unsigned long nested_read_cr0(struct vmcs12 *fields) vmcs12 111 arch/x86/kvm/vmx/nested.h static inline unsigned long nested_read_cr4(struct vmcs12 *fields) vmcs12 150 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has(struct vmcs12 *vmcs12, u32 bit) vmcs12 152 arch/x86/kvm/vmx/nested.h return vmcs12->cpu_based_vm_exec_control & bit; vmcs12 155 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has2(struct vmcs12 *vmcs12, u32 bit) vmcs12 157 arch/x86/kvm/vmx/nested.h return (vmcs12->cpu_based_vm_exec_control & vmcs12 159 arch/x86/kvm/vmx/nested.h (vmcs12->secondary_vm_exec_control & bit); vmcs12 162 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_preemption_timer(struct vmcs12 *vmcs12) vmcs12 164 arch/x86/kvm/vmx/nested.h return vmcs12->pin_based_vm_exec_control & vmcs12 168 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_nmi_exiting(struct vmcs12 *vmcs12) vmcs12 170 arch/x86/kvm/vmx/nested.h return vmcs12->pin_based_vm_exec_control & PIN_BASED_NMI_EXITING; vmcs12 173 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_virtual_nmis(struct vmcs12 *vmcs12) vmcs12 175 arch/x86/kvm/vmx/nested.h return vmcs12->pin_based_vm_exec_control & PIN_BASED_VIRTUAL_NMIS; vmcs12 178 arch/x86/kvm/vmx/nested.h static inline int nested_cpu_has_ept(struct vmcs12 *vmcs12) vmcs12 180 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_EPT); vmcs12 183 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_xsaves(struct vmcs12 *vmcs12) vmcs12 185 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_XSAVES); vmcs12 188 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_pml(struct vmcs12 *vmcs12) vmcs12 190 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_PML); vmcs12 193 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_virt_x2apic_mode(struct vmcs12 *vmcs12) vmcs12 195 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUALIZE_X2APIC_MODE); vmcs12 198 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_vpid(struct vmcs12 *vmcs12) vmcs12 200 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_VPID); vmcs12 203 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_apic_reg_virt(struct vmcs12 *vmcs12) vmcs12 205 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_APIC_REGISTER_VIRT); vmcs12 208 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_vid(struct vmcs12 *vmcs12) vmcs12 210 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_VIRTUAL_INTR_DELIVERY); vmcs12 213 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_posted_intr(struct vmcs12 *vmcs12) vmcs12 215 arch/x86/kvm/vmx/nested.h return vmcs12->pin_based_vm_exec_control & PIN_BASED_POSTED_INTR; vmcs12 218 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_vmfunc(struct vmcs12 *vmcs12) vmcs12 220 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_ENABLE_VMFUNC); vmcs12 223 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_eptp_switching(struct vmcs12 *vmcs12) vmcs12 225 arch/x86/kvm/vmx/nested.h return nested_cpu_has_vmfunc(vmcs12) && vmcs12 226 arch/x86/kvm/vmx/nested.h (vmcs12->vm_function_control & vmcs12 230 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_shadow_vmcs(struct vmcs12 *vmcs12) vmcs12 232 arch/x86/kvm/vmx/nested.h return nested_cpu_has2(vmcs12, SECONDARY_EXEC_SHADOW_VMCS); vmcs12 235 arch/x86/kvm/vmx/nested.h static inline bool nested_cpu_has_save_preemption_timer(struct vmcs12 *vmcs12) vmcs12 237 arch/x86/kvm/vmx/nested.h return vmcs12->vm_exit_controls & vmcs12 264 arch/x86/kvm/vmx/nested.h struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 268 arch/x86/kvm/vmx/nested.h nested_cpu_has2(vmcs12, SECONDARY_EXEC_UNRESTRICTED_GUEST)) vmcs12 6 arch/x86/kvm/vmx/vmcs12.c #define VMCS12_OFFSET(x) offsetof(struct vmcs12, x) vmcs12 27 arch/x86/kvm/vmx/vmcs12.h struct __packed vmcs12 { vmcs12 219 arch/x86/kvm/vmx/vmcs12.h BUILD_BUG_ON_MSG(offsetof(struct vmcs12, field) != (loc), \ vmcs12 398 arch/x86/kvm/vmx/vmcs12.h static inline u64 vmcs12_read_any(struct vmcs12 *vmcs12, unsigned long field, vmcs12 401 arch/x86/kvm/vmx/vmcs12.h char *p = (char *)vmcs12 + offset; vmcs12 418 arch/x86/kvm/vmx/vmcs12.h static inline void vmcs12_write_any(struct vmcs12 *vmcs12, unsigned long field, vmcs12 421 arch/x86/kvm/vmx/vmcs12.h char *p = (char *)vmcs12 + offset; vmcs12 1681 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 1684 arch/x86/kvm/vmx/vmx.c (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING)) vmcs12 1685 arch/x86/kvm/vmx/vmx.c return vcpu->arch.tsc_offset - vmcs12->tsc_offset; vmcs12 1692 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 1702 arch/x86/kvm/vmx/vmx.c (vmcs12->cpu_based_vm_exec_control & CPU_BASED_USE_TSC_OFFSETING)) vmcs12 1703 arch/x86/kvm/vmx/vmx.c g_tsc_offset = vmcs12->tsc_offset; vmcs12 4748 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 4759 arch/x86/kvm/vmx/vmx.c val = (val & ~vmcs12->cr0_guest_host_mask) | vmcs12 4760 arch/x86/kvm/vmx/vmx.c (vmcs12->guest_cr0 & vmcs12->cr0_guest_host_mask); vmcs12 4781 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 4785 arch/x86/kvm/vmx/vmx.c val = (val & ~vmcs12->cr4_guest_host_mask) | vmcs12 4786 arch/x86/kvm/vmx/vmx.c (vmcs12->guest_cr4 & vmcs12->cr4_guest_host_mask); vmcs12 6018 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 6021 arch/x86/kvm/vmx/vmx.c nested_cpu_has(vmcs12, CPU_BASED_TPR_SHADOW)) vmcs12 7103 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 7124 arch/x86/kvm/vmx/vmx.c if (!nested_cpu_has(vmcs12, CPU_BASED_USE_IO_BITMAPS)) vmcs12 7125 arch/x86/kvm/vmx/vmx.c intercept = nested_cpu_has(vmcs12, vmcs12 7138 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12 = get_vmcs12(vcpu); vmcs12 7147 arch/x86/kvm/vmx/vmx.c if (!nested_cpu_has2(vmcs12, SECONDARY_EXEC_RDTSCP)) { vmcs12 7168 arch/x86/kvm/vmx/vmx.c if (!nested_cpu_has2(vmcs12, SECONDARY_EXEC_DESC)) vmcs12 7277 arch/x86/kvm/vmx/vmx.c struct vmcs12 *vmcs12; vmcs12 7289 arch/x86/kvm/vmx/vmx.c vmcs12 = get_vmcs12(vcpu); vmcs12 7290 arch/x86/kvm/vmx/vmx.c if (!nested_cpu_has_pml(vmcs12)) vmcs12 7293 arch/x86/kvm/vmx/vmx.c if (vmcs12->guest_pml_index >= PML_ENTITY_NUM) { vmcs12 7299 arch/x86/kvm/vmx/vmx.c dst = vmcs12->pml_address + sizeof(u64) * vmcs12->guest_pml_index; vmcs12 7305 arch/x86/kvm/vmx/vmx.c vmcs12->guest_pml_index--; vmcs12 107 arch/x86/kvm/vmx/vmx.h struct vmcs12 *cached_vmcs12; vmcs12 113 arch/x86/kvm/vmx/vmx.h struct vmcs12 *cached_shadow_vmcs12; vmcs12 400 tools/arch/x86/include/uapi/asm/kvm.h __u8 vmcs12[KVM_STATE_NESTED_VMX_VMCS_SIZE];