Lines Matching refs:vmcs_readl
1379 static __always_inline unsigned long vmcs_readl(unsigned long field) in vmcs_readl() function
1390 return vmcs_readl(field); in vmcs_read16()
1395 return vmcs_readl(field); in vmcs_read32()
1401 return vmcs_readl(field); in vmcs_read64()
1403 return vmcs_readl(field) | ((u64)vmcs_readl(field+1) << 32); in vmcs_read64()
1445 vmcs_writel(field, vmcs_readl(field) & ~mask); in vmcs_clear_bits()
1450 vmcs_writel(field, vmcs_readl(field) | mask); in vmcs_set_bits()
1543 *p = vmcs_readl(kvm_vmx_segment_fields[seg].base); in vmx_read_guest_seg_base()
1984 cr0 = vmcs_readl(GUEST_CR0); in vmx_fpu_activate()
2047 rflags = vmcs_readl(GUEST_RFLAGS); in vmx_get_rflags()
2123 vmcs_readl(EXIT_QUALIFICATION)); in nested_vmx_check_exception()
2656 data = vmcs_readl(GUEST_FS_BASE); in vmx_get_msr()
2659 data = vmcs_readl(GUEST_GS_BASE); in vmx_get_msr()
2675 data = vmcs_readl(GUEST_SYSENTER_EIP); in vmx_get_msr()
2678 data = vmcs_readl(GUEST_SYSENTER_ESP); in vmx_get_msr()
2839 vcpu->arch.regs[VCPU_REGS_RSP] = vmcs_readl(GUEST_RSP); in vmx_cache_reg()
2842 vcpu->arch.regs[VCPU_REGS_RIP] = vmcs_readl(GUEST_RIP); in vmx_cache_reg()
3318 flags = vmcs_readl(GUEST_RFLAGS); in enter_pmode()
3323 vmcs_writel(GUEST_CR4, (vmcs_readl(GUEST_CR4) & ~X86_CR4_VME) | in enter_pmode()
3324 (vmcs_readl(CR4_READ_SHADOW) & X86_CR4_VME)); in enter_pmode()
3398 flags = vmcs_readl(GUEST_RFLAGS); in enter_rmode()
3404 vmcs_writel(GUEST_CR4, vmcs_readl(GUEST_CR4) | X86_CR4_VME); in enter_rmode()
3484 vcpu->arch.cr0 |= vmcs_readl(GUEST_CR0) & cr0_guest_owned_bits; in vmx_decache_cr0_guest_bits()
3490 vcpu->arch.cr3 = vmcs_readl(GUEST_CR3); in vmx_decache_cr3()
3499 vcpu->arch.cr4 |= vmcs_readl(GUEST_CR4) & cr4_guest_owned_bits; in vmx_decache_cr4_guest_bits()
3824 dt->address = vmcs_readl(GUEST_IDTR_BASE); in vmx_get_idt()
3836 dt->address = vmcs_readl(GUEST_GDTR_BASE); in vmx_get_gdt()
4971 vmcs_readl(GUEST_RFLAGS) & X86_EFLAGS_IF) && in vmx_interrupt_allowed()
5134 cr2 = vmcs_readl(EXIT_QUALIFICATION); in handle_exception()
5152 dr6 = vmcs_readl(EXIT_QUALIFICATION); in handle_exception()
5164 kvm_run->debug.arch.dr7 = vmcs_readl(GUEST_DR7); in handle_exception()
5176 kvm_run->debug.arch.pc = vmcs_readl(GUEST_CS_BASE) + rip; in handle_exception()
5206 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_io()
5305 vmcs_readl(CR0_READ_SHADOW) & ~X86_CR0_TS); in handle_clts()
5318 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_cr()
5395 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_dr()
5405 dr7 = vmcs_readl(GUEST_DR7); in handle_dr()
5476 vcpu->arch.dr7 = vmcs_readl(GUEST_DR7); in vmx_sync_dirty_debug_regs()
5587 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_invlpg()
5637 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_apic_access()
5659 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_apic_eoi_induced()
5669 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_apic_write()
5690 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_task_switch()
5735 vmcs_writel(GUEST_DR7, vmcs_readl(GUEST_DR7) & ~0x155); in handle_task_switch()
5752 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_ept_violation()
5759 vmcs_readl(GUEST_LINEAR_ADDRESS)); in handle_ept_violation()
6506 if (get_vmx_mem_address(vcpu, vmcs_readl(EXIT_QUALIFICATION), in nested_vmx_check_vmptr()
6929 field_value = vmcs_readl(field); in copy_shadow_to_vmcs12()
7009 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_vmread()
7052 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_vmwrite()
7155 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_vmptrst()
7220 if (get_vmx_mem_address(vcpu, vmcs_readl(EXIT_QUALIFICATION), in handle_invept()
7257 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in handle_pml_full()
7342 exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in nested_vmx_exit_handled_io()
7419 unsigned long exit_qualification = vmcs_readl(EXIT_QUALIFICATION); in nested_vmx_exit_handled_cr()
7505 vmcs_readl(EXIT_QUALIFICATION), in nested_vmx_exit_handled()
7631 *info1 = vmcs_readl(EXIT_QUALIFICATION); in vmx_get_exit_info()
7745 vmcs_readl(EXIT_QUALIFICATION)); in vmx_handle_exit()
8654 vmcs_readl(EXIT_QUALIFICATION)); in vmx_inject_page_fault_nested()
9599 /*1*/ (vmcs_readl(GUEST_CR0) & vcpu->arch.cr0_guest_owned_bits) | in vmcs12_guest_cr0()
9601 /*3*/ (vmcs_readl(CR0_READ_SHADOW) & ~(vmcs12->cr0_guest_host_mask | in vmcs12_guest_cr0()
9609 /*1*/ (vmcs_readl(GUEST_CR4) & vcpu->arch.cr4_guest_owned_bits) | in vmcs12_guest_cr4()
9611 /*3*/ (vmcs_readl(CR4_READ_SHADOW) & ~(vmcs12->cr4_guest_host_mask | in vmcs12_guest_cr4()
9731 vmcs12->guest_rflags = vmcs_readl(GUEST_RFLAGS); in prepare_vmcs12()
9759 vmcs12->guest_es_base = vmcs_readl(GUEST_ES_BASE); in prepare_vmcs12()
9760 vmcs12->guest_cs_base = vmcs_readl(GUEST_CS_BASE); in prepare_vmcs12()
9761 vmcs12->guest_ss_base = vmcs_readl(GUEST_SS_BASE); in prepare_vmcs12()
9762 vmcs12->guest_ds_base = vmcs_readl(GUEST_DS_BASE); in prepare_vmcs12()
9763 vmcs12->guest_fs_base = vmcs_readl(GUEST_FS_BASE); in prepare_vmcs12()
9764 vmcs12->guest_gs_base = vmcs_readl(GUEST_GS_BASE); in prepare_vmcs12()
9765 vmcs12->guest_ldtr_base = vmcs_readl(GUEST_LDTR_BASE); in prepare_vmcs12()
9766 vmcs12->guest_tr_base = vmcs_readl(GUEST_TR_BASE); in prepare_vmcs12()
9767 vmcs12->guest_gdtr_base = vmcs_readl(GUEST_GDTR_BASE); in prepare_vmcs12()
9768 vmcs12->guest_idtr_base = vmcs_readl(GUEST_IDTR_BASE); in prepare_vmcs12()
9773 vmcs_readl(GUEST_PENDING_DBG_EXCEPTIONS); in prepare_vmcs12()
9822 vmcs12->guest_sysenter_esp = vmcs_readl(GUEST_SYSENTER_ESP); in prepare_vmcs12()
9823 vmcs12->guest_sysenter_eip = vmcs_readl(GUEST_SYSENTER_EIP); in prepare_vmcs12()
9910 vcpu->arch.cr4_guest_owned_bits = ~vmcs_readl(CR4_GUEST_HOST_MASK); in load_vmcs12_host_state()