Lines Matching refs:arch

378 	svm->vcpu.arch.hflags |= HF_GIF_MASK;  in enable_gif()
383 svm->vcpu.arch.hflags &= ~HF_GIF_MASK; in disable_gif()
388 return !!(svm->vcpu.arch.hflags & HF_GIF_MASK); in gif_set()
475 vcpu->arch.efer = efer; in svm_set_efer()
600 vcpu->arch.osvw.length = (osvw_len >= 3) ? (osvw_len) : 3; in svm_init_osvw()
601 vcpu->arch.osvw.status = osvw_status & ~(6ULL); in svm_init_osvw()
612 vcpu->arch.osvw.status |= 1; in svm_init_osvw()
1010 vcpu->arch.tsc_catchup = 1; in svm_set_tsc_khz()
1011 vcpu->arch.tsc_always_catchup = 1; in svm_set_tsc_khz()
1093 svm->vcpu.arch.hflags = 0; in init_vmcb()
1164 svm->vcpu.arch.regs[VCPU_REGS_RIP] = save->rip; in init_vmcb()
1170 svm->vcpu.arch.cr0 = 0; in init_vmcb()
1190 svm->vcpu.arch.hflags = 0; in init_vmcb()
1266 svm->vcpu.arch.apic_base = APIC_DEFAULT_PHYS_BASE | in svm_create_vcpu()
1269 svm->vcpu.arch.apic_base |= MSR_IA32_APICBASE_BSP; in svm_create_vcpu()
1368 load_pdptrs(vcpu, vcpu->arch.walk_mmu, kvm_read_cr3(vcpu)); in svm_cache_reg()
1533 ulong gcr0 = svm->vcpu.arch.cr0; in update_cr0_intercept()
1558 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()
1560 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()
1565 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()
1570 vcpu->arch.cr0 = cr0; in svm_set_cr0()
1599 vcpu->arch.cr4 = cr4; in svm_set_cr4()
1686 get_debugreg(vcpu->arch.db[0], 0); in svm_sync_dirty_debug_regs()
1687 get_debugreg(vcpu->arch.db[1], 1); in svm_sync_dirty_debug_regs()
1688 get_debugreg(vcpu->arch.db[2], 2); in svm_sync_dirty_debug_regs()
1689 get_debugreg(vcpu->arch.db[3], 3); in svm_sync_dirty_debug_regs()
1690 vcpu->arch.dr6 = svm_get_dr6(vcpu); in svm_sync_dirty_debug_regs()
1691 vcpu->arch.dr7 = svm->vmcb->save.dr7; in svm_sync_dirty_debug_regs()
1693 vcpu->arch.switch_db_regs &= ~KVM_DEBUGREG_WONT_EXIT; in svm_sync_dirty_debug_regs()
1759 kvm_run->debug.arch.pc = in db_interception()
1761 kvm_run->debug.arch.exception = DB_VECTOR; in db_interception()
1773 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip; in bp_interception()
1774 kvm_run->debug.arch.exception = BP_VECTOR; in bp_interception()
2008 vcpu->arch.mmu.set_cr3 = nested_svm_set_tdp_cr3; in nested_svm_init_mmu_context()
2009 vcpu->arch.mmu.get_cr3 = nested_svm_get_tdp_cr3; in nested_svm_init_mmu_context()
2010 vcpu->arch.mmu.get_pdptr = nested_svm_get_tdp_pdptr; in nested_svm_init_mmu_context()
2011 vcpu->arch.mmu.inject_page_fault = nested_svm_inject_npf_exit; in nested_svm_init_mmu_context()
2012 vcpu->arch.mmu.shadow_root_level = get_npt_level(); in nested_svm_init_mmu_context()
2013 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_svm_init_mmu_context()
2018 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in nested_svm_uninit_mmu_context()
2023 if (!(svm->vcpu.arch.efer & EFER_SVME) in nested_svm_check_permissions()
2048 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.cr2; in nested_svm_check_exception()
2063 if (!(svm->vcpu.arch.hflags & HF_VINTR_MASK)) in nested_svm_intr()
2066 if (!(svm->vcpu.arch.hflags & HF_HIF_MASK)) in nested_svm_intr()
2170 msr = svm->vcpu.arch.regs[VCPU_REGS_RCX]; in nested_svm_exit_handled_msr()
2340 nested_vmcb->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
2344 nested_vmcb->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
2384 if (!(svm->vcpu.arch.hflags & HF_VINTR_MASK)) in nested_svm_vmexit()
2408 svm->vcpu.arch.cr3 = hsave->save.cr3; in nested_svm_vmexit()
2527 hsave->save.efer = svm->vcpu.arch.efer; in nested_svm_vmrun()
2529 hsave->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmrun()
2542 svm->vcpu.arch.hflags |= HF_HIF_MASK; in nested_svm_vmrun()
2544 svm->vcpu.arch.hflags &= ~HF_HIF_MASK; in nested_svm_vmrun()
2565 svm->vcpu.arch.cr3 = nested_vmcb->save.cr3; in nested_svm_vmrun()
2572 svm->vmcb->save.cr2 = svm->vcpu.arch.cr2 = nested_vmcb->save.cr2; in nested_svm_vmrun()
2597 svm->vcpu.arch.hflags |= HF_VINTR_MASK; in nested_svm_vmrun()
2599 svm->vcpu.arch.hflags &= ~HF_VINTR_MASK; in nested_svm_vmrun()
2601 if (svm->vcpu.arch.hflags & HF_VINTR_MASK) { in nested_svm_vmrun()
2828 svm->vcpu.arch.nmi_injected = false; in task_switch_interception()
2877 svm->vcpu.arch.hflags |= HF_IRET_MASK; in iret_interception()
2914 unsigned long cr0 = svm->vcpu.arch.cr0; in check_selective_cr0_intercepted()
2987 val = svm->vcpu.arch.cr2; in cr_interception()
3022 svm->vcpu.arch.switch_db_regs |= KVM_DEBUGREG_WONT_EXIT; in dr_interception()
3182 if (svm_dis && (vcpu->arch.efer & EFER_SVME)) in svm_set_vm_cr()
3516 vcpu->arch.cr0 = svm->vmcb->save.cr0; in handle_exit()
3518 vcpu->arch.cr3 = svm->vmcb->save.cr3; in handle_exit()
3601 vcpu->arch.hflags |= HF_NMI_MASK; in svm_inject_nmi()
3624 trace_kvm_inj_virq(vcpu->arch.interrupt.nr); in svm_set_irq()
3627 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr | in svm_set_irq()
3635 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in update_cr8_intercept()
3673 !(svm->vcpu.arch.hflags & HF_NMI_MASK); in svm_nmi_allowed()
3683 return !!(svm->vcpu.arch.hflags & HF_NMI_MASK); in svm_get_nmi_mask()
3691 svm->vcpu.arch.hflags |= HF_NMI_MASK; in svm_set_nmi_mask()
3694 svm->vcpu.arch.hflags &= ~HF_NMI_MASK; in svm_set_nmi_mask()
3712 return ret && !(svm->vcpu.arch.hflags & HF_VINTR_MASK); in svm_interrupt_allowed()
3737 if ((svm->vcpu.arch.hflags & (HF_NMI_MASK | HF_IRET_MASK)) in enable_nmi_window()
3772 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in sync_cr8_to_lapic()
3786 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in sync_lapic_to_cr8()
3807 if ((svm->vcpu.arch.hflags & HF_IRET_MASK) in svm_complete_interrupts()
3809 svm->vcpu.arch.hflags &= ~(HF_NMI_MASK | HF_IRET_MASK); in svm_complete_interrupts()
3813 svm->vcpu.arch.nmi_injected = false; in svm_complete_interrupts()
3827 svm->vcpu.arch.nmi_injected = true; in svm_complete_interrupts()
3873 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_vcpu_run()
3874 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_vcpu_run()
3875 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_vcpu_run()
3888 svm->vmcb->save.cr2 = vcpu->arch.cr2; in svm_vcpu_run()
3942 [rbx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RBX])), in svm_vcpu_run()
3943 [rcx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RCX])), in svm_vcpu_run()
3944 [rdx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RDX])), in svm_vcpu_run()
3945 [rsi]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RSI])), in svm_vcpu_run()
3946 [rdi]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RDI])), in svm_vcpu_run()
3947 [rbp]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RBP])) in svm_vcpu_run()
3949 , [r8]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R8])), in svm_vcpu_run()
3950 [r9]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R9])), in svm_vcpu_run()
3951 [r10]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R10])), in svm_vcpu_run()
3952 [r11]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R11])), in svm_vcpu_run()
3953 [r12]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R12])), in svm_vcpu_run()
3954 [r13]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R13])), in svm_vcpu_run()
3955 [r14]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R14])), in svm_vcpu_run()
3956 [r15]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R15])) in svm_vcpu_run()
3980 vcpu->arch.cr2 = svm->vmcb->save.cr2; in svm_vcpu_run()
3981 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax; in svm_vcpu_run()
3982 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp; in svm_vcpu_run()
3983 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip; in svm_vcpu_run()
4008 vcpu->arch.regs_avail &= ~(1 << VCPU_EXREG_PDPTR); in svm_vcpu_run()
4009 vcpu->arch.regs_dirty &= ~(1 << VCPU_EXREG_PDPTR); in svm_vcpu_run()
4253 cr0 = vcpu->arch.cr0 & ~SVM_CR0_SELECTIVE_MASK; in svm_check_intercept()