Lines Matching refs:arch
380 svm->vcpu.arch.hflags |= HF_GIF_MASK; in enable_gif()
385 svm->vcpu.arch.hflags &= ~HF_GIF_MASK; in disable_gif()
390 return !!(svm->vcpu.arch.hflags & HF_GIF_MASK); in gif_set()
477 vcpu->arch.efer = efer; in svm_set_efer()
602 vcpu->arch.osvw.length = (osvw_len >= 3) ? (osvw_len) : 3; in svm_init_osvw()
603 vcpu->arch.osvw.status = osvw_status & ~(6ULL); in svm_init_osvw()
614 vcpu->arch.osvw.status |= 1; in svm_init_osvw()
1007 svm->vcpu.arch.hflags = 0; in init_vmcb()
1078 svm->vcpu.arch.regs[VCPU_REGS_RIP] = save->rip; in init_vmcb()
1097 save->g_pat = svm->vcpu.arch.pat; in init_vmcb()
1104 svm->vcpu.arch.hflags = 0; in init_vmcb()
1123 svm->vcpu.arch.apic_base = APIC_DEFAULT_PHYS_BASE | in svm_vcpu_reset()
1126 svm->vcpu.arch.apic_base |= MSR_IA32_APICBASE_BSP; in svm_vcpu_reset()
1235 u64 tsc_ratio = vcpu->arch.tsc_scaling_ratio; in svm_vcpu_load()
1283 load_pdptrs(vcpu, vcpu->arch.walk_mmu, kvm_read_cr3(vcpu)); in svm_cache_reg()
1448 ulong gcr0 = svm->vcpu.arch.cr0; in update_cr0_intercept()
1473 if (vcpu->arch.efer & EFER_LME) { in svm_set_cr0()
1475 vcpu->arch.efer |= EFER_LMA; in svm_set_cr0()
1480 vcpu->arch.efer &= ~EFER_LMA; in svm_set_cr0()
1485 vcpu->arch.cr0 = cr0; in svm_set_cr0()
1515 vcpu->arch.cr4 = cr4; in svm_set_cr4()
1602 get_debugreg(vcpu->arch.db[0], 0); in svm_sync_dirty_debug_regs()
1603 get_debugreg(vcpu->arch.db[1], 1); in svm_sync_dirty_debug_regs()
1604 get_debugreg(vcpu->arch.db[2], 2); in svm_sync_dirty_debug_regs()
1605 get_debugreg(vcpu->arch.db[3], 3); in svm_sync_dirty_debug_regs()
1606 vcpu->arch.dr6 = svm_get_dr6(vcpu); in svm_sync_dirty_debug_regs()
1607 vcpu->arch.dr7 = svm->vmcb->save.dr7; in svm_sync_dirty_debug_regs()
1609 vcpu->arch.switch_db_regs &= ~KVM_DEBUGREG_WONT_EXIT; in svm_sync_dirty_debug_regs()
1675 kvm_run->debug.arch.pc = in db_interception()
1677 kvm_run->debug.arch.exception = DB_VECTOR; in db_interception()
1689 kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip; in bp_interception()
1690 kvm_run->debug.arch.exception = BP_VECTOR; in bp_interception()
1924 vcpu->arch.mmu.set_cr3 = nested_svm_set_tdp_cr3; in nested_svm_init_mmu_context()
1925 vcpu->arch.mmu.get_cr3 = nested_svm_get_tdp_cr3; in nested_svm_init_mmu_context()
1926 vcpu->arch.mmu.get_pdptr = nested_svm_get_tdp_pdptr; in nested_svm_init_mmu_context()
1927 vcpu->arch.mmu.inject_page_fault = nested_svm_inject_npf_exit; in nested_svm_init_mmu_context()
1928 vcpu->arch.mmu.shadow_root_level = get_npt_level(); in nested_svm_init_mmu_context()
1929 reset_shadow_zero_bits_mask(vcpu, &vcpu->arch.mmu); in nested_svm_init_mmu_context()
1930 vcpu->arch.walk_mmu = &vcpu->arch.nested_mmu; in nested_svm_init_mmu_context()
1935 vcpu->arch.walk_mmu = &vcpu->arch.mmu; in nested_svm_uninit_mmu_context()
1940 if (!(svm->vcpu.arch.efer & EFER_SVME) in nested_svm_check_permissions()
1965 svm->vmcb->control.exit_info_2 = svm->vcpu.arch.cr2; in nested_svm_check_exception()
1980 if (!(svm->vcpu.arch.hflags & HF_VINTR_MASK)) in nested_svm_intr()
1983 if (!(svm->vcpu.arch.hflags & HF_HIF_MASK)) in nested_svm_intr()
2087 msr = svm->vcpu.arch.regs[VCPU_REGS_RCX]; in nested_svm_exit_handled_msr()
2257 nested_vmcb->save.efer = svm->vcpu.arch.efer; in nested_svm_vmexit()
2261 nested_vmcb->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmexit()
2303 if (!(svm->vcpu.arch.hflags & HF_VINTR_MASK)) in nested_svm_vmexit()
2327 svm->vcpu.arch.cr3 = hsave->save.cr3; in nested_svm_vmexit()
2446 hsave->save.efer = svm->vcpu.arch.efer; in nested_svm_vmrun()
2448 hsave->save.cr4 = svm->vcpu.arch.cr4; in nested_svm_vmrun()
2461 svm->vcpu.arch.hflags |= HF_HIF_MASK; in nested_svm_vmrun()
2463 svm->vcpu.arch.hflags &= ~HF_HIF_MASK; in nested_svm_vmrun()
2484 svm->vcpu.arch.cr3 = nested_vmcb->save.cr3; in nested_svm_vmrun()
2491 svm->vmcb->save.cr2 = svm->vcpu.arch.cr2 = nested_vmcb->save.cr2; in nested_svm_vmrun()
2516 svm->vcpu.arch.hflags |= HF_VINTR_MASK; in nested_svm_vmrun()
2518 svm->vcpu.arch.hflags &= ~HF_VINTR_MASK; in nested_svm_vmrun()
2520 if (svm->vcpu.arch.hflags & HF_VINTR_MASK) { in nested_svm_vmrun()
2747 svm->vcpu.arch.nmi_injected = false; in task_switch_interception()
2796 svm->vcpu.arch.hflags |= HF_IRET_MASK; in iret_interception()
2833 unsigned long cr0 = svm->vcpu.arch.cr0; in check_selective_cr0_intercepted()
2906 val = svm->vcpu.arch.cr2; in cr_interception()
2941 svm->vcpu.arch.switch_db_regs |= KVM_DEBUGREG_WONT_EXIT; in dr_interception()
3104 if (svm_dis && (vcpu->arch.efer & EFER_SVME)) in svm_set_vm_cr()
3428 vcpu->arch.cr0 = svm->vmcb->save.cr0; in handle_exit()
3430 vcpu->arch.cr3 = svm->vmcb->save.cr3; in handle_exit()
3513 vcpu->arch.hflags |= HF_NMI_MASK; in svm_inject_nmi()
3536 trace_kvm_inj_virq(vcpu->arch.interrupt.nr); in svm_set_irq()
3539 svm->vmcb->control.event_inj = vcpu->arch.interrupt.nr | in svm_set_irq()
3547 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in update_cr8_intercept()
3585 !(svm->vcpu.arch.hflags & HF_NMI_MASK); in svm_nmi_allowed()
3595 return !!(svm->vcpu.arch.hflags & HF_NMI_MASK); in svm_get_nmi_mask()
3603 svm->vcpu.arch.hflags |= HF_NMI_MASK; in svm_set_nmi_mask()
3606 svm->vcpu.arch.hflags &= ~HF_NMI_MASK; in svm_set_nmi_mask()
3624 return ret && !(svm->vcpu.arch.hflags & HF_VINTR_MASK); in svm_interrupt_allowed()
3649 if ((svm->vcpu.arch.hflags & (HF_NMI_MASK | HF_IRET_MASK)) in enable_nmi_window()
3684 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in sync_cr8_to_lapic()
3698 if (is_guest_mode(vcpu) && (vcpu->arch.hflags & HF_VINTR_MASK)) in sync_lapic_to_cr8()
3719 if ((svm->vcpu.arch.hflags & HF_IRET_MASK) in svm_complete_interrupts()
3721 svm->vcpu.arch.hflags &= ~(HF_NMI_MASK | HF_IRET_MASK); in svm_complete_interrupts()
3725 svm->vcpu.arch.nmi_injected = false; in svm_complete_interrupts()
3739 svm->vcpu.arch.nmi_injected = true; in svm_complete_interrupts()
3785 svm->vmcb->save.rax = vcpu->arch.regs[VCPU_REGS_RAX]; in svm_vcpu_run()
3786 svm->vmcb->save.rsp = vcpu->arch.regs[VCPU_REGS_RSP]; in svm_vcpu_run()
3787 svm->vmcb->save.rip = vcpu->arch.regs[VCPU_REGS_RIP]; in svm_vcpu_run()
3800 svm->vmcb->save.cr2 = vcpu->arch.cr2; in svm_vcpu_run()
3854 [rbx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RBX])), in svm_vcpu_run()
3855 [rcx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RCX])), in svm_vcpu_run()
3856 [rdx]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RDX])), in svm_vcpu_run()
3857 [rsi]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RSI])), in svm_vcpu_run()
3858 [rdi]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RDI])), in svm_vcpu_run()
3859 [rbp]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_RBP])) in svm_vcpu_run()
3861 , [r8]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R8])), in svm_vcpu_run()
3862 [r9]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R9])), in svm_vcpu_run()
3863 [r10]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R10])), in svm_vcpu_run()
3864 [r11]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R11])), in svm_vcpu_run()
3865 [r12]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R12])), in svm_vcpu_run()
3866 [r13]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R13])), in svm_vcpu_run()
3867 [r14]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R14])), in svm_vcpu_run()
3868 [r15]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R15])) in svm_vcpu_run()
3892 vcpu->arch.cr2 = svm->vmcb->save.cr2; in svm_vcpu_run()
3893 vcpu->arch.regs[VCPU_REGS_RAX] = svm->vmcb->save.rax; in svm_vcpu_run()
3894 vcpu->arch.regs[VCPU_REGS_RSP] = svm->vmcb->save.rsp; in svm_vcpu_run()
3895 vcpu->arch.regs[VCPU_REGS_RIP] = svm->vmcb->save.rip; in svm_vcpu_run()
3918 vcpu->arch.regs_avail &= ~(1 << VCPU_EXREG_PDPTR); in svm_vcpu_run()
3919 vcpu->arch.regs_dirty &= ~(1 << VCPU_EXREG_PDPTR); in svm_vcpu_run()
4172 cr0 = vcpu->arch.cr0 & ~SVM_CR0_SELECTIVE_MASK; in svm_check_intercept()