Lines Matching refs:kvm_x86_ops

93 struct kvm_x86_ops *kvm_x86_ops;  variable
94 EXPORT_SYMBOL_GPL(kvm_x86_ops);
412 kvm_x86_ops->skip_emulated_instruction(vcpu); in kvm_complete_insn_gp()
459 if (kvm_x86_ops->get_cpl(vcpu) <= required_cpl) in kvm_require_cpl()
600 kvm_x86_ops->get_cs_db_l_bits(vcpu, &cs_db, &cs_l); in kvm_set_cr0()
613 kvm_x86_ops->set_cr0(vcpu, cr0); in kvm_set_cr0()
692 if (kvm_x86_ops->get_cpl(vcpu) != 0 || in kvm_set_xcr()
740 if (kvm_x86_ops->set_cr4(vcpu, cr4)) in kvm_set_cr4()
815 kvm_x86_ops->set_dr6(vcpu, vcpu->arch.dr6); in kvm_update_dr6()
826 kvm_x86_ops->set_dr7(vcpu, dr7); in kvm_update_dr7()
892 *val = kvm_x86_ops->get_dr6(vcpu); in kvm_get_dr()
994 kvm_x86_ops->set_efer(vcpu, efer); in set_efer()
1041 return kvm_x86_ops->set_msr(vcpu, msr); in kvm_set_msr()
1247 kvm_x86_ops->set_tsc_khz(vcpu, this_tsc_khz, use_scaling); in kvm_set_tsc_khz()
1289 u64 curr_offset = kvm_x86_ops->read_tsc_offset(vcpu); in update_ia32_tsc_adjust_msr()
1304 offset = kvm_x86_ops->compute_tsc_offset(vcpu, data); in kvm_write_tsc()
1361 offset = kvm_x86_ops->compute_tsc_offset(vcpu, data); in kvm_write_tsc()
1402 kvm_x86_ops->write_tsc_offset(vcpu, offset); in kvm_write_tsc()
1628 tsc_timestamp = kvm_x86_ops->read_l1_tsc(v, host_tsc); in kvm_guest_time_update()
2005 kvm_x86_ops->patch_hypercall(vcpu, instructions); in set_msr_hyperv_pw()
2379 return kvm_x86_ops->get_msr(vcpu, msr_index, pdata); in kvm_get_msr()
2806 r = !kvm_x86_ops->cpu_has_accelerated_tpr(); in kvm_vm_ioctl_check_extension()
2926 if (kvm_x86_ops->has_wbinvd_exit()) in kvm_arch_vcpu_load()
2933 kvm_x86_ops->vcpu_load(vcpu, cpu); in kvm_arch_vcpu_load()
2948 u64 offset = kvm_x86_ops->compute_tsc_offset(vcpu, in kvm_arch_vcpu_load()
2950 kvm_x86_ops->write_tsc_offset(vcpu, offset); in kvm_arch_vcpu_load()
2969 kvm_x86_ops->vcpu_put(vcpu); in kvm_arch_vcpu_put()
2977 kvm_x86_ops->sync_pir_to_irr(vcpu); in kvm_vcpu_ioctl_get_lapic()
3109 events->interrupt.shadow = kvm_x86_ops->get_interrupt_shadow(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
3113 events->nmi.masked = kvm_x86_ops->get_nmi_mask(vcpu); in kvm_vcpu_ioctl_x86_get_vcpu_events()
3141 kvm_x86_ops->set_interrupt_shadow(vcpu, in kvm_vcpu_ioctl_x86_set_vcpu_events()
3147 kvm_x86_ops->set_nmi_mask(vcpu, events->nmi.masked); in kvm_vcpu_ioctl_x86_set_vcpu_events()
3639 ret = kvm_x86_ops->set_tss_addr(kvm, addr); in kvm_vm_ioctl_set_tss_addr()
3820 if (kvm_x86_ops->flush_log_dirty) in kvm_vm_ioctl_get_dirty_log()
3821 kvm_x86_ops->flush_log_dirty(kvm); in kvm_vm_ioctl_get_dirty_log()
4125 if (!kvm_x86_ops->mpx_supported()) in kvm_init_msr_list()
4129 if (!kvm_x86_ops->rdtscp_supported()) in kvm_init_msr_list()
4189 kvm_x86_ops->set_segment(vcpu, var, seg); in kvm_set_segment()
4195 kvm_x86_ops->get_segment(vcpu, var, seg); in kvm_get_segment()
4215 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_read()
4222 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_fetch()
4230 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_mmu_gva_to_gpa_write()
4279 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_fetch_guest_virt()
4305 u32 access = (kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0; in kvm_read_guest_virt()
4358 u32 access = ((kvm_x86_ops->get_cpl(vcpu) == 3) ? PFERR_USER_MASK : 0) in vcpu_mmio_gva_to_gpa()
4737 return kvm_x86_ops->get_segment_base(vcpu, seg); in get_segment_base()
4750 if (kvm_x86_ops->has_wbinvd_exit()) { in kvm_emulate_wbinvd_noskip()
4765 kvm_x86_ops->skip_emulated_instruction(vcpu); in kvm_emulate_wbinvd()
4855 return kvm_x86_ops->get_cpl(emul_to_vcpu(ctxt)); in emulator_get_cpl()
4860 kvm_x86_ops->get_gdt(emul_to_vcpu(ctxt), dt); in emulator_get_gdt()
4865 kvm_x86_ops->get_idt(emul_to_vcpu(ctxt), dt); in emulator_get_idt()
4870 kvm_x86_ops->set_gdt(emul_to_vcpu(ctxt), dt); in emulator_set_gdt()
4875 kvm_x86_ops->set_idt(emul_to_vcpu(ctxt), dt); in emulator_set_idt()
5002 return kvm_x86_ops->check_intercept(emul_to_vcpu(ctxt), info, stage); in emulator_intercept()
5023 kvm_x86_ops->set_nmi_mask(emul_to_vcpu(ctxt), masked); in emulator_set_nmi_mask()
5066 u32 int_shadow = kvm_x86_ops->get_interrupt_shadow(vcpu); in toggle_interruptibility()
5077 kvm_x86_ops->set_interrupt_shadow(vcpu, mask); in toggle_interruptibility()
5102 kvm_x86_ops->get_cs_db_l_bits(vcpu, &cs_db, &cs_l); in init_emulate_ctxt()
5151 if (!is_guest_mode(vcpu) && kvm_x86_ops->get_cpl(vcpu) == 0) { in handle_emulation_failure()
5476 unsigned long rflags = kvm_x86_ops->get_rflags(vcpu); in x86_emulate_instruction()
5682 user_mode = kvm_x86_ops->get_cpl(__this_cpu_read(current_vcpu)); in kvm_is_user_mode()
5792 struct kvm_x86_ops *ops = opaque; in kvm_arch_init()
5794 if (kvm_x86_ops) { in kvm_arch_init()
5824 kvm_x86_ops = ops; in kvm_arch_init()
5860 kvm_x86_ops = NULL; in kvm_arch_exit()
5880 kvm_x86_ops->skip_emulated_instruction(vcpu); in kvm_emulate_halt()
5895 if (kvm_x86_ops->get_cpl(vcpu) != 0 || !is_protmode(vcpu)) { in kvm_hv_hypercall()
5967 kvm_x86_ops->skip_emulated_instruction(vcpu); in kvm_emulate_hypercall()
5989 if (kvm_x86_ops->get_cpl(vcpu) != 0) { in kvm_emulate_hypercall()
6021 kvm_x86_ops->patch_hypercall(vcpu, instruction); in emulator_fix_hypercall()
6059 if (!kvm_x86_ops->update_cr8_intercept) in update_cr8_intercept()
6075 kvm_x86_ops->update_cr8_intercept(vcpu, tpr, max_irr); in update_cr8_intercept()
6098 kvm_x86_ops->queue_exception(vcpu, vcpu->arch.exception.nr, in inject_pending_event()
6106 kvm_x86_ops->set_nmi(vcpu); in inject_pending_event()
6111 kvm_x86_ops->set_irq(vcpu); in inject_pending_event()
6115 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()
6116 r = kvm_x86_ops->check_nested_events(vcpu, req_int_win); in inject_pending_event()
6122 if (vcpu->arch.nmi_pending && kvm_x86_ops->nmi_allowed(vcpu)) { in inject_pending_event()
6125 kvm_x86_ops->set_nmi(vcpu); in inject_pending_event()
6134 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) { in inject_pending_event()
6135 r = kvm_x86_ops->check_nested_events(vcpu, req_int_win); in inject_pending_event()
6139 if (kvm_x86_ops->interrupt_allowed(vcpu)) { in inject_pending_event()
6142 kvm_x86_ops->set_irq(vcpu); in inject_pending_event()
6157 if (kvm_x86_ops->get_nmi_mask(vcpu) || vcpu->arch.nmi_injected) in process_nmi()
6177 kvm_x86_ops->load_eoi_exitmap(vcpu, eoi_exit_bitmap); in vcpu_scan_ioapic()
6184 kvm_x86_ops->tlb_flush(vcpu); in kvm_vcpu_flush_tlb()
6194 if (!kvm_x86_ops->set_apic_access_page_addr) in kvm_vcpu_reload_apic_access_page()
6200 kvm_x86_ops->set_apic_access_page_addr(vcpu, page_to_phys(page)); in kvm_vcpu_reload_apic_access_page()
6263 kvm_x86_ops->fpu_deactivate(vcpu); in vcpu_enter_guest()
6297 kvm_x86_ops->enable_nmi_window(vcpu); in vcpu_enter_guest()
6299 kvm_x86_ops->enable_irq_window(vcpu); in vcpu_enter_guest()
6307 if (kvm_x86_ops->hwapic_irr_update) in vcpu_enter_guest()
6308 kvm_x86_ops->hwapic_irr_update(vcpu, in vcpu_enter_guest()
6322 kvm_x86_ops->prepare_guest_switch(vcpu); in vcpu_enter_guest()
6366 kvm_x86_ops->run(vcpu); in vcpu_enter_guest()
6376 kvm_x86_ops->sync_dirty_debug_regs(vcpu); in vcpu_enter_guest()
6393 vcpu->arch.last_guest_tsc = kvm_x86_ops->read_l1_tsc(vcpu, in vcpu_enter_guest()
6402 kvm_x86_ops->handle_external_intr(vcpu); in vcpu_enter_guest()
6434 r = kvm_x86_ops->handle_exit(vcpu); in vcpu_enter_guest()
6438 kvm_x86_ops->cancel_injection(vcpu); in vcpu_enter_guest()
6745 kvm_x86_ops->get_idt(vcpu, &dt); in kvm_arch_vcpu_ioctl_get_sregs()
6748 kvm_x86_ops->get_gdt(vcpu, &dt); in kvm_arch_vcpu_ioctl_get_sregs()
6832 kvm_x86_ops->set_idt(vcpu, &dt); in kvm_arch_vcpu_ioctl_set_sregs()
6835 kvm_x86_ops->set_gdt(vcpu, &dt); in kvm_arch_vcpu_ioctl_set_sregs()
6845 kvm_x86_ops->set_efer(vcpu, sregs->efer); in kvm_arch_vcpu_ioctl_set_sregs()
6851 kvm_x86_ops->set_cr0(vcpu, sregs->cr0); in kvm_arch_vcpu_ioctl_set_sregs()
6855 kvm_x86_ops->set_cr4(vcpu, sregs->cr4); in kvm_arch_vcpu_ioctl_set_sregs()
6946 kvm_x86_ops->update_db_bp_intercept(vcpu); in kvm_arch_vcpu_ioctl_set_guest_debug()
7076 kvm_x86_ops->vcpu_free(vcpu); in kvm_arch_vcpu_free()
7089 vcpu = kvm_x86_ops->vcpu_create(kvm, id); in kvm_arch_vcpu_create()
7095 kvm_x86_ops->fpu_activate(vcpu); in kvm_arch_vcpu_create()
7142 kvm_x86_ops->vcpu_free(vcpu); in kvm_arch_vcpu_destroy()
7178 kvm_x86_ops->vcpu_reset(vcpu); in kvm_vcpu_reset()
7203 ret = kvm_x86_ops->hardware_enable(); in kvm_arch_hardware_enable()
7285 kvm_x86_ops->hardware_disable(); in kvm_arch_hardware_disable()
7293 r = kvm_x86_ops->hardware_setup(); in kvm_arch_hardware_setup()
7303 kvm_x86_ops->hardware_unsetup(); in kvm_arch_hardware_unsetup()
7308 kvm_x86_ops->check_processor_compatibility(rtn); in kvm_arch_check_processor_compat()
7414 kvm_x86_ops->sched_in(vcpu, cpu); in kvm_arch_sched_in()
7670 if (kvm_x86_ops->slot_enable_log_dirty) in kvm_mmu_slot_apply_flags()
7671 kvm_x86_ops->slot_enable_log_dirty(kvm, new); in kvm_mmu_slot_apply_flags()
7675 if (kvm_x86_ops->slot_disable_log_dirty) in kvm_mmu_slot_apply_flags()
7676 kvm_x86_ops->slot_disable_log_dirty(kvm, new); in kvm_mmu_slot_apply_flags()
7750 if (is_guest_mode(vcpu) && kvm_x86_ops->check_nested_events) in kvm_arch_vcpu_runnable()
7751 kvm_x86_ops->check_nested_events(vcpu, false); in kvm_arch_vcpu_runnable()
7770 return kvm_x86_ops->interrupt_allowed(vcpu); in kvm_arch_interrupt_allowed()
7792 rflags = kvm_x86_ops->get_rflags(vcpu); in kvm_get_rflags()
7804 kvm_x86_ops->set_rflags(vcpu, rflags); in __kvm_set_rflags()
7911 kvm_x86_ops->get_cpl(vcpu) == 0)) in kvm_arch_async_page_not_present()
7953 kvm_x86_ops->interrupt_allowed(vcpu); in kvm_arch_can_inject_async_page_present()