Home
last modified time | relevance | path

Searched refs:kvm_x86_ops (Results 1 – 15 of 15) sorted by relevance

/linux-4.4.14/arch/x86/kvm/
Dpmu.c159 config = kvm_x86_ops->pmu_ops->find_arch_event(pmc_to_pmu(pmc), in reprogram_gp_counter()
189 kvm_x86_ops->pmu_ops->find_fixed_event(idx), in reprogram_fixed_counter()
198 struct kvm_pmc *pmc = kvm_x86_ops->pmu_ops->pmc_idx_to_pmc(pmu, pmc_idx); in reprogram_counter()
223 struct kvm_pmc *pmc = kvm_x86_ops->pmu_ops->pmc_idx_to_pmc(pmu, bit); in kvm_pmu_handle_event()
237 return kvm_x86_ops->pmu_ops->is_valid_msr_idx(vcpu, idx); in kvm_pmu_is_valid_msr_idx()
246 pmc = kvm_x86_ops->pmu_ops->msr_idx_to_pmc(vcpu, idx); in kvm_pmu_rdpmc()
266 return kvm_x86_ops->pmu_ops->is_valid_msr(vcpu, msr); in kvm_pmu_is_valid_msr()
271 return kvm_x86_ops->pmu_ops->get_msr(vcpu, msr, data); in kvm_pmu_get_msr()
276 return kvm_x86_ops->pmu_ops->set_msr(vcpu, msr_info); in kvm_pmu_set_msr()
285 kvm_x86_ops->pmu_ops->refresh(vcpu); in kvm_pmu_refresh()
[all …]
Dkvm_cache_regs.h13 kvm_x86_ops->cache_reg(vcpu, reg); in kvm_register_read()
43 kvm_x86_ops->cache_reg(vcpu, VCPU_EXREG_PDPTR); in kvm_pdptr_read()
52 kvm_x86_ops->decache_cr0_guest_bits(vcpu); in kvm_read_cr0_bits()
65 kvm_x86_ops->decache_cr4_guest_bits(vcpu); in kvm_read_cr4_bits()
72 kvm_x86_ops->decache_cr3(vcpu); in kvm_read_cr3()
Dx86.c96 struct kvm_x86_ops *kvm_x86_ops __read_mostly;
97 EXPORT_SYMBOL_GPL(kvm_x86_ops);
422 kvm_x86_ops->skip_emulated_instruction(vcpu); in kvm_complete_insn_gp()
469 if (kvm_x86_ops->get_cpl(vcpu) <= required_cpl) in kvm_require_cpl()
610 kvm_x86_ops->get_cs_db_l_bits(vcpu, &cs_db, &cs_l); in kvm_set_cr0()
623 kvm_x86_ops->set_cr0(vcpu, cr0); in kvm_set_cr0()
709 if (kvm_x86_ops->get_cpl(vcpu) != 0 || in kvm_set_xcr()
757 if (kvm_x86_ops->set_cr4(vcpu, cr4)) in kvm_set_cr4()
832 kvm_x86_ops->set_dr6(vcpu, vcpu->arch.dr6); in kvm_update_dr6()
843 kvm_x86_ops->set_dr7(vcpu, dr7); in kvm_update_dr7()
[all …]
Dcpuid.c53 if (!kvm_x86_ops->mpx_supported()) in kvm_supported_xcr0()
102 kvm_x86_ops->fpu_activate(vcpu); in kvm_update_cpuid()
196 kvm_x86_ops->cpuid_update(vcpu); in kvm_vcpu_ioctl_set_cpuid()
220 kvm_x86_ops->cpuid_update(vcpu); in kvm_vcpu_ioctl_set_cpuid2()
289 unsigned f_gbpages = (kvm_x86_ops->get_lpage_level() == PT_PDPE_LEVEL) in __do_cpuid_ent()
296 unsigned f_rdtscp = kvm_x86_ops->rdtscp_supported() ? F(RDTSCP) : 0; in __do_cpuid_ent()
297 unsigned f_invpcid = kvm_x86_ops->invpcid_supported() ? F(INVPCID) : 0; in __do_cpuid_ent()
298 unsigned f_mpx = kvm_x86_ops->mpx_supported() ? F(MPX) : 0; in __do_cpuid_ent()
299 unsigned f_xsaves = kvm_x86_ops->xsaves_supported() ? F(XSAVES) : 0; in __do_cpuid_ent()
611 kvm_x86_ops->set_supported_cpuid(function, entry); in __do_cpuid_ent()
[all …]
Dmmu.h150 int cpl = kvm_x86_ops->get_cpl(vcpu); in permission_fault()
151 unsigned long rflags = kvm_x86_ops->get_rflags(vcpu); in permission_fault()
Dlapic.c382 kvm_x86_ops->sync_pir_to_irr(apic->vcpu); in apic_find_highest_irr()
421 if (unlikely(kvm_x86_ops->hwapic_isr_update)) in apic_set_isr()
422 kvm_x86_ops->hwapic_isr_update(vcpu->kvm, vec); in apic_set_isr()
469 if (unlikely(kvm_x86_ops->hwapic_isr_update)) in apic_clear_isr()
470 kvm_x86_ops->hwapic_isr_update(vcpu->kvm, in apic_clear_isr()
855 if (kvm_x86_ops->deliver_posted_interrupt) in __apic_accept_irq()
856 kvm_x86_ops->deliver_posted_interrupt(vcpu, vector); in __apic_accept_irq()
1228 if (kvm_x86_ops->deliver_posted_interrupt) in lapic_timer_int_injected()
1637 kvm_x86_ops->set_virtual_x2apic_mode(vcpu, true); in kvm_lapic_set_base()
1639 kvm_x86_ops->set_virtual_x2apic_mode(vcpu, false); in kvm_lapic_set_base()
[all …]
Dhyperv.c136 kvm_x86_ops->patch_hypercall(vcpu, instructions); in kvm_hv_set_msr_pw()
356 if (kvm_x86_ops->get_cpl(vcpu) != 0 || !is_protmode(vcpu)) { in kvm_hv_hypercall()
Dpmu.h73 return kvm_x86_ops->pmu_ops->pmc_is_enabled(pmc); in pmc_is_enabled()
Dx86.h58 kvm_x86_ops->get_cs_db_l_bits(vcpu, &cs_db, &cs_l); in is_64_bit_mode()
Dlapic.h148 return kvm_x86_ops->cpu_uses_apicv(vcpu); in kvm_vcpu_apic_vid_enabled()
Dmmu.c902 max_level = min(kvm_x86_ops->get_lpage_level(), host_level); in mapping_level()
1335 if (kvm_x86_ops->enable_log_dirty_pt_masked) in kvm_arch_mmu_enable_log_dirty_pt_masked()
1336 kvm_x86_ops->enable_log_dirty_pt_masked(kvm, slot, gfn_offset, in kvm_arch_mmu_enable_log_dirty_pt_masked()
2513 spte |= kvm_x86_ops->get_mt_mask(vcpu, gfn, in set_spte()
3442 return kvm_x86_ops->interrupt_allowed(vcpu); in can_do_async_pf()
3956 context->shadow_root_level = kvm_x86_ops->get_tdp_level(); in init_kvm_tdp_mmu()
3959 context->set_cr3 = kvm_x86_ops->set_tdp_cr3; in init_kvm_tdp_mmu()
4025 context->shadow_root_level = kvm_x86_ops->get_tdp_level(); in kvm_init_shadow_ept_mmu()
4048 context->set_cr3 = kvm_x86_ops->set_cr3; in init_kvm_softmmu()
Dtrace.h237 kvm_x86_ops->get_exit_info(vcpu, &__entry->info1,
740 __entry->csbase = kvm_x86_ops->get_segment_base(vcpu, VCPU_SREG_CS);
Dvmx.c6214 kvm_x86_ops->set_apic_access_page_addr = NULL; in hardware_setup()
6217 kvm_x86_ops->update_cr8_intercept = NULL; in hardware_setup()
6235 kvm_x86_ops->update_cr8_intercept = NULL; in hardware_setup()
6237 kvm_x86_ops->hwapic_irr_update = NULL; in hardware_setup()
6238 kvm_x86_ops->hwapic_isr_update = NULL; in hardware_setup()
6239 kvm_x86_ops->deliver_posted_interrupt = NULL; in hardware_setup()
6240 kvm_x86_ops->sync_pir_to_irr = vmx_sync_pir_to_irr_dummy; in hardware_setup()
6296 kvm_x86_ops->slot_enable_log_dirty = NULL; in hardware_setup()
6297 kvm_x86_ops->slot_disable_log_dirty = NULL; in hardware_setup()
6298 kvm_x86_ops->flush_log_dirty = NULL; in hardware_setup()
[all …]
Dsvm.c4263 static struct kvm_x86_ops svm_x86_ops = {
/linux-4.4.14/arch/x86/include/asm/
Dkvm_host.h760 struct kvm_x86_ops { struct
923 extern struct kvm_x86_ops *kvm_x86_ops; argument