Searched refs:kvm_vcpu (Results 1 - 163 of 163) sorted by relevance

/linux-4.4.14/arch/x86/kvm/
H A Dlapic.h26 struct kvm_vcpu *vcpu;
45 int kvm_create_lapic(struct kvm_vcpu *vcpu);
46 void kvm_free_lapic(struct kvm_vcpu *vcpu);
48 int kvm_apic_has_interrupt(struct kvm_vcpu *vcpu);
49 int kvm_apic_accept_pic_intr(struct kvm_vcpu *vcpu);
50 int kvm_get_apic_interrupt(struct kvm_vcpu *vcpu);
51 void kvm_apic_accept_events(struct kvm_vcpu *vcpu);
52 void kvm_lapic_reset(struct kvm_vcpu *vcpu, bool init_event);
53 u64 kvm_lapic_get_cr8(struct kvm_vcpu *vcpu);
54 void kvm_lapic_set_tpr(struct kvm_vcpu *vcpu, unsigned long cr8);
55 void kvm_lapic_set_eoi(struct kvm_vcpu *vcpu);
56 void kvm_lapic_set_base(struct kvm_vcpu *vcpu, u64 value);
57 u64 kvm_lapic_get_base(struct kvm_vcpu *vcpu);
58 void kvm_apic_set_version(struct kvm_vcpu *vcpu);
61 void kvm_apic_update_irr(struct kvm_vcpu *vcpu, u32 *pir);
62 int kvm_apic_set_irq(struct kvm_vcpu *vcpu, struct kvm_lapic_irq *irq,
69 u64 kvm_get_apic_base(struct kvm_vcpu *vcpu);
70 int kvm_set_apic_base(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
71 void kvm_apic_post_state_restore(struct kvm_vcpu *vcpu,
73 int kvm_lapic_find_highest_irr(struct kvm_vcpu *vcpu);
75 u64 kvm_get_lapic_tscdeadline_msr(struct kvm_vcpu *vcpu);
76 void kvm_set_lapic_tscdeadline_msr(struct kvm_vcpu *vcpu, u64 data);
78 void kvm_apic_write_nodecode(struct kvm_vcpu *vcpu, u32 offset);
79 void kvm_apic_set_eoi_accelerated(struct kvm_vcpu *vcpu, int vector);
81 int kvm_lapic_set_vapic_addr(struct kvm_vcpu *vcpu, gpa_t vapic_addr);
82 void kvm_lapic_sync_from_vapic(struct kvm_vcpu *vcpu);
83 void kvm_lapic_sync_to_vapic(struct kvm_vcpu *vcpu);
85 int kvm_x2apic_msr_write(struct kvm_vcpu *vcpu, u32 msr, u64 data);
86 int kvm_x2apic_msr_read(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
88 int kvm_hv_vapic_msr_write(struct kvm_vcpu *vcpu, u32 msr, u64 data);
89 int kvm_hv_vapic_msr_read(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
91 static inline bool kvm_hv_vapic_assist_page_enabled(struct kvm_vcpu *vcpu) kvm_hv_vapic_assist_page_enabled()
96 int kvm_lapic_enable_pv_eoi(struct kvm_vcpu *vcpu, u64 data);
106 static inline bool kvm_vcpu_has_lapic(struct kvm_vcpu *vcpu) kvm_vcpu_has_lapic()
131 static inline bool kvm_apic_present(struct kvm_vcpu *vcpu) kvm_apic_present()
136 static inline int kvm_lapic_enabled(struct kvm_vcpu *vcpu) kvm_lapic_enabled()
146 static inline bool kvm_vcpu_apic_vid_enabled(struct kvm_vcpu *vcpu) kvm_vcpu_apic_vid_enabled()
151 static inline bool kvm_apic_has_events(struct kvm_vcpu *vcpu) kvm_apic_has_events()
162 static inline int kvm_lapic_latched_init(struct kvm_vcpu *vcpu) kvm_lapic_latched_init()
167 bool kvm_apic_pending_eoi(struct kvm_vcpu *vcpu, int vector);
169 void wait_lapic_expire(struct kvm_vcpu *vcpu);
172 struct kvm_vcpu **dest_vcpu);
H A Dkvm_cache_regs.h9 static inline unsigned long kvm_register_read(struct kvm_vcpu *vcpu, kvm_register_read()
18 static inline void kvm_register_write(struct kvm_vcpu *vcpu, kvm_register_write()
27 static inline unsigned long kvm_rip_read(struct kvm_vcpu *vcpu) kvm_rip_read()
32 static inline void kvm_rip_write(struct kvm_vcpu *vcpu, unsigned long val) kvm_rip_write()
37 static inline u64 kvm_pdptr_read(struct kvm_vcpu *vcpu, int index) kvm_pdptr_read()
48 static inline ulong kvm_read_cr0_bits(struct kvm_vcpu *vcpu, ulong mask) kvm_read_cr0_bits()
56 static inline ulong kvm_read_cr0(struct kvm_vcpu *vcpu) kvm_read_cr0()
61 static inline ulong kvm_read_cr4_bits(struct kvm_vcpu *vcpu, ulong mask) kvm_read_cr4_bits()
69 static inline ulong kvm_read_cr3(struct kvm_vcpu *vcpu) kvm_read_cr3()
76 static inline ulong kvm_read_cr4(struct kvm_vcpu *vcpu) kvm_read_cr4()
81 static inline u64 kvm_read_edx_eax(struct kvm_vcpu *vcpu) kvm_read_edx_eax()
87 static inline void enter_guest_mode(struct kvm_vcpu *vcpu) enter_guest_mode()
92 static inline void leave_guest_mode(struct kvm_vcpu *vcpu) leave_guest_mode()
97 static inline bool is_guest_mode(struct kvm_vcpu *vcpu) is_guest_mode()
102 static inline bool is_smm(struct kvm_vcpu *vcpu) is_smm()
H A Dx86.h9 static inline void kvm_clear_exception_queue(struct kvm_vcpu *vcpu) kvm_clear_exception_queue()
14 static inline void kvm_queue_interrupt(struct kvm_vcpu *vcpu, u8 vector, kvm_queue_interrupt()
22 static inline void kvm_clear_interrupt_queue(struct kvm_vcpu *vcpu) kvm_clear_interrupt_queue()
27 static inline bool kvm_event_needs_reinjection(struct kvm_vcpu *vcpu) kvm_event_needs_reinjection()
38 static inline bool is_protmode(struct kvm_vcpu *vcpu) is_protmode()
43 static inline int is_long_mode(struct kvm_vcpu *vcpu) is_long_mode()
52 static inline bool is_64_bit_mode(struct kvm_vcpu *vcpu) is_64_bit_mode()
62 static inline bool mmu_is_nested(struct kvm_vcpu *vcpu) mmu_is_nested()
67 static inline int is_pae(struct kvm_vcpu *vcpu) is_pae()
72 static inline int is_pse(struct kvm_vcpu *vcpu) is_pse()
77 static inline int is_paging(struct kvm_vcpu *vcpu) is_paging()
87 static inline void vcpu_cache_mmio_info(struct kvm_vcpu *vcpu, vcpu_cache_mmio_info()
96 static inline bool vcpu_match_mmio_gen(struct kvm_vcpu *vcpu) vcpu_match_mmio_gen()
107 static inline void vcpu_clear_mmio_info(struct kvm_vcpu *vcpu, gva_t gva) vcpu_clear_mmio_info()
115 static inline bool vcpu_match_mmio_gva(struct kvm_vcpu *vcpu, unsigned long gva) vcpu_match_mmio_gva()
124 static inline bool vcpu_match_mmio_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) vcpu_match_mmio_gpa()
133 static inline unsigned long kvm_register_readl(struct kvm_vcpu *vcpu, kvm_register_readl()
141 static inline void kvm_register_writel(struct kvm_vcpu *vcpu, kvm_register_writel()
160 void kvm_before_handle_nmi(struct kvm_vcpu *vcpu);
161 void kvm_after_handle_nmi(struct kvm_vcpu *vcpu);
162 void kvm_set_pending_timer(struct kvm_vcpu *vcpu);
163 int kvm_inject_realmode_interrupt(struct kvm_vcpu *vcpu, int irq, int inc_eip);
165 void kvm_write_tsc(struct kvm_vcpu *vcpu, struct msr_data *msr);
175 void kvm_vcpu_mtrr_init(struct kvm_vcpu *vcpu);
176 u8 kvm_mtrr_get_guest_memory_type(struct kvm_vcpu *vcpu, gfn_t gfn);
177 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data);
178 int kvm_mtrr_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data);
179 int kvm_mtrr_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata);
180 bool kvm_mtrr_check_gfn_range_consistency(struct kvm_vcpu *vcpu, gfn_t gfn,
H A Dpmu.h5 #define pmu_to_vcpu(pmu) (container_of((pmu), struct kvm_vcpu, arch.pmu))
23 struct kvm_pmc *(*msr_idx_to_pmc)(struct kvm_vcpu *vcpu, unsigned idx);
24 int (*is_valid_msr_idx)(struct kvm_vcpu *vcpu, unsigned idx);
25 bool (*is_valid_msr)(struct kvm_vcpu *vcpu, u32 msr);
26 int (*get_msr)(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
27 int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
28 void (*refresh)(struct kvm_vcpu *vcpu);
29 void (*init)(struct kvm_vcpu *vcpu);
30 void (*reset)(struct kvm_vcpu *vcpu);
104 void kvm_pmu_deliver_pmi(struct kvm_vcpu *vcpu);
105 void kvm_pmu_handle_event(struct kvm_vcpu *vcpu);
106 int kvm_pmu_rdpmc(struct kvm_vcpu *vcpu, unsigned pmc, u64 *data);
107 int kvm_pmu_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx);
108 bool kvm_pmu_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr);
109 int kvm_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data);
110 int kvm_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info);
111 void kvm_pmu_refresh(struct kvm_vcpu *vcpu);
112 void kvm_pmu_reset(struct kvm_vcpu *vcpu);
113 void kvm_pmu_init(struct kvm_vcpu *vcpu);
114 void kvm_pmu_destroy(struct kvm_vcpu *vcpu);
H A Dcpuid.h6 int kvm_update_cpuid(struct kvm_vcpu *vcpu);
7 struct kvm_cpuid_entry2 *kvm_find_cpuid_entry(struct kvm_vcpu *vcpu,
12 int kvm_vcpu_ioctl_set_cpuid(struct kvm_vcpu *vcpu,
15 int kvm_vcpu_ioctl_set_cpuid2(struct kvm_vcpu *vcpu,
18 int kvm_vcpu_ioctl_get_cpuid2(struct kvm_vcpu *vcpu,
21 void kvm_cpuid(struct kvm_vcpu *vcpu, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx);
23 int cpuid_query_maxphyaddr(struct kvm_vcpu *vcpu);
25 static inline int cpuid_maxphyaddr(struct kvm_vcpu *vcpu) cpuid_maxphyaddr()
30 static inline bool guest_cpuid_has_xsave(struct kvm_vcpu *vcpu) guest_cpuid_has_xsave()
41 static inline bool guest_cpuid_has_mtrr(struct kvm_vcpu *vcpu) guest_cpuid_has_mtrr()
49 static inline bool guest_cpuid_has_tsc_adjust(struct kvm_vcpu *vcpu) guest_cpuid_has_tsc_adjust()
57 static inline bool guest_cpuid_has_smep(struct kvm_vcpu *vcpu) guest_cpuid_has_smep()
65 static inline bool guest_cpuid_has_smap(struct kvm_vcpu *vcpu) guest_cpuid_has_smap()
73 static inline bool guest_cpuid_has_fsgsbase(struct kvm_vcpu *vcpu) guest_cpuid_has_fsgsbase()
81 static inline bool guest_cpuid_has_longmode(struct kvm_vcpu *vcpu) guest_cpuid_has_longmode()
89 static inline bool guest_cpuid_has_osvw(struct kvm_vcpu *vcpu) guest_cpuid_has_osvw()
97 static inline bool guest_cpuid_has_pcid(struct kvm_vcpu *vcpu) guest_cpuid_has_pcid()
105 static inline bool guest_cpuid_has_x2apic(struct kvm_vcpu *vcpu) guest_cpuid_has_x2apic()
113 static inline bool guest_cpuid_is_amd(struct kvm_vcpu *vcpu) guest_cpuid_is_amd()
121 static inline bool guest_cpuid_has_gbpages(struct kvm_vcpu *vcpu) guest_cpuid_has_gbpages()
129 static inline bool guest_cpuid_has_rtm(struct kvm_vcpu *vcpu) guest_cpuid_has_rtm()
137 static inline bool guest_cpuid_has_mpx(struct kvm_vcpu *vcpu) guest_cpuid_has_mpx()
145 static inline bool guest_cpuid_has_pcommit(struct kvm_vcpu *vcpu) guest_cpuid_has_pcommit()
153 static inline bool guest_cpuid_has_rdtscp(struct kvm_vcpu *vcpu) guest_cpuid_has_rdtscp()
166 static inline bool guest_cpuid_has_nrips(struct kvm_vcpu *vcpu) guest_cpuid_has_nrips()
H A Dioapic.h9 struct kvm_vcpu;
109 void kvm_rtc_eoi_tracking_restore_one(struct kvm_vcpu *vcpu);
110 bool kvm_apic_match_dest(struct kvm_vcpu *vcpu, struct kvm_lapic *source,
112 int kvm_apic_compare_prio(struct kvm_vcpu *vcpu1, struct kvm_vcpu *vcpu2);
113 void kvm_ioapic_update_eoi(struct kvm_vcpu *vcpu, int vector,
124 void kvm_ioapic_scan_entry(struct kvm_vcpu *vcpu, u64 *eoi_exit_bitmap);
125 void kvm_scan_ioapic_routes(struct kvm_vcpu *vcpu, u64 *eoi_exit_bitmap);
H A Dirq.c34 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu) kvm_cpu_has_pending_timer()
43 static int pending_userspace_extint(struct kvm_vcpu *v) pending_userspace_extint()
52 static int kvm_cpu_has_extint(struct kvm_vcpu *v) kvm_cpu_has_extint()
71 int kvm_cpu_has_injectable_intr(struct kvm_vcpu *v) kvm_cpu_has_injectable_intr()
89 int kvm_cpu_has_interrupt(struct kvm_vcpu *v) kvm_cpu_has_interrupt()
105 static int kvm_cpu_get_extint(struct kvm_vcpu *v) kvm_cpu_get_extint()
122 int kvm_cpu_get_interrupt(struct kvm_vcpu *v) kvm_cpu_get_interrupt()
138 void kvm_inject_pending_timer_irqs(struct kvm_vcpu *vcpu) kvm_inject_pending_timer_irqs()
145 void __kvm_migrate_timers(struct kvm_vcpu *vcpu) __kvm_migrate_timers()
H A Dirq.h39 struct kvm_vcpu;
112 static inline int lapic_in_kernel(struct kvm_vcpu *vcpu) lapic_in_kernel()
122 void kvm_inject_pending_timer_irqs(struct kvm_vcpu *vcpu);
123 void kvm_inject_apic_timer_irqs(struct kvm_vcpu *vcpu);
124 void kvm_apic_nmi_wd_deliver(struct kvm_vcpu *vcpu);
125 void __kvm_migrate_apic_timer(struct kvm_vcpu *vcpu);
126 void __kvm_migrate_pit_timer(struct kvm_vcpu *vcpu);
127 void __kvm_migrate_timers(struct kvm_vcpu *vcpu);
129 int apic_has_pending_timer(struct kvm_vcpu *vcpu);
H A Dhyperv.h27 int kvm_hv_set_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 data, bool host);
28 int kvm_hv_get_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata);
30 int kvm_hv_hypercall(struct kvm_vcpu *vcpu);
H A Dpmu_amd.c71 static int amd_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) amd_is_valid_msr_idx()
81 static struct kvm_pmc *amd_msr_idx_to_pmc(struct kvm_vcpu *vcpu, unsigned idx) amd_msr_idx_to_pmc()
94 static bool amd_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) amd_is_valid_msr()
105 static int amd_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) amd_pmu_get_msr()
126 static int amd_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) amd_pmu_set_msr()
153 static void amd_pmu_refresh(struct kvm_vcpu *vcpu) amd_pmu_refresh()
167 static void amd_pmu_init(struct kvm_vcpu *vcpu) amd_pmu_init()
179 static void amd_pmu_reset(struct kvm_vcpu *vcpu) amd_pmu_reset()
H A Dhyperv.c52 static int kvm_hv_msr_get_crash_data(struct kvm_vcpu *vcpu, kvm_hv_msr_get_crash_data()
64 static int kvm_hv_msr_get_crash_ctl(struct kvm_vcpu *vcpu, u64 *pdata) kvm_hv_msr_get_crash_ctl()
72 static int kvm_hv_msr_set_crash_ctl(struct kvm_vcpu *vcpu, u64 data, bool host) kvm_hv_msr_set_crash_ctl()
95 static int kvm_hv_msr_set_crash_data(struct kvm_vcpu *vcpu, kvm_hv_msr_set_crash_data()
107 static int kvm_hv_set_msr_pw(struct kvm_vcpu *vcpu, u32 msr, u64 data, kvm_hv_set_msr_pw()
190 static int kvm_hv_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data, bool host) kvm_hv_set_msr()
238 static int kvm_hv_get_msr_pw(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) kvm_hv_get_msr_pw()
277 static int kvm_hv_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) kvm_hv_get_msr()
285 struct kvm_vcpu *v; kvm_hv_get_msr()
315 int kvm_hv_set_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 data, bool host) kvm_hv_set_msr_common()
328 int kvm_hv_get_msr_common(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) kvm_hv_get_msr_common()
346 int kvm_hv_hypercall(struct kvm_vcpu *vcpu) kvm_hv_hypercall()
H A Dpmu.c53 struct kvm_vcpu *vcpu = pmu_to_vcpu(pmu); kvm_pmi_trigger_fn()
214 void kvm_pmu_handle_event(struct kvm_vcpu *vcpu) kvm_pmu_handle_event()
235 int kvm_pmu_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) kvm_pmu_is_valid_msr_idx()
240 int kvm_pmu_rdpmc(struct kvm_vcpu *vcpu, unsigned idx, u64 *data) kvm_pmu_rdpmc()
258 void kvm_pmu_deliver_pmi(struct kvm_vcpu *vcpu) kvm_pmu_deliver_pmi()
264 bool kvm_pmu_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) kvm_pmu_is_valid_msr()
269 int kvm_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) kvm_pmu_get_msr()
274 int kvm_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) kvm_pmu_set_msr()
283 void kvm_pmu_refresh(struct kvm_vcpu *vcpu) kvm_pmu_refresh()
288 void kvm_pmu_reset(struct kvm_vcpu *vcpu) kvm_pmu_reset()
296 void kvm_pmu_init(struct kvm_vcpu *vcpu) kvm_pmu_init()
306 void kvm_pmu_destroy(struct kvm_vcpu *vcpu) kvm_pmu_destroy()
H A Dmmu_audit.c35 typedef void (*inspect_spte_fn) (struct kvm_vcpu *vcpu, u64 *sptep, int level);
37 static void __mmu_spte_walk(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, __mmu_spte_walk()
57 static void mmu_spte_walk(struct kvm_vcpu *vcpu, inspect_spte_fn fn) mmu_spte_walk()
96 static void audit_mappings(struct kvm_vcpu *vcpu, u64 *sptep, int level) audit_mappings()
163 static void audit_sptes_have_rmaps(struct kvm_vcpu *vcpu, u64 *sptep, int level) audit_sptes_have_rmaps()
169 static void audit_spte_after_sync(struct kvm_vcpu *vcpu, u64 *sptep, int level) audit_spte_after_sync()
226 static void audit_spte(struct kvm_vcpu *vcpu, u64 *sptep, int level) audit_spte()
233 static void audit_vcpu_spte(struct kvm_vcpu *vcpu) audit_vcpu_spte()
241 static void __kvm_mmu_audit(struct kvm_vcpu *vcpu, int point) __kvm_mmu_audit()
253 static inline void kvm_mmu_audit(struct kvm_vcpu *vcpu, int point) kvm_mmu_audit()
H A Dx86.c76 container_of(ctxt, struct kvm_vcpu, arch.emulate_ctxt)
90 #define VCPU_STAT(x) offsetof(struct kvm_vcpu, stat.x), KVM_STAT_VCPU
92 static void update_cr8_intercept(struct kvm_vcpu *vcpu);
93 static void process_nmi(struct kvm_vcpu *vcpu);
94 static void __kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
189 static inline void kvm_async_pf_hash_reset(struct kvm_vcpu *vcpu) kvm_async_pf_hash_reset()
279 u64 kvm_get_apic_base(struct kvm_vcpu *vcpu) kvm_get_apic_base()
285 int kvm_set_apic_base(struct kvm_vcpu *vcpu, struct msr_data *msr_info) kvm_set_apic_base()
361 static void kvm_multiple_exception(struct kvm_vcpu *vcpu, kvm_multiple_exception()
405 void kvm_queue_exception(struct kvm_vcpu *vcpu, unsigned nr) kvm_queue_exception()
411 void kvm_requeue_exception(struct kvm_vcpu *vcpu, unsigned nr) kvm_requeue_exception()
417 void kvm_complete_insn_gp(struct kvm_vcpu *vcpu, int err) kvm_complete_insn_gp()
426 void kvm_inject_page_fault(struct kvm_vcpu *vcpu, struct x86_exception *fault) kvm_inject_page_fault()
434 static bool kvm_propagate_fault(struct kvm_vcpu *vcpu, struct x86_exception *fault) kvm_propagate_fault()
444 void kvm_inject_nmi(struct kvm_vcpu *vcpu) kvm_inject_nmi()
451 void kvm_queue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code) kvm_queue_exception_e()
457 void kvm_requeue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code) kvm_requeue_exception_e()
467 bool kvm_require_cpl(struct kvm_vcpu *vcpu, int required_cpl) kvm_require_cpl()
476 bool kvm_require_dr(struct kvm_vcpu *vcpu, int dr) kvm_require_dr()
491 int kvm_read_guest_page_mmu(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, kvm_read_guest_page_mmu()
510 static int kvm_read_nested_guest_page(struct kvm_vcpu *vcpu, gfn_t gfn, kvm_read_nested_guest_page()
520 int load_pdptrs(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, unsigned long cr3) load_pdptrs()
556 static bool pdptrs_changed(struct kvm_vcpu *vcpu) pdptrs_changed()
583 int kvm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0) kvm_set_cr0()
642 void kvm_lmsw(struct kvm_vcpu *vcpu, unsigned long msw) kvm_lmsw()
648 static void kvm_load_guest_xcr0(struct kvm_vcpu *vcpu) kvm_load_guest_xcr0()
658 static void kvm_put_guest_xcr0(struct kvm_vcpu *vcpu) kvm_put_guest_xcr0()
667 static int __kvm_set_xcr(struct kvm_vcpu *vcpu, u32 index, u64 xcr) __kvm_set_xcr()
707 int kvm_set_xcr(struct kvm_vcpu *vcpu, u32 index, u64 xcr) kvm_set_xcr()
718 int kvm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) kvm_set_cr4()
771 int kvm_set_cr3(struct kvm_vcpu *vcpu, unsigned long cr3) kvm_set_cr3()
797 int kvm_set_cr8(struct kvm_vcpu *vcpu, unsigned long cr8) kvm_set_cr8()
809 unsigned long kvm_get_cr8(struct kvm_vcpu *vcpu) kvm_get_cr8()
818 static void kvm_update_dr0123(struct kvm_vcpu *vcpu) kvm_update_dr0123()
829 static void kvm_update_dr6(struct kvm_vcpu *vcpu) kvm_update_dr6()
835 static void kvm_update_dr7(struct kvm_vcpu *vcpu) kvm_update_dr7()
849 static u64 kvm_dr6_fixed(struct kvm_vcpu *vcpu) kvm_dr6_fixed()
858 static int __kvm_set_dr(struct kvm_vcpu *vcpu, int dr, unsigned long val) __kvm_set_dr()
887 int kvm_set_dr(struct kvm_vcpu *vcpu, int dr, unsigned long val) kvm_set_dr()
897 int kvm_get_dr(struct kvm_vcpu *vcpu, int dr, unsigned long *val) kvm_get_dr()
921 bool kvm_rdpmc(struct kvm_vcpu *vcpu) kvm_rdpmc()
981 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer) kvm_valid_efer()
1006 static int set_efer(struct kvm_vcpu *vcpu, u64 efer) set_efer()
1040 int kvm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr) kvm_set_msr()
1074 static int do_get_msr(struct kvm_vcpu *vcpu, unsigned index, u64 *data) do_get_msr()
1089 static int do_set_msr(struct kvm_vcpu *vcpu, unsigned index, u64 *data) do_set_msr()
1140 void kvm_set_pending_timer(struct kvm_vcpu *vcpu) kvm_set_pending_timer()
1243 static inline u64 nsec_to_cycles(struct kvm_vcpu *vcpu, u64 nsec) nsec_to_cycles()
1256 static int set_tsc_khz(struct kvm_vcpu *vcpu, u32 user_tsc_khz, bool scale) set_tsc_khz()
1292 static int kvm_set_tsc_khz(struct kvm_vcpu *vcpu, u32 this_tsc_khz) kvm_set_tsc_khz()
1325 static u64 compute_guest_tsc(struct kvm_vcpu *vcpu, s64 kernel_ns) compute_guest_tsc()
1334 static void kvm_track_tsc_matching(struct kvm_vcpu *vcpu) kvm_track_tsc_matching()
1362 static void update_ia32_tsc_adjust_msr(struct kvm_vcpu *vcpu, s64 offset) update_ia32_tsc_adjust_msr()
1383 u64 kvm_scale_tsc(struct kvm_vcpu *vcpu, u64 tsc) kvm_scale_tsc()
1395 static u64 kvm_compute_tsc_offset(struct kvm_vcpu *vcpu, u64 target_tsc) kvm_compute_tsc_offset()
1404 u64 kvm_read_l1_tsc(struct kvm_vcpu *vcpu, u64 host_tsc) kvm_read_l1_tsc()
1410 void kvm_write_tsc(struct kvm_vcpu *vcpu, struct msr_data *msr) kvm_write_tsc()
1535 static inline void adjust_tsc_offset_guest(struct kvm_vcpu *vcpu, adjust_tsc_offset_guest()
1541 static inline void adjust_tsc_offset_host(struct kvm_vcpu *vcpu, s64 adjustment) adjust_tsc_offset_host()
1689 struct kvm_vcpu *vcpu; kvm_gen_update_masterclock()
1708 static int kvm_guest_time_update(struct kvm_vcpu *v) kvm_guest_time_update()
1865 struct kvm_vcpu *vcpu; kvmclock_update_fn()
1873 static void kvm_gen_kvmclock_update(struct kvm_vcpu *v) kvm_gen_kvmclock_update()
1899 static int set_msr_mce(struct kvm_vcpu *vcpu, u32 msr, u64 data) set_msr_mce()
1935 static int xen_hvm_config(struct kvm_vcpu *vcpu, u64 data) xen_hvm_config()
1966 static int kvm_pv_enable_async_pf(struct kvm_vcpu *vcpu, u64 data) kvm_pv_enable_async_pf()
1991 static void kvmclock_reset(struct kvm_vcpu *vcpu) kvmclock_reset()
1996 static void accumulate_steal_time(struct kvm_vcpu *vcpu) accumulate_steal_time()
2008 static void record_steal_time(struct kvm_vcpu *vcpu) record_steal_time()
2027 int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr_info) kvm_set_msr_common()
2243 int kvm_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr) kvm_get_msr()
2249 static int get_msr_mce(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) get_msr_mce()
2284 int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr_info) kvm_get_msr_common()
2451 static int __msr_io(struct kvm_vcpu *vcpu, struct kvm_msrs *msrs, __msr_io()
2453 int (*do_msr)(struct kvm_vcpu *vcpu, __msr_io()
2472 static int msr_io(struct kvm_vcpu *vcpu, struct kvm_msrs __user *user_msrs, msr_io()
2473 int (*do_msr)(struct kvm_vcpu *vcpu, msr_io()
2690 static bool need_emulate_wbinvd(struct kvm_vcpu *vcpu) need_emulate_wbinvd()
2695 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvm_arch_vcpu_load()
2741 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu) kvm_arch_vcpu_put()
2748 static int kvm_vcpu_ioctl_get_lapic(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_get_lapic()
2757 static int kvm_vcpu_ioctl_set_lapic(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_set_lapic()
2766 static int kvm_cpu_accept_dm_intr(struct kvm_vcpu *vcpu) kvm_cpu_accept_dm_intr()
2778 static int kvm_vcpu_ready_for_interrupt_injection(struct kvm_vcpu *vcpu) kvm_vcpu_ready_for_interrupt_injection()
2786 static int kvm_vcpu_ioctl_interrupt(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_interrupt()
2813 static int kvm_vcpu_ioctl_nmi(struct kvm_vcpu *vcpu) kvm_vcpu_ioctl_nmi()
2820 static int kvm_vcpu_ioctl_smi(struct kvm_vcpu *vcpu) kvm_vcpu_ioctl_smi()
2827 static int vcpu_ioctl_tpr_access_reporting(struct kvm_vcpu *vcpu, vcpu_ioctl_tpr_access_reporting()
2836 static int kvm_vcpu_ioctl_x86_setup_mce(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_setup_mce()
2859 static int kvm_vcpu_ioctl_x86_set_mce(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_set_mce()
2907 static void kvm_vcpu_ioctl_x86_get_vcpu_events(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_get_vcpu_events()
2944 static int kvm_vcpu_ioctl_x86_set_vcpu_events(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_set_vcpu_events()
2998 static void kvm_vcpu_ioctl_x86_get_debugregs(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_get_debugregs()
3011 static int kvm_vcpu_ioctl_x86_set_debugregs(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_set_debugregs()
3034 static void fill_xsave(u8 *dest, struct kvm_vcpu *vcpu) fill_xsave()
3070 static void load_xsave(struct kvm_vcpu *vcpu, u8 *src) load_xsave()
3108 static void kvm_vcpu_ioctl_x86_get_xsave(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_get_xsave()
3123 static int kvm_vcpu_ioctl_x86_set_xsave(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_set_xsave()
3147 static void kvm_vcpu_ioctl_x86_get_xcrs(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_get_xcrs()
3161 static int kvm_vcpu_ioctl_x86_set_xcrs(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_x86_set_xcrs()
3190 static int kvm_set_guest_paused(struct kvm_vcpu *vcpu) kvm_set_guest_paused()
3202 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl()
3471 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf) kvm_arch_vcpu_fault()
4052 static int vcpu_mmio_write(struct kvm_vcpu *vcpu, gpa_t addr, int len, vcpu_mmio_write()
4073 static int vcpu_mmio_read(struct kvm_vcpu *vcpu, gpa_t addr, int len, void *v) vcpu_mmio_read()
4095 static void kvm_set_segment(struct kvm_vcpu *vcpu, kvm_set_segment()
4101 void kvm_get_segment(struct kvm_vcpu *vcpu, kvm_get_segment()
4107 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access, translate_nested_gpa()
4121 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva, kvm_mmu_gva_to_gpa_read()
4128 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva, kvm_mmu_gva_to_gpa_fetch()
4136 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva, kvm_mmu_gva_to_gpa_write()
4145 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva, kvm_mmu_gva_to_gpa_system()
4152 struct kvm_vcpu *vcpu, u32 access, kvm_read_guest_virt_helper()
4187 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_fetch_guest_virt()
4213 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_read_guest_virt()
4225 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_read_guest_virt_system()
4232 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_read_guest_phys_system()
4243 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); kvm_write_guest_virt_system()
4272 static int vcpu_mmio_gva_to_gpa(struct kvm_vcpu *vcpu, unsigned long gva, vcpu_mmio_gva_to_gpa()
4305 int emulator_write_phys(struct kvm_vcpu *vcpu, gpa_t gpa, emulator_write_phys()
4318 int (*read_write_prepare)(struct kvm_vcpu *vcpu, void *val,
4320 int (*read_write_emulate)(struct kvm_vcpu *vcpu, gpa_t gpa,
4322 int (*read_write_mmio)(struct kvm_vcpu *vcpu, gpa_t gpa,
4324 int (*read_write_exit_mmio)(struct kvm_vcpu *vcpu, gpa_t gpa,
4329 static int read_prepare(struct kvm_vcpu *vcpu, void *val, int bytes) read_prepare()
4341 static int read_emulate(struct kvm_vcpu *vcpu, gpa_t gpa, read_emulate()
4347 static int write_emulate(struct kvm_vcpu *vcpu, gpa_t gpa, write_emulate()
4353 static int write_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, int bytes, void *val) write_mmio()
4359 static int read_exit_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, read_exit_mmio()
4366 static int write_exit_mmio(struct kvm_vcpu *vcpu, gpa_t gpa, write_exit_mmio()
4392 struct kvm_vcpu *vcpu, emulator_read_write_onepage()
4438 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_read_write()
4523 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_cmpxchg_emulated()
4581 static int kernel_pio(struct kvm_vcpu *vcpu, void *pd) kernel_pio()
4596 static int emulator_pio_in_out(struct kvm_vcpu *vcpu, int size, emulator_pio_in_out()
4624 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_pio_in_emulated()
4646 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_pio_out_emulated()
4653 static unsigned long get_segment_base(struct kvm_vcpu *vcpu, int seg) get_segment_base()
4663 int kvm_emulate_wbinvd_noskip(struct kvm_vcpu *vcpu) kvm_emulate_wbinvd_noskip()
4681 int kvm_emulate_wbinvd(struct kvm_vcpu *vcpu) kvm_emulate_wbinvd()
4715 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_get_cr()
4744 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_cr()
4840 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_segment()
4895 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_get_smbase()
4902 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_set_smbase()
5009 static void toggle_interruptibility(struct kvm_vcpu *vcpu, u32 mask) toggle_interruptibility()
5028 static bool inject_emulated_exception(struct kvm_vcpu *vcpu) inject_emulated_exception()
5042 static void init_emulate_ctxt(struct kvm_vcpu *vcpu) init_emulate_ctxt()
5065 int kvm_inject_realmode_interrupt(struct kvm_vcpu *vcpu, int irq, int inc_eip) kvm_inject_realmode_interrupt()
5093 static int handle_emulation_failure(struct kvm_vcpu *vcpu) handle_emulation_failure()
5110 static bool reexecute_instruction(struct kvm_vcpu *vcpu, gva_t cr2, reexecute_instruction()
5184 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); retry_instruction()
5225 static int complete_emulated_mmio(struct kvm_vcpu *vcpu);
5226 static int complete_emulated_pio(struct kvm_vcpu *vcpu);
5228 static void kvm_smm_changed(struct kvm_vcpu *vcpu) kvm_smm_changed()
5246 static void kvm_set_hflags(struct kvm_vcpu *vcpu, unsigned emul_flags) kvm_set_hflags()
5271 static void kvm_vcpu_check_singlestep(struct kvm_vcpu *vcpu, unsigned long rflags, int *r) kvm_vcpu_check_singlestep()
5305 static bool kvm_vcpu_check_breakpoint(struct kvm_vcpu *vcpu, int *r) kvm_vcpu_check_breakpoint()
5344 int x86_emulate_instruction(struct kvm_vcpu *vcpu, x86_emulate_instruction()
5479 int kvm_fast_pio_out(struct kvm_vcpu *vcpu, int size, unsigned short port) kvm_fast_pio_out()
5514 struct kvm_vcpu *vcpu; kvmclock_cpufreq_notifier()
5648 static DEFINE_PER_CPU(struct kvm_vcpu *, current_vcpu);
5681 void kvm_before_handle_nmi(struct kvm_vcpu *vcpu) kvm_before_handle_nmi()
5687 void kvm_after_handle_nmi(struct kvm_vcpu *vcpu) kvm_after_handle_nmi()
5728 struct kvm_vcpu *vcpu; pvclock_gtod_update_fn()
5843 int kvm_vcpu_halt(struct kvm_vcpu *vcpu) kvm_vcpu_halt()
5856 int kvm_emulate_halt(struct kvm_vcpu *vcpu) kvm_emulate_halt()
5881 int kvm_emulate_hypercall(struct kvm_vcpu *vcpu) kvm_emulate_hypercall()
5936 struct kvm_vcpu *vcpu = emul_to_vcpu(ctxt); emulator_fix_hypercall()
5945 static int dm_request_for_irq_injection(struct kvm_vcpu *vcpu) dm_request_for_irq_injection()
5951 static void post_kvm_run_save(struct kvm_vcpu *vcpu) post_kvm_run_save()
5964 static void update_cr8_intercept(struct kvm_vcpu *vcpu) update_cr8_intercept()
5987 static int inject_pending_event(struct kvm_vcpu *vcpu, bool req_int_win) inject_pending_event()
6057 static void process_nmi(struct kvm_vcpu *vcpu) process_nmi()
6091 static void process_smi_save_seg_32(struct kvm_vcpu *vcpu, char *buf, int n) process_smi_save_seg_32()
6110 static void process_smi_save_seg_64(struct kvm_vcpu *vcpu, char *buf, int n) process_smi_save_seg_64()
6127 static void process_smi_save_state_32(struct kvm_vcpu *vcpu, char *buf) process_smi_save_state_32()
6177 static void process_smi_save_state_64(struct kvm_vcpu *vcpu, char *buf) process_smi_save_state_64()
6234 static void process_smi(struct kvm_vcpu *vcpu) process_smi()
6308 static void vcpu_scan_ioapic(struct kvm_vcpu *vcpu) vcpu_scan_ioapic()
6324 static void kvm_vcpu_flush_tlb(struct kvm_vcpu *vcpu) kvm_vcpu_flush_tlb()
6330 void kvm_vcpu_reload_apic_access_page(struct kvm_vcpu *vcpu) kvm_vcpu_reload_apic_access_page()
6369 static int vcpu_enter_guest(struct kvm_vcpu *vcpu) vcpu_enter_guest()
6621 static inline int vcpu_block(struct kvm *kvm, struct kvm_vcpu *vcpu) vcpu_block()
6654 static inline bool kvm_vcpu_running(struct kvm_vcpu *vcpu) kvm_vcpu_running()
6660 static int vcpu_run(struct kvm_vcpu *vcpu) vcpu_run()
6709 static inline int complete_emulated_io(struct kvm_vcpu *vcpu) complete_emulated_io()
6720 static int complete_emulated_pio(struct kvm_vcpu *vcpu) complete_emulated_pio()
6745 static int complete_emulated_mmio(struct kvm_vcpu *vcpu) complete_emulated_mmio()
6791 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run) kvm_arch_vcpu_ioctl_run()
6819 int (*cui)(struct kvm_vcpu *) = vcpu->arch.complete_userspace_io; kvm_arch_vcpu_ioctl_run()
6837 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
6875 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
6909 void kvm_get_cs_db_l_bits(struct kvm_vcpu *vcpu, int *db, int *l) kvm_get_cs_db_l_bits()
6919 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
6958 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_mpstate()
6971 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_mpstate()
6987 int kvm_task_switch(struct kvm_vcpu *vcpu, u16 tss_selector, int idt_index, kvm_task_switch()
7008 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
7089 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
7147 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
7165 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
7182 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
7199 static void fx_init(struct kvm_vcpu *vcpu) fx_init()
7214 void kvm_load_guest_fpu(struct kvm_vcpu *vcpu) kvm_load_guest_fpu()
7230 void kvm_put_guest_fpu(struct kvm_vcpu *vcpu) kvm_put_guest_fpu()
7254 void kvm_arch_vcpu_free(struct kvm_vcpu *vcpu) kvm_arch_vcpu_free()
7262 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, kvm_arch_vcpu_create()
7265 struct kvm_vcpu *vcpu; kvm_arch_vcpu_create()
7277 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
7291 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu) kvm_arch_vcpu_postcreate()
7311 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_arch_vcpu_destroy()
7324 void kvm_vcpu_reset(struct kvm_vcpu *vcpu, bool init_event) kvm_vcpu_reset()
7365 void kvm_vcpu_deliver_sipi_vector(struct kvm_vcpu *vcpu, u8 vector) kvm_vcpu_deliver_sipi_vector()
7379 struct kvm_vcpu *vcpu; kvm_arch_hardware_enable()
7509 bool kvm_vcpu_is_reset_bsp(struct kvm_vcpu *vcpu) kvm_vcpu_is_reset_bsp()
7515 bool kvm_vcpu_is_bsp(struct kvm_vcpu *vcpu) kvm_vcpu_is_bsp()
7520 bool kvm_vcpu_compatible(struct kvm_vcpu *vcpu) kvm_vcpu_compatible()
7527 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu) kvm_arch_vcpu_init()
7607 void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) kvm_arch_vcpu_uninit()
7622 void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) kvm_arch_sched_in()
7656 static void kvm_unload_vcpu_mmu(struct kvm_vcpu *vcpu) kvm_unload_vcpu_mmu()
7668 struct kvm_vcpu *vcpu; kvm_free_vcpus()
7984 static inline bool kvm_vcpu_has_events(struct kvm_vcpu *vcpu) kvm_vcpu_has_events()
8008 int kvm_arch_vcpu_runnable(struct kvm_vcpu *vcpu) kvm_arch_vcpu_runnable()
8016 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu) kvm_arch_vcpu_should_kick()
8021 int kvm_arch_interrupt_allowed(struct kvm_vcpu *vcpu) kvm_arch_interrupt_allowed()
8026 unsigned long kvm_get_linear_rip(struct kvm_vcpu *vcpu) kvm_get_linear_rip()
8035 bool kvm_is_linear_rip(struct kvm_vcpu *vcpu, unsigned long linear_rip) kvm_is_linear_rip()
8041 unsigned long kvm_get_rflags(struct kvm_vcpu *vcpu) kvm_get_rflags()
8052 static void __kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) __kvm_set_rflags()
8060 void kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) kvm_set_rflags()
8067 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu, struct kvm_async_pf *work) kvm_arch_async_page_ready()
8096 static void kvm_add_async_pf_gfn(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_add_async_pf_gfn()
8106 static u32 kvm_async_pf_gfn_slot(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_async_pf_gfn_slot()
8119 bool kvm_find_async_pf_gfn(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_find_async_pf_gfn()
8124 static void kvm_del_async_pf_gfn(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_del_async_pf_gfn()
8147 static int apf_put_user(struct kvm_vcpu *vcpu, u32 val) apf_put_user()
8154 void kvm_arch_async_page_not_present(struct kvm_vcpu *vcpu, kvm_arch_async_page_not_present()
8176 void kvm_arch_async_page_present(struct kvm_vcpu *vcpu, kvm_arch_async_page_present()
8200 bool kvm_arch_can_inject_async_page_present(struct kvm_vcpu *vcpu) kvm_arch_can_inject_async_page_present()
H A Dlapic.c88 bool kvm_apic_pending_eoi(struct kvm_vcpu *vcpu, int vector) kvm_apic_pending_eoi()
161 struct kvm_vcpu *vcpu; recalculate_apic_map()
282 void kvm_apic_set_version(struct kvm_vcpu *vcpu) kvm_apic_set_version()
346 void kvm_apic_update_irr(struct kvm_vcpu *vcpu, u32 *pir) kvm_apic_update_irr()
391 struct kvm_vcpu *vcpu; apic_clear_irr()
409 struct kvm_vcpu *vcpu; apic_set_isr()
456 struct kvm_vcpu *vcpu; apic_clear_isr()
479 int kvm_lapic_find_highest_irr(struct kvm_vcpu *vcpu) kvm_lapic_find_highest_irr()
499 int kvm_apic_set_irq(struct kvm_vcpu *vcpu, struct kvm_lapic_irq *irq, kvm_apic_set_irq()
508 static int pv_eoi_put_user(struct kvm_vcpu *vcpu, u8 val) pv_eoi_put_user()
515 static int pv_eoi_get_user(struct kvm_vcpu *vcpu, u8 *val) pv_eoi_get_user()
522 static inline bool pv_eoi_enabled(struct kvm_vcpu *vcpu) pv_eoi_enabled()
527 static bool pv_eoi_get_pending(struct kvm_vcpu *vcpu) pv_eoi_get_pending()
536 static void pv_eoi_set_pending(struct kvm_vcpu *vcpu) pv_eoi_set_pending()
546 static void pv_eoi_clr_pending(struct kvm_vcpu *vcpu) pv_eoi_clr_pending()
651 bool kvm_apic_match_dest(struct kvm_vcpu *vcpu, struct kvm_lapic *source, kvm_apic_match_dest()
761 struct kvm_vcpu **dest_vcpu) kvm_intr_is_single_vcpu_fast()
828 struct kvm_vcpu *vcpu = apic->vcpu; __apic_accept_irq()
928 int kvm_apic_compare_prio(struct kvm_vcpu *vcpu1, struct kvm_vcpu *vcpu2) kvm_apic_compare_prio()
986 void kvm_apic_set_eoi_accelerated(struct kvm_vcpu *vcpu, int vector) kvm_apic_set_eoi_accelerated()
1054 struct kvm_vcpu *vcpu = apic->vcpu; __report_tpr_access()
1157 static int apic_mmio_read(struct kvm_vcpu *vcpu, struct kvm_io_device *this, apic_mmio_read()
1197 struct kvm_vcpu *vcpu = apic->vcpu; apic_timer_expired()
1219 static bool lapic_timer_int_injected(struct kvm_vcpu *vcpu) lapic_timer_int_injected()
1237 void wait_lapic_expire(struct kvm_vcpu *vcpu) wait_lapic_expire()
1311 struct kvm_vcpu *vcpu = apic->vcpu; start_apic_timer()
1483 static int apic_mmio_write(struct kvm_vcpu *vcpu, struct kvm_io_device *this, apic_mmio_write()
1516 void kvm_lapic_set_eoi(struct kvm_vcpu *vcpu) kvm_lapic_set_eoi()
1524 void kvm_apic_write_nodecode(struct kvm_vcpu *vcpu, u32 offset) kvm_apic_write_nodecode()
1538 void kvm_free_lapic(struct kvm_vcpu *vcpu) kvm_free_lapic()
1565 u64 kvm_get_lapic_tscdeadline_msr(struct kvm_vcpu *vcpu) kvm_get_lapic_tscdeadline_msr()
1576 void kvm_set_lapic_tscdeadline_msr(struct kvm_vcpu *vcpu, u64 data) kvm_set_lapic_tscdeadline_msr()
1589 void kvm_lapic_set_tpr(struct kvm_vcpu *vcpu, unsigned long cr8) kvm_lapic_set_tpr()
1600 u64 kvm_lapic_get_cr8(struct kvm_vcpu *vcpu) kvm_lapic_get_cr8()
1612 void kvm_lapic_set_base(struct kvm_vcpu *vcpu, u64 value) kvm_lapic_set_base()
1655 void kvm_lapic_reset(struct kvm_vcpu *vcpu, bool init_event) kvm_lapic_reset()
1727 int apic_has_pending_timer(struct kvm_vcpu *vcpu) apic_has_pending_timer()
1753 void kvm_apic_nmi_wd_deliver(struct kvm_vcpu *vcpu) kvm_apic_nmi_wd_deliver()
1780 int kvm_create_lapic(struct kvm_vcpu *vcpu) kvm_create_lapic()
1824 int kvm_apic_has_interrupt(struct kvm_vcpu *vcpu) kvm_apic_has_interrupt()
1840 int kvm_apic_accept_pic_intr(struct kvm_vcpu *vcpu) kvm_apic_accept_pic_intr()
1853 void kvm_inject_apic_timer_irqs(struct kvm_vcpu *vcpu) kvm_inject_apic_timer_irqs()
1868 int kvm_get_apic_interrupt(struct kvm_vcpu *vcpu) kvm_get_apic_interrupt()
1889 void kvm_apic_post_state_restore(struct kvm_vcpu *vcpu, kvm_apic_post_state_restore()
1925 void __kvm_migrate_apic_timer(struct kvm_vcpu *vcpu) __kvm_migrate_apic_timer()
1944 static void apic_sync_pv_eoi_from_guest(struct kvm_vcpu *vcpu, apic_sync_pv_eoi_from_guest()
1974 void kvm_lapic_sync_from_vapic(struct kvm_vcpu *vcpu) kvm_lapic_sync_from_vapic()
1997 static void apic_sync_pv_eoi_to_guest(struct kvm_vcpu *vcpu, apic_sync_pv_eoi_to_guest()
2017 void kvm_lapic_sync_to_vapic(struct kvm_vcpu *vcpu) kvm_lapic_sync_to_vapic()
2041 int kvm_lapic_set_vapic_addr(struct kvm_vcpu *vcpu, gpa_t vapic_addr) kvm_lapic_set_vapic_addr()
2057 int kvm_x2apic_msr_write(struct kvm_vcpu *vcpu, u32 msr, u64 data) kvm_x2apic_msr_write()
2074 int kvm_x2apic_msr_read(struct kvm_vcpu *vcpu, u32 msr, u64 *data) kvm_x2apic_msr_read()
2098 int kvm_hv_vapic_msr_write(struct kvm_vcpu *vcpu, u32 reg, u64 data) kvm_hv_vapic_msr_write()
2111 int kvm_hv_vapic_msr_read(struct kvm_vcpu *vcpu, u32 reg, u64 *data) kvm_hv_vapic_msr_read()
2129 int kvm_lapic_enable_pv_eoi(struct kvm_vcpu *vcpu, u64 data) kvm_lapic_enable_pv_eoi()
2142 void kvm_apic_accept_events(struct kvm_vcpu *vcpu) kvm_apic_accept_events()
H A Dpmu_intel.c117 static int intel_is_valid_msr_idx(struct kvm_vcpu *vcpu, unsigned idx) intel_is_valid_msr_idx()
128 static struct kvm_pmc *intel_msr_idx_to_pmc(struct kvm_vcpu *vcpu, intel_msr_idx_to_pmc()
145 static bool intel_is_valid_msr(struct kvm_vcpu *vcpu, u32 msr) intel_is_valid_msr()
167 static int intel_pmu_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *data) intel_pmu_get_msr()
199 static int intel_pmu_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) intel_pmu_set_msr()
257 static void intel_pmu_refresh(struct kvm_vcpu *vcpu) intel_pmu_refresh()
308 static void intel_pmu_init(struct kvm_vcpu *vcpu) intel_pmu_init()
326 static void intel_pmu_reset(struct kvm_vcpu *vcpu) intel_pmu_reset()
H A Dmmu.h56 reset_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, struct kvm_mmu *context);
74 int handle_mmio_page_fault(struct kvm_vcpu *vcpu, u64 addr, bool direct);
75 void kvm_init_shadow_mmu(struct kvm_vcpu *vcpu);
76 void kvm_init_shadow_ept_mmu(struct kvm_vcpu *vcpu, bool execonly);
87 static inline int kvm_mmu_reload(struct kvm_vcpu *vcpu) kvm_mmu_reload()
138 static inline bool is_write_protection(struct kvm_vcpu *vcpu) is_write_protection()
147 static inline bool permission_fault(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, permission_fault()
H A Dmmu.c179 static void mmu_free_roots(struct kvm_vcpu *vcpu);
226 static unsigned int kvm_current_mmio_generation(struct kvm_vcpu *vcpu) kvm_current_mmio_generation()
231 static void mark_mmio_spte(struct kvm_vcpu *vcpu, u64 *sptep, u64 gfn, mark_mmio_spte()
261 static bool set_mmio_spte(struct kvm_vcpu *vcpu, u64 *sptep, gfn_t gfn, set_mmio_spte()
272 static bool check_mmio_spte(struct kvm_vcpu *vcpu, u64 spte) check_mmio_spte()
299 static int is_nx(struct kvm_vcpu *vcpu) is_nx()
632 static void walk_shadow_page_lockless_begin(struct kvm_vcpu *vcpu) walk_shadow_page_lockless_begin()
647 static void walk_shadow_page_lockless_end(struct kvm_vcpu *vcpu) walk_shadow_page_lockless_end()
709 static int mmu_topup_memory_caches(struct kvm_vcpu *vcpu) mmu_topup_memory_caches()
726 static void mmu_free_memory_caches(struct kvm_vcpu *vcpu) mmu_free_memory_caches()
744 static struct pte_list_desc *mmu_alloc_pte_list_desc(struct kvm_vcpu *vcpu) mmu_alloc_pte_list_desc()
834 static int has_wrprotected_page(struct kvm_vcpu *vcpu, gfn_t gfn, int level) has_wrprotected_page()
871 gfn_to_memslot_dirty_bitmap(struct kvm_vcpu *vcpu, gfn_t gfn, gfn_to_memslot_dirty_bitmap()
883 static int mapping_level(struct kvm_vcpu *vcpu, gfn_t large_gfn, mapping_level()
923 static int pte_list_add(struct kvm_vcpu *vcpu, u64 *spte, pte_list_add()
1057 static bool rmap_can_add(struct kvm_vcpu *vcpu) rmap_can_add()
1065 static int rmap_add(struct kvm_vcpu *vcpu, u64 *spte, gfn_t gfn) rmap_add()
1174 static void drop_large_spte(struct kvm_vcpu *vcpu, u64 *sptep) drop_large_spte()
1342 static bool rmap_write_protect(struct kvm_vcpu *vcpu, u64 gfn) rmap_write_protect()
1614 static void rmap_recycle(struct kvm_vcpu *vcpu, u64 *spte, gfn_t gfn) rmap_recycle()
1701 static void mmu_page_add_parent_pte(struct kvm_vcpu *vcpu, mmu_page_add_parent_pte()
1723 static struct kvm_mmu_page *kvm_mmu_alloc_page(struct kvm_vcpu *vcpu, kvm_mmu_alloc_page()
1766 static int nonpaging_sync_page(struct kvm_vcpu *vcpu, nonpaging_sync_page()
1772 static void nonpaging_invlpg(struct kvm_vcpu *vcpu, gva_t gva) nonpaging_invlpg()
1776 static void nonpaging_update_pte(struct kvm_vcpu *vcpu, nonpaging_update_pte()
1896 static int __kvm_sync_page(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, __kvm_sync_page()
1916 static int kvm_sync_page_transient(struct kvm_vcpu *vcpu, kvm_sync_page_transient()
1932 static void kvm_mmu_audit(struct kvm_vcpu *vcpu, int point) { } mmu_audit_disable()
1936 static int kvm_sync_page(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, kvm_sync_page()
1943 static void kvm_sync_pages(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_sync_pages()
2027 static void mmu_sync_children(struct kvm_vcpu *vcpu, mmu_sync_children()
2081 static struct kvm_mmu_page *kvm_mmu_get_page(struct kvm_vcpu *vcpu, kvm_mmu_get_page()
2153 struct kvm_vcpu *vcpu, u64 addr) shadow_walk_init()
2217 static void validate_direct_spte(struct kvm_vcpu *vcpu, u64 *sptep, validate_direct_spte()
2437 static void __kvm_unsync_page(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp) __kvm_unsync_page()
2446 static void kvm_unsync_pages(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_unsync_pages()
2458 static int mmu_need_write_protect(struct kvm_vcpu *vcpu, gfn_t gfn, mmu_need_write_protect()
2487 static int set_spte(struct kvm_vcpu *vcpu, u64 *sptep, set_spte()
2567 static void mmu_set_spte(struct kvm_vcpu *vcpu, u64 *sptep, mmu_set_spte()
2629 static pfn_t pte_prefetch_gfn_to_pfn(struct kvm_vcpu *vcpu, gfn_t gfn, pte_prefetch_gfn_to_pfn()
2641 static int direct_pte_prefetch_many(struct kvm_vcpu *vcpu, direct_pte_prefetch_many()
2668 static void __direct_pte_prefetch(struct kvm_vcpu *vcpu, __direct_pte_prefetch()
2691 static void direct_pte_prefetch(struct kvm_vcpu *vcpu, u64 *sptep) direct_pte_prefetch()
2711 static int __direct_map(struct kvm_vcpu *vcpu, gpa_t v, int write, __direct_map()
2762 static int kvm_handle_bad_page(struct kvm_vcpu *vcpu, gfn_t gfn, pfn_t pfn) kvm_handle_bad_page()
2781 static void transparent_hugepage_adjust(struct kvm_vcpu *vcpu, transparent_hugepage_adjust()
2822 static bool handle_abnormal_pfn(struct kvm_vcpu *vcpu, gva_t gva, gfn_t gfn, handle_abnormal_pfn()
2863 fast_pf_fix_direct_spte(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, fast_pf_fix_direct_spte()
2899 static bool fast_page_fault(struct kvm_vcpu *vcpu, gva_t gva, int level, fast_page_fault()
2976 static bool try_async_pf(struct kvm_vcpu *vcpu, bool prefault, gfn_t gfn,
2978 static void make_mmu_pages_available(struct kvm_vcpu *vcpu);
2980 static int nonpaging_map(struct kvm_vcpu *vcpu, gva_t v, u32 error_code, nonpaging_map()
3035 static void mmu_free_roots(struct kvm_vcpu *vcpu) mmu_free_roots()
3080 static int mmu_check_root(struct kvm_vcpu *vcpu, gfn_t root_gfn) mmu_check_root()
3092 static int mmu_alloc_direct_roots(struct kvm_vcpu *vcpu) mmu_alloc_direct_roots()
3128 static int mmu_alloc_shadow_roots(struct kvm_vcpu *vcpu) mmu_alloc_shadow_roots()
3224 static int mmu_alloc_roots(struct kvm_vcpu *vcpu) mmu_alloc_roots()
3232 static void mmu_sync_roots(struct kvm_vcpu *vcpu) mmu_sync_roots()
3264 void kvm_mmu_sync_roots(struct kvm_vcpu *vcpu) kvm_mmu_sync_roots()
3272 static gpa_t nonpaging_gva_to_gpa(struct kvm_vcpu *vcpu, gva_t vaddr, nonpaging_gva_to_gpa()
3280 static gpa_t nonpaging_gva_to_gpa_nested(struct kvm_vcpu *vcpu, gva_t vaddr, nonpaging_gva_to_gpa_nested()
3308 static bool quickly_check_mmio_pf(struct kvm_vcpu *vcpu, u64 addr, bool direct) quickly_check_mmio_pf()
3318 walk_shadow_page_get_mmio_spte(struct kvm_vcpu *vcpu, u64 addr, u64 *sptep) walk_shadow_page_get_mmio_spte()
3362 int handle_mmio_page_fault(struct kvm_vcpu *vcpu, u64 addr, bool direct) handle_mmio_page_fault()
3397 static int nonpaging_page_fault(struct kvm_vcpu *vcpu, gva_t gva, nonpaging_page_fault()
3424 static int kvm_arch_setup_async_pf(struct kvm_vcpu *vcpu, gva_t gva, gfn_t gfn) kvm_arch_setup_async_pf()
3436 static bool can_do_async_pf(struct kvm_vcpu *vcpu) can_do_async_pf()
3445 static bool try_async_pf(struct kvm_vcpu *vcpu, bool prefault, gfn_t gfn, try_async_pf()
3472 check_hugepage_cache_consistency(struct kvm_vcpu *vcpu, gfn_t gfn, int level) check_hugepage_cache_consistency()
3481 static int tdp_page_fault(struct kvm_vcpu *vcpu, gva_t gpa, u32 error_code, tdp_page_fault()
3546 static void nonpaging_init_context(struct kvm_vcpu *vcpu, nonpaging_init_context()
3561 void kvm_mmu_new_cr3(struct kvm_vcpu *vcpu) kvm_mmu_new_cr3()
3566 static unsigned long get_cr3(struct kvm_vcpu *vcpu) get_cr3()
3571 static void inject_page_fault(struct kvm_vcpu *vcpu, inject_page_fault()
3577 static bool sync_mmio_spte(struct kvm_vcpu *vcpu, u64 *sptep, gfn_t gfn, sync_mmio_spte()
3617 __reset_rsvds_bits_mask(struct kvm_vcpu *vcpu, __reset_rsvds_bits_mask()
3699 static void reset_rsvds_bits_mask(struct kvm_vcpu *vcpu, reset_rsvds_bits_mask()
3742 static void reset_rsvds_bits_mask_ept(struct kvm_vcpu *vcpu, reset_rsvds_bits_mask_ept()
3755 reset_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, struct kvm_mmu *context) reset_shadow_zero_bits_mask()
3782 reset_tdp_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, reset_tdp_shadow_zero_bits_mask()
3802 reset_ept_shadow_zero_bits_mask(struct kvm_vcpu *vcpu, reset_ept_shadow_zero_bits_mask()
3809 static void update_permission_bitmask(struct kvm_vcpu *vcpu, update_permission_bitmask()
3874 static void update_last_pte_bitmap(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu) update_last_pte_bitmap()
3892 static void paging64_init_context_common(struct kvm_vcpu *vcpu, paging64_init_context_common()
3914 static void paging64_init_context(struct kvm_vcpu *vcpu, paging64_init_context()
3920 static void paging32_init_context(struct kvm_vcpu *vcpu, paging32_init_context()
3940 static void paging32E_init_context(struct kvm_vcpu *vcpu, paging32E_init_context()
3946 static void init_kvm_tdp_mmu(struct kvm_vcpu *vcpu) init_kvm_tdp_mmu()
3990 void kvm_init_shadow_mmu(struct kvm_vcpu *vcpu) kvm_init_shadow_mmu()
4019 void kvm_init_shadow_ept_mmu(struct kvm_vcpu *vcpu, bool execonly) kvm_init_shadow_ept_mmu()
4043 static void init_kvm_softmmu(struct kvm_vcpu *vcpu) init_kvm_softmmu()
4054 static void init_kvm_nested_mmu(struct kvm_vcpu *vcpu) init_kvm_nested_mmu()
4093 static void init_kvm_mmu(struct kvm_vcpu *vcpu) init_kvm_mmu()
4103 void kvm_mmu_reset_context(struct kvm_vcpu *vcpu) kvm_mmu_reset_context()
4110 int kvm_mmu_load(struct kvm_vcpu *vcpu) kvm_mmu_load()
4128 void kvm_mmu_unload(struct kvm_vcpu *vcpu) kvm_mmu_unload()
4135 static void mmu_pte_write_new_pte(struct kvm_vcpu *vcpu, mmu_pte_write_new_pte()
4161 static void mmu_pte_write_flush_tlb(struct kvm_vcpu *vcpu, bool zap_page, mmu_pte_write_flush_tlb()
4173 static u64 mmu_pte_write_fetch_gpte(struct kvm_vcpu *vcpu, gpa_t *gpa, mmu_pte_write_fetch_gpte()
4284 void kvm_mmu_pte_write(struct kvm_vcpu *vcpu, gpa_t gpa, kvm_mmu_pte_write()
4358 int kvm_mmu_unprotect_page_virt(struct kvm_vcpu *vcpu, gva_t gva) kvm_mmu_unprotect_page_virt()
4374 static void make_mmu_pages_available(struct kvm_vcpu *vcpu) make_mmu_pages_available()
4390 static bool is_mmio_page_fault(struct kvm_vcpu *vcpu, gva_t addr) is_mmio_page_fault()
4398 int kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gva_t cr2, u32 error_code, kvm_mmu_page_fault()
4434 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva) kvm_mmu_invlpg()
4454 static void free_mmu_pages(struct kvm_vcpu *vcpu) free_mmu_pages()
4461 static int alloc_mmu_pages(struct kvm_vcpu *vcpu) alloc_mmu_pages()
4482 int kvm_mmu_create(struct kvm_vcpu *vcpu) kvm_mmu_create()
4492 void kvm_mmu_setup(struct kvm_vcpu *vcpu) kvm_mmu_setup()
4961 void kvm_mmu_destroy(struct kvm_vcpu *vcpu) kvm_mmu_destroy()
H A Dvmx.c521 struct kvm_vcpu vcpu;
611 static inline struct vcpu_vmx *to_vmx(struct kvm_vcpu *vcpu) to_vmx()
616 static struct pi_desc *vcpu_to_pi_desc(struct kvm_vcpu *vcpu) vcpu_to_pi_desc()
837 static inline struct vmcs12 *get_vmcs12(struct kvm_vcpu *vcpu) get_vmcs12()
842 static struct page *nested_get_page(struct kvm_vcpu *vcpu, gpa_t addr) nested_get_page()
861 static unsigned long nested_ept_get_cr3(struct kvm_vcpu *vcpu);
867 static int vmx_cpu_uses_apicv(struct kvm_vcpu *vcpu);
869 static void vmx_set_segment(struct kvm_vcpu *vcpu,
871 static void vmx_get_segment(struct kvm_vcpu *vcpu,
873 static bool guest_state_valid(struct kvm_vcpu *vcpu);
875 static void vmx_sync_pir_to_irr_dummy(struct kvm_vcpu *vcpu);
954 static void ept_save_pdptrs(struct kvm_vcpu *vcpu);
1011 static inline bool cpu_need_tpr_shadow(struct kvm_vcpu *vcpu) cpu_need_tpr_shadow()
1128 static inline bool cpu_need_virtualize_apic_accesses(struct kvm_vcpu *vcpu) cpu_need_virtualize_apic_accesses()
1255 static void nested_vmx_vmexit(struct kvm_vcpu *vcpu, u32 exit_reason,
1258 static void nested_vmx_entry_failure(struct kvm_vcpu *vcpu,
1639 static void update_exception_bitmap(struct kvm_vcpu *vcpu) update_exception_bitmap()
1883 static void vmx_save_host_state(struct kvm_vcpu *vcpu) vmx_save_host_state()
1989 static void vmx_vcpu_pi_load(struct kvm_vcpu *vcpu, int cpu) vmx_vcpu_pi_load()
2039 static void vmx_vcpu_load(struct kvm_vcpu *vcpu, int cpu) vmx_vcpu_load()
2097 static void vmx_vcpu_pi_put(struct kvm_vcpu *vcpu) vmx_vcpu_pi_put()
2110 static void vmx_vcpu_put(struct kvm_vcpu *vcpu) vmx_vcpu_put()
2122 static void vmx_fpu_activate(struct kvm_vcpu *vcpu) vmx_fpu_activate()
2141 static void vmx_decache_cr0_guest_bits(struct kvm_vcpu *vcpu);
2159 static void vmx_fpu_deactivate(struct kvm_vcpu *vcpu) vmx_fpu_deactivate()
2186 static unsigned long vmx_get_rflags(struct kvm_vcpu *vcpu) vmx_get_rflags()
2203 static void vmx_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) vmx_set_rflags()
2214 static u32 vmx_get_interrupt_shadow(struct kvm_vcpu *vcpu) vmx_get_interrupt_shadow()
2227 static void vmx_set_interrupt_shadow(struct kvm_vcpu *vcpu, int mask) vmx_set_interrupt_shadow()
2243 static void skip_emulated_instruction(struct kvm_vcpu *vcpu) skip_emulated_instruction()
2259 static int nested_vmx_check_exception(struct kvm_vcpu *vcpu, unsigned nr) nested_vmx_check_exception()
2272 static void vmx_queue_exception(struct kvm_vcpu *vcpu, unsigned nr, vmx_queue_exception()
2329 static void vmx_set_msr_bitmap(struct kvm_vcpu *vcpu) vmx_set_msr_bitmap()
2398 static u64 guest_read_tsc(struct kvm_vcpu *vcpu) guest_read_tsc()
2411 static u64 vmx_read_l1_tsc(struct kvm_vcpu *vcpu, u64 host_tsc) vmx_read_l1_tsc()
2421 static u64 vmx_read_tsc_offset(struct kvm_vcpu *vcpu) vmx_read_tsc_offset()
2429 static void vmx_write_tsc_offset(struct kvm_vcpu *vcpu, u64 offset) vmx_write_tsc_offset()
2452 static void vmx_adjust_tsc_offset_guest(struct kvm_vcpu *vcpu, s64 adjustment) vmx_adjust_tsc_offset_guest()
2465 static bool guest_cpuid_has_vmx(struct kvm_vcpu *vcpu) guest_cpuid_has_vmx()
2477 static inline bool nested_vmx_allowed(struct kvm_vcpu *vcpu) nested_vmx_allowed()
2682 static int vmx_get_vmx_msr(struct kvm_vcpu *vcpu, u32 msr_index, u64 *pdata) vmx_get_vmx_msr()
2783 static int vmx_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) vmx_get_msr()
2849 static void vmx_leave_nested(struct kvm_vcpu *vcpu);
2856 static int vmx_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) vmx_set_msr()
2967 static void vmx_cache_reg(struct kvm_vcpu *vcpu, enum kvm_reg reg) vmx_cache_reg()
3409 static bool emulation_required(struct kvm_vcpu *vcpu) emulation_required()
3414 static void fix_pmode_seg(struct kvm_vcpu *vcpu, int seg, fix_pmode_seg()
3433 static void enter_pmode(struct kvm_vcpu *vcpu) enter_pmode()
3506 static void enter_rmode(struct kvm_vcpu *vcpu) enter_rmode()
3554 static void vmx_set_efer(struct kvm_vcpu *vcpu, u64 efer) vmx_set_efer()
3581 static void enter_lmode(struct kvm_vcpu *vcpu) enter_lmode()
3598 static void exit_lmode(struct kvm_vcpu *vcpu) exit_lmode()
3606 static inline void __vmx_flush_tlb(struct kvm_vcpu *vcpu, int vpid) __vmx_flush_tlb()
3616 static void vmx_flush_tlb(struct kvm_vcpu *vcpu) vmx_flush_tlb()
3621 static void vmx_decache_cr0_guest_bits(struct kvm_vcpu *vcpu) vmx_decache_cr0_guest_bits()
3629 static void vmx_decache_cr3(struct kvm_vcpu *vcpu) vmx_decache_cr3()
3636 static void vmx_decache_cr4_guest_bits(struct kvm_vcpu *vcpu) vmx_decache_cr4_guest_bits()
3644 static void ept_load_pdptrs(struct kvm_vcpu *vcpu) ept_load_pdptrs()
3660 static void ept_save_pdptrs(struct kvm_vcpu *vcpu) ept_save_pdptrs()
3677 static int vmx_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4);
3681 struct kvm_vcpu *vcpu) ept_update_paging_mode_cr0()
3707 static void vmx_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0) vmx_set_cr0()
3762 static void vmx_set_cr3(struct kvm_vcpu *vcpu, unsigned long cr3) vmx_set_cr3()
3782 static int vmx_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) vmx_set_cr4()
3834 static void vmx_get_segment(struct kvm_vcpu *vcpu, vmx_get_segment()
3871 static u64 vmx_get_segment_base(struct kvm_vcpu *vcpu, int seg) vmx_get_segment_base()
3882 static int vmx_get_cpl(struct kvm_vcpu *vcpu) vmx_get_cpl()
3914 static void vmx_set_segment(struct kvm_vcpu *vcpu, vmx_set_segment()
3955 static void vmx_get_cs_db_l_bits(struct kvm_vcpu *vcpu, int *db, int *l) vmx_get_cs_db_l_bits()
3963 static void vmx_get_idt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) vmx_get_idt()
3969 static void vmx_set_idt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) vmx_set_idt()
3975 static void vmx_get_gdt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) vmx_get_gdt()
3981 static void vmx_set_gdt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) vmx_set_gdt()
3987 static bool rmode_segment_valid(struct kvm_vcpu *vcpu, int seg) rmode_segment_valid()
4008 static bool code_segment_valid(struct kvm_vcpu *vcpu) code_segment_valid()
4036 static bool stack_segment_valid(struct kvm_vcpu *vcpu) stack_segment_valid()
4058 static bool data_segment_valid(struct kvm_vcpu *vcpu, int seg) data_segment_valid()
4083 static bool tr_valid(struct kvm_vcpu *vcpu) tr_valid()
4101 static bool ldtr_valid(struct kvm_vcpu *vcpu) ldtr_valid()
4119 static bool cs_ss_rpl_check(struct kvm_vcpu *vcpu) cs_ss_rpl_check()
4135 static bool guest_state_valid(struct kvm_vcpu *vcpu) guest_state_valid()
4493 static int vmx_cpu_uses_apicv(struct kvm_vcpu *vcpu) vmx_cpu_uses_apicv()
4498 static int vmx_complete_nested_posted_interrupt(struct kvm_vcpu *vcpu) vmx_complete_nested_posted_interrupt()
4535 static inline bool kvm_vcpu_trigger_posted_interrupt(struct kvm_vcpu *vcpu) kvm_vcpu_trigger_posted_interrupt()
4563 static int vmx_deliver_nested_posted_interrupt(struct kvm_vcpu *vcpu, vmx_deliver_nested_posted_interrupt()
4589 static void vmx_deliver_posted_interrupt(struct kvm_vcpu *vcpu, int vector) vmx_deliver_posted_interrupt()
4607 static void vmx_sync_pir_to_irr(struct kvm_vcpu *vcpu) vmx_sync_pir_to_irr()
4617 static void vmx_sync_pir_to_irr_dummy(struct kvm_vcpu *vcpu) vmx_sync_pir_to_irr_dummy()
4872 static void vmx_vcpu_reset(struct kvm_vcpu *vcpu, bool init_event) vmx_vcpu_reset()
4971 static bool nested_exit_on_intr(struct kvm_vcpu *vcpu) nested_exit_on_intr()
4981 static bool nested_exit_intr_ack_set(struct kvm_vcpu *vcpu) nested_exit_intr_ack_set()
4987 static bool nested_exit_on_nmi(struct kvm_vcpu *vcpu) nested_exit_on_nmi()
4993 static void enable_irq_window(struct kvm_vcpu *vcpu) enable_irq_window()
5002 static void enable_nmi_window(struct kvm_vcpu *vcpu) enable_nmi_window()
5017 static void vmx_inject_irq(struct kvm_vcpu *vcpu) vmx_inject_irq()
5044 static void vmx_inject_nmi(struct kvm_vcpu *vcpu) vmx_inject_nmi()
5075 static bool vmx_get_nmi_mask(struct kvm_vcpu *vcpu) vmx_get_nmi_mask()
5084 static void vmx_set_nmi_mask(struct kvm_vcpu *vcpu, bool masked) vmx_set_nmi_mask()
5104 static int vmx_nmi_allowed(struct kvm_vcpu *vcpu) vmx_nmi_allowed()
5117 static int vmx_interrupt_allowed(struct kvm_vcpu *vcpu) vmx_interrupt_allowed()
5137 static bool rmode_exception(struct kvm_vcpu *vcpu, int vec) rmode_exception()
5169 static int handle_rmode_exception(struct kvm_vcpu *vcpu, handle_rmode_exception()
5215 static int handle_machine_check(struct kvm_vcpu *vcpu) handle_machine_check()
5221 static int handle_exception(struct kvm_vcpu *vcpu) handle_exception()
5332 static int handle_external_interrupt(struct kvm_vcpu *vcpu) handle_external_interrupt()
5338 static int handle_triple_fault(struct kvm_vcpu *vcpu) handle_triple_fault()
5344 static int handle_io(struct kvm_vcpu *vcpu) handle_io()
5367 vmx_patch_hypercall(struct kvm_vcpu *vcpu, unsigned char *hypercall) vmx_patch_hypercall()
5377 static bool nested_cr0_valid(struct kvm_vcpu *vcpu, unsigned long val) nested_cr0_valid()
5390 static int handle_set_cr0(struct kvm_vcpu *vcpu, unsigned long val) handle_set_cr0()
5422 static int handle_set_cr4(struct kvm_vcpu *vcpu, unsigned long val) handle_set_cr4()
5440 static void handle_clts(struct kvm_vcpu *vcpu) handle_clts()
5455 static int handle_cr(struct kvm_vcpu *vcpu) handle_cr()
5534 static int handle_dr(struct kvm_vcpu *vcpu) handle_dr()
5602 static u64 vmx_get_dr6(struct kvm_vcpu *vcpu) vmx_get_dr6()
5607 static void vmx_set_dr6(struct kvm_vcpu *vcpu, unsigned long val) vmx_set_dr6()
5611 static void vmx_sync_dirty_debug_regs(struct kvm_vcpu *vcpu) vmx_sync_dirty_debug_regs()
5629 static void vmx_set_dr7(struct kvm_vcpu *vcpu, unsigned long val) vmx_set_dr7()
5634 static int handle_cpuid(struct kvm_vcpu *vcpu) handle_cpuid()
5640 static int handle_rdmsr(struct kvm_vcpu *vcpu) handle_rdmsr()
5662 static int handle_wrmsr(struct kvm_vcpu *vcpu) handle_wrmsr()
5683 static int handle_tpr_below_threshold(struct kvm_vcpu *vcpu) handle_tpr_below_threshold()
5689 static int handle_interrupt_window(struct kvm_vcpu *vcpu) handle_interrupt_window()
5704 static int handle_halt(struct kvm_vcpu *vcpu) handle_halt()
5709 static int handle_vmcall(struct kvm_vcpu *vcpu) handle_vmcall()
5715 static int handle_invd(struct kvm_vcpu *vcpu) handle_invd()
5720 static int handle_invlpg(struct kvm_vcpu *vcpu) handle_invlpg()
5729 static int handle_rdpmc(struct kvm_vcpu *vcpu) handle_rdpmc()
5739 static int handle_wbinvd(struct kvm_vcpu *vcpu) handle_wbinvd()
5745 static int handle_xsetbv(struct kvm_vcpu *vcpu) handle_xsetbv()
5755 static int handle_xsaves(struct kvm_vcpu *vcpu) handle_xsaves()
5762 static int handle_xrstors(struct kvm_vcpu *vcpu) handle_xrstors()
5769 static int handle_apic_access(struct kvm_vcpu *vcpu) handle_apic_access()
5792 static int handle_apic_eoi_induced(struct kvm_vcpu *vcpu) handle_apic_eoi_induced()
5802 static int handle_apic_write(struct kvm_vcpu *vcpu) handle_apic_write()
5812 static int handle_task_switch(struct kvm_vcpu *vcpu) handle_task_switch()
5877 static int handle_ept_violation(struct kvm_vcpu *vcpu) handle_ept_violation()
5925 static int handle_ept_misconfig(struct kvm_vcpu *vcpu) handle_ept_misconfig()
5957 static int handle_nmi_window(struct kvm_vcpu *vcpu) handle_nmi_window()
5971 static int handle_invalid_guest_state(struct kvm_vcpu *vcpu) handle_invalid_guest_state()
6049 static void grow_ple_window(struct kvm_vcpu *vcpu) grow_ple_window()
6062 static void shrink_ple_window(struct kvm_vcpu *vcpu) shrink_ple_window()
6096 struct kvm_vcpu *vcpu; wakeup_handler()
6349 static int handle_pause(struct kvm_vcpu *vcpu) handle_pause()
6360 static int handle_nop(struct kvm_vcpu *vcpu) handle_nop()
6366 static int handle_mwait(struct kvm_vcpu *vcpu) handle_mwait()
6372 static int handle_monitor_trap(struct kvm_vcpu *vcpu) handle_monitor_trap()
6377 static int handle_monitor(struct kvm_vcpu *vcpu) handle_monitor()
6475 static void nested_vmx_succeed(struct kvm_vcpu *vcpu) nested_vmx_succeed()
6482 static void nested_vmx_failInvalid(struct kvm_vcpu *vcpu) nested_vmx_failInvalid()
6490 static void nested_vmx_failValid(struct kvm_vcpu *vcpu, nested_vmx_failValid()
6512 static void nested_vmx_abort(struct kvm_vcpu *vcpu, u32 indicator) nested_vmx_abort()
6537 static int get_vmx_mem_address(struct kvm_vcpu *vcpu, get_vmx_mem_address()
6636 static int nested_vmx_check_vmptr(struct kvm_vcpu *vcpu, int exit_reason, nested_vmx_check_vmptr()
6732 static int handle_vmon(struct kvm_vcpu *vcpu) handle_vmon()
6808 static int nested_vmx_check_permission(struct kvm_vcpu *vcpu) nested_vmx_check_permission()
6892 static int handle_vmoff(struct kvm_vcpu *vcpu) handle_vmoff()
6903 static int handle_vmclear(struct kvm_vcpu *vcpu) handle_vmclear()
6943 static int nested_vmx_run(struct kvm_vcpu *vcpu, bool launch);
6946 static int handle_vmlaunch(struct kvm_vcpu *vcpu) handle_vmlaunch()
6952 static int handle_vmresume(struct kvm_vcpu *vcpu) handle_vmresume()
6984 static inline int vmcs12_read_any(struct kvm_vcpu *vcpu, vmcs12_read_any()
7015 static inline int vmcs12_write_any(struct kvm_vcpu *vcpu, vmcs12_write_any()
7133 static int nested_vmx_check_vmcs12(struct kvm_vcpu *vcpu) nested_vmx_check_vmcs12()
7144 static int handle_vmread(struct kvm_vcpu *vcpu) handle_vmread()
7187 static int handle_vmwrite(struct kvm_vcpu *vcpu) handle_vmwrite()
7241 static int handle_vmptrld(struct kvm_vcpu *vcpu) handle_vmptrld()
7290 static int handle_vmptrst(struct kvm_vcpu *vcpu) handle_vmptrst()
7316 static int handle_invept(struct kvm_vcpu *vcpu) handle_invept()
7382 static int handle_invvpid(struct kvm_vcpu *vcpu) handle_invvpid()
7445 static int handle_pml_full(struct kvm_vcpu *vcpu) handle_pml_full()
7470 static int handle_pcommit(struct kvm_vcpu *vcpu) handle_pcommit()
7482 static int (*const kvm_vmx_exit_handlers[])(struct kvm_vcpu *vcpu) = {
7533 static bool nested_vmx_exit_handled_io(struct kvm_vcpu *vcpu, nested_vmx_exit_handled_io()
7582 static bool nested_vmx_exit_handled_msr(struct kvm_vcpu *vcpu, nested_vmx_exit_handled_msr()
7619 static bool nested_vmx_exit_handled_cr(struct kvm_vcpu *vcpu, nested_vmx_exit_handled_cr()
7700 static bool nested_vmx_exit_handled(struct kvm_vcpu *vcpu) nested_vmx_exit_handled()
7836 static void vmx_get_exit_info(struct kvm_vcpu *vcpu, u64 *info1, u64 *info2) vmx_get_exit_info()
7866 static void vmx_flush_pml_buffer(struct kvm_vcpu *vcpu) vmx_flush_pml_buffer()
7904 struct kvm_vcpu *vcpu; kvm_flush_pml_buffers()
8074 static int vmx_handle_exit(struct kvm_vcpu *vcpu) vmx_handle_exit()
8167 static void update_cr8_intercept(struct kvm_vcpu *vcpu, int tpr, int irr) update_cr8_intercept()
8183 static void vmx_set_virtual_x2apic_mode(struct kvm_vcpu *vcpu, bool set) vmx_set_virtual_x2apic_mode()
8212 static void vmx_set_apic_access_page_addr(struct kvm_vcpu *vcpu, hpa_t hpa) vmx_set_apic_access_page_addr()
8269 static void vmx_hwapic_irr_update(struct kvm_vcpu *vcpu, int max_irr) vmx_hwapic_irr_update()
8297 static void vmx_load_eoi_exitmap(struct kvm_vcpu *vcpu) vmx_load_eoi_exitmap()
8333 static void vmx_handle_external_intr(struct kvm_vcpu *vcpu) vmx_handle_external_intr()
8438 static void __vmx_complete_interrupts(struct kvm_vcpu *vcpu, __vmx_complete_interrupts()
8499 static void vmx_cancel_injection(struct kvm_vcpu *vcpu) vmx_cancel_injection()
8527 static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu) vmx_vcpu_run()
8724 static void vmx_load_vmcs01(struct kvm_vcpu *vcpu) vmx_load_vmcs01()
8740 static void vmx_free_vcpu(struct kvm_vcpu *vcpu) vmx_free_vcpu()
8756 static struct kvm_vcpu *vmx_create_vcpu(struct kvm *kvm, unsigned int id) vmx_create_vcpu()
8868 static u64 vmx_get_mt_mask(struct kvm_vcpu *vcpu, gfn_t gfn, bool is_mmio) vmx_get_mt_mask()
8938 static void vmx_cpuid_update(struct kvm_vcpu *vcpu) vmx_cpuid_update()
8989 static void nested_ept_inject_page_fault(struct kvm_vcpu *vcpu, nested_ept_inject_page_fault()
9005 static unsigned long nested_ept_get_cr3(struct kvm_vcpu *vcpu) nested_ept_get_cr3()
9011 static void nested_ept_init_mmu_context(struct kvm_vcpu *vcpu) nested_ept_init_mmu_context()
9024 static void nested_ept_uninit_mmu_context(struct kvm_vcpu *vcpu) nested_ept_uninit_mmu_context()
9041 static void vmx_inject_page_fault_nested(struct kvm_vcpu *vcpu, vmx_inject_page_fault_nested()
9056 static bool nested_get_vmcs12_pages(struct kvm_vcpu *vcpu, nested_get_vmcs12_pages()
9132 static void vmx_start_preemption_timer(struct kvm_vcpu *vcpu) vmx_start_preemption_timer()
9154 static int nested_vmx_check_msr_bitmap_controls(struct kvm_vcpu *vcpu, nested_vmx_check_msr_bitmap_controls()
9180 static inline bool nested_vmx_merge_msr_bitmap(struct kvm_vcpu *vcpu, nested_vmx_merge_msr_bitmap()
9259 static int nested_vmx_check_apicv_controls(struct kvm_vcpu *vcpu, nested_vmx_check_apicv_controls()
9302 static int nested_vmx_check_msr_switch(struct kvm_vcpu *vcpu, nested_vmx_check_msr_switch()
9327 static int nested_vmx_check_msr_switch_controls(struct kvm_vcpu *vcpu, nested_vmx_check_msr_switch_controls()
9344 static int nested_vmx_msr_check_common(struct kvm_vcpu *vcpu, nested_vmx_msr_check_common()
9358 static int nested_vmx_load_msr_check(struct kvm_vcpu *vcpu, nested_vmx_load_msr_check()
9369 static int nested_vmx_store_msr_check(struct kvm_vcpu *vcpu, nested_vmx_store_msr_check()
9382 static u32 nested_vmx_load_msr(struct kvm_vcpu *vcpu, u64 gpa, u32 count) nested_vmx_load_msr()
9417 static int nested_vmx_store_msr(struct kvm_vcpu *vcpu, u64 gpa, u32 count) nested_vmx_store_msr()
9468 static void prepare_vmcs02(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) prepare_vmcs02()
9796 static int nested_vmx_run(struct kvm_vcpu *vcpu, bool launch) nested_vmx_run()
10008 vmcs12_guest_cr0(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) vmcs12_guest_cr0()
10018 vmcs12_guest_cr4(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12) vmcs12_guest_cr4()
10027 static void vmcs12_save_pending_event(struct kvm_vcpu *vcpu, vmcs12_save_pending_event()
10069 static int vmx_check_nested_events(struct kvm_vcpu *vcpu, bool external_intr) vmx_check_nested_events()
10108 static u32 vmx_get_preemption_timer_value(struct kvm_vcpu *vcpu) vmx_get_preemption_timer_value()
10133 static void prepare_vmcs12(struct kvm_vcpu *vcpu, struct vmcs12 *vmcs12, prepare_vmcs12()
10286 static void load_vmcs12_host_state(struct kvm_vcpu *vcpu, load_vmcs12_host_state()
10423 static void nested_vmx_vmexit(struct kvm_vcpu *vcpu, u32 exit_reason, nested_vmx_vmexit()
10516 static void vmx_leave_nested(struct kvm_vcpu *vcpu) vmx_leave_nested()
10530 static void nested_vmx_entry_failure(struct kvm_vcpu *vcpu, nested_vmx_entry_failure()
10542 static int vmx_check_intercept(struct kvm_vcpu *vcpu, vmx_check_intercept()
10549 static void vmx_sched_in(struct kvm_vcpu *vcpu, int cpu) vmx_sched_in()
10593 static int vmx_pre_block(struct kvm_vcpu *vcpu) vmx_pre_block()
10659 static void vmx_post_block(struct kvm_vcpu *vcpu) vmx_post_block()
10715 struct kvm_vcpu *vcpu; vmx_update_pi_irte()
H A Dsvm.c93 struct kvm_vcpu;
131 struct kvm_vcpu vcpu;
206 static void svm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
207 static void svm_flush_tlb(struct kvm_vcpu *vcpu);
251 static inline struct vcpu_svm *to_svm(struct kvm_vcpu *vcpu) to_svm()
475 static void svm_set_efer(struct kvm_vcpu *vcpu, u64 efer) svm_set_efer()
491 static u32 svm_get_interrupt_shadow(struct kvm_vcpu *vcpu) svm_get_interrupt_shadow()
501 static void svm_set_interrupt_shadow(struct kvm_vcpu *vcpu, int mask) svm_set_interrupt_shadow()
512 static void skip_emulated_instruction(struct kvm_vcpu *vcpu) skip_emulated_instruction()
535 static void svm_queue_exception(struct kvm_vcpu *vcpu, unsigned nr, svm_queue_exception()
596 static void svm_init_osvw(struct kvm_vcpu *vcpu) svm_init_osvw()
960 static u64 svm_read_tsc_offset(struct kvm_vcpu *vcpu) svm_read_tsc_offset()
967 static void svm_write_tsc_offset(struct kvm_vcpu *vcpu, u64 offset) svm_write_tsc_offset()
986 static void svm_adjust_tsc_offset_guest(struct kvm_vcpu *vcpu, s64 adjustment) svm_adjust_tsc_offset_guest()
1116 static void svm_vcpu_reset(struct kvm_vcpu *vcpu, bool init_event) svm_vcpu_reset()
1134 static struct kvm_vcpu *svm_create_vcpu(struct kvm *kvm, unsigned int id) svm_create_vcpu()
1202 static void svm_free_vcpu(struct kvm_vcpu *vcpu) svm_free_vcpu()
1214 static void svm_vcpu_load(struct kvm_vcpu *vcpu, int cpu) svm_vcpu_load()
1243 static void svm_vcpu_put(struct kvm_vcpu *vcpu) svm_vcpu_put()
1263 static unsigned long svm_get_rflags(struct kvm_vcpu *vcpu) svm_get_rflags()
1268 static void svm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags) svm_set_rflags()
1278 static void svm_cache_reg(struct kvm_vcpu *vcpu, enum kvm_reg reg) svm_cache_reg()
1300 static struct vmcb_seg *svm_seg(struct kvm_vcpu *vcpu, int seg) svm_seg()
1318 static u64 svm_get_segment_base(struct kvm_vcpu *vcpu, int seg) svm_get_segment_base()
1325 static void svm_get_segment(struct kvm_vcpu *vcpu, svm_get_segment()
1393 static int svm_get_cpl(struct kvm_vcpu *vcpu) svm_get_cpl()
1400 static void svm_get_idt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) svm_get_idt()
1408 static void svm_set_idt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) svm_set_idt()
1417 static void svm_get_gdt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) svm_get_gdt()
1425 static void svm_set_gdt(struct kvm_vcpu *vcpu, struct desc_ptr *dt) svm_set_gdt()
1434 static void svm_decache_cr0_guest_bits(struct kvm_vcpu *vcpu) svm_decache_cr0_guest_bits()
1438 static void svm_decache_cr3(struct kvm_vcpu *vcpu) svm_decache_cr3()
1442 static void svm_decache_cr4_guest_bits(struct kvm_vcpu *vcpu) svm_decache_cr4_guest_bits()
1468 static void svm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0) svm_set_cr0()
1504 static int svm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4) svm_set_cr4()
1524 static void svm_set_segment(struct kvm_vcpu *vcpu, svm_set_segment()
1558 static void update_bp_intercept(struct kvm_vcpu *vcpu) update_bp_intercept()
1585 static u64 svm_get_dr6(struct kvm_vcpu *vcpu) svm_get_dr6()
1590 static void svm_set_dr6(struct kvm_vcpu *vcpu, unsigned long value) svm_set_dr6()
1598 static void svm_sync_dirty_debug_regs(struct kvm_vcpu *vcpu) svm_sync_dirty_debug_regs()
1613 static void svm_set_dr7(struct kvm_vcpu *vcpu, unsigned long value) svm_set_dr7()
1710 static void svm_fpu_activate(struct kvm_vcpu *vcpu) svm_fpu_activate()
1812 struct kvm_vcpu *vcpu = &svm->vcpu; io_interception()
1860 static unsigned long nested_svm_get_tdp_cr3(struct kvm_vcpu *vcpu) nested_svm_get_tdp_cr3()
1867 static u64 nested_svm_get_tdp_pdptr(struct kvm_vcpu *vcpu, int index) nested_svm_get_tdp_pdptr()
1881 static void nested_svm_set_tdp_cr3(struct kvm_vcpu *vcpu, nested_svm_set_tdp_cr3()
1891 static void nested_svm_inject_npf_exit(struct kvm_vcpu *vcpu, nested_svm_inject_npf_exit()
1920 static void nested_svm_init_mmu_context(struct kvm_vcpu *vcpu) nested_svm_init_mmu_context()
1933 static void nested_svm_uninit_mmu_context(struct kvm_vcpu *vcpu) nested_svm_uninit_mmu_context()
2677 struct kvm_vcpu *vcpu = &svm->vcpu; invlpga_interception()
2984 static u64 svm_read_l1_tsc(struct kvm_vcpu *vcpu, u64 host_tsc) svm_read_l1_tsc()
2990 static int svm_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr_info) svm_get_msr()
3085 static int svm_set_vm_cr(struct kvm_vcpu *vcpu, u64 data) svm_set_vm_cr()
3110 static int svm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr) svm_set_msr()
3306 static void dump_vmcb(struct kvm_vcpu *vcpu) dump_vmcb()
3411 static void svm_get_exit_info(struct kvm_vcpu *vcpu, u64 *info1, u64 *info2) svm_get_exit_info()
3419 static int handle_exit(struct kvm_vcpu *vcpu) handle_exit()
3488 static void reload_tss(struct kvm_vcpu *vcpu) reload_tss()
3508 static void svm_inject_nmi(struct kvm_vcpu *vcpu) svm_inject_nmi()
3530 static void svm_set_irq(struct kvm_vcpu *vcpu) svm_set_irq()
3543 static void update_cr8_intercept(struct kvm_vcpu *vcpu, int tpr, int irr) update_cr8_intercept()
3559 static void svm_set_virtual_x2apic_mode(struct kvm_vcpu *vcpu, bool set) svm_set_virtual_x2apic_mode()
3564 static int svm_cpu_uses_apicv(struct kvm_vcpu *vcpu) svm_cpu_uses_apicv()
3569 static void svm_load_eoi_exitmap(struct kvm_vcpu *vcpu) svm_load_eoi_exitmap()
3574 static void svm_sync_pir_to_irr(struct kvm_vcpu *vcpu) svm_sync_pir_to_irr()
3579 static int svm_nmi_allowed(struct kvm_vcpu *vcpu) svm_nmi_allowed()
3591 static bool svm_get_nmi_mask(struct kvm_vcpu *vcpu) svm_get_nmi_mask()
3598 static void svm_set_nmi_mask(struct kvm_vcpu *vcpu, bool masked) svm_set_nmi_mask()
3611 static int svm_interrupt_allowed(struct kvm_vcpu *vcpu) svm_interrupt_allowed()
3629 static void enable_irq_window(struct kvm_vcpu *vcpu) enable_irq_window()
3645 static void enable_nmi_window(struct kvm_vcpu *vcpu) enable_nmi_window()
3666 static void svm_flush_tlb(struct kvm_vcpu *vcpu) svm_flush_tlb()
3676 static void svm_prepare_guest_switch(struct kvm_vcpu *vcpu) svm_prepare_guest_switch()
3680 static inline void sync_cr8_to_lapic(struct kvm_vcpu *vcpu) sync_cr8_to_lapic()
3693 static inline void sync_lapic_to_cr8(struct kvm_vcpu *vcpu) sync_lapic_to_cr8()
3770 static void svm_cancel_injection(struct kvm_vcpu *vcpu) svm_cancel_injection()
3781 static void svm_vcpu_run(struct kvm_vcpu *vcpu) svm_vcpu_run()
3933 static void svm_set_cr3(struct kvm_vcpu *vcpu, unsigned long root) svm_set_cr3()
3942 static void set_tdp_cr3(struct kvm_vcpu *vcpu, unsigned long root) set_tdp_cr3()
3968 svm_patch_hypercall(struct kvm_vcpu *vcpu, unsigned char *hypercall) svm_patch_hypercall()
3993 static u64 svm_get_mt_mask(struct kvm_vcpu *vcpu, gfn_t gfn, bool is_mmio) svm_get_mt_mask()
3998 static void svm_cpuid_update(struct kvm_vcpu *vcpu) svm_cpuid_update()
4063 static void svm_fpu_deactivate(struct kvm_vcpu *vcpu) svm_fpu_deactivate()
4134 static int svm_check_intercept(struct kvm_vcpu *vcpu, svm_check_intercept()
4254 static void svm_handle_external_intr(struct kvm_vcpu *vcpu) svm_handle_external_intr()
4259 static void svm_sched_in(struct kvm_vcpu *vcpu, int cpu) svm_sched_in()
H A Dioapic.c108 static void __rtc_irq_eoi_tracking_restore_one(struct kvm_vcpu *vcpu) __rtc_irq_eoi_tracking_restore_one()
135 void kvm_rtc_eoi_tracking_restore_one(struct kvm_vcpu *vcpu) kvm_rtc_eoi_tracking_restore_one()
146 struct kvm_vcpu *vcpu; kvm_rtc_eoi_tracking_restore_all()
157 static void rtc_irq_eoi(struct kvm_ioapic *ioapic, struct kvm_vcpu *vcpu) rtc_irq_eoi()
236 void kvm_ioapic_scan_entry(struct kvm_vcpu *vcpu, u64 *eoi_exit_bitmap) kvm_ioapic_scan_entry()
407 static void __kvm_ioapic_update_eoi(struct kvm_vcpu *vcpu, __kvm_ioapic_update_eoi()
462 void kvm_ioapic_update_eoi(struct kvm_vcpu *vcpu, int vector, int trigger_mode) kvm_ioapic_update_eoi()
482 static int ioapic_mmio_read(struct kvm_vcpu *vcpu, struct kvm_io_device *this, ioapic_mmio_read()
525 static int ioapic_mmio_write(struct kvm_vcpu *vcpu, struct kvm_io_device *this, ioapic_mmio_write()
H A Dmtrr.c61 bool kvm_mtrr_valid(struct kvm_vcpu *vcpu, u32 msr, u64 data) kvm_mtrr_valid()
121 static u8 mtrr_disabled_type(struct kvm_vcpu *vcpu) mtrr_disabled_type()
316 static void update_mtrr(struct kvm_vcpu *vcpu, u32 msr) update_mtrr()
350 static void set_var_mtrr_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) set_var_mtrr_msr()
382 int kvm_mtrr_set_msr(struct kvm_vcpu *vcpu, u32 msr, u64 data) kvm_mtrr_set_msr()
403 int kvm_mtrr_get_msr(struct kvm_vcpu *vcpu, u32 msr, u64 *pdata) kvm_mtrr_get_msr()
445 void kvm_vcpu_mtrr_init(struct kvm_vcpu *vcpu) kvm_vcpu_mtrr_init()
624 u8 kvm_mtrr_get_guest_memory_type(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_mtrr_get_guest_memory_type()
700 bool kvm_mtrr_check_gfn_range_consistency(struct kvm_vcpu *vcpu, gfn_t gfn, kvm_mtrr_check_gfn_range_consistency()
H A Di8259.c52 struct kvm_vcpu *vcpu, *found = NULL;
279 struct kvm_vcpu *vcpu; kvm_pic_reset()
532 static int picdev_master_write(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_master_write()
539 static int picdev_master_read(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_master_read()
546 static int picdev_slave_write(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_slave_write()
553 static int picdev_slave_read(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_slave_read()
560 static int picdev_eclr_write(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_eclr_write()
567 static int picdev_eclr_read(struct kvm_vcpu *vcpu, struct kvm_io_device *dev, picdev_eclr_read()
H A Dpaging_tmpl.h140 static int FNAME(cmpxchg_gpte)(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, cmpxchg_gpte()
163 static bool FNAME(prefetch_invalid_gpte)(struct kvm_vcpu *vcpu, prefetch_invalid_gpte()
184 static inline unsigned FNAME(gpte_access)(struct kvm_vcpu *vcpu, u64 gpte) gpte_access()
199 static int FNAME(update_accessed_dirty_bits)(struct kvm_vcpu *vcpu, update_accessed_dirty_bits()
261 struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, walk_addr_generic()
437 struct kvm_vcpu *vcpu, gva_t addr, u32 access) walk_addr()
445 struct kvm_vcpu *vcpu, gva_t addr, walk_addr_nested()
454 FNAME(prefetch_gpte)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, prefetch_gpte()
484 static void FNAME(update_pte)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp, update_pte()
492 static bool FNAME(gpte_changed)(struct kvm_vcpu *vcpu, gpte_changed()
515 static void FNAME(pte_prefetch)(struct kvm_vcpu *vcpu, struct guest_walker *gw, pte_prefetch()
551 static int FNAME(fetch)(struct kvm_vcpu *vcpu, gva_t addr, fetch()
656 FNAME(is_self_change_mapping)(struct kvm_vcpu *vcpu, is_self_change_mapping()
692 static int FNAME(page_fault)(struct kvm_vcpu *vcpu, gva_t addr, u32 error_code, page_fault()
819 static void FNAME(invlpg)(struct kvm_vcpu *vcpu, gva_t gva) invlpg()
874 static gpa_t FNAME(gva_to_gpa)(struct kvm_vcpu *vcpu, gva_t vaddr, u32 access, gva_to_gpa()
893 static gpa_t FNAME(gva_to_gpa_nested)(struct kvm_vcpu *vcpu, gva_t vaddr, gva_to_gpa_nested()
926 static int FNAME(sync_page)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp) sync_page()
H A Dcpuid.c61 int kvm_update_cpuid(struct kvm_vcpu *vcpu) kvm_update_cpuid()
128 static void cpuid_fix_nx_cap(struct kvm_vcpu *vcpu) cpuid_fix_nx_cap()
147 int cpuid_query_maxphyaddr(struct kvm_vcpu *vcpu) cpuid_query_maxphyaddr()
163 int kvm_vcpu_ioctl_set_cpuid(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_set_cpuid()
205 int kvm_vcpu_ioctl_set_cpuid2(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_set_cpuid2()
226 int kvm_vcpu_ioctl_get_cpuid2(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_get_cpuid2()
737 static int move_to_next_stateful_cpuid_entry(struct kvm_vcpu *vcpu, int i) move_to_next_stateful_cpuid_entry()
769 struct kvm_cpuid_entry2 *kvm_find_cpuid_entry(struct kvm_vcpu *vcpu, kvm_find_cpuid_entry()
795 static struct kvm_cpuid_entry2* check_cpuid_limit(struct kvm_vcpu *vcpu, check_cpuid_limit()
811 void kvm_cpuid(struct kvm_vcpu *vcpu, u32 *eax, u32 *ebx, u32 *ecx, u32 *edx) kvm_cpuid()
838 void kvm_emulate_cpuid(struct kvm_vcpu *vcpu) kvm_emulate_cpuid()
H A Dirq_comm.c57 struct kvm_vcpu *vcpu, *lowest = NULL; kvm_irq_delivery_to_apic()
270 struct kvm_vcpu **dest_vcpu) kvm_intr_is_single_vcpu()
273 struct kvm_vcpu *vcpu; kvm_intr_is_single_vcpu()
342 void kvm_scan_ioapic_routes(struct kvm_vcpu *vcpu, u64 *eoi_exit_bitmap) kvm_scan_ioapic_routes()
H A Di8254.c257 void __kvm_migrate_pit_timer(struct kvm_vcpu *vcpu) __kvm_migrate_pit_timer()
282 struct kvm_vcpu *vcpu; pit_do_work()
451 static int pit_ioport_write(struct kvm_vcpu *vcpu, pit_ioport_write()
528 static int pit_ioport_read(struct kvm_vcpu *vcpu, pit_ioport_read()
599 static int speaker_ioport_write(struct kvm_vcpu *vcpu, speaker_ioport_write()
617 static int speaker_ioport_read(struct kvm_vcpu *vcpu, speaker_ioport_read()
H A Dmmutrace.h249 TP_PROTO(struct kvm_vcpu *vcpu, gva_t gva, u32 error_code,
H A Dtrace.h222 TP_PROTO(unsigned int exit_reason, struct kvm_vcpu *vcpu, u32 isa),
727 TP_PROTO(struct kvm_vcpu *vcpu, __u8 failed),
/linux-4.4.14/arch/arm/include/asm/
H A Dkvm_coproc.h22 void kvm_reset_coprocs(struct kvm_vcpu *vcpu);
31 int kvm_handle_cp10_id(struct kvm_vcpu *vcpu, struct kvm_run *run);
32 int kvm_handle_cp_0_13_access(struct kvm_vcpu *vcpu, struct kvm_run *run);
33 int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run);
34 int kvm_handle_cp14_access(struct kvm_vcpu *vcpu, struct kvm_run *run);
35 int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
36 int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
38 unsigned long kvm_arm_num_guest_msrs(struct kvm_vcpu *vcpu);
39 int kvm_arm_copy_msrindices(struct kvm_vcpu *vcpu, u64 __user *uindices);
43 int kvm_arm_copy_coproc_indices(struct kvm_vcpu *vcpu, u64 __user *uindices);
44 int kvm_arm_coproc_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
45 int kvm_arm_coproc_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
46 unsigned long kvm_arm_num_coproc_regs(struct kvm_vcpu *vcpu);
H A Dkvm_emulate.h28 unsigned long *vcpu_reg(struct kvm_vcpu *vcpu, u8 reg_num);
29 unsigned long *vcpu_spsr(struct kvm_vcpu *vcpu);
31 static inline unsigned long vcpu_get_reg(struct kvm_vcpu *vcpu, vcpu_get_reg()
37 static inline void vcpu_set_reg(struct kvm_vcpu *vcpu, u8 reg_num, vcpu_set_reg()
43 bool kvm_condition_valid(struct kvm_vcpu *vcpu);
44 void kvm_skip_instr(struct kvm_vcpu *vcpu, bool is_wide_instr);
45 void kvm_inject_undefined(struct kvm_vcpu *vcpu);
46 void kvm_inject_dabt(struct kvm_vcpu *vcpu, unsigned long addr);
47 void kvm_inject_pabt(struct kvm_vcpu *vcpu, unsigned long addr);
49 static inline void vcpu_reset_hcr(struct kvm_vcpu *vcpu) vcpu_reset_hcr()
54 static inline unsigned long vcpu_get_hcr(struct kvm_vcpu *vcpu) vcpu_get_hcr()
59 static inline void vcpu_set_hcr(struct kvm_vcpu *vcpu, unsigned long hcr) vcpu_set_hcr()
64 static inline bool vcpu_mode_is_32bit(struct kvm_vcpu *vcpu) vcpu_mode_is_32bit()
69 static inline unsigned long *vcpu_pc(struct kvm_vcpu *vcpu) vcpu_pc()
74 static inline unsigned long *vcpu_cpsr(struct kvm_vcpu *vcpu) vcpu_cpsr()
79 static inline void vcpu_set_thumb(struct kvm_vcpu *vcpu) vcpu_set_thumb()
84 static inline bool mode_has_spsr(struct kvm_vcpu *vcpu) mode_has_spsr()
90 static inline bool vcpu_mode_priv(struct kvm_vcpu *vcpu) vcpu_mode_priv()
96 static inline u32 kvm_vcpu_get_hsr(struct kvm_vcpu *vcpu) kvm_vcpu_get_hsr()
101 static inline unsigned long kvm_vcpu_get_hfar(struct kvm_vcpu *vcpu) kvm_vcpu_get_hfar()
106 static inline phys_addr_t kvm_vcpu_get_fault_ipa(struct kvm_vcpu *vcpu) kvm_vcpu_get_fault_ipa()
111 static inline unsigned long kvm_vcpu_get_hyp_pc(struct kvm_vcpu *vcpu) kvm_vcpu_get_hyp_pc()
116 static inline bool kvm_vcpu_dabt_isvalid(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_isvalid()
121 static inline bool kvm_vcpu_dabt_iswrite(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_iswrite()
126 static inline bool kvm_vcpu_dabt_issext(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_issext()
131 static inline int kvm_vcpu_dabt_get_rd(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_get_rd()
136 static inline bool kvm_vcpu_dabt_isextabt(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_isextabt()
141 static inline bool kvm_vcpu_dabt_iss1tw(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_iss1tw()
147 static inline int kvm_vcpu_dabt_get_as(struct kvm_vcpu *vcpu) kvm_vcpu_dabt_get_as()
163 static inline bool kvm_vcpu_trap_il_is32bit(struct kvm_vcpu *vcpu) kvm_vcpu_trap_il_is32bit()
168 static inline u8 kvm_vcpu_trap_get_class(struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_class()
173 static inline bool kvm_vcpu_trap_is_iabt(struct kvm_vcpu *vcpu) kvm_vcpu_trap_is_iabt()
178 static inline u8 kvm_vcpu_trap_get_fault(struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_fault()
183 static inline u8 kvm_vcpu_trap_get_fault_type(struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_fault_type()
188 static inline u32 kvm_vcpu_hvc_get_imm(struct kvm_vcpu *vcpu) kvm_vcpu_hvc_get_imm()
193 static inline unsigned long kvm_vcpu_get_mpidr_aff(struct kvm_vcpu *vcpu) kvm_vcpu_get_mpidr_aff()
198 static inline void kvm_vcpu_set_be(struct kvm_vcpu *vcpu) kvm_vcpu_set_be()
203 static inline bool kvm_vcpu_is_be(struct kvm_vcpu *vcpu) kvm_vcpu_is_be()
208 static inline unsigned long vcpu_data_guest_to_host(struct kvm_vcpu *vcpu, vcpu_data_guest_to_host()
233 static inline unsigned long vcpu_data_host_to_guest(struct kvm_vcpu *vcpu, vcpu_data_host_to_guest()
H A Dkvm_host.h44 u32 *kvm_vcpu_reg(struct kvm_vcpu *vcpu, u8 reg_num, u32 mode);
46 int kvm_reset_vcpu(struct kvm_vcpu *vcpu);
47 void kvm_reset_coprocs(struct kvm_vcpu *vcpu);
156 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu);
157 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *indices);
158 int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
159 int kvm_arm_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
169 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu);
170 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *indices);
180 struct kvm_vcpu *kvm_arm_get_running_vcpu(void);
181 struct kvm_vcpu __percpu **kvm_get_running_vcpus(void);
183 int kvm_arm_copy_coproc_indices(struct kvm_vcpu *vcpu, u64 __user *uindices);
184 unsigned long kvm_arm_num_coproc_regs(struct kvm_vcpu *vcpu);
185 int kvm_arm_coproc_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
186 int kvm_arm_coproc_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
188 int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
227 struct kvm_vcpu *kvm_mpidr_to_vcpu(struct kvm *kvm, unsigned long mpidr);
232 static inline void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) {} kvm_arch_sched_in()
233 static inline void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) {} kvm_arch_sched_in()
236 static inline void kvm_arm_setup_debug(struct kvm_vcpu *vcpu) {} kvm_arm_clear_debug()
237 static inline void kvm_arm_clear_debug(struct kvm_vcpu *vcpu) {} kvm_arm_reset_debug_ptr()
238 static inline void kvm_arm_reset_debug_ptr(struct kvm_vcpu *vcpu) {}
H A Dkvm_mmio.h31 int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run);
32 int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run,
H A Dkvm_psci.h24 int kvm_psci_version(struct kvm_vcpu *vcpu);
25 int kvm_psci_call(struct kvm_vcpu *vcpu);
H A Dkvm_mmu.h62 int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run);
64 void kvm_mmu_free_memory_caches(struct kvm_vcpu *vcpu);
180 static inline bool vcpu_has_cache_enabled(struct kvm_vcpu *vcpu) vcpu_has_cache_enabled()
185 static inline void __coherent_cache_guest_page(struct kvm_vcpu *vcpu, pfn_t pfn, __coherent_cache_guest_page()
269 void kvm_set_way_flush(struct kvm_vcpu *vcpu);
270 void kvm_toggle_cache(struct kvm_vcpu *vcpu, bool was_enabled);
H A Dkvm_asm.h84 struct kvm_vcpu;
101 extern int __kvm_vcpu_run(struct kvm_vcpu *vcpu);
/linux-4.4.14/arch/powerpc/kvm/
H A Dbooke.h76 void kvmppc_set_msr(struct kvm_vcpu *vcpu, u32 new_msr);
77 void kvmppc_mmu_msr_notify(struct kvm_vcpu *vcpu, u32 old_msr);
79 void kvmppc_set_epcr(struct kvm_vcpu *vcpu, u32 new_epcr);
80 void kvmppc_set_tcr(struct kvm_vcpu *vcpu, u32 new_tcr);
81 void kvmppc_set_tsr_bits(struct kvm_vcpu *vcpu, u32 tsr_bits);
82 void kvmppc_clr_tsr_bits(struct kvm_vcpu *vcpu, u32 tsr_bits);
84 int kvmppc_booke_emulate_op(struct kvm_run *run, struct kvm_vcpu *vcpu,
86 int kvmppc_booke_emulate_mfspr(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val);
87 int kvmppc_booke_emulate_mtspr(struct kvm_vcpu *vcpu, int sprn, ulong spr_val);
90 void kvmppc_load_guest_spe(struct kvm_vcpu *vcpu);
91 void kvmppc_save_guest_spe(struct kvm_vcpu *vcpu);
94 void kvmppc_vcpu_disable_spe(struct kvm_vcpu *vcpu);
96 void kvmppc_booke_vcpu_load(struct kvm_vcpu *vcpu, int cpu);
97 void kvmppc_booke_vcpu_put(struct kvm_vcpu *vcpu);
106 void kvmppc_set_pending_interrupt(struct kvm_vcpu *vcpu, enum int_class type);
108 extern void kvmppc_mmu_destroy_e500(struct kvm_vcpu *vcpu);
110 struct kvm_vcpu *vcpu,
112 extern int kvmppc_core_emulate_mtspr_e500(struct kvm_vcpu *vcpu, int sprn,
114 extern int kvmppc_core_emulate_mfspr_e500(struct kvm_vcpu *vcpu, int sprn,
116 extern void kvmppc_mmu_destroy_e500(struct kvm_vcpu *vcpu);
118 struct kvm_vcpu *vcpu,
120 extern int kvmppc_core_emulate_mtspr_e500(struct kvm_vcpu *vcpu, int sprn,
122 extern int kvmppc_core_emulate_mfspr_e500(struct kvm_vcpu *vcpu, int sprn,
H A Dtiming.h27 void kvmppc_init_timing_stats(struct kvm_vcpu *vcpu);
28 void kvmppc_update_timing_stats(struct kvm_vcpu *vcpu);
29 void kvmppc_create_vcpu_debugfs(struct kvm_vcpu *vcpu, unsigned int id);
30 void kvmppc_remove_vcpu_debugfs(struct kvm_vcpu *vcpu);
32 static inline void kvmppc_set_exit_type(struct kvm_vcpu *vcpu, int type) kvmppc_set_exit_type()
39 static inline void kvmppc_init_timing_stats(struct kvm_vcpu *vcpu) {} kvmppc_update_timing_stats()
40 static inline void kvmppc_update_timing_stats(struct kvm_vcpu *vcpu) {} kvmppc_create_vcpu_debugfs()
41 static inline void kvmppc_create_vcpu_debugfs(struct kvm_vcpu *vcpu, kvmppc_create_vcpu_debugfs()
43 static inline void kvmppc_remove_vcpu_debugfs(struct kvm_vcpu *vcpu) {} kvmppc_set_exit_type()
44 static inline void kvmppc_set_exit_type(struct kvm_vcpu *vcpu, int type) {} kvmppc_set_exit_type()
48 static inline void kvmppc_account_exit_stat(struct kvm_vcpu *vcpu, int type) kvmppc_account_exit_stat()
103 static inline void kvmppc_account_exit(struct kvm_vcpu *vcpu, int type) kvmppc_account_exit()
H A De500.h61 struct kvm_vcpu vcpu;
103 static inline struct kvmppc_vcpu_e500 *to_e500(struct kvm_vcpu *vcpu) to_e500()
130 int kvmppc_e500_emul_tlbwe(struct kvm_vcpu *vcpu);
131 int kvmppc_e500_emul_tlbre(struct kvm_vcpu *vcpu);
132 int kvmppc_e500_emul_tlbivax(struct kvm_vcpu *vcpu, gva_t ea);
133 int kvmppc_e500_emul_tlbilx(struct kvm_vcpu *vcpu, int type, gva_t ea);
134 int kvmppc_e500_emul_tlbsx(struct kvm_vcpu *vcpu, gva_t ea);
138 void kvmppc_get_sregs_e500_tlb(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
139 int kvmppc_set_sregs_e500_tlb(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
141 int kvmppc_get_one_reg_e500_tlb(struct kvm_vcpu *vcpu, u64 id,
143 int kvmppc_set_one_reg_e500_tlb(struct kvm_vcpu *vcpu, u64 id,
211 static inline unsigned int get_cur_pid(struct kvm_vcpu *vcpu) get_cur_pid()
216 static inline unsigned int get_cur_as(struct kvm_vcpu *vcpu) get_cur_as()
221 static inline unsigned int get_cur_pr(struct kvm_vcpu *vcpu) get_cur_pr()
226 static inline unsigned int get_cur_spid(const struct kvm_vcpu *vcpu) get_cur_spid()
231 static inline unsigned int get_cur_sas(const struct kvm_vcpu *vcpu) get_cur_sas()
236 static inline unsigned int get_tlb_tlbsel(const struct kvm_vcpu *vcpu) get_tlb_tlbsel()
245 static inline unsigned int get_tlb_nv_bit(const struct kvm_vcpu *vcpu) get_tlb_nv_bit()
250 static inline unsigned int get_tlb_esel_bit(const struct kvm_vcpu *vcpu) get_tlb_esel_bit()
255 static inline int tlbe_is_host_safe(const struct kvm_vcpu *vcpu, tlbe_is_host_safe()
308 static inline int get_lpid(struct kvm_vcpu *vcpu) get_lpid()
313 unsigned int kvmppc_e500_get_tlb_stid(struct kvm_vcpu *vcpu,
316 static inline unsigned int get_tlbmiss_tid(struct kvm_vcpu *vcpu) get_tlbmiss_tid()
328 static inline bool has_feature(const struct kvm_vcpu *vcpu, has_feature()
H A Dbook3s.c42 #define VCPU_STAT(x) offsetof(struct kvm_vcpu, stat.x), KVM_STAT_VCPU
69 void kvmppc_unfixup_split_real(struct kvm_vcpu *vcpu) kvmppc_unfixup_split_real()
80 static inline unsigned long kvmppc_interrupt_offset(struct kvm_vcpu *vcpu) kvmppc_interrupt_offset()
87 static inline void kvmppc_update_int_pending(struct kvm_vcpu *vcpu, kvmppc_update_int_pending()
98 static inline bool kvmppc_critical_section(struct kvm_vcpu *vcpu) kvmppc_critical_section()
124 void kvmppc_inject_interrupt(struct kvm_vcpu *vcpu, int vec, u64 flags) kvmppc_inject_interrupt()
161 void kvmppc_book3s_dequeue_irqprio(struct kvm_vcpu *vcpu, kvmppc_book3s_dequeue_irqprio()
173 void kvmppc_book3s_queue_irqprio(struct kvm_vcpu *vcpu, unsigned int vec) kvmppc_book3s_queue_irqprio()
185 void kvmppc_core_queue_program(struct kvm_vcpu *vcpu, ulong flags) kvmppc_core_queue_program()
192 void kvmppc_core_queue_dec(struct kvm_vcpu *vcpu) kvmppc_core_queue_dec()
198 int kvmppc_core_pending_dec(struct kvm_vcpu *vcpu) kvmppc_core_pending_dec()
204 void kvmppc_core_dequeue_dec(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_dec()
210 void kvmppc_core_queue_external(struct kvm_vcpu *vcpu, kvmppc_core_queue_external()
221 void kvmppc_core_dequeue_external(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_external()
227 void kvmppc_core_queue_data_storage(struct kvm_vcpu *vcpu, ulong dar, kvmppc_core_queue_data_storage()
235 void kvmppc_core_queue_inst_storage(struct kvm_vcpu *vcpu, ulong flags) kvmppc_core_queue_inst_storage()
244 static int kvmppc_book3s_irqprio_deliver(struct kvm_vcpu *vcpu, kvmppc_book3s_irqprio_deliver()
325 static bool clear_irqprio(struct kvm_vcpu *vcpu, unsigned int priority) clear_irqprio()
339 int kvmppc_core_prepare_to_enter(struct kvm_vcpu *vcpu) kvmppc_core_prepare_to_enter()
369 pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa, bool writing, kvmppc_gpa_to_pfn()
395 int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr, enum xlate_instdata xlid, kvmppc_xlate()
425 int kvmppc_load_last_inst(struct kvm_vcpu *vcpu, enum instruction_type type, kvmppc_load_last_inst()
442 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
447 int kvmppc_subarch_vcpu_init(struct kvm_vcpu *vcpu) kvmppc_subarch_vcpu_init()
452 void kvmppc_subarch_vcpu_uninit(struct kvm_vcpu *vcpu) kvmppc_subarch_vcpu_uninit()
456 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
462 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
468 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
496 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
523 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
528 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
533 int kvmppc_get_one_reg(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg()
609 int kvmppc_set_one_reg(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg()
683 void kvmppc_core_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load()
688 void kvmppc_core_vcpu_put(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put()
693 void kvmppc_set_msr(struct kvm_vcpu *vcpu, u64 msr) kvmppc_set_msr()
699 int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu) kvmppc_vcpu_run()
704 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
710 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
717 void kvmppc_decrementer_func(struct kvm_vcpu *vcpu) kvmppc_decrementer_func()
723 struct kvm_vcpu *kvmppc_core_vcpu_create(struct kvm *kvm, unsigned int id) kvmppc_core_vcpu_create()
728 void kvmppc_core_vcpu_free(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free()
733 int kvmppc_core_check_requests(struct kvm_vcpu *vcpu) kvmppc_core_check_requests()
801 void kvmppc_mmu_destroy(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy()
827 int kvmppc_h_logical_ci_load(struct kvm_vcpu *vcpu) kvmppc_h_logical_ci_load()
869 int kvmppc_h_logical_ci_store(struct kvm_vcpu *vcpu) kvmppc_h_logical_ci_store()
928 r = kvm_init(NULL, sizeof(struct kvm_vcpu), 0, THIS_MODULE); kvmppc_book3s_init()
H A Dbook3s.h25 extern void kvmppc_mmu_destroy_pr(struct kvm_vcpu *vcpu);
26 extern int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu,
28 extern int kvmppc_core_emulate_mtspr_pr(struct kvm_vcpu *vcpu,
30 extern int kvmppc_core_emulate_mfspr_pr(struct kvm_vcpu *vcpu,
H A Dbook3s_mmu_hpte.c67 void kvmppc_mmu_hpte_cache_map(struct kvm_vcpu *vcpu, struct hpte_cache *pte) kvmppc_mmu_hpte_cache_map()
112 static void invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte) invalidate_pte()
143 static void kvmppc_mmu_pte_flush_all(struct kvm_vcpu *vcpu) kvmppc_mmu_pte_flush_all()
161 static void kvmppc_mmu_pte_flush_page(struct kvm_vcpu *vcpu, ulong guest_ea) kvmppc_mmu_pte_flush_page()
180 static void kvmppc_mmu_pte_flush_long(struct kvm_vcpu *vcpu, ulong guest_ea) kvmppc_mmu_pte_flush_long()
200 void kvmppc_mmu_pte_flush(struct kvm_vcpu *vcpu, ulong guest_ea, ulong ea_mask) kvmppc_mmu_pte_flush()
223 static void kvmppc_mmu_pte_vflush_short(struct kvm_vcpu *vcpu, u64 guest_vp) kvmppc_mmu_pte_vflush_short()
244 static void kvmppc_mmu_pte_vflush_64k(struct kvm_vcpu *vcpu, u64 guest_vp) kvmppc_mmu_pte_vflush_64k()
266 static void kvmppc_mmu_pte_vflush_long(struct kvm_vcpu *vcpu, u64 guest_vp) kvmppc_mmu_pte_vflush_long()
286 void kvmppc_mmu_pte_vflush(struct kvm_vcpu *vcpu, u64 guest_vp, u64 vp_mask) kvmppc_mmu_pte_vflush()
309 void kvmppc_mmu_pte_pflush(struct kvm_vcpu *vcpu, ulong pa_start, ulong pa_end) kvmppc_mmu_pte_pflush()
331 struct hpte_cache *kvmppc_mmu_hpte_cache_next(struct kvm_vcpu *vcpu) kvmppc_mmu_hpte_cache_next()
349 void kvmppc_mmu_hpte_destroy(struct kvm_vcpu *vcpu) kvmppc_mmu_hpte_destroy()
362 int kvmppc_mmu_hpte_init(struct kvm_vcpu *vcpu) kvmppc_mmu_hpte_init()
H A Dbook3s_32_mmu.c52 static inline bool check_debug_ip(struct kvm_vcpu *vcpu) check_debug_ip()
81 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr,
84 static int kvmppc_mmu_book3s_32_esid_to_vsid(struct kvm_vcpu *vcpu, ulong esid,
87 static u32 find_sr(struct kvm_vcpu *vcpu, gva_t eaddr) find_sr()
92 static u64 kvmppc_mmu_book3s_32_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_32_ea_to_vp()
105 static void kvmppc_mmu_book3s_32_reset_msr(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_32_reset_msr()
110 static hva_t kvmppc_mmu_book3s_32_get_pteg(struct kvm_vcpu *vcpu, kvmppc_mmu_book3s_32_get_pteg()
144 static int kvmppc_mmu_book3s_32_xlate_bat(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_32_xlate_bat()
199 static int kvmppc_mmu_book3s_32_xlate_pte(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_32_xlate_pte()
305 static int kvmppc_mmu_book3s_32_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_32_xlate()
341 static u32 kvmppc_mmu_book3s_32_mfsrin(struct kvm_vcpu *vcpu, u32 srnum) kvmppc_mmu_book3s_32_mfsrin()
346 static void kvmppc_mmu_book3s_32_mtsrin(struct kvm_vcpu *vcpu, u32 srnum, kvmppc_mmu_book3s_32_mtsrin()
353 static void kvmppc_mmu_book3s_32_tlbie(struct kvm_vcpu *vcpu, ulong ea, bool large) kvmppc_mmu_book3s_32_tlbie()
356 struct kvm_vcpu *v; kvmppc_mmu_book3s_32_tlbie()
363 static int kvmppc_mmu_book3s_32_esid_to_vsid(struct kvm_vcpu *vcpu, ulong esid, kvmppc_mmu_book3s_32_esid_to_vsid()
406 static bool kvmppc_mmu_book3s_32_is_dcbz32(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_32_is_dcbz32()
412 void kvmppc_mmu_book3s_32_init(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_32_init()
H A Dbooke.c50 #define VCPU_STAT(x) offsetof(struct kvm_vcpu, stat.x), KVM_STAT_VCPU
75 void kvmppc_dump_vcpu(struct kvm_vcpu *vcpu) kvmppc_dump_vcpu()
96 void kvmppc_vcpu_disable_spe(struct kvm_vcpu *vcpu) kvmppc_vcpu_disable_spe()
105 static void kvmppc_vcpu_enable_spe(struct kvm_vcpu *vcpu) kvmppc_vcpu_enable_spe()
114 static void kvmppc_vcpu_sync_spe(struct kvm_vcpu *vcpu) kvmppc_vcpu_sync_spe()
124 static void kvmppc_vcpu_sync_spe(struct kvm_vcpu *vcpu) kvmppc_vcpu_sync_spe()
138 static inline void kvmppc_load_guest_fp(struct kvm_vcpu *vcpu) kvmppc_load_guest_fp()
154 static inline void kvmppc_save_guest_fp(struct kvm_vcpu *vcpu) kvmppc_save_guest_fp()
163 static void kvmppc_vcpu_sync_fpu(struct kvm_vcpu *vcpu) kvmppc_vcpu_sync_fpu()
178 static inline void kvmppc_load_guest_altivec(struct kvm_vcpu *vcpu) kvmppc_load_guest_altivec()
196 static inline void kvmppc_save_guest_altivec(struct kvm_vcpu *vcpu) kvmppc_save_guest_altivec()
207 static void kvmppc_vcpu_sync_debug(struct kvm_vcpu *vcpu) kvmppc_vcpu_sync_debug()
234 void kvmppc_set_msr(struct kvm_vcpu *vcpu, u32 new_msr) kvmppc_set_msr()
250 static void kvmppc_booke_queue_irqprio(struct kvm_vcpu *vcpu, kvmppc_booke_queue_irqprio()
257 void kvmppc_core_queue_dtlb_miss(struct kvm_vcpu *vcpu, kvmppc_core_queue_dtlb_miss()
265 void kvmppc_core_queue_data_storage(struct kvm_vcpu *vcpu, kvmppc_core_queue_data_storage()
273 void kvmppc_core_queue_itlb_miss(struct kvm_vcpu *vcpu) kvmppc_core_queue_itlb_miss()
278 void kvmppc_core_queue_inst_storage(struct kvm_vcpu *vcpu, ulong esr_flags) kvmppc_core_queue_inst_storage()
284 static void kvmppc_core_queue_alignment(struct kvm_vcpu *vcpu, ulong dear_flags, kvmppc_core_queue_alignment()
292 void kvmppc_core_queue_program(struct kvm_vcpu *vcpu, ulong esr_flags) kvmppc_core_queue_program()
298 void kvmppc_core_queue_dec(struct kvm_vcpu *vcpu) kvmppc_core_queue_dec()
303 int kvmppc_core_pending_dec(struct kvm_vcpu *vcpu) kvmppc_core_pending_dec()
308 void kvmppc_core_dequeue_dec(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_dec()
313 void kvmppc_core_queue_external(struct kvm_vcpu *vcpu, kvmppc_core_queue_external()
324 void kvmppc_core_dequeue_external(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_external()
330 static void kvmppc_core_queue_watchdog(struct kvm_vcpu *vcpu) kvmppc_core_queue_watchdog()
335 static void kvmppc_core_dequeue_watchdog(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_watchdog()
340 void kvmppc_core_queue_debug(struct kvm_vcpu *vcpu) kvmppc_core_queue_debug()
345 void kvmppc_core_dequeue_debug(struct kvm_vcpu *vcpu) kvmppc_core_dequeue_debug()
350 static void set_guest_srr(struct kvm_vcpu *vcpu, unsigned long srr0, u32 srr1) set_guest_srr()
356 static void set_guest_csrr(struct kvm_vcpu *vcpu, unsigned long srr0, u32 srr1) set_guest_csrr()
362 static void set_guest_dsrr(struct kvm_vcpu *vcpu, unsigned long srr0, u32 srr1) set_guest_dsrr()
372 static void set_guest_mcsrr(struct kvm_vcpu *vcpu, unsigned long srr0, u32 srr1) set_guest_mcsrr()
379 static int kvmppc_booke_irqprio_deliver(struct kvm_vcpu *vcpu, kvmppc_booke_irqprio_deliver()
541 static unsigned long watchdog_next_timeout(struct kvm_vcpu *vcpu) watchdog_next_timeout()
567 static void arm_next_watchdog(struct kvm_vcpu *vcpu) arm_next_watchdog()
594 struct kvm_vcpu *vcpu = (struct kvm_vcpu *)data; kvmppc_watchdog_func()
640 static void update_timer_ints(struct kvm_vcpu *vcpu) update_timer_ints()
653 static void kvmppc_core_check_exceptions(struct kvm_vcpu *vcpu) kvmppc_core_check_exceptions()
673 int kvmppc_core_prepare_to_enter(struct kvm_vcpu *vcpu) kvmppc_core_prepare_to_enter()
698 int kvmppc_core_check_requests(struct kvm_vcpu *vcpu) kvmppc_core_check_requests()
724 int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu) kvmppc_vcpu_run()
794 static int emulation_exit(struct kvm_run *run, struct kvm_vcpu *vcpu) emulation_exit()
828 static int kvmppc_handle_debug(struct kvm_run *run, struct kvm_vcpu *vcpu) kvmppc_handle_debug()
901 static void kvmppc_restart_interrupt(struct kvm_vcpu *vcpu, kvmppc_restart_interrupt()
948 static int kvmppc_resume_inst_load(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_resume_inst_load()
975 int kvmppc_handle_exit(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_exit()
1359 static void kvmppc_set_tsr(struct kvm_vcpu *vcpu, u32 new_tsr) kvmppc_set_tsr()
1372 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
1401 int kvmppc_subarch_vcpu_init(struct kvm_vcpu *vcpu) kvmppc_subarch_vcpu_init()
1416 void kvmppc_subarch_vcpu_uninit(struct kvm_vcpu *vcpu) kvmppc_subarch_vcpu_uninit()
1421 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
1449 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
1477 static void get_sregs_base(struct kvm_vcpu *vcpu, get_sregs_base()
1496 static int set_sregs_base(struct kvm_vcpu *vcpu, set_sregs_base()
1521 static void get_sregs_arch206(struct kvm_vcpu *vcpu, get_sregs_arch206()
1533 static int set_sregs_arch206(struct kvm_vcpu *vcpu, set_sregs_arch206()
1550 int kvmppc_get_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) kvmppc_get_sregs_ivor()
1573 int kvmppc_set_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) kvmppc_set_sregs_ivor()
1598 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
1608 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
1627 int kvmppc_get_one_reg(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg()
1683 int kvmppc_set_one_reg(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg()
1752 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
1757 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
1762 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
1805 void kvmppc_set_epcr(struct kvm_vcpu *vcpu, u32 new_epcr) kvmppc_set_epcr()
1817 void kvmppc_set_tcr(struct kvm_vcpu *vcpu, u32 new_tcr) kvmppc_set_tcr()
1824 void kvmppc_set_tsr_bits(struct kvm_vcpu *vcpu, u32 tsr_bits) kvmppc_set_tsr_bits()
1832 void kvmppc_clr_tsr_bits(struct kvm_vcpu *vcpu, u32 tsr_bits) kvmppc_clr_tsr_bits()
1846 void kvmppc_decrementer_func(struct kvm_vcpu *vcpu) kvmppc_decrementer_func()
1911 void kvm_guest_protect_msr(struct kvm_vcpu *vcpu, ulong prot_bitmap, bool set) kvm_guest_protect_msr()
1934 int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr, enum xlate_instdata xlid, kvmppc_xlate()
1985 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
2056 void kvmppc_booke_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvmppc_booke_vcpu_load()
2059 current->thread.kvm_vcpu = vcpu; kvmppc_booke_vcpu_load()
2062 void kvmppc_booke_vcpu_put(struct kvm_vcpu *vcpu) kvmppc_booke_vcpu_put()
2064 current->thread.kvm_vcpu = NULL; kvmppc_booke_vcpu_put()
2071 void kvmppc_mmu_destroy(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy()
2081 struct kvm_vcpu *kvmppc_core_vcpu_create(struct kvm *kvm, unsigned int id) kvmppc_core_vcpu_create()
2086 void kvmppc_core_vcpu_free(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free()
2096 void kvmppc_core_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load()
2101 void kvmppc_core_vcpu_put(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put()
H A De500mc.c31 void kvmppc_set_pending_interrupt(struct kvm_vcpu *vcpu, enum int_class type) kvmppc_set_pending_interrupt()
104 void kvmppc_set_pid(struct kvm_vcpu *vcpu, u32 pid) kvmppc_set_pid()
109 void kvmppc_mmu_msr_notify(struct kvm_vcpu *vcpu, u32 old_msr) kvmppc_mmu_msr_notify()
114 static DEFINE_PER_CPU(struct kvm_vcpu *[KVMPPC_NR_LPIDS], last_vcpu_of_lpid);
116 static void kvmppc_core_vcpu_load_e500mc(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load_e500mc()
153 static void kvmppc_core_vcpu_put_e500mc(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put_e500mc()
199 int kvmppc_core_vcpu_setup(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_setup()
218 static int kvmppc_core_get_sregs_e500mc(struct kvm_vcpu *vcpu, kvmppc_core_get_sregs_e500mc()
242 static int kvmppc_core_set_sregs_e500mc(struct kvm_vcpu *vcpu, kvmppc_core_set_sregs_e500mc()
276 static int kvmppc_get_one_reg_e500mc(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg_e500mc()
292 static int kvmppc_set_one_reg_e500mc(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg_e500mc()
308 static struct kvm_vcpu *kvmppc_core_vcpu_create_e500mc(struct kvm *kvm, kvmppc_core_vcpu_create_e500mc()
312 struct kvm_vcpu *vcpu; kvmppc_core_vcpu_create_e500mc()
350 static void kvmppc_core_vcpu_free_e500mc(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free_e500mc()
H A Dbook3s_pr_papr.c26 static unsigned long get_pteg_addr(struct kvm_vcpu *vcpu, long pte_index) get_pteg_addr()
39 static int kvmppc_h_pr_enter(struct kvm_vcpu *vcpu) kvmppc_h_pr_enter()
85 static int kvmppc_h_pr_remove(struct kvm_vcpu *vcpu) kvmppc_h_pr_remove()
140 static int kvmppc_h_pr_bulk_remove(struct kvm_vcpu *vcpu) kvmppc_h_pr_bulk_remove()
203 static int kvmppc_h_pr_protect(struct kvm_vcpu *vcpu) kvmppc_h_pr_protect()
247 static int kvmppc_h_pr_put_tce(struct kvm_vcpu *vcpu) kvmppc_h_pr_put_tce()
261 static int kvmppc_h_pr_logical_ci_load(struct kvm_vcpu *vcpu) kvmppc_h_pr_logical_ci_load()
272 static int kvmppc_h_pr_logical_ci_store(struct kvm_vcpu *vcpu) kvmppc_h_pr_logical_ci_store()
283 static int kvmppc_h_pr_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd) kvmppc_h_pr_xics_hcall()
290 int kvmppc_h_pr(struct kvm_vcpu *vcpu, unsigned long cmd) kvmppc_h_pr()
H A De500_mmu.c67 static unsigned int get_tlb_esel(struct kvm_vcpu *vcpu, int tlbsel) get_tlb_esel()
129 static inline void kvmppc_e500_deliver_tlb_miss(struct kvm_vcpu *vcpu, kvmppc_e500_deliver_tlb_miss()
196 static void kvmppc_set_tlb1map_range(struct kvm_vcpu *vcpu, kvmppc_set_tlb1map_range()
248 int kvmppc_e500_emul_tlbivax(struct kvm_vcpu *vcpu, gva_t ea) kvmppc_e500_emul_tlbivax()
310 int kvmppc_e500_emul_tlbilx(struct kvm_vcpu *vcpu, int type, gva_t ea) kvmppc_e500_emul_tlbilx()
325 int kvmppc_e500_emul_tlbre(struct kvm_vcpu *vcpu) kvmppc_e500_emul_tlbre()
344 int kvmppc_e500_emul_tlbsx(struct kvm_vcpu *vcpu, gva_t ea) kvmppc_e500_emul_tlbsx()
393 int kvmppc_e500_emul_tlbwe(struct kvm_vcpu *vcpu) kvmppc_e500_emul_tlbwe()
456 static int kvmppc_e500_tlb_search(struct kvm_vcpu *vcpu, kvmppc_e500_tlb_search()
472 int kvmppc_core_vcpu_translate(struct kvm_vcpu *vcpu, kvmppc_core_vcpu_translate()
498 int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_itlb_index()
505 int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr) kvmppc_mmu_dtlb_index()
512 void kvmppc_mmu_itlb_miss(struct kvm_vcpu *vcpu) kvmppc_mmu_itlb_miss()
519 void kvmppc_mmu_dtlb_miss(struct kvm_vcpu *vcpu) kvmppc_mmu_dtlb_miss()
526 gpa_t kvmppc_mmu_xlate(struct kvm_vcpu *vcpu, unsigned int index, kvmppc_mmu_xlate()
539 void kvmppc_mmu_destroy_e500(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy_e500()
574 void kvmppc_get_sregs_e500_tlb(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) kvmppc_get_sregs_e500_tlb()
590 int kvmppc_set_sregs_e500_tlb(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs) kvmppc_set_sregs_e500_tlb()
604 int kvmppc_get_one_reg_e500_tlb(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg_e500_tlb()
657 int kvmppc_set_one_reg_e500_tlb(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg_e500_tlb()
724 static int vcpu_mmu_geometry_update(struct kvm_vcpu *vcpu, vcpu_mmu_geometry_update()
738 int kvm_vcpu_ioctl_config_tlb(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_config_tlb()
862 int kvm_vcpu_ioctl_dirty_tlb(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_dirty_tlb()
872 static int vcpu_mmu_init(struct kvm_vcpu *vcpu, vcpu_mmu_init()
906 struct kvm_vcpu *vcpu = &vcpu_e500->vcpu; kvmppc_e500_tlb_init()
H A Dbook3s_64_mmu.c39 static void kvmppc_mmu_book3s_64_reset_msr(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_64_reset_msr()
45 struct kvm_vcpu *vcpu, kvmppc_mmu_book3s_64_find_slbe()
98 static u64 kvmppc_mmu_book3s_64_ea_to_vp(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_64_ea_to_vp()
133 static hva_t kvmppc_mmu_book3s_64_get_pteg(struct kvm_vcpu *vcpu, kvmppc_mmu_book3s_64_get_pteg()
208 static int kvmppc_mmu_book3s_64_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_64_xlate()
378 static void kvmppc_mmu_book3s_64_slbmte(struct kvm_vcpu *vcpu, u64 rs, u64 rb) kvmppc_mmu_book3s_64_slbmte()
430 static u64 kvmppc_mmu_book3s_64_slbmfee(struct kvm_vcpu *vcpu, u64 slb_nr) kvmppc_mmu_book3s_64_slbmfee()
442 static u64 kvmppc_mmu_book3s_64_slbmfev(struct kvm_vcpu *vcpu, u64 slb_nr) kvmppc_mmu_book3s_64_slbmfev()
454 static void kvmppc_mmu_book3s_64_slbie(struct kvm_vcpu *vcpu, u64 ea) kvmppc_mmu_book3s_64_slbie()
476 static void kvmppc_mmu_book3s_64_slbia(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_64_slbia()
494 static void kvmppc_mmu_book3s_64_mtsrin(struct kvm_vcpu *vcpu, u32 srnum, kvmppc_mmu_book3s_64_mtsrin()
534 static void kvmppc_mmu_book3s_64_tlbie(struct kvm_vcpu *vcpu, ulong va, kvmppc_mmu_book3s_64_tlbie()
539 struct kvm_vcpu *v; kvmppc_mmu_book3s_64_tlbie()
568 static int segment_contains_magic_page(struct kvm_vcpu *vcpu, ulong esid) segment_contains_magic_page()
577 static int kvmppc_mmu_book3s_64_esid_to_vsid(struct kvm_vcpu *vcpu, ulong esid, kvmppc_mmu_book3s_64_esid_to_vsid()
651 static bool kvmppc_mmu_book3s_64_is_dcbz32(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_64_is_dcbz32()
656 void kvmppc_mmu_book3s_64_init(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_64_init()
H A Dbook3s_64_mmu_host.c35 void kvmppc_mmu_invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte) kvmppc_mmu_invalidate_pte()
44 static u16 kvmppc_sid_hash(struct kvm_vcpu *vcpu, u64 gvsid) kvmppc_sid_hash()
57 static struct kvmppc_sid_map *find_sid_vsid(struct kvm_vcpu *vcpu, u64 gvsid) find_sid_vsid()
82 int kvmppc_mmu_map_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *orig_pte, kvmppc_mmu_map_page()
217 void kvmppc_mmu_unmap_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte) kvmppc_mmu_unmap_page()
228 static struct kvmppc_sid_map *create_sid_map(struct kvm_vcpu *vcpu, u64 gvsid) create_sid_map()
268 static int kvmppc_mmu_next_segment(struct kvm_vcpu *vcpu, ulong esid) kvmppc_mmu_next_segment()
309 int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) kvmppc_mmu_map_segment()
355 void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong ea, ulong seg_size) kvmppc_mmu_flush_segment()
372 void kvmppc_mmu_flush_segments(struct kvm_vcpu *vcpu) kvmppc_mmu_flush_segments()
380 void kvmppc_mmu_destroy_pr(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy_pr()
386 int kvmppc_mmu_init(struct kvm_vcpu *vcpu) kvmppc_mmu_init()
H A Dbook3s_xics.h63 struct kvm_vcpu *vcpu;
76 struct kvm_vcpu *rm_kick_target;
92 struct kvm_vcpu *rm_dbgtgt;
116 struct kvm_vcpu *vcpu = NULL; kvmppc_xics_find_server()
H A Dpowerpc.c49 int kvm_arch_vcpu_runnable(struct kvm_vcpu *v) kvm_arch_vcpu_runnable()
55 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu) kvm_arch_vcpu_should_kick()
69 int kvmppc_prepare_to_enter(struct kvm_vcpu *vcpu) kvmppc_prepare_to_enter()
129 static void kvmppc_swab_shared(struct kvm_vcpu *vcpu) kvmppc_swab_shared()
149 int kvmppc_kvm_pv(struct kvm_vcpu *vcpu) kvmppc_kvm_pv()
241 int kvmppc_sanity_check(struct kvm_vcpu *vcpu) kvmppc_sanity_check()
270 int kvmppc_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu) kvmppc_emulate_mmio()
312 int kvmppc_st(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr, kvmppc_st()
348 int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr, kvmppc_ld()
438 struct kvm_vcpu *vcpu; kvm_arch_destroy_vm()
622 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, unsigned int id) kvm_arch_vcpu_create()
624 struct kvm_vcpu *vcpu; kvm_arch_vcpu_create()
633 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu) kvm_arch_vcpu_postcreate()
637 void kvm_arch_vcpu_free(struct kvm_vcpu *vcpu) kvm_arch_vcpu_free()
656 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_arch_vcpu_destroy()
661 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu) kvm_cpu_has_pending_timer()
668 struct kvm_vcpu *vcpu; kvmppc_decrementer_wakeup()
670 vcpu = container_of(timer, struct kvm_vcpu, arch.dec_timer); kvmppc_decrementer_wakeup()
676 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu) kvm_arch_vcpu_init()
691 void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) kvm_arch_vcpu_uninit()
697 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvm_arch_vcpu_load()
712 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu) kvm_arch_vcpu_put()
720 static void kvmppc_complete_mmio_load(struct kvm_vcpu *vcpu, kvmppc_complete_mmio_load()
785 int kvmppc_handle_load(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_load()
832 int kvmppc_handle_loads(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_loads()
844 int kvmppc_handle_store(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_store()
902 int kvm_vcpu_ioctl_get_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg) kvm_vcpu_ioctl_get_one_reg()
950 int kvm_vcpu_ioctl_set_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg) kvm_vcpu_ioctl_set_one_reg()
999 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_arch_vcpu_ioctl_run()
1040 int kvm_vcpu_ioctl_interrupt(struct kvm_vcpu *vcpu, struct kvm_interrupt *irq) kvm_vcpu_ioctl_interrupt()
1054 static int kvm_vcpu_ioctl_enable_cap(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_enable_cap()
1146 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_mpstate()
1152 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_mpstate()
1161 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl()
1217 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf) kvm_arch_vcpu_fault()
H A Dbook3s_32_mmu_host.c61 void kvmppc_mmu_invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte) kvmppc_mmu_invalidate_pte()
78 static u16 kvmppc_sid_hash(struct kvm_vcpu *vcpu, u64 gvsid) kvmppc_sid_hash()
91 static struct kvmppc_sid_map *find_sid_vsid(struct kvm_vcpu *vcpu, u64 gvsid) find_sid_vsid()
118 static u32 *kvmppc_mmu_get_pteg(struct kvm_vcpu *vcpu, u32 vsid, u32 eaddr, kvmppc_mmu_get_pteg()
142 int kvmppc_mmu_map_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *orig_pte, kvmppc_mmu_map_page()
270 void kvmppc_mmu_unmap_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte) kvmppc_mmu_unmap_page()
275 static struct kvmppc_sid_map *create_sid_map(struct kvm_vcpu *vcpu, u64 gvsid) create_sid_map()
314 int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr) kvmppc_mmu_map_segment()
345 void kvmppc_mmu_flush_segments(struct kvm_vcpu *vcpu) kvmppc_mmu_flush_segments()
357 void kvmppc_mmu_destroy_pr(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy_pr()
371 int kvmppc_mmu_init(struct kvm_vcpu *vcpu) kvmppc_mmu_init()
H A De500.c218 unsigned int kvmppc_e500_get_tlb_stid(struct kvm_vcpu *vcpu, kvmppc_e500_get_tlb_stid()
225 void kvmppc_set_pid(struct kvm_vcpu *vcpu, u32 pid) kvmppc_set_pid()
297 void kvmppc_mmu_msr_notify(struct kvm_vcpu *vcpu, u32 old_msr) kvmppc_mmu_msr_notify()
303 static void kvmppc_core_vcpu_load_e500(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load_e500()
311 static void kvmppc_core_vcpu_put_e500(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put_e500()
350 int kvmppc_core_vcpu_setup(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_setup()
365 static int kvmppc_core_get_sregs_e500(struct kvm_vcpu *vcpu, kvmppc_core_get_sregs_e500()
390 static int kvmppc_core_set_sregs_e500(struct kvm_vcpu *vcpu, kvmppc_core_set_sregs_e500()
426 static int kvmppc_get_one_reg_e500(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg_e500()
433 static int kvmppc_set_one_reg_e500(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg_e500()
440 static struct kvm_vcpu *kvmppc_core_vcpu_create_e500(struct kvm *kvm, kvmppc_core_vcpu_create_e500()
444 struct kvm_vcpu *vcpu; kvmppc_core_vcpu_create_e500()
483 static void kvmppc_core_vcpu_free_e500(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free_e500()
H A Dbook3s_pr.c54 static int kvmppc_handle_ext(struct kvm_vcpu *vcpu, unsigned int exit_nr,
56 static void kvmppc_giveup_fac(struct kvm_vcpu *vcpu, ulong fac);
65 static bool kvmppc_is_split_real(struct kvm_vcpu *vcpu) kvmppc_is_split_real()
71 static void kvmppc_fixup_split_real(struct kvm_vcpu *vcpu) kvmppc_fixup_split_real()
92 void kvmppc_unfixup_split_real(struct kvm_vcpu *vcpu);
94 static void kvmppc_core_vcpu_load_pr(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load_pr()
118 static void kvmppc_core_vcpu_put_pr(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put_pr()
146 struct kvm_vcpu *vcpu) kvmppc_copy_to_svcpu()
182 void kvmppc_copy_from_svcpu(struct kvm_vcpu *vcpu, kvmppc_copy_from_svcpu()
238 static int kvmppc_core_check_requests_pr(struct kvm_vcpu *vcpu) kvmppc_core_check_requests_pr()
255 struct kvm_vcpu *vcpu; do_kvm_unmap_hva()
319 static void kvmppc_recalc_shadow_msr(struct kvm_vcpu *vcpu) kvmppc_recalc_shadow_msr()
337 static void kvmppc_set_msr_pr(struct kvm_vcpu *vcpu, u64 msr) kvmppc_set_msr_pr()
402 void kvmppc_set_pvr_pr(struct kvm_vcpu *vcpu, u32 pvr) kvmppc_set_pvr_pr()
488 static void kvmppc_patch_dcbz(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte) kvmppc_patch_dcbz()
515 static int kvmppc_visible_gpa(struct kvm_vcpu *vcpu, gpa_t gpa) kvmppc_visible_gpa()
530 int kvmppc_handle_pagefault(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_pagefault()
648 void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr) kvmppc_giveup_ext()
691 static void kvmppc_giveup_fac(struct kvm_vcpu *vcpu, ulong fac) kvmppc_giveup_fac()
710 static int kvmppc_handle_ext(struct kvm_vcpu *vcpu, unsigned int exit_nr, kvmppc_handle_ext()
779 static void kvmppc_handle_lost_ext(struct kvm_vcpu *vcpu) kvmppc_handle_lost_ext()
806 static void kvmppc_trigger_fac_interrupt(struct kvm_vcpu *vcpu, ulong fac) kvmppc_trigger_fac_interrupt()
814 static void kvmppc_emulate_fac(struct kvm_vcpu *vcpu, ulong fac) kvmppc_emulate_fac()
828 static int kvmppc_handle_fac(struct kvm_vcpu *vcpu, ulong fac) kvmppc_handle_fac()
871 void kvmppc_set_fscr(struct kvm_vcpu *vcpu, u64 fscr) kvmppc_set_fscr()
881 int kvmppc_handle_exit_pr(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_exit_pr()
1249 static int kvm_arch_vcpu_ioctl_get_sregs_pr(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs_pr()
1276 static int kvm_arch_vcpu_ioctl_set_sregs_pr(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs_pr()
1312 static int kvmppc_get_one_reg_pr(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg_pr()
1342 static void kvmppc_set_lpcr_pr(struct kvm_vcpu *vcpu, u64 new_lpcr) kvmppc_set_lpcr_pr()
1350 static int kvmppc_set_one_reg_pr(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg_pr()
1372 static struct kvm_vcpu *kvmppc_core_vcpu_create_pr(struct kvm *kvm, kvmppc_core_vcpu_create_pr()
1376 struct kvm_vcpu *vcpu; kvmppc_core_vcpu_create_pr()
1451 static void kvmppc_core_vcpu_free_pr(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free_pr()
1464 static int kvmppc_vcpu_run_pr(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu) kvmppc_vcpu_run_pr()
1535 struct kvm_vcpu *vcpu; kvm_vm_ioctl_get_dirty_log_pr()
1607 struct kvm_vcpu *vcpu; kvm_vm_ioctl_get_smmu_info_pr()
H A Dtiming.c33 void kvmppc_init_timing_stats(struct kvm_vcpu *vcpu) kvmppc_init_timing_stats()
55 static void add_exit_timing(struct kvm_vcpu *vcpu, u64 duration, int type) add_exit_timing()
93 void kvmppc_update_timing_stats(struct kvm_vcpu *vcpu) kvmppc_update_timing_stats()
142 struct kvm_vcpu *vcpu = m->private; kvmppc_exit_timing_show()
191 struct kvm_vcpu *vcpu = seqf->private; kvmppc_exit_timing_write()
219 void kvmppc_create_vcpu_debugfs(struct kvm_vcpu *vcpu, unsigned int id) kvmppc_create_vcpu_debugfs()
239 void kvmppc_remove_vcpu_debugfs(struct kvm_vcpu *vcpu) kvmppc_remove_vcpu_debugfs()
H A De500_emulate.c54 static int kvmppc_e500_emul_msgclr(struct kvm_vcpu *vcpu, int rb) kvmppc_e500_emul_msgclr()
66 static int kvmppc_e500_emul_msgsnd(struct kvm_vcpu *vcpu, int rb) kvmppc_e500_emul_msgsnd()
72 struct kvm_vcpu *cvcpu; kvmppc_e500_emul_msgsnd()
89 static int kvmppc_e500_emul_ehpriv(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_e500_emul_ehpriv()
109 static int kvmppc_e500_emul_dcbtls(struct kvm_vcpu *vcpu) kvmppc_e500_emul_dcbtls()
118 static int kvmppc_e500_emul_mftmr(struct kvm_vcpu *vcpu, unsigned int inst, kvmppc_e500_emul_mftmr()
131 int kvmppc_core_emulate_op_e500(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_core_emulate_op_e500()
208 int kvmppc_core_emulate_mtspr_e500(struct kvm_vcpu *vcpu, int sprn, ulong spr_val) kvmppc_core_emulate_mtspr_e500()
318 int kvmppc_core_emulate_mfspr_e500(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val) kvmppc_core_emulate_mfspr_e500()
H A Dbook3s_hv.c84 static void kvmppc_end_cede(struct kvm_vcpu *vcpu);
85 static int kvmppc_hv_setup_htab_rma(struct kvm_vcpu *vcpu);
114 static void kvmppc_fast_vcpu_kick_hv(struct kvm_vcpu *vcpu) kvmppc_fast_vcpu_kick_hv()
188 static void kvmppc_core_vcpu_load_hv(struct kvm_vcpu *vcpu, int cpu) kvmppc_core_vcpu_load_hv()
211 static void kvmppc_core_vcpu_put_hv(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_put_hv()
225 static void kvmppc_set_msr_hv(struct kvm_vcpu *vcpu, u64 msr) kvmppc_set_msr_hv()
237 static void kvmppc_set_pvr_hv(struct kvm_vcpu *vcpu, u32 pvr) kvmppc_set_pvr_hv()
242 static int kvmppc_set_arch_compat(struct kvm_vcpu *vcpu, u32 arch_compat) kvmppc_set_arch_compat()
282 static void kvmppc_dump_regs(struct kvm_vcpu *vcpu) kvmppc_dump_regs()
315 static struct kvm_vcpu *kvmppc_find_vcpu(struct kvm *kvm, int id) kvmppc_find_vcpu()
318 struct kvm_vcpu *v, *ret = NULL; kvmppc_find_vcpu()
331 static void init_vpa(struct kvm_vcpu *vcpu, struct lppaca *vpa) init_vpa()
337 static int set_vpa(struct kvm_vcpu *vcpu, struct kvmppc_vpa *v, set_vpa()
369 static unsigned long do_h_register_vpa(struct kvm_vcpu *vcpu, do_h_register_vpa()
376 struct kvm_vcpu *tvcpu; do_h_register_vpa()
479 static void kvmppc_update_vpa(struct kvm_vcpu *vcpu, struct kvmppc_vpa *vpap) kvmppc_update_vpa()
529 static void kvmppc_update_vpas(struct kvm_vcpu *vcpu) kvmppc_update_vpas()
570 static void kvmppc_create_dtl_entry(struct kvm_vcpu *vcpu, kvmppc_create_dtl_entry()
608 static bool kvmppc_power8_compatible(struct kvm_vcpu *vcpu) kvmppc_power8_compatible()
618 static int kvmppc_h_set_mode(struct kvm_vcpu *vcpu, unsigned long mflags, kvmppc_h_set_mode()
650 static int kvm_arch_vcpu_yield_to(struct kvm_vcpu *target) kvm_arch_vcpu_yield_to()
672 static int kvmppc_get_yield_count(struct kvm_vcpu *vcpu) kvmppc_get_yield_count()
685 int kvmppc_pseries_do_hcall(struct kvm_vcpu *vcpu) kvmppc_pseries_do_hcall()
690 struct kvm_vcpu *tvcpu; kvmppc_pseries_do_hcall()
812 struct kvm_vcpu *vcpu) kvmppc_emulate_debug_inst()
835 static int kvmppc_handle_exit_hv(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_handle_exit_hv()
958 static int kvm_arch_vcpu_ioctl_get_sregs_hv(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs_hv()
973 static int kvm_arch_vcpu_ioctl_set_sregs_hv(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs_hv()
995 static void kvmppc_set_lpcr(struct kvm_vcpu *vcpu, u64 new_lpcr, kvmppc_set_lpcr()
1009 struct kvm_vcpu *vcpu; kvmppc_set_lpcr()
1039 static int kvmppc_get_one_reg_hv(struct kvm_vcpu *vcpu, u64 id, kvmppc_get_one_reg_hv()
1231 static int kvmppc_set_one_reg_hv(struct kvm_vcpu *vcpu, u64 id, kvmppc_set_one_reg_hv()
1465 {"rm_entry", offsetof(struct kvm_vcpu, arch.rm_entry)},
1466 {"rm_intr", offsetof(struct kvm_vcpu, arch.rm_intr)},
1467 {"rm_exit", offsetof(struct kvm_vcpu, arch.rm_exit)},
1468 {"guest", offsetof(struct kvm_vcpu, arch.guest_time)},
1469 {"cede", offsetof(struct kvm_vcpu, arch.cede_time)},
1475 struct kvm_vcpu *vcpu;
1482 struct kvm_vcpu *vcpu = inode->i_private; debugfs_timings_open()
1509 struct kvm_vcpu *vcpu = p->vcpu; debugfs_timings_read()
1586 static void debugfs_vcpu_init(struct kvm_vcpu *vcpu, unsigned int id) debugfs_vcpu_init()
1603 static void debugfs_vcpu_init(struct kvm_vcpu *vcpu, unsigned int id) debugfs_vcpu_init()
1608 static struct kvm_vcpu *kvmppc_core_vcpu_create_hv(struct kvm *kvm, kvmppc_core_vcpu_create_hv()
1611 struct kvm_vcpu *vcpu; kvmppc_core_vcpu_create_hv()
1695 static void kvmppc_core_vcpu_free_hv(struct kvm_vcpu *vcpu) kvmppc_core_vcpu_free_hv()
1706 static int kvmppc_core_check_requests_hv(struct kvm_vcpu *vcpu) kvmppc_core_check_requests_hv()
1712 static void kvmppc_set_timer(struct kvm_vcpu *vcpu) kvmppc_set_timer()
1730 static void kvmppc_end_cede(struct kvm_vcpu *vcpu) kvmppc_end_cede()
1742 struct kvm_vcpu *vcpu) kvmppc_remove_runnable()
1767 tpaca->kvm_hstate.kvm_vcpu = NULL; kvmppc_grab_hwthread()
1799 tpaca->kvm_hstate.kvm_vcpu = NULL; kvmppc_release_hwthread()
1804 static void kvmppc_start_thread(struct kvm_vcpu *vcpu, struct kvmppc_vcore *vc) kvmppc_start_thread()
1821 tpaca->kvm_hstate.kvm_vcpu = vcpu; kvmppc_start_thread()
1823 /* Order stores to hstate.kvm_vcpu etc. before store to kvm_vcore */ kvmppc_start_thread()
2163 struct kvm_vcpu *vcpu, *vnext; prepare_threads()
2216 struct kvm_vcpu *vcpu, *vnext; post_guest_process()
2262 struct kvm_vcpu, arch.run_list); post_guest_process()
2275 struct kvm_vcpu *vcpu, *vnext; kvmppc_run_core()
2501 struct kvm_vcpu *vcpu, int wait_state) kvmppc_wait_for_exec()
2520 struct kvm_vcpu *vcpu; kvmppc_vcore_blocked()
2553 static int kvmppc_run_vcpu(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu) kvmppc_run_vcpu()
2557 struct kvm_vcpu *v, *vn; kvmppc_run_vcpu()
2671 struct kvm_vcpu, arch.run_list); kvmppc_run_vcpu()
2680 static int kvmppc_vcpu_run_hv(struct kvm_run *run, struct kvm_vcpu *vcpu) kvmppc_vcpu_run_hv()
2893 static void kvmppc_mmu_destroy_hv(struct kvm_vcpu *vcpu) kvmppc_mmu_destroy_hv()
2898 static int kvmppc_hv_setup_htab_rma(struct kvm_vcpu *vcpu) kvmppc_hv_setup_htab_rma()
3052 static int kvmppc_core_emulate_op_hv(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_core_emulate_op_hv()
3058 static int kvmppc_core_emulate_mtspr_hv(struct kvm_vcpu *vcpu, int sprn, kvmppc_core_emulate_mtspr_hv()
3064 static int kvmppc_core_emulate_mfspr_hv(struct kvm_vcpu *vcpu, int sprn, kvmppc_core_emulate_mfspr_hv()
H A Dtrace_hv.h223 TP_PROTO(struct kvm_vcpu *vcpu),
247 TP_PROTO(struct kvm_vcpu *vcpu),
274 TP_PROTO(struct kvm_vcpu *vcpu, unsigned long *hptep,
310 TP_PROTO(struct kvm_vcpu *vcpu, unsigned long *hptep, long ret),
334 TP_PROTO(struct kvm_vcpu *vcpu),
363 TP_PROTO(struct kvm_vcpu *vcpu, int ret),
436 TP_PROTO(struct kvm_vcpu *vcpu),
454 TP_PROTO(struct kvm_vcpu *vcpu, struct kvm_run *run),
H A Dbook3s_rtas.c21 static void kvm_rtas_set_xive(struct kvm_vcpu *vcpu, struct rtas_args *args) kvm_rtas_set_xive()
42 static void kvm_rtas_get_xive(struct kvm_vcpu *vcpu, struct rtas_args *args) kvm_rtas_get_xive()
67 static void kvm_rtas_int_off(struct kvm_vcpu *vcpu, struct rtas_args *args) kvm_rtas_int_off()
86 static void kvm_rtas_int_on(struct kvm_vcpu *vcpu, struct rtas_args *args) kvm_rtas_int_on()
107 void (*handler)(struct kvm_vcpu *vcpu, struct rtas_args *args);
208 int kvmppc_rtas_hcall(struct kvm_vcpu *vcpu) kvmppc_rtas_hcall()
H A Demulate.c37 void kvmppc_emulate_dec(struct kvm_vcpu *vcpu) kvmppc_emulate_dec()
81 u32 kvmppc_get_dec(struct kvm_vcpu *vcpu, u64 tb) kvmppc_get_dec()
93 static int kvmppc_emulate_mtspr(struct kvm_vcpu *vcpu, int sprn, int rs) kvmppc_emulate_mtspr()
146 static int kvmppc_emulate_mfspr(struct kvm_vcpu *vcpu, int sprn, int rt) kvmppc_emulate_mfspr()
212 int kvmppc_emulate_instruction(struct kvm_run *run, struct kvm_vcpu *vcpu) kvmppc_emulate_instruction()
H A Dbook3s_hv_ras.c34 static void reload_slb(struct kvm_vcpu *vcpu) reload_slb()
68 static long kvmppc_realmode_mc_power7(struct kvm_vcpu *vcpu) kvmppc_realmode_mc_power7()
139 long kvmppc_realmode_machine_check(struct kvm_vcpu *vcpu) kvmppc_realmode_machine_check()
H A Dbooke_emulate.c35 static void kvmppc_emul_rfi(struct kvm_vcpu *vcpu) kvmppc_emul_rfi()
41 static void kvmppc_emul_rfdi(struct kvm_vcpu *vcpu) kvmppc_emul_rfdi()
47 static void kvmppc_emul_rfci(struct kvm_vcpu *vcpu) kvmppc_emul_rfci()
53 int kvmppc_booke_emulate_op(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_booke_emulate_op()
131 int kvmppc_booke_emulate_mtspr(struct kvm_vcpu *vcpu, int sprn, ulong spr_val) kvmppc_booke_emulate_mtspr()
390 int kvmppc_booke_emulate_mfspr(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val) kvmppc_booke_emulate_mfspr()
H A Dbook3s_64_vio_hv.c44 long kvmppc_h_put_tce(struct kvm_vcpu *vcpu, unsigned long liobn, kvmppc_h_put_tce()
79 long kvmppc_h_get_tce(struct kvm_vcpu *vcpu, unsigned long liobn, kvmppc_h_get_tce()
H A Dbook3s_emulate.c77 static bool spr_allowed(struct kvm_vcpu *vcpu, enum priv_level level) spr_allowed()
90 int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_core_emulate_op_pr()
321 void kvmppc_set_bat(struct kvm_vcpu *vcpu, struct kvmppc_bat *bat, bool upper, kvmppc_set_bat()
341 static struct kvmppc_bat *kvmppc_find_bat(struct kvm_vcpu *vcpu, int sprn) kvmppc_find_bat()
366 int kvmppc_core_emulate_mtspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong spr_val) kvmppc_core_emulate_mtspr_pr()
515 int kvmppc_core_emulate_mfspr_pr(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val) kvmppc_core_emulate_mfspr_pr()
659 u32 kvmppc_alignment_dsisr(struct kvm_vcpu *vcpu, unsigned int inst) kvmppc_alignment_dsisr()
664 ulong kvmppc_alignment_dar(struct kvm_vcpu *vcpu, unsigned int inst) kvmppc_alignment_dar()
H A Dtrace_booke.h40 TP_PROTO(unsigned int exit_nr, struct kvm_vcpu *vcpu),
196 TP_PROTO(struct kvm_vcpu *vcpu, unsigned int priority),
H A Dbook3s_hv_rm_xics.c53 static void icp_rm_set_vcpu_irq(struct kvm_vcpu *vcpu, icp_rm_set_vcpu_irq()
54 struct kvm_vcpu *this_vcpu) icp_rm_set_vcpu_irq()
81 static void icp_rm_clr_vcpu_irq(struct kvm_vcpu *vcpu) icp_rm_clr_vcpu_irq()
93 struct kvm_vcpu *this_vcpu = local_paca->kvm_hstate.kvm_vcpu; icp_rm_try_update()
384 unsigned long kvmppc_rm_h_xirr(struct kvm_vcpu *vcpu) kvmppc_rm_h_xirr()
422 int kvmppc_rm_h_ipi(struct kvm_vcpu *vcpu, unsigned long server, kvmppc_rm_h_ipi()
509 int kvmppc_rm_h_cppr(struct kvm_vcpu *vcpu, unsigned long cppr) kvmppc_rm_h_cppr()
571 int kvmppc_rm_h_eoi(struct kvm_vcpu *vcpu, unsigned long xirr) kvmppc_rm_h_eoi()
H A Dtrace.h102 TP_PROTO(struct kvm_vcpu *vcpu),
H A Dtrace_pr.h14 TP_PROTO(int r, struct kvm_vcpu *vcpu),
123 TP_PROTO(const char *type, struct kvm_vcpu *vcpu, unsigned long long p1,
219 TP_PROTO(unsigned int exit_nr, struct kvm_vcpu *vcpu),
H A Dbook3s_paired_singles.c161 static inline void kvmppc_sync_qpr(struct kvm_vcpu *vcpu, int rt) kvmppc_sync_qpr()
166 static void kvmppc_inject_pf(struct kvm_vcpu *vcpu, ulong eaddr, bool is_store) kvmppc_inject_pf()
183 static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_emulate_fpr_load()
227 static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_emulate_fpr_store()
273 static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_emulate_psq_load()
316 static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_emulate_psq_store()
355 static bool kvmppc_inst_is_paired_single(struct kvm_vcpu *vcpu, u32 inst) kvmppc_inst_is_paired_single()
501 static int kvmppc_ps_three_in(struct kvm_vcpu *vcpu, bool rc, kvmppc_ps_three_in()
549 static int kvmppc_ps_two_in(struct kvm_vcpu *vcpu, bool rc, kvmppc_ps_two_in()
601 static int kvmppc_ps_one_in(struct kvm_vcpu *vcpu, bool rc, kvmppc_ps_one_in()
632 int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu) kvmppc_emulate_paired_single()
H A Dbook3s_hv_rm_mmu.c49 * as indicated by local_paca->kvm_hstate.kvm_vcpu being set, global_invalidates()
54 if (kvm->arch.online_vcores == 1 && local_paca->kvm_hstate.kvm_vcpu) global_invalidates()
376 long kvmppc_h_enter(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_enter()
483 long kvmppc_h_remove(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_remove()
490 long kvmppc_h_bulk_remove(struct kvm_vcpu *vcpu) kvmppc_h_bulk_remove()
600 long kvmppc_h_protect(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_protect()
664 long kvmppc_h_read(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_read()
698 long kvmppc_h_clear_ref(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_clear_ref()
744 long kvmppc_h_clear_mod(struct kvm_vcpu *vcpu, unsigned long flags, kvmppc_h_clear_mod()
922 long kvmppc_hpte_hv_fault(struct kvm_vcpu *vcpu, unsigned long addr, kvmppc_hpte_hv_fault()
H A Dbook3s_xics.c570 static noinline unsigned long kvmppc_h_xirr(struct kvm_vcpu *vcpu) kvmppc_h_xirr()
604 static noinline int kvmppc_h_ipi(struct kvm_vcpu *vcpu, unsigned long server, kvmppc_h_ipi()
689 static int kvmppc_h_ipoll(struct kvm_vcpu *vcpu, unsigned long server) kvmppc_h_ipoll()
706 static noinline void kvmppc_h_cppr(struct kvm_vcpu *vcpu, unsigned long cppr) kvmppc_h_cppr()
763 static noinline int kvmppc_h_eoi(struct kvm_vcpu *vcpu, unsigned long xirr) kvmppc_h_eoi()
815 static noinline int kvmppc_xics_rm_complete(struct kvm_vcpu *vcpu, u32 hcall) kvmppc_xics_rm_complete()
845 int kvmppc_xics_hcall(struct kvm_vcpu *vcpu, u32 req) kvmppc_xics_hcall()
899 struct kvm_vcpu *vcpu; xics_debug_show()
1036 int kvmppc_xics_create_icp(struct kvm_vcpu *vcpu, unsigned long server_num) kvmppc_xics_create_icp()
1061 u64 kvmppc_xics_get_icp(struct kvm_vcpu *vcpu) kvmppc_xics_get_icp()
1075 int kvmppc_xics_set_icp(struct kvm_vcpu *vcpu, u64 icpval) kvmppc_xics_set_icp()
1361 int kvmppc_xics_connect_vcpu(struct kvm_device *dev, struct kvm_vcpu *vcpu, kvmppc_xics_connect_vcpu()
1381 void kvmppc_xics_free_icp(struct kvm_vcpu *vcpu) kvmppc_xics_free_icp()
H A De500_mmu_host.c160 void kvmppc_map_magic(struct kvm_vcpu *vcpu) kvmppc_map_magic()
297 void kvmppc_core_flush_tlb(struct kvm_vcpu *vcpu) kvmppc_core_flush_tlb()
307 struct kvm_vcpu *vcpu, kvmppc_e500_setup_stlbe()
588 void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 eaddr, gpa_t gpaddr, kvmppc_mmu_map()
627 int kvmppc_load_last_inst(struct kvm_vcpu *vcpu, enum instruction_type type, kvmppc_load_last_inst()
717 int kvmppc_load_last_inst(struct kvm_vcpu *vcpu, enum instruction_type type, kvmppc_load_last_inst()
H A Dbook3s_hv_builtin.c110 long int kvmppc_rm_h_confer(struct kvm_vcpu *vcpu, int target, kvmppc_rm_h_confer()
184 long kvmppc_h_random(struct kvm_vcpu *vcpu) kvmppc_h_random()
H A Dbook3s_64_mmu_hv.c175 void kvmppc_map_vrma(struct kvm_vcpu *vcpu, struct kvm_memory_slot *memslot, kvmppc_map_vrma()
246 static void kvmppc_mmu_book3s_64_hv_reset_msr(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_64_hv_reset_msr()
278 static struct kvmppc_slb *kvmppc_mmu_book3s_hv_find_slbe(struct kvm_vcpu *vcpu, kvmppc_mmu_book3s_hv_find_slbe()
308 static int kvmppc_mmu_book3s_64_hv_xlate(struct kvm_vcpu *vcpu, gva_t eaddr, kvmppc_mmu_book3s_64_hv_xlate()
391 static int kvmppc_hv_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_hv_emulate_mmio()
436 int kvmppc_book3s_hv_page_fault(struct kvm_run *run, struct kvm_vcpu *vcpu, kvmppc_book3s_hv_page_fault()
1033 struct kvm_vcpu *vcpu; kvmppc_hv_get_dirty_log()
1634 void kvmppc_mmu_book3s_hv_init(struct kvm_vcpu *vcpu) kvmppc_mmu_book3s_hv_init()
H A Dmpic.c117 struct kvm_vcpu *vcpu = current->thread.kvm_vcpu; get_current_cpu()
179 struct kvm_vcpu *vcpu;
1177 void kvmppc_mpic_set_epr(struct kvm_vcpu *vcpu) kvmppc_mpic_set_epr()
1372 static int kvm_mpic_read(struct kvm_vcpu *vcpu, kvm_mpic_read()
1414 static int kvm_mpic_write(struct kvm_vcpu *vcpu, kvm_mpic_write()
1733 int kvmppc_mpic_connect_vcpu(struct kvm_device *dev, struct kvm_vcpu *vcpu, kvmppc_mpic_connect_vcpu()
1778 void kvmppc_mpic_disconnect_vcpu(struct openpic *opp, struct kvm_vcpu *vcpu) kvmppc_mpic_disconnect_vcpu()
H A Demulate_loadstore.c50 int kvmppc_emulate_loadstore(struct kvm_vcpu *vcpu) kvmppc_emulate_loadstore()
H A Dbookehv_interrupts.S181 * Get vcpu from Paca: paca->__current.thread->kvm_vcpu
/linux-4.4.14/arch/powerpc/include/asm/
H A Dkvm_ppc.h23 /* This file exists just so we can dereference kvm_vcpu, avoiding nested header
70 extern int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu);
71 extern int __kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu);
74 extern void kvmppc_dump_vcpu(struct kvm_vcpu *vcpu);
75 extern int kvmppc_handle_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
78 extern int kvmppc_handle_loads(struct kvm_run *run, struct kvm_vcpu *vcpu,
81 extern int kvmppc_handle_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
85 extern int kvmppc_load_last_inst(struct kvm_vcpu *vcpu,
88 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
90 extern int kvmppc_st(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr,
93 struct kvm_vcpu *vcpu);
94 extern int kvmppc_emulate_loadstore(struct kvm_vcpu *vcpu);
95 extern int kvmppc_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu);
96 extern void kvmppc_emulate_dec(struct kvm_vcpu *vcpu);
97 extern u32 kvmppc_get_dec(struct kvm_vcpu *vcpu, u64 tb);
98 extern void kvmppc_decrementer_func(struct kvm_vcpu *vcpu);
99 extern int kvmppc_sanity_check(struct kvm_vcpu *vcpu);
100 extern int kvmppc_subarch_vcpu_init(struct kvm_vcpu *vcpu);
101 extern void kvmppc_subarch_vcpu_uninit(struct kvm_vcpu *vcpu);
105 extern void kvmppc_mmu_map(struct kvm_vcpu *vcpu, u64 gvaddr, gpa_t gpaddr,
107 extern void kvmppc_mmu_priv_switch(struct kvm_vcpu *vcpu, int usermode);
108 extern void kvmppc_mmu_switch_pid(struct kvm_vcpu *vcpu, u32 pid);
109 extern void kvmppc_mmu_destroy(struct kvm_vcpu *vcpu);
110 extern int kvmppc_mmu_init(struct kvm_vcpu *vcpu);
111 extern int kvmppc_mmu_dtlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
112 extern int kvmppc_mmu_itlb_index(struct kvm_vcpu *vcpu, gva_t eaddr);
113 extern gpa_t kvmppc_mmu_xlate(struct kvm_vcpu *vcpu, unsigned int gtlb_index,
115 extern void kvmppc_mmu_dtlb_miss(struct kvm_vcpu *vcpu);
116 extern void kvmppc_mmu_itlb_miss(struct kvm_vcpu *vcpu);
117 extern int kvmppc_xlate(struct kvm_vcpu *vcpu, ulong eaddr,
121 extern struct kvm_vcpu *kvmppc_core_vcpu_create(struct kvm *kvm,
123 extern void kvmppc_core_vcpu_free(struct kvm_vcpu *vcpu);
124 extern int kvmppc_core_vcpu_setup(struct kvm_vcpu *vcpu);
126 extern int kvmppc_core_vcpu_translate(struct kvm_vcpu *vcpu,
129 extern void kvmppc_core_vcpu_load(struct kvm_vcpu *vcpu, int cpu);
130 extern void kvmppc_core_vcpu_put(struct kvm_vcpu *vcpu);
132 extern int kvmppc_core_prepare_to_enter(struct kvm_vcpu *vcpu);
133 extern int kvmppc_core_pending_dec(struct kvm_vcpu *vcpu);
134 extern void kvmppc_core_queue_program(struct kvm_vcpu *vcpu, ulong flags);
135 extern void kvmppc_core_queue_dec(struct kvm_vcpu *vcpu);
136 extern void kvmppc_core_dequeue_dec(struct kvm_vcpu *vcpu);
137 extern void kvmppc_core_queue_external(struct kvm_vcpu *vcpu,
139 extern void kvmppc_core_dequeue_external(struct kvm_vcpu *vcpu);
140 extern void kvmppc_core_queue_dtlb_miss(struct kvm_vcpu *vcpu, ulong dear_flags,
142 extern void kvmppc_core_queue_data_storage(struct kvm_vcpu *vcpu,
145 extern void kvmppc_core_queue_itlb_miss(struct kvm_vcpu *vcpu);
146 extern void kvmppc_core_queue_inst_storage(struct kvm_vcpu *vcpu,
148 extern void kvmppc_core_flush_tlb(struct kvm_vcpu *vcpu);
149 extern int kvmppc_core_check_requests(struct kvm_vcpu *vcpu);
154 extern void kvmppc_core_destroy_mmu(struct kvm_vcpu *vcpu);
155 extern int kvmppc_kvm_pv(struct kvm_vcpu *vcpu);
156 extern void kvmppc_map_magic(struct kvm_vcpu *vcpu);
163 extern void kvmppc_map_vrma(struct kvm_vcpu *vcpu,
165 extern int kvmppc_pseries_do_hcall(struct kvm_vcpu *vcpu);
169 extern long kvmppc_h_put_tce(struct kvm_vcpu *vcpu, unsigned long liobn,
171 extern long kvmppc_h_get_tce(struct kvm_vcpu *vcpu, unsigned long liobn,
198 extern int kvmppc_prepare_to_enter(struct kvm_vcpu *vcpu);
202 int kvm_vcpu_ioctl_interrupt(struct kvm_vcpu *vcpu, struct kvm_interrupt *irq);
205 extern int kvmppc_rtas_hcall(struct kvm_vcpu *vcpu);
214 void kvmppc_core_dequeue_debug(struct kvm_vcpu *vcpu);
215 void kvmppc_core_queue_debug(struct kvm_vcpu *vcpu);
230 int (*get_sregs)(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
231 int (*set_sregs)(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
232 int (*get_one_reg)(struct kvm_vcpu *vcpu, u64 id,
234 int (*set_one_reg)(struct kvm_vcpu *vcpu, u64 id,
236 void (*vcpu_load)(struct kvm_vcpu *vcpu, int cpu);
237 void (*vcpu_put)(struct kvm_vcpu *vcpu);
238 void (*set_msr)(struct kvm_vcpu *vcpu, u64 msr);
239 int (*vcpu_run)(struct kvm_run *run, struct kvm_vcpu *vcpu);
240 struct kvm_vcpu *(*vcpu_create)(struct kvm *kvm, unsigned int id);
241 void (*vcpu_free)(struct kvm_vcpu *vcpu);
242 int (*check_requests)(struct kvm_vcpu *vcpu);
258 void (*mmu_destroy)(struct kvm_vcpu *vcpu);
266 int (*emulate_op)(struct kvm_run *run, struct kvm_vcpu *vcpu,
268 int (*emulate_mtspr)(struct kvm_vcpu *vcpu, int sprn, ulong spr_val);
269 int (*emulate_mfspr)(struct kvm_vcpu *vcpu, int sprn, ulong *spr_val);
270 void (*fast_vcpu_kick)(struct kvm_vcpu *vcpu);
279 static inline int kvmppc_get_last_inst(struct kvm_vcpu *vcpu, kvmppc_get_last_inst()
366 int kvmppc_core_get_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
367 int kvmppc_core_set_sregs(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
369 int kvmppc_get_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
370 int kvmppc_set_sregs_ivor(struct kvm_vcpu *vcpu, struct kvm_sregs *sregs);
372 int kvm_vcpu_ioctl_get_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg);
373 int kvm_vcpu_ioctl_set_one_reg(struct kvm_vcpu *vcpu, struct kvm_one_reg *reg);
374 int kvmppc_get_one_reg(struct kvm_vcpu *vcpu, u64 id, union kvmppc_one_reg *);
375 int kvmppc_set_one_reg(struct kvm_vcpu *vcpu, u64 id, union kvmppc_one_reg *);
377 void kvmppc_set_pid(struct kvm_vcpu *vcpu, u32 pid);
402 static inline void kvmppc_fast_vcpu_kick(struct kvm_vcpu *vcpu) kvmppc_fast_vcpu_kick()
426 static inline void kvmppc_fast_vcpu_kick(struct kvm_vcpu *vcpu) kvmppc_fast_vcpu_kick()
436 static inline int kvmppc_xics_enabled(struct kvm_vcpu *vcpu) kvmppc_xics_enabled()
440 extern void kvmppc_xics_free_icp(struct kvm_vcpu *vcpu);
441 extern int kvmppc_xics_create_icp(struct kvm_vcpu *vcpu, unsigned long server);
443 extern int kvmppc_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd);
444 extern u64 kvmppc_xics_get_icp(struct kvm_vcpu *vcpu);
445 extern int kvmppc_xics_set_icp(struct kvm_vcpu *vcpu, u64 icpval);
447 struct kvm_vcpu *vcpu, u32 cpu);
449 static inline int kvmppc_xics_enabled(struct kvm_vcpu *vcpu) kvmppc_xics_enabled()
451 static inline void kvmppc_xics_free_icp(struct kvm_vcpu *vcpu) { } kvmppc_xics_create_icp()
452 static inline int kvmppc_xics_create_icp(struct kvm_vcpu *vcpu, kvmppc_xics_create_icp()
458 static inline int kvmppc_xics_hcall(struct kvm_vcpu *vcpu, u32 cmd) kvmppc_xics_hcall()
462 static inline unsigned long kvmppc_get_epr(struct kvm_vcpu *vcpu) kvmppc_get_epr()
473 static inline void kvmppc_set_epr(struct kvm_vcpu *vcpu, u32 epr) kvmppc_set_epr()
484 void kvmppc_mpic_set_epr(struct kvm_vcpu *vcpu);
485 int kvmppc_mpic_connect_vcpu(struct kvm_device *dev, struct kvm_vcpu *vcpu,
487 void kvmppc_mpic_disconnect_vcpu(struct openpic *opp, struct kvm_vcpu *vcpu);
491 static inline void kvmppc_mpic_set_epr(struct kvm_vcpu *vcpu) kvmppc_mpic_set_epr()
496 struct kvm_vcpu *vcpu, u32 cpu) kvmppc_mpic_connect_vcpu()
502 struct kvm_vcpu *vcpu) kvmppc_mpic_disconnect_vcpu()
508 int kvm_vcpu_ioctl_config_tlb(struct kvm_vcpu *vcpu,
510 int kvm_vcpu_ioctl_dirty_tlb(struct kvm_vcpu *vcpu,
540 static inline bool kvmppc_shared_big_endian(struct kvm_vcpu *vcpu) kvmppc_shared_big_endian()
554 static inline ulong kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
560 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, ulong val) \
566 static inline u##size kvmppc_get_##reg(struct kvm_vcpu *vcpu) \
575 static inline void kvmppc_set_##reg(struct kvm_vcpu *vcpu, u##size val) \
613 static inline void kvmppc_set_msr_fast(struct kvm_vcpu *vcpu, u64 val) kvmppc_set_msr_fast()
627 static inline u32 kvmppc_get_sr(struct kvm_vcpu *vcpu, int nr) kvmppc_get_sr()
635 static inline void kvmppc_set_sr(struct kvm_vcpu *vcpu, int nr, u32 val) kvmppc_set_sr()
665 static inline ulong kvmppc_get_ea_indexed(struct kvm_vcpu *vcpu, int ra, int rb) kvmppc_get_ea_indexed()
H A Dkvm_book3s.h117 extern void kvmppc_mmu_pte_flush(struct kvm_vcpu *vcpu, ulong ea, ulong ea_mask);
118 extern void kvmppc_mmu_pte_vflush(struct kvm_vcpu *vcpu, u64 vp, u64 vp_mask);
119 extern void kvmppc_mmu_pte_pflush(struct kvm_vcpu *vcpu, ulong pa_start, ulong pa_end);
120 extern void kvmppc_set_msr(struct kvm_vcpu *vcpu, u64 new_msr);
121 extern void kvmppc_mmu_book3s_64_init(struct kvm_vcpu *vcpu);
122 extern void kvmppc_mmu_book3s_32_init(struct kvm_vcpu *vcpu);
123 extern void kvmppc_mmu_book3s_hv_init(struct kvm_vcpu *vcpu);
124 extern int kvmppc_mmu_map_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte,
126 extern void kvmppc_mmu_unmap_page(struct kvm_vcpu *vcpu, struct kvmppc_pte *pte);
127 extern int kvmppc_mmu_map_segment(struct kvm_vcpu *vcpu, ulong eaddr);
128 extern void kvmppc_mmu_flush_segment(struct kvm_vcpu *vcpu, ulong eaddr, ulong seg_size);
129 extern void kvmppc_mmu_flush_segments(struct kvm_vcpu *vcpu);
131 struct kvm_vcpu *vcpu, unsigned long addr,
136 extern void kvmppc_mmu_hpte_cache_map(struct kvm_vcpu *vcpu, struct hpte_cache *pte);
137 extern struct hpte_cache *kvmppc_mmu_hpte_cache_next(struct kvm_vcpu *vcpu);
139 extern void kvmppc_mmu_hpte_destroy(struct kvm_vcpu *vcpu);
140 extern int kvmppc_mmu_hpte_init(struct kvm_vcpu *vcpu);
141 extern void kvmppc_mmu_invalidate_pte(struct kvm_vcpu *vcpu, struct hpte_cache *pte);
148 extern int kvmppc_ld(struct kvm_vcpu *vcpu, ulong *eaddr, int size, void *ptr, bool data);
149 extern void kvmppc_book3s_queue_irqprio(struct kvm_vcpu *vcpu, unsigned int vec);
150 extern void kvmppc_book3s_dequeue_irqprio(struct kvm_vcpu *vcpu,
152 extern void kvmppc_inject_interrupt(struct kvm_vcpu *vcpu, int vec, u64 flags);
153 extern void kvmppc_set_bat(struct kvm_vcpu *vcpu, struct kvmppc_bat *bat,
155 extern void kvmppc_giveup_ext(struct kvm_vcpu *vcpu, ulong msr);
156 extern int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu);
157 extern pfn_t kvmppc_gpa_to_pfn(struct kvm_vcpu *vcpu, gpa_t gpa, bool writing,
180 extern void kvmppc_set_fscr(struct kvm_vcpu *vcpu, u64 fscr);
184 extern u32 kvmppc_alignment_dsisr(struct kvm_vcpu *vcpu, unsigned int inst);
185 extern ulong kvmppc_alignment_dar(struct kvm_vcpu *vcpu, unsigned int inst);
186 extern int kvmppc_h_pr(struct kvm_vcpu *vcpu, unsigned long cmd);
191 struct kvm_vcpu *vcpu);
192 extern void kvmppc_copy_from_svcpu(struct kvm_vcpu *vcpu,
195 static inline struct kvmppc_vcpu_book3s *to_book3s(struct kvm_vcpu *vcpu) to_book3s()
209 static inline void kvmppc_set_gpr(struct kvm_vcpu *vcpu, int num, ulong val) kvmppc_set_gpr()
214 static inline ulong kvmppc_get_gpr(struct kvm_vcpu *vcpu, int num) kvmppc_get_gpr()
219 static inline void kvmppc_set_cr(struct kvm_vcpu *vcpu, u32 val) kvmppc_set_cr()
224 static inline u32 kvmppc_get_cr(struct kvm_vcpu *vcpu) kvmppc_get_cr()
229 static inline void kvmppc_set_xer(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_xer()
234 static inline ulong kvmppc_get_xer(struct kvm_vcpu *vcpu) kvmppc_get_xer()
239 static inline void kvmppc_set_ctr(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_ctr()
244 static inline ulong kvmppc_get_ctr(struct kvm_vcpu *vcpu) kvmppc_get_ctr()
249 static inline void kvmppc_set_lr(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_lr()
254 static inline ulong kvmppc_get_lr(struct kvm_vcpu *vcpu) kvmppc_get_lr()
259 static inline void kvmppc_set_pc(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_pc()
264 static inline ulong kvmppc_get_pc(struct kvm_vcpu *vcpu) kvmppc_get_pc()
269 static inline u64 kvmppc_get_msr(struct kvm_vcpu *vcpu); kvmppc_need_byteswap()
270 static inline bool kvmppc_need_byteswap(struct kvm_vcpu *vcpu) kvmppc_need_byteswap()
275 static inline ulong kvmppc_get_fault_dar(struct kvm_vcpu *vcpu) kvmppc_get_fault_dar()
286 static inline bool kvmppc_supports_magic_page(struct kvm_vcpu *vcpu) kvmppc_supports_magic_page()
292 extern int kvmppc_h_logical_ci_load(struct kvm_vcpu *vcpu);
293 extern int kvmppc_h_logical_ci_store(struct kvm_vcpu *vcpu);
H A Dkvm_booke.h37 static inline void kvmppc_set_gpr(struct kvm_vcpu *vcpu, int num, ulong val) kvmppc_set_gpr()
42 static inline ulong kvmppc_get_gpr(struct kvm_vcpu *vcpu, int num) kvmppc_get_gpr()
47 static inline void kvmppc_set_cr(struct kvm_vcpu *vcpu, u32 val) kvmppc_set_cr()
52 static inline u32 kvmppc_get_cr(struct kvm_vcpu *vcpu) kvmppc_get_cr()
57 static inline void kvmppc_set_xer(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_xer()
62 static inline ulong kvmppc_get_xer(struct kvm_vcpu *vcpu) kvmppc_get_xer()
67 static inline bool kvmppc_need_byteswap(struct kvm_vcpu *vcpu) kvmppc_need_byteswap()
73 static inline void kvmppc_set_ctr(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_ctr()
78 static inline ulong kvmppc_get_ctr(struct kvm_vcpu *vcpu) kvmppc_get_ctr()
83 static inline void kvmppc_set_lr(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_lr()
88 static inline ulong kvmppc_get_lr(struct kvm_vcpu *vcpu) kvmppc_get_lr()
93 static inline void kvmppc_set_pc(struct kvm_vcpu *vcpu, ulong val) kvmppc_set_pc()
98 static inline ulong kvmppc_get_pc(struct kvm_vcpu *vcpu) kvmppc_get_pc()
103 static inline ulong kvmppc_get_fault_dar(struct kvm_vcpu *vcpu) kvmppc_get_fault_dar()
108 static inline bool kvmppc_supports_magic_page(struct kvm_vcpu *vcpu) kvmppc_supports_magic_page()
H A Dkvm_host.h293 struct kvm_vcpu *runner;
350 void (*slbmte)(struct kvm_vcpu *vcpu, u64 rb, u64 rs);
351 u64 (*slbmfee)(struct kvm_vcpu *vcpu, u64 slb_nr);
352 u64 (*slbmfev)(struct kvm_vcpu *vcpu, u64 slb_nr);
353 void (*slbie)(struct kvm_vcpu *vcpu, u64 slb_nr);
354 void (*slbia)(struct kvm_vcpu *vcpu);
356 void (*mtsrin)(struct kvm_vcpu *vcpu, u32 srnum, ulong value);
357 u32 (*mfsrin)(struct kvm_vcpu *vcpu, u32 srnum);
358 int (*xlate)(struct kvm_vcpu *vcpu, gva_t eaddr,
360 void (*reset_msr)(struct kvm_vcpu *vcpu);
361 void (*tlbie)(struct kvm_vcpu *vcpu, ulong addr, bool large);
362 int (*esid_to_vsid)(struct kvm_vcpu *vcpu, ulong esid, u64 *vsid);
363 u64 (*ea_to_vp)(struct kvm_vcpu *vcpu, gva_t eaddr, bool data);
364 bool (*is_dcbz32)(struct kvm_vcpu *vcpu);
717 static inline void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) {} kvm_arch_exit()
719 static inline void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) {} kvm_arch_vcpu_unblocking()
720 static inline void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) {}
H A Dkvm_book3s_asm.h111 struct kvm_vcpu *kvm_vcpu;
H A Dkvm_book3s_32.h23 static inline struct kvmppc_book3s_shadow_vcpu *svcpu_get(struct kvm_vcpu *vcpu) svcpu_get()
H A Dprocessor.h293 struct kvm_vcpu *kvm_vcpu; member in struct:thread_struct
H A Dkvm_book3s_64.h24 static inline struct kvmppc_book3s_shadow_vcpu *svcpu_get(struct kvm_vcpu *vcpu) svcpu_get()
/linux-4.4.14/arch/s390/kvm/
H A Dkvm-s390.h23 typedef int (*intercept_handler_t)(struct kvm_vcpu *vcpu);
51 static inline int is_vcpu_stopped(struct kvm_vcpu *vcpu) is_vcpu_stopped()
68 static inline u32 kvm_s390_get_prefix(struct kvm_vcpu *vcpu) kvm_s390_get_prefix()
73 static inline void kvm_s390_set_prefix(struct kvm_vcpu *vcpu, u32 prefix) kvm_s390_set_prefix()
84 static inline u64 kvm_s390_get_base_disp_s(struct kvm_vcpu *vcpu, ar_t *ar) kvm_s390_get_base_disp_s()
95 static inline void kvm_s390_get_base_disp_sse(struct kvm_vcpu *vcpu, kvm_s390_get_base_disp_sse()
113 static inline void kvm_s390_get_regs_rre(struct kvm_vcpu *vcpu, int *r1, int *r2) kvm_s390_get_regs_rre()
121 static inline u64 kvm_s390_get_base_disp_rsy(struct kvm_vcpu *vcpu, ar_t *ar) kvm_s390_get_base_disp_rsy()
136 static inline u64 kvm_s390_get_base_disp_rs(struct kvm_vcpu *vcpu, ar_t *ar) kvm_s390_get_base_disp_rs()
148 static inline void kvm_s390_set_psw_cc(struct kvm_vcpu *vcpu, unsigned long cc) kvm_s390_set_psw_cc()
179 int kvm_s390_handle_wait(struct kvm_vcpu *vcpu);
180 void kvm_s390_vcpu_wakeup(struct kvm_vcpu *vcpu);
182 int __must_check kvm_s390_deliver_pending_interrupts(struct kvm_vcpu *vcpu);
183 void kvm_s390_clear_local_irqs(struct kvm_vcpu *vcpu);
187 int __must_check kvm_s390_inject_vcpu(struct kvm_vcpu *vcpu,
189 static inline int kvm_s390_inject_prog_irq(struct kvm_vcpu *vcpu, kvm_s390_inject_prog_irq()
199 static inline int kvm_s390_inject_program_int(struct kvm_vcpu *vcpu, u16 code) kvm_s390_inject_program_int()
215 void kvm_s390_rewind_psw(struct kvm_vcpu *vcpu, int ilc);
216 int kvm_handle_sie_intercept(struct kvm_vcpu *vcpu);
220 int kvm_s390_handle_b2(struct kvm_vcpu *vcpu);
221 int kvm_s390_handle_e5(struct kvm_vcpu *vcpu);
222 int kvm_s390_handle_01(struct kvm_vcpu *vcpu);
223 int kvm_s390_handle_b9(struct kvm_vcpu *vcpu);
224 int kvm_s390_handle_lpsw(struct kvm_vcpu *vcpu);
225 int kvm_s390_handle_stctl(struct kvm_vcpu *vcpu);
226 int kvm_s390_handle_lctl(struct kvm_vcpu *vcpu);
227 int kvm_s390_handle_eb(struct kvm_vcpu *vcpu);
230 int kvm_s390_handle_sigp(struct kvm_vcpu *vcpu);
231 int kvm_s390_handle_sigp_pei(struct kvm_vcpu *vcpu);
235 long kvm_arch_fault_in_page(struct kvm_vcpu *vcpu, gpa_t gpa, int writable);
236 int kvm_s390_store_status_unloaded(struct kvm_vcpu *vcpu, unsigned long addr);
237 int kvm_s390_store_adtl_status_unloaded(struct kvm_vcpu *vcpu,
239 int kvm_s390_vcpu_store_status(struct kvm_vcpu *vcpu, unsigned long addr);
240 int kvm_s390_vcpu_store_adtl_status(struct kvm_vcpu *vcpu, unsigned long addr);
241 void kvm_s390_vcpu_start(struct kvm_vcpu *vcpu);
242 void kvm_s390_vcpu_stop(struct kvm_vcpu *vcpu);
243 void kvm_s390_vcpu_block(struct kvm_vcpu *vcpu);
244 void kvm_s390_vcpu_unblock(struct kvm_vcpu *vcpu);
245 void exit_sie(struct kvm_vcpu *vcpu);
246 void kvm_s390_sync_request(int req, struct kvm_vcpu *vcpu);
247 int kvm_s390_vcpu_setup_cmma(struct kvm_vcpu *vcpu);
248 void kvm_s390_vcpu_unsetup_cmma(struct kvm_vcpu *vcpu);
253 int kvm_s390_handle_diag(struct kvm_vcpu *vcpu);
258 struct kvm_vcpu *vcpu; kvm_s390_vcpu_block_all()
268 struct kvm_vcpu *vcpu; kvm_s390_vcpu_unblock_all()
310 static inline int kvm_s390_inject_prog_cond(struct kvm_vcpu *vcpu, int rc) kvm_s390_inject_prog_cond()
321 int kvm_s390_vcpu_has_irq(struct kvm_vcpu *vcpu, int exclude_stop);
322 int psw_extint_disabled(struct kvm_vcpu *vcpu);
324 int kvm_s390_ext_call_pending(struct kvm_vcpu *vcpu);
326 int kvm_s390_is_stop_irq_pending(struct kvm_vcpu *vcpu);
327 void kvm_s390_clear_stop_irq(struct kvm_vcpu *vcpu);
328 int kvm_s390_set_irq_state(struct kvm_vcpu *vcpu,
330 int kvm_s390_get_irq_state(struct kvm_vcpu *vcpu,
334 void kvm_s390_backup_guest_per_regs(struct kvm_vcpu *vcpu);
335 void kvm_s390_restore_guest_per_regs(struct kvm_vcpu *vcpu);
336 void kvm_s390_patch_guest_per_regs(struct kvm_vcpu *vcpu);
337 int kvm_s390_import_bp_data(struct kvm_vcpu *vcpu,
339 void kvm_s390_clear_bp_data(struct kvm_vcpu *vcpu);
340 void kvm_s390_prepare_debug_exit(struct kvm_vcpu *vcpu);
341 void kvm_s390_handle_per_event(struct kvm_vcpu *vcpu);
H A Dsigp.c23 static int __sigp_sense(struct kvm_vcpu *vcpu, struct kvm_vcpu *dst_vcpu, __sigp_sense()
51 static int __inject_sigp_emergency(struct kvm_vcpu *vcpu, __inject_sigp_emergency()
52 struct kvm_vcpu *dst_vcpu) __inject_sigp_emergency()
68 static int __sigp_emergency(struct kvm_vcpu *vcpu, struct kvm_vcpu *dst_vcpu) __sigp_emergency()
73 static int __sigp_conditional_emergency(struct kvm_vcpu *vcpu, __sigp_conditional_emergency()
74 struct kvm_vcpu *dst_vcpu, __sigp_conditional_emergency()
100 static int __sigp_external_call(struct kvm_vcpu *vcpu, __sigp_external_call()
101 struct kvm_vcpu *dst_vcpu, u64 *reg) __sigp_external_call()
122 static int __sigp_stop(struct kvm_vcpu *vcpu, struct kvm_vcpu *dst_vcpu) __sigp_stop()
139 static int __sigp_stop_and_store_status(struct kvm_vcpu *vcpu, __sigp_stop_and_store_status()
140 struct kvm_vcpu *dst_vcpu, u64 *reg) __sigp_stop_and_store_status()
158 static int __sigp_set_arch(struct kvm_vcpu *vcpu, u32 parameter) __sigp_set_arch()
162 struct kvm_vcpu *v; __sigp_set_arch()
183 static int __sigp_set_prefix(struct kvm_vcpu *vcpu, struct kvm_vcpu *dst_vcpu, __sigp_set_prefix()
213 static int __sigp_store_status_at_addr(struct kvm_vcpu *vcpu, __sigp_store_status_at_addr()
214 struct kvm_vcpu *dst_vcpu, __sigp_store_status_at_addr()
237 static int __sigp_sense_running(struct kvm_vcpu *vcpu, __sigp_sense_running()
238 struct kvm_vcpu *dst_vcpu, u64 *reg) __sigp_sense_running()
260 static int __prepare_sigp_re_start(struct kvm_vcpu *vcpu, __prepare_sigp_re_start()
261 struct kvm_vcpu *dst_vcpu, u8 order_code) __prepare_sigp_re_start()
276 static int __prepare_sigp_cpu_reset(struct kvm_vcpu *vcpu, __prepare_sigp_cpu_reset()
277 struct kvm_vcpu *dst_vcpu, u8 order_code) __prepare_sigp_cpu_reset()
283 static int __prepare_sigp_unknown(struct kvm_vcpu *vcpu, __prepare_sigp_unknown()
284 struct kvm_vcpu *dst_vcpu) __prepare_sigp_unknown()
290 static int handle_sigp_dst(struct kvm_vcpu *vcpu, u8 order_code, handle_sigp_dst()
294 struct kvm_vcpu *dst_vcpu = kvm_get_vcpu_by_id(vcpu->kvm, cpu_addr); handle_sigp_dst()
367 static int handle_sigp_order_in_user_space(struct kvm_vcpu *vcpu, u8 order_code, handle_sigp_order_in_user_space()
417 int kvm_s390_handle_sigp(struct kvm_vcpu *vcpu) kvm_s390_handle_sigp()
467 int kvm_s390_handle_sigp_pei(struct kvm_vcpu *vcpu) kvm_s390_handle_sigp_pei()
471 struct kvm_vcpu *dest_vcpu; kvm_s390_handle_sigp_pei()
H A Dintercept.c41 void kvm_s390_rewind_psw(struct kvm_vcpu *vcpu, int ilc) kvm_s390_rewind_psw()
54 static int handle_noop(struct kvm_vcpu *vcpu) handle_noop()
69 static int handle_stop(struct kvm_vcpu *vcpu) handle_stop()
103 static int handle_validity(struct kvm_vcpu *vcpu) handle_validity()
113 static int handle_instruction(struct kvm_vcpu *vcpu) handle_instruction()
127 static void __extract_prog_irq(struct kvm_vcpu *vcpu, __extract_prog_irq()
192 static int handle_itdb(struct kvm_vcpu *vcpu) handle_itdb()
212 static int handle_prog(struct kvm_vcpu *vcpu) handle_prog()
252 static int handle_external_interrupt(struct kvm_vcpu *vcpu) handle_external_interrupt()
299 static int handle_mvpg_pei(struct kvm_vcpu *vcpu) handle_mvpg_pei()
329 static int handle_partial_execution(struct kvm_vcpu *vcpu) handle_partial_execution()
339 int kvm_handle_sie_intercept(struct kvm_vcpu *vcpu) kvm_handle_sie_intercept()
H A Dgaccess.h30 static inline unsigned long kvm_s390_real_to_abs(struct kvm_vcpu *vcpu, kvm_s390_real_to_abs()
55 static inline unsigned long kvm_s390_logical_to_effective(struct kvm_vcpu *vcpu, kvm_s390_logical_to_effective()
97 struct kvm_vcpu *__vcpu = (vcpu); \
124 int write_guest_lc(struct kvm_vcpu *vcpu, unsigned long gra, void *data, write_guest_lc()
150 int read_guest_lc(struct kvm_vcpu *vcpu, unsigned long gra, void *data, read_guest_lc()
158 int guest_translate_address(struct kvm_vcpu *vcpu, unsigned long gva,
160 int check_gva_range(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar,
163 int access_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data,
166 int access_guest_real(struct kvm_vcpu *vcpu, unsigned long gra,
215 int write_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, write_guest()
235 int read_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, read_guest()
258 int write_guest_abs(struct kvm_vcpu *vcpu, unsigned long gpa, void *data, write_guest_abs()
281 int read_guest_abs(struct kvm_vcpu *vcpu, unsigned long gpa, void *data, read_guest_abs()
304 int write_guest_real(struct kvm_vcpu *vcpu, unsigned long gra, void *data, write_guest_real()
327 int read_guest_real(struct kvm_vcpu *vcpu, unsigned long gra, void *data, read_guest_real()
333 void ipte_lock(struct kvm_vcpu *vcpu);
334 void ipte_unlock(struct kvm_vcpu *vcpu);
335 int ipte_lock_held(struct kvm_vcpu *vcpu);
336 int kvm_s390_check_low_addr_prot_real(struct kvm_vcpu *vcpu, unsigned long gra);
H A Dkvm-s390.c53 #define VCPU_STAT(x) offsetof(struct kvm_vcpu, stat.x), KVM_STAT_VCPU
153 struct kvm_vcpu *vcpu; kvm_clock_sync()
460 static void kvm_s390_vcpu_crypto_setup(struct kvm_vcpu *vcpu);
464 struct kvm_vcpu *vcpu; kvm_s390_vm_set_crypto()
1185 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_arch_vcpu_destroy()
1214 struct kvm_vcpu *vcpu; kvm_free_vcpus()
1242 static int __kvm_ucontrol_vcpu_init(struct kvm_vcpu *vcpu) __kvm_ucontrol_vcpu_init()
1252 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu) kvm_arch_vcpu_init()
1271 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvm_arch_vcpu_load()
1293 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu) kvm_arch_vcpu_put()
1310 static void kvm_s390_vcpu_initial_reset(struct kvm_vcpu *vcpu) kvm_s390_vcpu_initial_reset()
1334 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu) kvm_arch_vcpu_postcreate()
1345 static void kvm_s390_vcpu_crypto_setup(struct kvm_vcpu *vcpu) kvm_s390_vcpu_crypto_setup()
1360 void kvm_s390_vcpu_unsetup_cmma(struct kvm_vcpu *vcpu) kvm_s390_vcpu_unsetup_cmma()
1366 int kvm_s390_vcpu_setup_cmma(struct kvm_vcpu *vcpu) kvm_s390_vcpu_setup_cmma()
1377 static void kvm_s390_vcpu_setup_model(struct kvm_vcpu *vcpu) kvm_s390_vcpu_setup_model()
1386 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
1430 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, kvm_arch_vcpu_create()
1433 struct kvm_vcpu *vcpu; kvm_arch_vcpu_create()
1489 int kvm_arch_vcpu_runnable(struct kvm_vcpu *vcpu) kvm_arch_vcpu_runnable()
1494 void kvm_s390_vcpu_block(struct kvm_vcpu *vcpu) kvm_s390_vcpu_block()
1500 void kvm_s390_vcpu_unblock(struct kvm_vcpu *vcpu) kvm_s390_vcpu_unblock()
1505 static void kvm_s390_vcpu_request(struct kvm_vcpu *vcpu) kvm_s390_vcpu_request()
1511 static void kvm_s390_vcpu_request_handled(struct kvm_vcpu *vcpu) kvm_s390_vcpu_request_handled()
1520 void exit_sie(struct kvm_vcpu *vcpu) exit_sie()
1528 void kvm_s390_sync_request(int req, struct kvm_vcpu *vcpu) kvm_s390_sync_request()
1538 struct kvm_vcpu *vcpu; kvm_gmap_notifier()
1549 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu) kvm_arch_vcpu_should_kick()
1556 static int kvm_arch_vcpu_ioctl_get_one_reg(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_one_reg()
1605 static int kvm_arch_vcpu_ioctl_set_one_reg(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_one_reg()
1656 static int kvm_arch_vcpu_ioctl_initial_reset(struct kvm_vcpu *vcpu) kvm_arch_vcpu_ioctl_initial_reset()
1662 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
1668 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
1674 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
1683 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
1691 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
1705 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
1717 static int kvm_arch_vcpu_ioctl_set_initial_psw(struct kvm_vcpu *vcpu, psw_t psw) kvm_arch_vcpu_ioctl_set_initial_psw()
1730 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
1740 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
1772 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_mpstate()
1780 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_mpstate()
1805 static bool ibs_enabled(struct kvm_vcpu *vcpu) ibs_enabled()
1810 static int kvm_s390_handle_requests(struct kvm_vcpu *vcpu) kvm_s390_handle_requests()
1864 struct kvm_vcpu *vcpu; kvm_s390_set_tod_clock()
1888 long kvm_arch_fault_in_page(struct kvm_vcpu *vcpu, gpa_t gpa, int writable) kvm_arch_fault_in_page()
1894 static void __kvm_inject_pfault_token(struct kvm_vcpu *vcpu, bool start_token, __kvm_inject_pfault_token()
1911 void kvm_arch_async_page_not_present(struct kvm_vcpu *vcpu, kvm_arch_async_page_not_present()
1918 void kvm_arch_async_page_present(struct kvm_vcpu *vcpu, kvm_arch_async_page_present()
1925 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu, kvm_arch_async_page_ready()
1931 bool kvm_arch_can_inject_async_page_present(struct kvm_vcpu *vcpu) kvm_arch_can_inject_async_page_present()
1940 static int kvm_arch_setup_async_pf(struct kvm_vcpu *vcpu) kvm_arch_setup_async_pf()
1969 static int vcpu_pre_run(struct kvm_vcpu *vcpu) vcpu_pre_run()
2011 static int vcpu_post_run_fault_in_sie(struct kvm_vcpu *vcpu) vcpu_post_run_fault_in_sie()
2036 static int vcpu_post_run(struct kvm_vcpu *vcpu, int exit_reason) vcpu_post_run()
2083 static int __vcpu_run(struct kvm_vcpu *vcpu) __vcpu_run()
2120 static void sync_regs(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run) sync_regs()
2148 static void store_regs(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run) store_regs()
2164 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run) kvm_arch_vcpu_ioctl_run()
2230 int kvm_s390_store_status_unloaded(struct kvm_vcpu *vcpu, unsigned long gpa) kvm_s390_store_status_unloaded()
2281 int kvm_s390_vcpu_store_status(struct kvm_vcpu *vcpu, unsigned long addr) kvm_s390_vcpu_store_status()
2298 int kvm_s390_store_adtl_status_unloaded(struct kvm_vcpu *vcpu, kvm_s390_store_adtl_status_unloaded()
2309 int kvm_s390_vcpu_store_adtl_status(struct kvm_vcpu *vcpu, unsigned long addr) kvm_s390_vcpu_store_adtl_status()
2327 static void __disable_ibs_on_vcpu(struct kvm_vcpu *vcpu) __disable_ibs_on_vcpu()
2336 struct kvm_vcpu *vcpu; __disable_ibs_on_all_vcpus()
2343 static void __enable_ibs_on_vcpu(struct kvm_vcpu *vcpu) __enable_ibs_on_vcpu()
2349 void kvm_s390_vcpu_start(struct kvm_vcpu *vcpu) kvm_s390_vcpu_start()
2388 void kvm_s390_vcpu_stop(struct kvm_vcpu *vcpu) kvm_s390_vcpu_stop()
2391 struct kvm_vcpu *started_vcpu = NULL; kvm_s390_vcpu_stop()
2426 static int kvm_vcpu_ioctl_enable_cap(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_enable_cap()
2450 static long kvm_s390_guest_mem_op(struct kvm_vcpu *vcpu, kvm_s390_guest_mem_op()
2512 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl()
2664 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf) kvm_arch_vcpu_fault()
2731 return kvm_init(NULL, sizeof(struct kvm_vcpu), 0, THIS_MODULE); kvm_s390_init()
H A Dinterrupt.c37 int psw_extint_disabled(struct kvm_vcpu *vcpu) psw_extint_disabled()
42 static int psw_ioint_disabled(struct kvm_vcpu *vcpu) psw_ioint_disabled()
47 static int psw_mchk_disabled(struct kvm_vcpu *vcpu) psw_mchk_disabled()
52 static int psw_interrupts_disabled(struct kvm_vcpu *vcpu) psw_interrupts_disabled()
59 static int ckc_interrupts_enabled(struct kvm_vcpu *vcpu) ckc_interrupts_enabled()
70 static int ckc_irq_pending(struct kvm_vcpu *vcpu) ckc_irq_pending()
77 static int cpu_timer_interrupts_enabled(struct kvm_vcpu *vcpu) cpu_timer_interrupts_enabled()
83 static int cpu_timer_irq_pending(struct kvm_vcpu *vcpu) cpu_timer_irq_pending()
105 static inline unsigned long pending_irqs(struct kvm_vcpu *vcpu) pending_irqs()
111 static unsigned long disable_iscs(struct kvm_vcpu *vcpu, disable_iscs()
123 static unsigned long deliverable_irqs(struct kvm_vcpu *vcpu) deliverable_irqs()
162 static void __set_cpu_idle(struct kvm_vcpu *vcpu) __set_cpu_idle()
168 static void __unset_cpu_idle(struct kvm_vcpu *vcpu) __unset_cpu_idle()
174 static void __reset_intercept_indicators(struct kvm_vcpu *vcpu) __reset_intercept_indicators()
188 static void __set_cpuflag(struct kvm_vcpu *vcpu, u32 flag) __set_cpuflag()
193 static void set_intercept_indicators_io(struct kvm_vcpu *vcpu) set_intercept_indicators_io()
203 static void set_intercept_indicators_ext(struct kvm_vcpu *vcpu) set_intercept_indicators_ext()
213 static void set_intercept_indicators_mchk(struct kvm_vcpu *vcpu) set_intercept_indicators_mchk()
223 static void set_intercept_indicators_stop(struct kvm_vcpu *vcpu) set_intercept_indicators_stop()
230 static void set_intercept_indicators(struct kvm_vcpu *vcpu) set_intercept_indicators()
238 static u16 get_ilc(struct kvm_vcpu *vcpu) get_ilc()
255 static int __must_check __deliver_cpu_timer(struct kvm_vcpu *vcpu) __deliver_cpu_timer()
274 static int __must_check __deliver_ckc(struct kvm_vcpu *vcpu) __deliver_ckc()
293 static int __must_check __deliver_pfault_init(struct kvm_vcpu *vcpu) __deliver_pfault_init()
321 static int __must_check __deliver_machine_check(struct kvm_vcpu *vcpu) __deliver_machine_check()
392 static int __must_check __deliver_restart(struct kvm_vcpu *vcpu) __deliver_restart()
410 static int __must_check __deliver_set_prefix(struct kvm_vcpu *vcpu) __deliver_set_prefix()
430 static int __must_check __deliver_emergency_signal(struct kvm_vcpu *vcpu) __deliver_emergency_signal()
458 static int __must_check __deliver_external_call(struct kvm_vcpu *vcpu) __deliver_external_call()
486 static int __must_check __deliver_prog(struct kvm_vcpu *vcpu) __deliver_prog()
599 static int __must_check __deliver_service(struct kvm_vcpu *vcpu) __deliver_service()
633 static int __must_check __deliver_pfault_done(struct kvm_vcpu *vcpu) __deliver_pfault_done()
675 static int __must_check __deliver_virtio(struct kvm_vcpu *vcpu) __deliver_virtio()
721 static int __must_check __deliver_io(struct kvm_vcpu *vcpu, __deliver_io()
773 typedef int (*deliver_irq_t)(struct kvm_vcpu *vcpu);
792 int kvm_s390_ext_call_pending(struct kvm_vcpu *vcpu) kvm_s390_ext_call_pending()
804 int kvm_s390_vcpu_has_irq(struct kvm_vcpu *vcpu, int exclude_stop) kvm_s390_vcpu_has_irq()
823 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu) kvm_cpu_has_pending_timer()
828 int kvm_s390_handle_wait(struct kvm_vcpu *vcpu) kvm_s390_handle_wait()
869 void kvm_s390_vcpu_wakeup(struct kvm_vcpu *vcpu) kvm_s390_vcpu_wakeup()
884 struct kvm_vcpu *vcpu; kvm_s390_idle_wakeup()
887 vcpu = container_of(timer, struct kvm_vcpu, arch.ckc_timer); kvm_s390_idle_wakeup()
902 void kvm_s390_clear_local_irqs(struct kvm_vcpu *vcpu) kvm_s390_clear_local_irqs()
917 int __must_check kvm_s390_deliver_pending_interrupts(struct kvm_vcpu *vcpu) kvm_s390_deliver_pending_interrupts()
958 static int __inject_prog(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_prog()
990 static int __inject_pfault_init(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_pfault_init()
1006 static int __inject_extcall_sigpif(struct kvm_vcpu *vcpu, uint16_t src_id) __inject_extcall_sigpif()
1021 static int __inject_extcall(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_extcall()
1046 static int __inject_set_prefix(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_set_prefix()
1065 static int __inject_sigp_stop(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_sigp_stop()
1090 static int __inject_sigp_restart(struct kvm_vcpu *vcpu, __inject_sigp_restart()
1102 static int __inject_sigp_emergency(struct kvm_vcpu *vcpu, __inject_sigp_emergency()
1122 static int __inject_mchk(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) __inject_mchk()
1153 static int __inject_ckc(struct kvm_vcpu *vcpu) __inject_ckc()
1166 static int __inject_cpu_timer(struct kvm_vcpu *vcpu) __inject_cpu_timer()
1329 struct kvm_vcpu *dst_vcpu; __floating_irq_kick()
1494 int kvm_s390_is_stop_irq_pending(struct kvm_vcpu *vcpu) kvm_s390_is_stop_irq_pending()
1501 void kvm_s390_clear_stop_irq(struct kvm_vcpu *vcpu) kvm_s390_clear_stop_irq()
1511 static int do_inject_vcpu(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) do_inject_vcpu()
1556 int kvm_s390_inject_vcpu(struct kvm_vcpu *vcpu, struct kvm_s390_irq *irq) kvm_s390_inject_vcpu()
1942 struct kvm_vcpu *vcpu; flic_set_attr()
2122 int kvm_s390_set_irq_state(struct kvm_vcpu *vcpu, void __user *irqstate, int len) kvm_s390_set_irq_state()
2204 int kvm_s390_get_irq_state(struct kvm_vcpu *vcpu, __u8 __user *buf, int len) kvm_s390_get_irq_state()
H A Ddiag.c23 static int diag_release_pages(struct kvm_vcpu *vcpu) diag_release_pages()
61 static int __diag_page_ref_service(struct kvm_vcpu *vcpu) __diag_page_ref_service()
148 static int __diag_time_slice_end(struct kvm_vcpu *vcpu) __diag_time_slice_end()
156 static int __diag_time_slice_end_directed(struct kvm_vcpu *vcpu) __diag_time_slice_end_directed()
159 struct kvm_vcpu *tcpu; __diag_time_slice_end_directed()
179 static int __diag_ipl_functions(struct kvm_vcpu *vcpu) __diag_ipl_functions()
209 static int __diag_virtio_hypercall(struct kvm_vcpu *vcpu) __diag_virtio_hypercall()
240 int kvm_s390_handle_diag(struct kvm_vcpu *vcpu) kvm_s390_handle_diag()
H A Dpriv.c34 static int handle_set_clock(struct kvm_vcpu *vcpu) handle_set_clock()
57 static int handle_set_prefix(struct kvm_vcpu *vcpu) handle_set_prefix()
95 static int handle_store_prefix(struct kvm_vcpu *vcpu) handle_store_prefix()
125 static int handle_store_cpu_address(struct kvm_vcpu *vcpu) handle_store_cpu_address()
151 static int __skey_check_enable(struct kvm_vcpu *vcpu) __skey_check_enable()
165 static int handle_skey(struct kvm_vcpu *vcpu) handle_skey()
181 static int handle_ipte_interlock(struct kvm_vcpu *vcpu) handle_ipte_interlock()
192 static int handle_test_block(struct kvm_vcpu *vcpu) handle_test_block()
220 static int handle_tpi(struct kvm_vcpu *vcpu) handle_tpi()
284 static int handle_tsch(struct kvm_vcpu *vcpu) handle_tsch()
315 static int handle_io_inst(struct kvm_vcpu *vcpu) handle_io_inst()
343 static int handle_stfl(struct kvm_vcpu *vcpu) handle_stfl()
389 int kvm_s390_handle_lpsw(struct kvm_vcpu *vcpu) kvm_s390_handle_lpsw()
417 static int handle_lpswe(struct kvm_vcpu *vcpu) handle_lpswe()
439 static int handle_stidp(struct kvm_vcpu *vcpu) handle_stidp()
464 static void handle_stsi_3_2_2(struct kvm_vcpu *vcpu, struct sysinfo_3_2_2 *mem) handle_stsi_3_2_2()
491 static void insert_stsi_usr_data(struct kvm_vcpu *vcpu, u64 addr, ar_t ar, insert_stsi_usr_data()
502 static int handle_stsi(struct kvm_vcpu *vcpu) handle_stsi()
611 int kvm_s390_handle_b2(struct kvm_vcpu *vcpu) kvm_s390_handle_b2()
627 static int handle_epsw(struct kvm_vcpu *vcpu) handle_epsw()
654 static int handle_pfmf(struct kvm_vcpu *vcpu) handle_pfmf()
742 static int handle_essa(struct kvm_vcpu *vcpu) handle_essa()
790 int kvm_s390_handle_b9(struct kvm_vcpu *vcpu) kvm_s390_handle_b9()
802 int kvm_s390_handle_lctl(struct kvm_vcpu *vcpu) kvm_s390_handle_lctl()
841 int kvm_s390_handle_stctl(struct kvm_vcpu *vcpu) kvm_s390_handle_stctl()
875 static int handle_lctlg(struct kvm_vcpu *vcpu) handle_lctlg()
913 static int handle_stctg(struct kvm_vcpu *vcpu) handle_stctg()
952 int kvm_s390_handle_eb(struct kvm_vcpu *vcpu) kvm_s390_handle_eb()
962 static int handle_tprot(struct kvm_vcpu *vcpu) handle_tprot()
1016 int kvm_s390_handle_e5(struct kvm_vcpu *vcpu) kvm_s390_handle_e5()
1024 static int handle_sckpf(struct kvm_vcpu *vcpu) handle_sckpf()
1045 int kvm_s390_handle_01(struct kvm_vcpu *vcpu) kvm_s390_handle_01()
H A Dguestdbg.c62 static void enable_all_hw_bp(struct kvm_vcpu *vcpu) enable_all_hw_bp()
102 static void enable_all_hw_wp(struct kvm_vcpu *vcpu) enable_all_hw_wp()
133 void kvm_s390_backup_guest_per_regs(struct kvm_vcpu *vcpu) kvm_s390_backup_guest_per_regs()
141 void kvm_s390_restore_guest_per_regs(struct kvm_vcpu *vcpu) kvm_s390_restore_guest_per_regs()
149 void kvm_s390_patch_guest_per_regs(struct kvm_vcpu *vcpu) kvm_s390_patch_guest_per_regs()
177 static int __import_wp_info(struct kvm_vcpu *vcpu, __import_wp_info()
206 int kvm_s390_import_bp_data(struct kvm_vcpu *vcpu, kvm_s390_import_bp_data()
290 void kvm_s390_clear_bp_data(struct kvm_vcpu *vcpu) kvm_s390_clear_bp_data()
321 static struct kvm_hw_bp_info_arch *find_hw_bp(struct kvm_vcpu *vcpu, find_hw_bp()
346 static struct kvm_hw_wp_info_arch *any_wp_changed(struct kvm_vcpu *vcpu) any_wp_changed()
379 void kvm_s390_prepare_debug_exit(struct kvm_vcpu *vcpu) kvm_s390_prepare_debug_exit()
390 static int debug_exit_required(struct kvm_vcpu *vcpu) debug_exit_required()
442 static void filter_guest_per_event(struct kvm_vcpu *vcpu) filter_guest_per_event()
481 void kvm_s390_handle_per_event(struct kvm_vcpu *vcpu) kvm_s390_handle_per_event()
H A Dgaccess.c260 int ipte_lock_held(struct kvm_vcpu *vcpu) ipte_lock_held()
269 static void ipte_lock_simple(struct kvm_vcpu *vcpu) ipte_lock_simple()
291 static void ipte_unlock_simple(struct kvm_vcpu *vcpu) ipte_unlock_simple()
310 static void ipte_lock_siif(struct kvm_vcpu *vcpu) ipte_lock_siif()
327 static void ipte_unlock_siif(struct kvm_vcpu *vcpu) ipte_unlock_siif()
343 void ipte_lock(struct kvm_vcpu *vcpu) ipte_lock()
351 void ipte_unlock(struct kvm_vcpu *vcpu) ipte_unlock()
359 static int ar_translation(struct kvm_vcpu *vcpu, union asce *asce, ar_t ar, ar_translation()
463 static int get_vcpu_asce(struct kvm_vcpu *vcpu, union asce *asce, get_vcpu_asce()
539 static unsigned long guest_translate(struct kvm_vcpu *vcpu, unsigned long gva, guest_translate()
700 static int low_address_protection_enabled(struct kvm_vcpu *vcpu, low_address_protection_enabled()
713 static int guest_page_range(struct kvm_vcpu *vcpu, unsigned long ga, guest_page_range()
754 int access_guest(struct kvm_vcpu *vcpu, unsigned long ga, ar_t ar, void *data, access_guest()
798 int access_guest_real(struct kvm_vcpu *vcpu, unsigned long gra, access_guest_real()
827 int guest_translate_address(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar, guest_translate_address()
869 int check_gva_range(struct kvm_vcpu *vcpu, unsigned long gva, ar_t ar, check_gva_range()
897 int kvm_s390_check_low_addr_prot_real(struct kvm_vcpu *vcpu, unsigned long gra) kvm_s390_check_low_addr_prot_real()
H A Dtrace-s390.h42 TP_PROTO(unsigned int id, struct kvm_vcpu *vcpu,
48 __field(struct kvm_vcpu *, vcpu)
H A Dtrace.h19 #define VCPU_PROTO_COMMON struct kvm_vcpu *vcpu
/linux-4.4.14/arch/arm64/include/asm/
H A Dkvm_coproc.h26 void kvm_reset_sys_regs(struct kvm_vcpu *vcpu);
41 int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run);
42 int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
43 int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
44 int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
45 int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
46 int kvm_handle_sys_reg(struct kvm_vcpu *vcpu, struct kvm_run *run);
52 int kvm_arm_copy_sys_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices);
53 int kvm_arm_sys_reg_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
54 int kvm_arm_sys_reg_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *);
55 unsigned long kvm_arm_num_sys_reg_descs(struct kvm_vcpu *vcpu);
H A Dkvm_emulate.h34 unsigned long *vcpu_reg32(const struct kvm_vcpu *vcpu, u8 reg_num);
35 unsigned long *vcpu_spsr32(const struct kvm_vcpu *vcpu);
37 bool kvm_condition_valid32(const struct kvm_vcpu *vcpu);
38 void kvm_skip_instr32(struct kvm_vcpu *vcpu, bool is_wide_instr);
40 void kvm_inject_undefined(struct kvm_vcpu *vcpu);
41 void kvm_inject_dabt(struct kvm_vcpu *vcpu, unsigned long addr);
42 void kvm_inject_pabt(struct kvm_vcpu *vcpu, unsigned long addr);
44 static inline void vcpu_reset_hcr(struct kvm_vcpu *vcpu) vcpu_reset_hcr()
51 static inline unsigned long vcpu_get_hcr(struct kvm_vcpu *vcpu) vcpu_get_hcr()
56 static inline void vcpu_set_hcr(struct kvm_vcpu *vcpu, unsigned long hcr) vcpu_set_hcr()
61 static inline unsigned long *vcpu_pc(const struct kvm_vcpu *vcpu) vcpu_pc()
66 static inline unsigned long *vcpu_elr_el1(const struct kvm_vcpu *vcpu) vcpu_elr_el1()
71 static inline unsigned long *vcpu_cpsr(const struct kvm_vcpu *vcpu) vcpu_cpsr()
76 static inline bool vcpu_mode_is_32bit(const struct kvm_vcpu *vcpu) vcpu_mode_is_32bit()
81 static inline bool kvm_condition_valid(const struct kvm_vcpu *vcpu) kvm_condition_valid()
89 static inline void kvm_skip_instr(struct kvm_vcpu *vcpu, bool is_wide_instr) kvm_skip_instr()
97 static inline void vcpu_set_thumb(struct kvm_vcpu *vcpu) vcpu_set_thumb()
107 static inline unsigned long vcpu_get_reg(const struct kvm_vcpu *vcpu, vcpu_get_reg()
113 static inline void vcpu_set_reg(struct kvm_vcpu *vcpu, u8 reg_num, vcpu_set_reg()
121 static inline unsigned long *vcpu_spsr(const struct kvm_vcpu *vcpu) vcpu_spsr()
129 static inline bool vcpu_mode_priv(const struct kvm_vcpu *vcpu) vcpu_mode_priv()
139 static inline u32 kvm_vcpu_get_hsr(const struct kvm_vcpu *vcpu) kvm_vcpu_get_hsr()
144 static inline unsigned long kvm_vcpu_get_hfar(const struct kvm_vcpu *vcpu) kvm_vcpu_get_hfar()
149 static inline phys_addr_t kvm_vcpu_get_fault_ipa(const struct kvm_vcpu *vcpu) kvm_vcpu_get_fault_ipa()
154 static inline u32 kvm_vcpu_hvc_get_imm(const struct kvm_vcpu *vcpu) kvm_vcpu_hvc_get_imm()
159 static inline bool kvm_vcpu_dabt_isvalid(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_isvalid()
164 static inline bool kvm_vcpu_dabt_iswrite(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_iswrite()
169 static inline bool kvm_vcpu_dabt_issext(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_issext()
174 static inline int kvm_vcpu_dabt_get_rd(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_get_rd()
179 static inline bool kvm_vcpu_dabt_isextabt(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_isextabt()
184 static inline bool kvm_vcpu_dabt_iss1tw(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_iss1tw()
189 static inline int kvm_vcpu_dabt_get_as(const struct kvm_vcpu *vcpu) kvm_vcpu_dabt_get_as()
195 static inline bool kvm_vcpu_trap_il_is32bit(const struct kvm_vcpu *vcpu) kvm_vcpu_trap_il_is32bit()
200 static inline u8 kvm_vcpu_trap_get_class(const struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_class()
205 static inline bool kvm_vcpu_trap_is_iabt(const struct kvm_vcpu *vcpu) kvm_vcpu_trap_is_iabt()
210 static inline u8 kvm_vcpu_trap_get_fault(const struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_fault()
215 static inline u8 kvm_vcpu_trap_get_fault_type(const struct kvm_vcpu *vcpu) kvm_vcpu_trap_get_fault_type()
220 static inline unsigned long kvm_vcpu_get_mpidr_aff(struct kvm_vcpu *vcpu) kvm_vcpu_get_mpidr_aff()
225 static inline void kvm_vcpu_set_be(struct kvm_vcpu *vcpu) kvm_vcpu_set_be()
233 static inline bool kvm_vcpu_is_be(struct kvm_vcpu *vcpu) kvm_vcpu_is_be()
241 static inline unsigned long vcpu_data_guest_to_host(struct kvm_vcpu *vcpu, vcpu_data_guest_to_host()
272 static inline unsigned long vcpu_data_host_to_guest(struct kvm_vcpu *vcpu, vcpu_data_host_to_guest()
H A Dkvm_host.h46 int kvm_reset_vcpu(struct kvm_vcpu *vcpu);
203 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu);
204 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *indices);
205 int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
206 int kvm_arm_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
222 struct kvm_vcpu *kvm_arm_get_running_vcpu(void);
223 struct kvm_vcpu * __percpu *kvm_get_running_vcpus(void);
229 int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
235 struct kvm_vcpu *kvm_mpidr_to_vcpu(struct kvm *kvm, unsigned long mpidr);
253 static inline void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) {} kvm_arch_sched_in()
254 static inline void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) {}
257 void kvm_arm_setup_debug(struct kvm_vcpu *vcpu);
258 void kvm_arm_clear_debug(struct kvm_vcpu *vcpu);
259 void kvm_arm_reset_debug_ptr(struct kvm_vcpu *vcpu);
H A Dkvm_mmio.h34 int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run);
35 int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run,
H A Dkvm_psci.h24 int kvm_psci_version(struct kvm_vcpu *vcpu);
25 int kvm_psci_call(struct kvm_vcpu *vcpu);
H A Dkvm_mmu.h94 int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run);
96 void kvm_mmu_free_memory_caches(struct kvm_vcpu *vcpu);
228 static inline bool vcpu_has_cache_enabled(struct kvm_vcpu *vcpu)
233 static inline void __coherent_cache_guest_page(struct kvm_vcpu *vcpu, pfn_t pfn,
271 void kvm_set_way_flush(struct kvm_vcpu *vcpu);
272 void kvm_toggle_cache(struct kvm_vcpu *vcpu, bool was_enabled);
H A Dkvm_asm.h107 struct kvm_vcpu;
121 extern int __kvm_vcpu_run(struct kvm_vcpu *vcpu);
/linux-4.4.14/arch/x86/include/asm/
H A Dkvm_host.h270 void (*set_cr3)(struct kvm_vcpu *vcpu, unsigned long root);
271 unsigned long (*get_cr3)(struct kvm_vcpu *vcpu);
272 u64 (*get_pdptr)(struct kvm_vcpu *vcpu, int index);
273 int (*page_fault)(struct kvm_vcpu *vcpu, gva_t gva, u32 err,
275 void (*inject_page_fault)(struct kvm_vcpu *vcpu,
277 gpa_t (*gva_to_gpa)(struct kvm_vcpu *vcpu, gva_t gva, u32 access,
279 gpa_t (*translate_gpa)(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
281 int (*sync_page)(struct kvm_vcpu *vcpu,
283 void (*invlpg)(struct kvm_vcpu *vcpu, gva_t gva);
284 void (*update_pte)(struct kvm_vcpu *vcpu, struct kvm_mmu_page *sp,
334 struct kvm_vcpu *vcpu;
478 int (*complete_userspace_io)(struct kvm_vcpu *vcpu);
770 void (*cpuid_update)(struct kvm_vcpu *vcpu);
773 struct kvm_vcpu *(*vcpu_create)(struct kvm *kvm, unsigned id);
774 void (*vcpu_free)(struct kvm_vcpu *vcpu);
775 void (*vcpu_reset)(struct kvm_vcpu *vcpu, bool init_event);
777 void (*prepare_guest_switch)(struct kvm_vcpu *vcpu);
778 void (*vcpu_load)(struct kvm_vcpu *vcpu, int cpu);
779 void (*vcpu_put)(struct kvm_vcpu *vcpu);
781 void (*update_bp_intercept)(struct kvm_vcpu *vcpu);
782 int (*get_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
783 int (*set_msr)(struct kvm_vcpu *vcpu, struct msr_data *msr);
784 u64 (*get_segment_base)(struct kvm_vcpu *vcpu, int seg);
785 void (*get_segment)(struct kvm_vcpu *vcpu,
787 int (*get_cpl)(struct kvm_vcpu *vcpu);
788 void (*set_segment)(struct kvm_vcpu *vcpu,
790 void (*get_cs_db_l_bits)(struct kvm_vcpu *vcpu, int *db, int *l);
791 void (*decache_cr0_guest_bits)(struct kvm_vcpu *vcpu);
792 void (*decache_cr3)(struct kvm_vcpu *vcpu);
793 void (*decache_cr4_guest_bits)(struct kvm_vcpu *vcpu);
794 void (*set_cr0)(struct kvm_vcpu *vcpu, unsigned long cr0);
795 void (*set_cr3)(struct kvm_vcpu *vcpu, unsigned long cr3);
796 int (*set_cr4)(struct kvm_vcpu *vcpu, unsigned long cr4);
797 void (*set_efer)(struct kvm_vcpu *vcpu, u64 efer);
798 void (*get_idt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
799 void (*set_idt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
800 void (*get_gdt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
801 void (*set_gdt)(struct kvm_vcpu *vcpu, struct desc_ptr *dt);
802 u64 (*get_dr6)(struct kvm_vcpu *vcpu);
803 void (*set_dr6)(struct kvm_vcpu *vcpu, unsigned long value);
804 void (*sync_dirty_debug_regs)(struct kvm_vcpu *vcpu);
805 void (*set_dr7)(struct kvm_vcpu *vcpu, unsigned long value);
806 void (*cache_reg)(struct kvm_vcpu *vcpu, enum kvm_reg reg);
807 unsigned long (*get_rflags)(struct kvm_vcpu *vcpu);
808 void (*set_rflags)(struct kvm_vcpu *vcpu, unsigned long rflags);
809 void (*fpu_activate)(struct kvm_vcpu *vcpu);
810 void (*fpu_deactivate)(struct kvm_vcpu *vcpu);
812 void (*tlb_flush)(struct kvm_vcpu *vcpu);
814 void (*run)(struct kvm_vcpu *vcpu);
815 int (*handle_exit)(struct kvm_vcpu *vcpu);
816 void (*skip_emulated_instruction)(struct kvm_vcpu *vcpu);
817 void (*set_interrupt_shadow)(struct kvm_vcpu *vcpu, int mask);
818 u32 (*get_interrupt_shadow)(struct kvm_vcpu *vcpu);
819 void (*patch_hypercall)(struct kvm_vcpu *vcpu,
821 void (*set_irq)(struct kvm_vcpu *vcpu);
822 void (*set_nmi)(struct kvm_vcpu *vcpu);
823 void (*queue_exception)(struct kvm_vcpu *vcpu, unsigned nr,
826 void (*cancel_injection)(struct kvm_vcpu *vcpu);
827 int (*interrupt_allowed)(struct kvm_vcpu *vcpu);
828 int (*nmi_allowed)(struct kvm_vcpu *vcpu);
829 bool (*get_nmi_mask)(struct kvm_vcpu *vcpu);
830 void (*set_nmi_mask)(struct kvm_vcpu *vcpu, bool masked);
831 void (*enable_nmi_window)(struct kvm_vcpu *vcpu);
832 void (*enable_irq_window)(struct kvm_vcpu *vcpu);
833 void (*update_cr8_intercept)(struct kvm_vcpu *vcpu, int tpr, int irr);
834 int (*cpu_uses_apicv)(struct kvm_vcpu *vcpu);
835 void (*hwapic_irr_update)(struct kvm_vcpu *vcpu, int max_irr);
837 void (*load_eoi_exitmap)(struct kvm_vcpu *vcpu);
838 void (*set_virtual_x2apic_mode)(struct kvm_vcpu *vcpu, bool set);
839 void (*set_apic_access_page_addr)(struct kvm_vcpu *vcpu, hpa_t hpa);
840 void (*deliver_posted_interrupt)(struct kvm_vcpu *vcpu, int vector);
841 void (*sync_pir_to_irr)(struct kvm_vcpu *vcpu);
844 u64 (*get_mt_mask)(struct kvm_vcpu *vcpu, gfn_t gfn, bool is_mmio);
848 void (*adjust_tsc_offset_guest)(struct kvm_vcpu *vcpu, s64 adjustment);
850 void (*set_tdp_cr3)(struct kvm_vcpu *vcpu, unsigned long cr3);
856 u64 (*read_tsc_offset)(struct kvm_vcpu *vcpu);
857 void (*write_tsc_offset)(struct kvm_vcpu *vcpu, u64 offset);
859 u64 (*read_l1_tsc)(struct kvm_vcpu *vcpu, u64 host_tsc);
861 void (*get_exit_info)(struct kvm_vcpu *vcpu, u64 *info1, u64 *info2);
863 int (*check_intercept)(struct kvm_vcpu *vcpu,
866 void (*handle_external_intr)(struct kvm_vcpu *vcpu);
870 int (*check_nested_events)(struct kvm_vcpu *vcpu, bool external_intr);
872 void (*sched_in)(struct kvm_vcpu *kvm, int cpu);
910 int (*pre_block)(struct kvm_vcpu *vcpu);
911 void (*post_block)(struct kvm_vcpu *vcpu);
928 void kvm_mmu_destroy(struct kvm_vcpu *vcpu);
929 int kvm_mmu_create(struct kvm_vcpu *vcpu);
930 void kvm_mmu_setup(struct kvm_vcpu *vcpu);
934 void kvm_mmu_reset_context(struct kvm_vcpu *vcpu);
953 int load_pdptrs(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu, unsigned long cr3);
955 int emulator_write_phys(struct kvm_vcpu *vcpu, gpa_t gpa,
973 u64 vcpu_tsc_khz(struct kvm_vcpu *vcpu);
995 int x86_emulate_instruction(struct kvm_vcpu *vcpu, unsigned long cr2,
998 static inline int emulate_instruction(struct kvm_vcpu *vcpu, emulate_instruction()
1005 bool kvm_valid_efer(struct kvm_vcpu *vcpu, u64 efer);
1006 int kvm_get_msr(struct kvm_vcpu *vcpu, struct msr_data *msr);
1007 int kvm_set_msr(struct kvm_vcpu *vcpu, struct msr_data *msr);
1011 int kvm_fast_pio_out(struct kvm_vcpu *vcpu, int size, unsigned short port);
1012 void kvm_emulate_cpuid(struct kvm_vcpu *vcpu);
1013 int kvm_emulate_halt(struct kvm_vcpu *vcpu);
1014 int kvm_vcpu_halt(struct kvm_vcpu *vcpu);
1015 int kvm_emulate_wbinvd(struct kvm_vcpu *vcpu);
1017 void kvm_get_segment(struct kvm_vcpu *vcpu, struct kvm_segment *var, int seg);
1018 int kvm_load_segment_descriptor(struct kvm_vcpu *vcpu, u16 selector, int seg);
1019 void kvm_vcpu_deliver_sipi_vector(struct kvm_vcpu *vcpu, u8 vector);
1021 int kvm_task_switch(struct kvm_vcpu *vcpu, u16 tss_selector, int idt_index,
1024 int kvm_set_cr0(struct kvm_vcpu *vcpu, unsigned long cr0);
1025 int kvm_set_cr3(struct kvm_vcpu *vcpu, unsigned long cr3);
1026 int kvm_set_cr4(struct kvm_vcpu *vcpu, unsigned long cr4);
1027 int kvm_set_cr8(struct kvm_vcpu *vcpu, unsigned long cr8);
1028 int kvm_set_dr(struct kvm_vcpu *vcpu, int dr, unsigned long val);
1029 int kvm_get_dr(struct kvm_vcpu *vcpu, int dr, unsigned long *val);
1030 unsigned long kvm_get_cr8(struct kvm_vcpu *vcpu);
1031 void kvm_lmsw(struct kvm_vcpu *vcpu, unsigned long msw);
1032 void kvm_get_cs_db_l_bits(struct kvm_vcpu *vcpu, int *db, int *l);
1033 int kvm_set_xcr(struct kvm_vcpu *vcpu, u32 index, u64 xcr);
1035 int kvm_get_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1036 int kvm_set_msr_common(struct kvm_vcpu *vcpu, struct msr_data *msr);
1038 unsigned long kvm_get_rflags(struct kvm_vcpu *vcpu);
1039 void kvm_set_rflags(struct kvm_vcpu *vcpu, unsigned long rflags);
1040 bool kvm_rdpmc(struct kvm_vcpu *vcpu);
1042 void kvm_queue_exception(struct kvm_vcpu *vcpu, unsigned nr);
1043 void kvm_queue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code);
1044 void kvm_requeue_exception(struct kvm_vcpu *vcpu, unsigned nr);
1045 void kvm_requeue_exception_e(struct kvm_vcpu *vcpu, unsigned nr, u32 error_code);
1046 void kvm_inject_page_fault(struct kvm_vcpu *vcpu, struct x86_exception *fault);
1047 int kvm_read_guest_page_mmu(struct kvm_vcpu *vcpu, struct kvm_mmu *mmu,
1050 bool kvm_require_cpl(struct kvm_vcpu *vcpu, int required_cpl);
1051 bool kvm_require_dr(struct kvm_vcpu *vcpu, int dr);
1068 void kvm_inject_nmi(struct kvm_vcpu *vcpu);
1070 void kvm_mmu_pte_write(struct kvm_vcpu *vcpu, gpa_t gpa,
1073 int kvm_mmu_unprotect_page_virt(struct kvm_vcpu *vcpu, gva_t gva);
1074 void __kvm_mmu_free_some_pages(struct kvm_vcpu *vcpu);
1075 int kvm_mmu_load(struct kvm_vcpu *vcpu);
1076 void kvm_mmu_unload(struct kvm_vcpu *vcpu);
1077 void kvm_mmu_sync_roots(struct kvm_vcpu *vcpu);
1078 gpa_t translate_nested_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access,
1080 gpa_t kvm_mmu_gva_to_gpa_read(struct kvm_vcpu *vcpu, gva_t gva,
1082 gpa_t kvm_mmu_gva_to_gpa_fetch(struct kvm_vcpu *vcpu, gva_t gva,
1084 gpa_t kvm_mmu_gva_to_gpa_write(struct kvm_vcpu *vcpu, gva_t gva,
1086 gpa_t kvm_mmu_gva_to_gpa_system(struct kvm_vcpu *vcpu, gva_t gva,
1089 int kvm_emulate_hypercall(struct kvm_vcpu *vcpu);
1091 int kvm_mmu_page_fault(struct kvm_vcpu *vcpu, gva_t gva, u32 error_code,
1093 void kvm_mmu_invlpg(struct kvm_vcpu *vcpu, gva_t gva);
1094 void kvm_mmu_new_cr3(struct kvm_vcpu *vcpu);
1099 static inline gpa_t translate_gpa(struct kvm_vcpu *vcpu, gpa_t gpa, u32 access, translate_gpa()
1139 static inline void kvm_inject_gp(struct kvm_vcpu *vcpu, u32 error_code) kvm_inject_gp()
1216 int kvm_cpu_has_injectable_intr(struct kvm_vcpu *v);
1217 int kvm_cpu_has_interrupt(struct kvm_vcpu *vcpu);
1218 int kvm_arch_interrupt_allowed(struct kvm_vcpu *vcpu);
1219 int kvm_cpu_get_interrupt(struct kvm_vcpu *v);
1220 void kvm_vcpu_reset(struct kvm_vcpu *vcpu, bool init_event);
1221 void kvm_vcpu_reload_apic_access_page(struct kvm_vcpu *vcpu);
1228 u64 kvm_scale_tsc(struct kvm_vcpu *vcpu, u64 tsc);
1229 u64 kvm_read_l1_tsc(struct kvm_vcpu *vcpu, u64 host_tsc);
1231 unsigned long kvm_get_linear_rip(struct kvm_vcpu *vcpu);
1232 bool kvm_is_linear_rip(struct kvm_vcpu *vcpu, unsigned long linear_rip);
1234 void kvm_arch_async_page_not_present(struct kvm_vcpu *vcpu,
1236 void kvm_arch_async_page_present(struct kvm_vcpu *vcpu,
1238 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu,
1240 bool kvm_arch_can_inject_async_page_present(struct kvm_vcpu *vcpu);
1241 extern bool kvm_find_async_pf_gfn(struct kvm_vcpu *vcpu, gfn_t gfn);
1243 void kvm_complete_insn_gp(struct kvm_vcpu *vcpu, int err);
1249 bool kvm_vcpu_is_reset_bsp(struct kvm_vcpu *vcpu);
1250 bool kvm_vcpu_is_bsp(struct kvm_vcpu *vcpu);
1253 struct kvm_vcpu **dest_vcpu);
1258 static inline void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) {} kvm_arch_vcpu_unblocking()
1259 static inline void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) {}
/linux-4.4.14/arch/mips/kvm/
H A Dinterrupt.h39 void kvm_mips_queue_irq(struct kvm_vcpu *vcpu, uint32_t priority);
40 void kvm_mips_dequeue_irq(struct kvm_vcpu *vcpu, uint32_t priority);
41 int kvm_mips_pending_timer(struct kvm_vcpu *vcpu);
43 void kvm_mips_queue_timer_int_cb(struct kvm_vcpu *vcpu);
44 void kvm_mips_dequeue_timer_int_cb(struct kvm_vcpu *vcpu);
45 void kvm_mips_queue_io_int_cb(struct kvm_vcpu *vcpu,
47 void kvm_mips_dequeue_io_int_cb(struct kvm_vcpu *vcpu,
49 int kvm_mips_irq_deliver_cb(struct kvm_vcpu *vcpu, unsigned int priority,
51 int kvm_mips_irq_clear_cb(struct kvm_vcpu *vcpu, unsigned int priority,
53 void kvm_mips_deliver_interrupts(struct kvm_vcpu *vcpu, uint32_t cause);
H A Dinterrupt.c25 void kvm_mips_queue_irq(struct kvm_vcpu *vcpu, uint32_t priority) kvm_mips_queue_irq()
30 void kvm_mips_dequeue_irq(struct kvm_vcpu *vcpu, uint32_t priority) kvm_mips_dequeue_irq()
35 void kvm_mips_queue_timer_int_cb(struct kvm_vcpu *vcpu) kvm_mips_queue_timer_int_cb()
49 void kvm_mips_dequeue_timer_int_cb(struct kvm_vcpu *vcpu) kvm_mips_dequeue_timer_int_cb()
55 void kvm_mips_queue_io_int_cb(struct kvm_vcpu *vcpu, kvm_mips_queue_io_int_cb()
88 void kvm_mips_dequeue_io_int_cb(struct kvm_vcpu *vcpu, kvm_mips_dequeue_io_int_cb()
116 int kvm_mips_irq_deliver_cb(struct kvm_vcpu *vcpu, unsigned int priority, kvm_mips_irq_deliver_cb()
198 int kvm_mips_irq_clear_cb(struct kvm_vcpu *vcpu, unsigned int priority, kvm_mips_irq_clear_cb()
204 void kvm_mips_deliver_interrupts(struct kvm_vcpu *vcpu, uint32_t cause) kvm_mips_deliver_interrupts()
239 int kvm_mips_pending_timer(struct kvm_vcpu *vcpu) kvm_mips_pending_timer()
H A Dcommpage.h22 extern void kvm_mips_commpage_init(struct kvm_vcpu *vcpu);
H A Dmips.c37 #define VCPU_STAT(x) offsetof(struct kvm_vcpu, stat.x)
63 static int kvm_mips_reset_vcpu(struct kvm_vcpu *vcpu) kvm_mips_reset_vcpu()
79 int kvm_arch_vcpu_runnable(struct kvm_vcpu *vcpu) kvm_arch_vcpu_runnable()
84 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu) kvm_arch_vcpu_should_kick()
144 struct kvm_vcpu *vcpu; kvm_mips_free_vcpus()
246 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, unsigned int id) kvm_arch_vcpu_create()
252 struct kvm_vcpu *vcpu = kzalloc(sizeof(struct kvm_vcpu), GFP_KERNEL); kvm_arch_vcpu_create()
356 void kvm_arch_vcpu_free(struct kvm_vcpu *vcpu) kvm_arch_vcpu_free()
369 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_arch_vcpu_destroy()
374 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
380 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_arch_vcpu_ioctl_run()
420 int kvm_vcpu_ioctl_interrupt(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_interrupt()
424 struct kvm_vcpu *dvcpu = NULL; kvm_vcpu_ioctl_interrupt()
454 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_mpstate()
460 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_mpstate()
532 static int kvm_mips_get_reg(struct kvm_vcpu *vcpu, kvm_mips_get_reg()
711 static int kvm_mips_set_reg(struct kvm_vcpu *vcpu, kvm_mips_set_reg()
872 static int kvm_vcpu_ioctl_enable_cap(struct kvm_vcpu *vcpu, kvm_vcpu_ioctl_enable_cap()
902 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl()
1038 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
1044 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
1050 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu) kvm_arch_vcpu_postcreate()
1054 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
1059 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
1064 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf) kvm_arch_vcpu_fault()
1106 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu) kvm_cpu_has_pending_timer()
1111 int kvm_arch_vcpu_dump_regs(struct kvm_vcpu *vcpu) kvm_arch_vcpu_dump_regs()
1142 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
1156 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
1172 struct kvm_vcpu *vcpu = (struct kvm_vcpu *)data; kvm_mips_comparecount_func()
1184 struct kvm_vcpu *vcpu; kvm_mips_comparecount_wakeup()
1186 vcpu = container_of(timer, struct kvm_vcpu, arch.comparecount_timer); kvm_mips_comparecount_wakeup()
1191 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu) kvm_arch_vcpu_init()
1200 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
1207 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
1226 int kvm_mips_handle_exit(struct kvm_run *run, struct kvm_vcpu *vcpu) kvm_mips_handle_exit()
1250 kvm_debug("kvm_mips_handle_exit: cause: %#x, PC: %p, kvm_run: %p, kvm_vcpu: %p\n", kvm_mips_handle_exit()
1422 void kvm_own_fpu(struct kvm_vcpu *vcpu) kvm_own_fpu()
1467 void kvm_own_msa(struct kvm_vcpu *vcpu) kvm_own_msa()
1525 void kvm_drop_fpu(struct kvm_vcpu *vcpu) kvm_drop_fpu()
1540 void kvm_lose_fpu(struct kvm_vcpu *vcpu) kvm_lose_fpu()
1627 ret = kvm_init(NULL, sizeof(struct kvm_vcpu), 0, THIS_MODULE); kvm_mips_init()
H A Dcommpage.c27 void kvm_mips_commpage_init(struct kvm_vcpu *vcpu) kvm_mips_commpage_init()
H A Ddyntrans.c32 struct kvm_vcpu *vcpu) kvm_mips_trans_cache_index()
53 struct kvm_vcpu *vcpu) kvm_mips_trans_cache_va()
73 int kvm_mips_trans_mfc0(uint32_t inst, uint32_t *opc, struct kvm_vcpu *vcpu) kvm_mips_trans_mfc0()
115 int kvm_mips_trans_mtc0(uint32_t inst, uint32_t *opc, struct kvm_vcpu *vcpu) kvm_mips_trans_mtc0()
H A Dtrap_emul.c40 static int kvm_trap_emul_handle_cop_unusable(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_cop_unusable()
88 static int kvm_trap_emul_handle_tlb_mod(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_tlb_mod()
132 static int kvm_trap_emul_handle_tlb_st_miss(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_tlb_st_miss()
179 static int kvm_trap_emul_handle_tlb_ld_miss(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_tlb_ld_miss()
233 static int kvm_trap_emul_handle_addr_err_st(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_addr_err_st()
263 static int kvm_trap_emul_handle_addr_err_ld(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_addr_err_ld()
293 static int kvm_trap_emul_handle_syscall(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_syscall()
311 static int kvm_trap_emul_handle_res_inst(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_res_inst()
329 static int kvm_trap_emul_handle_break(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_break()
347 static int kvm_trap_emul_handle_trap(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_trap()
365 static int kvm_trap_emul_handle_msa_fpe(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_msa_fpe()
383 static int kvm_trap_emul_handle_fpe(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_fpe()
407 static int kvm_trap_emul_handle_msa_disabled(struct kvm_vcpu *vcpu) kvm_trap_emul_handle_msa_disabled()
453 static int kvm_trap_emul_vcpu_init(struct kvm_vcpu *vcpu) kvm_trap_emul_vcpu_init()
458 static int kvm_trap_emul_vcpu_setup(struct kvm_vcpu *vcpu) kvm_trap_emul_vcpu_setup()
514 static int kvm_trap_emul_get_one_reg(struct kvm_vcpu *vcpu, kvm_trap_emul_get_one_reg()
537 static int kvm_trap_emul_set_one_reg(struct kvm_vcpu *vcpu, kvm_trap_emul_set_one_reg()
625 static int kvm_trap_emul_vcpu_get_regs(struct kvm_vcpu *vcpu) kvm_trap_emul_vcpu_get_regs()
632 static int kvm_trap_emul_vcpu_set_regs(struct kvm_vcpu *vcpu) kvm_trap_emul_vcpu_set_regs()
H A Dtlb.c50 uint32_t kvm_mips_get_kernel_asid(struct kvm_vcpu *vcpu) kvm_mips_get_kernel_asid()
55 uint32_t kvm_mips_get_user_asid(struct kvm_vcpu *vcpu) kvm_mips_get_user_asid()
60 inline uint32_t kvm_mips_get_commpage_asid(struct kvm_vcpu *vcpu) kvm_mips_get_commpage_asid()
116 void kvm_mips_dump_guest_tlbs(struct kvm_vcpu *vcpu) kvm_mips_dump_guest_tlbs()
168 unsigned long kvm_mips_translate_guest_kseg0_to_hpa(struct kvm_vcpu *vcpu, kvm_mips_translate_guest_kseg0_to_hpa()
198 int kvm_mips_host_tlb_write(struct kvm_vcpu *vcpu, unsigned long entryhi, kvm_mips_host_tlb_write()
262 struct kvm_vcpu *vcpu) kvm_mips_handle_kseg0_tlb_fault()
314 struct kvm_vcpu *vcpu) kvm_mips_handle_commpage_tlb_fault()
356 int kvm_mips_handle_mapped_seg_tlb_fault(struct kvm_vcpu *vcpu, kvm_mips_handle_mapped_seg_tlb_fault()
406 int kvm_mips_guest_tlb_lookup(struct kvm_vcpu *vcpu, unsigned long entryhi) kvm_mips_guest_tlb_lookup()
427 int kvm_mips_host_tlb_lookup(struct kvm_vcpu *vcpu, unsigned long vaddr) kvm_mips_host_tlb_lookup()
463 int kvm_mips_host_tlb_inv(struct kvm_vcpu *vcpu, unsigned long va) kvm_mips_host_tlb_inv()
511 int kvm_mips_host_tlb_inv_index(struct kvm_vcpu *vcpu, int index) kvm_mips_host_tlb_inv_index()
600 struct kvm_vcpu *vcpu) kvm_get_new_mmu_context()
658 static void kvm_mips_migrate_count(struct kvm_vcpu *vcpu) kvm_mips_migrate_count()
665 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvm_arch_vcpu_load()
745 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu) kvm_arch_vcpu_put()
773 uint32_t kvm_get_inst(uint32_t *opc, struct kvm_vcpu *vcpu) kvm_get_inst()
H A Dstats.c70 void kvm_mips_dump_stats(struct kvm_vcpu *vcpu) kvm_mips_dump_stats()
H A Dtrace.h24 TP_PROTO(struct kvm_vcpu *vcpu, unsigned int reason),
H A Demulate.c42 unsigned long kvm_compute_return_epc(struct kvm_vcpu *vcpu, kvm_compute_return_epc()
201 enum emulation_result update_pc(struct kvm_vcpu *vcpu, uint32_t cause) update_pc()
231 static inline int kvm_mips_count_disabled(struct kvm_vcpu *vcpu) kvm_mips_count_disabled()
246 static uint32_t kvm_mips_ktime_to_count(struct kvm_vcpu *vcpu, ktime_t now) kvm_mips_ktime_to_count()
285 static inline ktime_t kvm_mips_count_time(struct kvm_vcpu *vcpu) kvm_mips_count_time()
303 static uint32_t kvm_mips_read_count_running(struct kvm_vcpu *vcpu, ktime_t now) kvm_mips_read_count_running()
363 uint32_t kvm_mips_read_count(struct kvm_vcpu *vcpu) kvm_mips_read_count()
390 static ktime_t kvm_mips_freeze_hrtimer(struct kvm_vcpu *vcpu, kvm_mips_freeze_hrtimer()
421 static void kvm_mips_resume_hrtimer(struct kvm_vcpu *vcpu, kvm_mips_resume_hrtimer()
447 void kvm_mips_write_count(struct kvm_vcpu *vcpu, uint32_t count) kvm_mips_write_count()
471 void kvm_mips_init_count(struct kvm_vcpu *vcpu) kvm_mips_init_count()
494 int kvm_mips_set_count_hz(struct kvm_vcpu *vcpu, s64 count_hz) kvm_mips_set_count_hz()
541 void kvm_mips_write_compare(struct kvm_vcpu *vcpu, uint32_t compare, bool ack) kvm_mips_write_compare()
585 static ktime_t kvm_mips_count_disable(struct kvm_vcpu *vcpu) kvm_mips_count_disable()
612 void kvm_mips_count_disable_cause(struct kvm_vcpu *vcpu) kvm_mips_count_disable_cause()
632 void kvm_mips_count_enable_cause(struct kvm_vcpu *vcpu) kvm_mips_count_enable_cause()
658 int kvm_mips_set_count_ctl(struct kvm_vcpu *vcpu, s64 count_ctl) kvm_mips_set_count_ctl()
720 int kvm_mips_set_count_resume(struct kvm_vcpu *vcpu, s64 count_resume) kvm_mips_set_count_resume()
742 enum hrtimer_restart kvm_mips_count_timeout(struct kvm_vcpu *vcpu) kvm_mips_count_timeout()
750 enum emulation_result kvm_mips_emul_eret(struct kvm_vcpu *vcpu) kvm_mips_emul_eret()
773 enum emulation_result kvm_mips_emul_wait(struct kvm_vcpu *vcpu) kvm_mips_emul_wait()
801 enum emulation_result kvm_mips_emul_tlbr(struct kvm_vcpu *vcpu) kvm_mips_emul_tlbr()
811 enum emulation_result kvm_mips_emul_tlbwi(struct kvm_vcpu *vcpu) kvm_mips_emul_tlbwi()
850 enum emulation_result kvm_mips_emul_tlbwr(struct kvm_vcpu *vcpu) kvm_mips_emul_tlbwr()
881 enum emulation_result kvm_mips_emul_tlbp(struct kvm_vcpu *vcpu) kvm_mips_emul_tlbp()
905 unsigned int kvm_mips_config1_wrmask(struct kvm_vcpu *vcpu) kvm_mips_config1_wrmask()
923 unsigned int kvm_mips_config3_wrmask(struct kvm_vcpu *vcpu) kvm_mips_config3_wrmask()
942 unsigned int kvm_mips_config4_wrmask(struct kvm_vcpu *vcpu) kvm_mips_config4_wrmask()
955 unsigned int kvm_mips_config5_wrmask(struct kvm_vcpu *vcpu) kvm_mips_config5_wrmask()
978 struct kvm_vcpu *vcpu) kvm_mips_emulate_CP0()
1318 struct kvm_vcpu *vcpu) kvm_mips_emulate_store()
1430 struct kvm_vcpu *vcpu) kvm_mips_emulate_load()
1533 int kvm_mips_sync_icache(unsigned long va, struct kvm_vcpu *vcpu) kvm_mips_sync_icache()
1575 struct kvm_vcpu *vcpu) kvm_mips_emulate_cache()
1728 struct kvm_vcpu *vcpu) kvm_mips_emulate_inst()
1776 struct kvm_vcpu *vcpu) kvm_mips_emulate_syscall()
1811 struct kvm_vcpu *vcpu) kvm_mips_emulate_tlbmiss_ld()
1857 struct kvm_vcpu *vcpu) kvm_mips_emulate_tlbinv_ld()
1903 struct kvm_vcpu *vcpu) kvm_mips_emulate_tlbmiss_st()
1947 struct kvm_vcpu *vcpu) kvm_mips_emulate_tlbinv_st()
1991 struct kvm_vcpu *vcpu) kvm_mips_handle_tlbmod()
2020 struct kvm_vcpu *vcpu) kvm_mips_emulate_tlbmod()
2062 struct kvm_vcpu *vcpu) kvm_mips_emulate_fpu_exc()
2091 struct kvm_vcpu *vcpu) kvm_mips_emulate_ri_exc()
2126 struct kvm_vcpu *vcpu) kvm_mips_emulate_bp_exc()
2161 struct kvm_vcpu *vcpu) kvm_mips_emulate_trap_exc()
2196 struct kvm_vcpu *vcpu) kvm_mips_emulate_msafpe_exc()
2231 struct kvm_vcpu *vcpu) kvm_mips_emulate_fpe_exc()
2266 struct kvm_vcpu *vcpu) kvm_mips_emulate_msadis_exc()
2315 struct kvm_vcpu *vcpu) kvm_mips_handle_ri()
2398 enum emulation_result kvm_mips_complete_mmio_load(struct kvm_vcpu *vcpu, kvm_mips_complete_mmio_load()
2446 struct kvm_vcpu *vcpu) kvm_mips_emulate_exc()
2484 struct kvm_vcpu *vcpu) kvm_mips_check_privilege()
2580 struct kvm_vcpu *vcpu) kvm_mips_handle_tlbmiss()
/linux-4.4.14/arch/powerpc/kernel/
H A Dasm-offsets.c126 DEFINE(THREAD_KVM_VCPU, offsetof(struct thread_struct, kvm_vcpu)); main()
436 DEFINE(VCPU_HOST_STACK, offsetof(struct kvm_vcpu, arch.host_stack)); main()
437 DEFINE(VCPU_HOST_PID, offsetof(struct kvm_vcpu, arch.host_pid)); main()
438 DEFINE(VCPU_GUEST_PID, offsetof(struct kvm_vcpu, arch.pid)); main()
439 DEFINE(VCPU_GPRS, offsetof(struct kvm_vcpu, arch.gpr)); main()
440 DEFINE(VCPU_VRSAVE, offsetof(struct kvm_vcpu, arch.vrsave)); main()
441 DEFINE(VCPU_FPRS, offsetof(struct kvm_vcpu, arch.fp.fpr)); main()
443 DEFINE(VCPU_VRS, offsetof(struct kvm_vcpu, arch.vr.vr)); main()
445 DEFINE(VCPU_XER, offsetof(struct kvm_vcpu, arch.xer)); main()
446 DEFINE(VCPU_CTR, offsetof(struct kvm_vcpu, arch.ctr)); main()
447 DEFINE(VCPU_LR, offsetof(struct kvm_vcpu, arch.lr)); main()
449 DEFINE(VCPU_TAR, offsetof(struct kvm_vcpu, arch.tar)); main()
451 DEFINE(VCPU_CR, offsetof(struct kvm_vcpu, arch.cr)); main()
452 DEFINE(VCPU_PC, offsetof(struct kvm_vcpu, arch.pc)); main()
454 DEFINE(VCPU_MSR, offsetof(struct kvm_vcpu, arch.shregs.msr)); main()
455 DEFINE(VCPU_SRR0, offsetof(struct kvm_vcpu, arch.shregs.srr0)); main()
456 DEFINE(VCPU_SRR1, offsetof(struct kvm_vcpu, arch.shregs.srr1)); main()
457 DEFINE(VCPU_SPRG0, offsetof(struct kvm_vcpu, arch.shregs.sprg0)); main()
458 DEFINE(VCPU_SPRG1, offsetof(struct kvm_vcpu, arch.shregs.sprg1)); main()
459 DEFINE(VCPU_SPRG2, offsetof(struct kvm_vcpu, arch.shregs.sprg2)); main()
460 DEFINE(VCPU_SPRG3, offsetof(struct kvm_vcpu, arch.shregs.sprg3)); main()
463 DEFINE(VCPU_TB_RMENTRY, offsetof(struct kvm_vcpu, arch.rm_entry)); main()
464 DEFINE(VCPU_TB_RMINTR, offsetof(struct kvm_vcpu, arch.rm_intr)); main()
465 DEFINE(VCPU_TB_RMEXIT, offsetof(struct kvm_vcpu, arch.rm_exit)); main()
466 DEFINE(VCPU_TB_GUEST, offsetof(struct kvm_vcpu, arch.guest_time)); main()
467 DEFINE(VCPU_TB_CEDE, offsetof(struct kvm_vcpu, arch.cede_time)); main()
468 DEFINE(VCPU_CUR_ACTIVITY, offsetof(struct kvm_vcpu, arch.cur_activity)); main()
469 DEFINE(VCPU_ACTIVITY_START, offsetof(struct kvm_vcpu, arch.cur_tb_start)); main()
480 DEFINE(VCPU_SHADOW_PID, offsetof(struct kvm_vcpu, arch.shadow_pid)); main()
481 DEFINE(VCPU_SHADOW_PID1, offsetof(struct kvm_vcpu, arch.shadow_pid1)); main()
482 DEFINE(VCPU_SHARED, offsetof(struct kvm_vcpu, arch.shared)); main()
484 DEFINE(VCPU_SHADOW_MSR, offsetof(struct kvm_vcpu, arch.shadow_msr)); main()
486 DEFINE(VCPU_SHAREDBE, offsetof(struct kvm_vcpu, arch.shared_big_endian)); main()
496 DEFINE(VCPU_KVM, offsetof(struct kvm_vcpu, kvm)); main()
509 DEFINE(VCPU_DSISR, offsetof(struct kvm_vcpu, arch.shregs.dsisr)); main()
510 DEFINE(VCPU_DAR, offsetof(struct kvm_vcpu, arch.shregs.dar)); main()
511 DEFINE(VCPU_VPA, offsetof(struct kvm_vcpu, arch.vpa.pinned_addr)); main()
512 DEFINE(VCPU_VPA_DIRTY, offsetof(struct kvm_vcpu, arch.vpa.dirty)); main()
513 DEFINE(VCPU_HEIR, offsetof(struct kvm_vcpu, arch.emul_inst)); main()
514 DEFINE(VCPU_CPU, offsetof(struct kvm_vcpu, cpu)); main()
515 DEFINE(VCPU_THREAD_CPU, offsetof(struct kvm_vcpu, arch.thread_cpu)); main()
518 DEFINE(VCPU_VCPUID, offsetof(struct kvm_vcpu, vcpu_id)); main()
519 DEFINE(VCPU_PURR, offsetof(struct kvm_vcpu, arch.purr)); main()
520 DEFINE(VCPU_SPURR, offsetof(struct kvm_vcpu, arch.spurr)); main()
521 DEFINE(VCPU_IC, offsetof(struct kvm_vcpu, arch.ic)); main()
522 DEFINE(VCPU_VTB, offsetof(struct kvm_vcpu, arch.vtb)); main()
523 DEFINE(VCPU_DSCR, offsetof(struct kvm_vcpu, arch.dscr)); main()
524 DEFINE(VCPU_AMR, offsetof(struct kvm_vcpu, arch.amr)); main()
525 DEFINE(VCPU_UAMOR, offsetof(struct kvm_vcpu, arch.uamor)); main()
526 DEFINE(VCPU_IAMR, offsetof(struct kvm_vcpu, arch.iamr)); main()
527 DEFINE(VCPU_CTRL, offsetof(struct kvm_vcpu, arch.ctrl)); main()
528 DEFINE(VCPU_DABR, offsetof(struct kvm_vcpu, arch.dabr)); main()
529 DEFINE(VCPU_DABRX, offsetof(struct kvm_vcpu, arch.dabrx)); main()
530 DEFINE(VCPU_DAWR, offsetof(struct kvm_vcpu, arch.dawr)); main()
531 DEFINE(VCPU_DAWRX, offsetof(struct kvm_vcpu, arch.dawrx)); main()
532 DEFINE(VCPU_CIABR, offsetof(struct kvm_vcpu, arch.ciabr)); main()
533 DEFINE(VCPU_HFLAGS, offsetof(struct kvm_vcpu, arch.hflags)); main()
534 DEFINE(VCPU_DEC, offsetof(struct kvm_vcpu, arch.dec)); main()
535 DEFINE(VCPU_DEC_EXPIRES, offsetof(struct kvm_vcpu, arch.dec_expires)); main()
536 DEFINE(VCPU_PENDING_EXC, offsetof(struct kvm_vcpu, arch.pending_exceptions)); main()
537 DEFINE(VCPU_CEDED, offsetof(struct kvm_vcpu, arch.ceded)); main()
538 DEFINE(VCPU_PRODDED, offsetof(struct kvm_vcpu, arch.prodded)); main()
539 DEFINE(VCPU_MMCR, offsetof(struct kvm_vcpu, arch.mmcr)); main()
540 DEFINE(VCPU_PMC, offsetof(struct kvm_vcpu, arch.pmc)); main()
541 DEFINE(VCPU_SPMC, offsetof(struct kvm_vcpu, arch.spmc)); main()
542 DEFINE(VCPU_SIAR, offsetof(struct kvm_vcpu, arch.siar)); main()
543 DEFINE(VCPU_SDAR, offsetof(struct kvm_vcpu, arch.sdar)); main()
544 DEFINE(VCPU_SIER, offsetof(struct kvm_vcpu, arch.sier)); main()
545 DEFINE(VCPU_SLB, offsetof(struct kvm_vcpu, arch.slb)); main()
546 DEFINE(VCPU_SLB_MAX, offsetof(struct kvm_vcpu, arch.slb_max)); main()
547 DEFINE(VCPU_SLB_NR, offsetof(struct kvm_vcpu, arch.slb_nr)); main()
548 DEFINE(VCPU_FAULT_DSISR, offsetof(struct kvm_vcpu, arch.fault_dsisr)); main()
549 DEFINE(VCPU_FAULT_DAR, offsetof(struct kvm_vcpu, arch.fault_dar)); main()
550 DEFINE(VCPU_INTR_MSR, offsetof(struct kvm_vcpu, arch.intr_msr)); main()
551 DEFINE(VCPU_LAST_INST, offsetof(struct kvm_vcpu, arch.last_inst)); main()
552 DEFINE(VCPU_TRAP, offsetof(struct kvm_vcpu, arch.trap)); main()
553 DEFINE(VCPU_CFAR, offsetof(struct kvm_vcpu, arch.cfar)); main()
554 DEFINE(VCPU_PPR, offsetof(struct kvm_vcpu, arch.ppr)); main()
555 DEFINE(VCPU_FSCR, offsetof(struct kvm_vcpu, arch.fscr)); main()
556 DEFINE(VCPU_SHADOW_FSCR, offsetof(struct kvm_vcpu, arch.shadow_fscr)); main()
557 DEFINE(VCPU_PSPB, offsetof(struct kvm_vcpu, arch.pspb)); main()
558 DEFINE(VCPU_EBBHR, offsetof(struct kvm_vcpu, arch.ebbhr)); main()
559 DEFINE(VCPU_EBBRR, offsetof(struct kvm_vcpu, arch.ebbrr)); main()
560 DEFINE(VCPU_BESCR, offsetof(struct kvm_vcpu, arch.bescr)); main()
561 DEFINE(VCPU_CSIGR, offsetof(struct kvm_vcpu, arch.csigr)); main()
562 DEFINE(VCPU_TACR, offsetof(struct kvm_vcpu, arch.tacr)); main()
563 DEFINE(VCPU_TCSCR, offsetof(struct kvm_vcpu, arch.tcscr)); main()
564 DEFINE(VCPU_ACOP, offsetof(struct kvm_vcpu, arch.acop)); main()
565 DEFINE(VCPU_WORT, offsetof(struct kvm_vcpu, arch.wort)); main()
566 DEFINE(VCPU_SHADOW_SRR1, offsetof(struct kvm_vcpu, arch.shadow_srr1)); main()
579 DEFINE(VCPU_TFHAR, offsetof(struct kvm_vcpu, arch.tfhar)); main()
580 DEFINE(VCPU_TFIAR, offsetof(struct kvm_vcpu, arch.tfiar)); main()
581 DEFINE(VCPU_TEXASR, offsetof(struct kvm_vcpu, arch.texasr)); main()
582 DEFINE(VCPU_GPR_TM, offsetof(struct kvm_vcpu, arch.gpr_tm)); main()
583 DEFINE(VCPU_FPRS_TM, offsetof(struct kvm_vcpu, arch.fp_tm.fpr)); main()
584 DEFINE(VCPU_VRS_TM, offsetof(struct kvm_vcpu, arch.vr_tm.vr)); main()
585 DEFINE(VCPU_VRSAVE_TM, offsetof(struct kvm_vcpu, arch.vrsave_tm)); main()
586 DEFINE(VCPU_CR_TM, offsetof(struct kvm_vcpu, arch.cr_tm)); main()
587 DEFINE(VCPU_LR_TM, offsetof(struct kvm_vcpu, arch.lr_tm)); main()
588 DEFINE(VCPU_CTR_TM, offsetof(struct kvm_vcpu, arch.ctr_tm)); main()
589 DEFINE(VCPU_AMR_TM, offsetof(struct kvm_vcpu, arch.amr_tm)); main()
590 DEFINE(VCPU_PPR_TM, offsetof(struct kvm_vcpu, arch.ppr_tm)); main()
591 DEFINE(VCPU_DSCR_TM, offsetof(struct kvm_vcpu, arch.dscr_tm)); main()
592 DEFINE(VCPU_TAR_TM, offsetof(struct kvm_vcpu, arch.tar_tm)); main()
654 HSTATE_FIELD(HSTATE_KVM_VCPU, kvm_vcpu); main()
695 DEFINE(VCPU_CR, offsetof(struct kvm_vcpu, arch.cr)); main()
696 DEFINE(VCPU_XER, offsetof(struct kvm_vcpu, arch.xer)); main()
697 DEFINE(VCPU_LR, offsetof(struct kvm_vcpu, arch.lr)); main()
698 DEFINE(VCPU_CTR, offsetof(struct kvm_vcpu, arch.ctr)); main()
699 DEFINE(VCPU_PC, offsetof(struct kvm_vcpu, arch.pc)); main()
700 DEFINE(VCPU_SPRG9, offsetof(struct kvm_vcpu, arch.sprg9)); main()
701 DEFINE(VCPU_LAST_INST, offsetof(struct kvm_vcpu, arch.last_inst)); main()
702 DEFINE(VCPU_FAULT_DEAR, offsetof(struct kvm_vcpu, arch.fault_dear)); main()
703 DEFINE(VCPU_FAULT_ESR, offsetof(struct kvm_vcpu, arch.fault_esr)); main()
704 DEFINE(VCPU_CRIT_SAVE, offsetof(struct kvm_vcpu, arch.crit_save)); main()
737 DEFINE(VCPU_EVR, offsetof(struct kvm_vcpu, arch.evr[0])); main()
738 DEFINE(VCPU_ACC, offsetof(struct kvm_vcpu, arch.acc)); main()
739 DEFINE(VCPU_SPEFSCR, offsetof(struct kvm_vcpu, arch.spefscr)); main()
740 DEFINE(VCPU_HOST_SPEFSCR, offsetof(struct kvm_vcpu, arch.host_spefscr)); main()
744 DEFINE(VCPU_HOST_MAS4, offsetof(struct kvm_vcpu, arch.host_mas4)); main()
745 DEFINE(VCPU_HOST_MAS6, offsetof(struct kvm_vcpu, arch.host_mas6)); main()
746 DEFINE(VCPU_EPLC, offsetof(struct kvm_vcpu, arch.eplc)); main()
750 DEFINE(VCPU_TIMING_EXIT_TBU, offsetof(struct kvm_vcpu, main()
752 DEFINE(VCPU_TIMING_EXIT_TBL, offsetof(struct kvm_vcpu, main()
754 DEFINE(VCPU_TIMING_LAST_ENTER_TBU, offsetof(struct kvm_vcpu, main()
756 DEFINE(VCPU_TIMING_LAST_ENTER_TBL, offsetof(struct kvm_vcpu, main()
/linux-4.4.14/include/kvm/
H A Darm_arch_timer.h63 int kvm_timer_vcpu_reset(struct kvm_vcpu *vcpu,
65 void kvm_timer_vcpu_init(struct kvm_vcpu *vcpu);
66 void kvm_timer_flush_hwstate(struct kvm_vcpu *vcpu);
67 void kvm_timer_sync_hwstate(struct kvm_vcpu *vcpu);
68 void kvm_timer_vcpu_terminate(struct kvm_vcpu *vcpu);
70 u64 kvm_arm_timer_get_reg(struct kvm_vcpu *, u64 regid);
71 int kvm_arm_timer_set_reg(struct kvm_vcpu *, u64 regid, u64 value);
73 bool kvm_timer_should_fire(struct kvm_vcpu *vcpu);
74 void kvm_timer_schedule(struct kvm_vcpu *vcpu);
75 void kvm_timer_unschedule(struct kvm_vcpu *vcpu);
H A Darm_vgic.h83 struct kvm_vcpu;
113 struct vgic_lr (*get_lr)(const struct kvm_vcpu *, int);
114 void (*set_lr)(struct kvm_vcpu *, int, struct vgic_lr);
115 u64 (*get_elrsr)(const struct kvm_vcpu *vcpu);
116 u64 (*get_eisr)(const struct kvm_vcpu *vcpu);
117 void (*clear_eisr)(struct kvm_vcpu *vcpu);
118 u32 (*get_interrupt_status)(const struct kvm_vcpu *vcpu);
119 void (*enable_underflow)(struct kvm_vcpu *vcpu);
120 void (*disable_underflow)(struct kvm_vcpu *vcpu);
121 void (*get_vmcr)(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr);
122 void (*set_vmcr)(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr);
123 void (*enable)(struct kvm_vcpu *vcpu);
143 bool (*queue_sgi)(struct kvm_vcpu *, int irq);
144 void (*add_sgi_source)(struct kvm_vcpu *, int irq, int source);
153 struct kvm_vcpu *redist_vcpu;
326 struct kvm_vcpu;
335 void kvm_vgic_vcpu_early_init(struct kvm_vcpu *vcpu);
336 void kvm_vgic_vcpu_destroy(struct kvm_vcpu *vcpu);
337 void kvm_vgic_flush_hwstate(struct kvm_vcpu *vcpu);
338 void kvm_vgic_sync_hwstate(struct kvm_vcpu *vcpu);
343 void vgic_v3_dispatch_sgi(struct kvm_vcpu *vcpu, u64 reg);
344 int kvm_vgic_vcpu_pending_irq(struct kvm_vcpu *vcpu);
345 struct irq_phys_map *kvm_vgic_map_phys_irq(struct kvm_vcpu *vcpu,
347 int kvm_vgic_unmap_phys_irq(struct kvm_vcpu *vcpu, struct irq_phys_map *map);
348 bool kvm_vgic_map_is_active(struct kvm_vcpu *vcpu, struct irq_phys_map *map);
H A Diodev.h22 struct kvm_vcpu;
30 int (*read)(struct kvm_vcpu *vcpu,
35 int (*write)(struct kvm_vcpu *vcpu,
54 static inline int kvm_iodevice_read(struct kvm_vcpu *vcpu, kvm_iodevice_read()
62 static inline int kvm_iodevice_write(struct kvm_vcpu *vcpu, kvm_iodevice_write()
/linux-4.4.14/arch/mips/include/asm/
H A Dkvm_host.h601 int (*handle_cop_unusable)(struct kvm_vcpu *vcpu);
602 int (*handle_tlb_mod)(struct kvm_vcpu *vcpu);
603 int (*handle_tlb_ld_miss)(struct kvm_vcpu *vcpu);
604 int (*handle_tlb_st_miss)(struct kvm_vcpu *vcpu);
605 int (*handle_addr_err_st)(struct kvm_vcpu *vcpu);
606 int (*handle_addr_err_ld)(struct kvm_vcpu *vcpu);
607 int (*handle_syscall)(struct kvm_vcpu *vcpu);
608 int (*handle_res_inst)(struct kvm_vcpu *vcpu);
609 int (*handle_break)(struct kvm_vcpu *vcpu);
610 int (*handle_trap)(struct kvm_vcpu *vcpu);
611 int (*handle_msa_fpe)(struct kvm_vcpu *vcpu);
612 int (*handle_fpe)(struct kvm_vcpu *vcpu);
613 int (*handle_msa_disabled)(struct kvm_vcpu *vcpu);
615 int (*vcpu_init)(struct kvm_vcpu *vcpu);
616 int (*vcpu_setup)(struct kvm_vcpu *vcpu);
618 void (*queue_timer_int)(struct kvm_vcpu *vcpu);
619 void (*dequeue_timer_int)(struct kvm_vcpu *vcpu);
620 void (*queue_io_int)(struct kvm_vcpu *vcpu,
622 void (*dequeue_io_int)(struct kvm_vcpu *vcpu,
624 int (*irq_deliver)(struct kvm_vcpu *vcpu, unsigned int priority,
626 int (*irq_clear)(struct kvm_vcpu *vcpu, unsigned int priority,
628 int (*get_one_reg)(struct kvm_vcpu *vcpu,
630 int (*set_one_reg)(struct kvm_vcpu *vcpu,
632 int (*vcpu_get_regs)(struct kvm_vcpu *vcpu);
633 int (*vcpu_set_regs)(struct kvm_vcpu *vcpu);
639 int kvm_arch_vcpu_dump_regs(struct kvm_vcpu *vcpu);
642 extern int __kvm_mips_vcpu_run(struct kvm_run *run, struct kvm_vcpu *vcpu);
652 void kvm_own_fpu(struct kvm_vcpu *vcpu);
653 void kvm_own_msa(struct kvm_vcpu *vcpu);
654 void kvm_drop_fpu(struct kvm_vcpu *vcpu);
655 void kvm_lose_fpu(struct kvm_vcpu *vcpu);
658 uint32_t kvm_get_kernel_asid(struct kvm_vcpu *vcpu);
660 uint32_t kvm_get_user_asid(struct kvm_vcpu *vcpu);
662 uint32_t kvm_get_commpage_asid (struct kvm_vcpu *vcpu);
665 struct kvm_vcpu *vcpu);
668 struct kvm_vcpu *vcpu);
670 extern int kvm_mips_handle_mapped_seg_tlb_fault(struct kvm_vcpu *vcpu,
678 struct kvm_vcpu *vcpu);
683 struct kvm_vcpu *vcpu);
686 extern void kvm_mips_dump_guest_tlbs(struct kvm_vcpu *vcpu);
688 extern int kvm_mips_host_tlb_inv(struct kvm_vcpu *vcpu, unsigned long entryhi);
689 extern int kvm_mips_host_tlb_inv_index(struct kvm_vcpu *vcpu, int index);
691 extern int kvm_mips_guest_tlb_lookup(struct kvm_vcpu *vcpu,
693 extern int kvm_mips_host_tlb_lookup(struct kvm_vcpu *vcpu, unsigned long vaddr);
694 extern unsigned long kvm_mips_translate_guest_kseg0_to_hpa(struct kvm_vcpu *vcpu,
697 struct kvm_vcpu *vcpu);
699 extern void kvm_mips_alloc_new_mmu_context(struct kvm_vcpu *vcpu);
700 extern void kvm_mips_vcpu_load(struct kvm_vcpu *vcpu, int cpu);
701 extern void kvm_mips_vcpu_put(struct kvm_vcpu *vcpu);
704 uint32_t kvm_get_inst(uint32_t *opc, struct kvm_vcpu *vcpu);
705 enum emulation_result update_pc(struct kvm_vcpu *vcpu, uint32_t cause);
710 struct kvm_vcpu *vcpu);
715 struct kvm_vcpu *vcpu);
720 struct kvm_vcpu *vcpu);
725 struct kvm_vcpu *vcpu);
730 struct kvm_vcpu *vcpu);
735 struct kvm_vcpu *vcpu);
740 struct kvm_vcpu *vcpu);
745 struct kvm_vcpu *vcpu);
750 struct kvm_vcpu *vcpu);
755 struct kvm_vcpu *vcpu);
760 struct kvm_vcpu *vcpu);
765 struct kvm_vcpu *vcpu);
770 struct kvm_vcpu *vcpu);
775 struct kvm_vcpu *vcpu);
780 struct kvm_vcpu *vcpu);
782 extern enum emulation_result kvm_mips_complete_mmio_load(struct kvm_vcpu *vcpu,
785 uint32_t kvm_mips_read_count(struct kvm_vcpu *vcpu);
786 void kvm_mips_write_count(struct kvm_vcpu *vcpu, uint32_t count);
787 void kvm_mips_write_compare(struct kvm_vcpu *vcpu, uint32_t compare, bool ack);
788 void kvm_mips_init_count(struct kvm_vcpu *vcpu);
789 int kvm_mips_set_count_ctl(struct kvm_vcpu *vcpu, s64 count_ctl);
790 int kvm_mips_set_count_resume(struct kvm_vcpu *vcpu, s64 count_resume);
791 int kvm_mips_set_count_hz(struct kvm_vcpu *vcpu, s64 count_hz);
792 void kvm_mips_count_enable_cause(struct kvm_vcpu *vcpu);
793 void kvm_mips_count_disable_cause(struct kvm_vcpu *vcpu);
794 enum hrtimer_restart kvm_mips_count_timeout(struct kvm_vcpu *vcpu);
799 struct kvm_vcpu *vcpu);
805 struct kvm_vcpu *vcpu);
810 struct kvm_vcpu *vcpu);
814 struct kvm_vcpu *vcpu);
818 struct kvm_vcpu *vcpu);
820 unsigned int kvm_mips_config1_wrmask(struct kvm_vcpu *vcpu);
821 unsigned int kvm_mips_config3_wrmask(struct kvm_vcpu *vcpu);
822 unsigned int kvm_mips_config4_wrmask(struct kvm_vcpu *vcpu);
823 unsigned int kvm_mips_config5_wrmask(struct kvm_vcpu *vcpu);
827 struct kvm_vcpu *vcpu);
829 struct kvm_vcpu *vcpu);
831 struct kvm_vcpu *vcpu);
833 struct kvm_vcpu *vcpu);
836 extern void kvm_mips_dump_stats(struct kvm_vcpu *vcpu);
848 static inline void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) {} kvm_arch_sched_in()
849 static inline void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) {} kvm_arch_vcpu_blocking()
850 static inline void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) {} kvm_arch_vcpu_unblocking()
851 static inline void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) {}
/linux-4.4.14/arch/arm/kernel/
H A Dasm-offsets.c173 DEFINE(VCPU_KVM, offsetof(struct kvm_vcpu, kvm)); main()
174 DEFINE(VCPU_MIDR, offsetof(struct kvm_vcpu, arch.midr)); main()
175 DEFINE(VCPU_CP15, offsetof(struct kvm_vcpu, arch.cp15)); main()
176 DEFINE(VCPU_VFP_GUEST, offsetof(struct kvm_vcpu, arch.vfp_guest)); main()
177 DEFINE(VCPU_VFP_HOST, offsetof(struct kvm_vcpu, arch.host_cpu_context)); main()
178 DEFINE(VCPU_REGS, offsetof(struct kvm_vcpu, arch.regs)); main()
179 DEFINE(VCPU_USR_REGS, offsetof(struct kvm_vcpu, arch.regs.usr_regs)); main()
180 DEFINE(VCPU_SVC_REGS, offsetof(struct kvm_vcpu, arch.regs.svc_regs)); main()
181 DEFINE(VCPU_ABT_REGS, offsetof(struct kvm_vcpu, arch.regs.abt_regs)); main()
182 DEFINE(VCPU_UND_REGS, offsetof(struct kvm_vcpu, arch.regs.und_regs)); main()
183 DEFINE(VCPU_IRQ_REGS, offsetof(struct kvm_vcpu, arch.regs.irq_regs)); main()
184 DEFINE(VCPU_FIQ_REGS, offsetof(struct kvm_vcpu, arch.regs.fiq_regs)); main()
185 DEFINE(VCPU_PC, offsetof(struct kvm_vcpu, arch.regs.usr_regs.ARM_pc)); main()
186 DEFINE(VCPU_CPSR, offsetof(struct kvm_vcpu, arch.regs.usr_regs.ARM_cpsr)); main()
187 DEFINE(VCPU_HCR, offsetof(struct kvm_vcpu, arch.hcr)); main()
188 DEFINE(VCPU_IRQ_LINES, offsetof(struct kvm_vcpu, arch.irq_lines)); main()
189 DEFINE(VCPU_HSR, offsetof(struct kvm_vcpu, arch.fault.hsr)); main()
190 DEFINE(VCPU_HxFAR, offsetof(struct kvm_vcpu, arch.fault.hxfar)); main()
191 DEFINE(VCPU_HPFAR, offsetof(struct kvm_vcpu, arch.fault.hpfar)); main()
192 DEFINE(VCPU_HYP_PC, offsetof(struct kvm_vcpu, arch.fault.hyp_pc)); main()
193 DEFINE(VCPU_VGIC_CPU, offsetof(struct kvm_vcpu, arch.vgic_cpu)); main()
202 DEFINE(VCPU_TIMER_CNTV_CTL, offsetof(struct kvm_vcpu, arch.timer_cpu.cntv_ctl)); main()
203 DEFINE(VCPU_TIMER_CNTV_CVAL, offsetof(struct kvm_vcpu, arch.timer_cpu.cntv_cval)); main()
/linux-4.4.14/include/linux/
H A Dkvm_host.h177 int kvm_io_bus_write(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx, gpa_t addr,
179 int kvm_io_bus_write_cookie(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx,
181 int kvm_io_bus_read(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx, gpa_t addr,
193 struct kvm_vcpu *vcpu;
201 void kvm_clear_async_pf_completion_queue(struct kvm_vcpu *vcpu);
202 void kvm_check_async_pf_completion(struct kvm_vcpu *vcpu);
203 int kvm_setup_async_pf(struct kvm_vcpu *vcpu, gva_t gva, unsigned long hva,
205 int kvm_async_pf_wakeup_all(struct kvm_vcpu *vcpu);
225 struct kvm_vcpu { struct
287 static inline int kvm_vcpu_exiting_guest_mode(struct kvm_vcpu *vcpu) kvm_vcpu_exiting_guest_mode()
359 static inline int kvm_arch_vcpu_memslots_id(struct kvm_vcpu *vcpu) kvm_arch_vcpu_memslots_id()
386 struct kvm_vcpu *vcpus[KVM_MAX_VCPUS];
447 static inline struct kvm_vcpu *kvm_get_vcpu(struct kvm *kvm, int i) kvm_get_vcpu()
463 static inline struct kvm_vcpu *kvm_get_vcpu_by_id(struct kvm *kvm, int id) kvm_get_vcpu_by_id()
465 struct kvm_vcpu *vcpu; kvm_get_vcpu_by_id()
479 int kvm_vcpu_init(struct kvm_vcpu *vcpu, struct kvm *kvm, unsigned id);
480 void kvm_vcpu_uninit(struct kvm_vcpu *vcpu);
482 int __must_check vcpu_load(struct kvm_vcpu *vcpu);
483 void vcpu_put(struct kvm_vcpu *vcpu);
529 static inline struct kvm_memslots *kvm_vcpu_memslots(struct kvm_vcpu *vcpu) kvm_vcpu_memslots()
641 struct kvm_memslots *kvm_vcpu_memslots(struct kvm_vcpu *vcpu);
642 struct kvm_memory_slot *kvm_vcpu_gfn_to_memslot(struct kvm_vcpu *vcpu, gfn_t gfn);
643 pfn_t kvm_vcpu_gfn_to_pfn_atomic(struct kvm_vcpu *vcpu, gfn_t gfn);
644 pfn_t kvm_vcpu_gfn_to_pfn(struct kvm_vcpu *vcpu, gfn_t gfn);
645 struct page *kvm_vcpu_gfn_to_page(struct kvm_vcpu *vcpu, gfn_t gfn);
646 unsigned long kvm_vcpu_gfn_to_hva(struct kvm_vcpu *vcpu, gfn_t gfn);
647 unsigned long kvm_vcpu_gfn_to_hva_prot(struct kvm_vcpu *vcpu, gfn_t gfn, bool *writable);
648 int kvm_vcpu_read_guest_page(struct kvm_vcpu *vcpu, gfn_t gfn, void *data, int offset,
650 int kvm_vcpu_read_guest_atomic(struct kvm_vcpu *vcpu, gpa_t gpa, void *data,
652 int kvm_vcpu_read_guest(struct kvm_vcpu *vcpu, gpa_t gpa, void *data,
654 int kvm_vcpu_write_guest_page(struct kvm_vcpu *vcpu, gfn_t gfn, const void *data,
656 int kvm_vcpu_write_guest(struct kvm_vcpu *vcpu, gpa_t gpa, const void *data,
658 void kvm_vcpu_mark_page_dirty(struct kvm_vcpu *vcpu, gfn_t gfn);
660 void kvm_vcpu_block(struct kvm_vcpu *vcpu);
661 void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu);
662 void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu);
663 void kvm_vcpu_kick(struct kvm_vcpu *vcpu);
664 int kvm_vcpu_yield_to(struct kvm_vcpu *target);
665 void kvm_vcpu_on_spin(struct kvm_vcpu *vcpu);
666 void kvm_load_guest_fpu(struct kvm_vcpu *vcpu);
667 void kvm_put_guest_fpu(struct kvm_vcpu *vcpu);
679 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf);
702 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu);
703 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu);
705 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu,
708 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs);
709 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs);
710 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu,
712 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu,
714 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu,
716 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu,
718 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu,
720 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run);
725 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu);
726 void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu);
728 void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu);
730 void kvm_arch_vcpu_free(struct kvm_vcpu *vcpu);
731 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu);
732 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu);
733 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, unsigned int id);
734 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu);
735 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu);
736 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu);
743 int kvm_arch_vcpu_runnable(struct kvm_vcpu *vcpu);
744 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu);
797 static inline wait_queue_head_t *kvm_arch_vcpu_wq(struct kvm_vcpu *vcpu) kvm_arch_vcpu_wq()
824 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu);
825 void kvm_vcpu_kick(struct kvm_vcpu *vcpu);
993 static inline void kvm_migrate_timers(struct kvm_vcpu *vcpu) kvm_migrate_timers()
1103 bool kvm_vcpu_compatible(struct kvm_vcpu *vcpu);
1105 static inline bool kvm_vcpu_compatible(struct kvm_vcpu *vcpu) { return true; } kvm_vcpu_compatible()
1108 static inline void kvm_make_request(int req, struct kvm_vcpu *vcpu) kvm_make_request()
1113 static inline bool kvm_check_request(int req, struct kvm_vcpu *vcpu) kvm_check_request()
1167 static inline void kvm_vcpu_set_in_spin_loop(struct kvm_vcpu *vcpu, bool val) kvm_vcpu_set_in_spin_loop()
1171 static inline void kvm_vcpu_set_dy_eligible(struct kvm_vcpu *vcpu, bool val) kvm_vcpu_set_dy_eligible()
1178 static inline void kvm_vcpu_set_in_spin_loop(struct kvm_vcpu *vcpu, bool val) kvm_vcpu_set_in_spin_loop()
1182 static inline void kvm_vcpu_set_dy_eligible(struct kvm_vcpu *vcpu, bool val) kvm_vcpu_set_dy_eligible()
H A Dkvm_types.h29 struct kvm_vcpu;
/linux-4.4.14/arch/arm/kvm/
H A Dguest.c33 #define VCPU_STAT(x) { #x, offsetof(struct kvm_vcpu, stat.x), KVM_STAT_VCPU }
39 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
49 static int get_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) get_core_reg()
66 static int set_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) set_core_reg()
102 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
107 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
125 static int copy_timer_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) copy_timer_indices()
139 static int set_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) set_timer_reg()
152 static int get_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) get_timer_reg()
171 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu) kvm_arm_num_regs()
182 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) kvm_arm_copy_reg_indices()
202 int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_get_reg()
218 int kvm_arm_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_set_reg()
234 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
240 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
278 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
283 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
288 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
294 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
H A Dcoproc.h43 bool (*access)(struct kvm_vcpu *,
48 void (*reset)(struct kvm_vcpu *, const struct coproc_reg *);
71 static inline bool ignore_write(struct kvm_vcpu *vcpu, ignore_write()
77 static inline bool read_zero(struct kvm_vcpu *vcpu, read_zero()
84 static inline bool write_to_read_only(struct kvm_vcpu *vcpu, write_to_read_only()
93 static inline bool read_from_write_only(struct kvm_vcpu *vcpu, read_from_write_only()
103 static inline void reset_unknown(struct kvm_vcpu *vcpu, reset_unknown()
111 static inline void reset_val(struct kvm_vcpu *vcpu, const struct coproc_reg *r) reset_val()
118 static inline void reset_unknown64(struct kvm_vcpu *vcpu, reset_unknown64()
156 bool access_vm_reg(struct kvm_vcpu *vcpu,
H A Dhandle_exit.c29 typedef int (*exit_handle_fn)(struct kvm_vcpu *, struct kvm_run *);
31 static int handle_svc_hyp(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_svc_hyp()
39 static int handle_hvc(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_hvc()
55 static int handle_smc(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_smc()
61 static int handle_pabt_hyp(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_pabt_hyp()
69 static int handle_dabt_hyp(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_dabt_hyp()
88 static int kvm_handle_wfx(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_wfx()
121 static exit_handle_fn kvm_get_exit_handler(struct kvm_vcpu *vcpu) kvm_get_exit_handler()
139 int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run, handle_exit()
H A Dpsci.c44 static unsigned long kvm_psci_vcpu_suspend(struct kvm_vcpu *vcpu) kvm_psci_vcpu_suspend()
64 static void kvm_psci_vcpu_off(struct kvm_vcpu *vcpu) kvm_psci_vcpu_off()
69 static unsigned long kvm_psci_vcpu_on(struct kvm_vcpu *source_vcpu) kvm_psci_vcpu_on()
72 struct kvm_vcpu *vcpu = NULL; kvm_psci_vcpu_on()
127 static unsigned long kvm_psci_vcpu_affinity_info(struct kvm_vcpu *vcpu) kvm_psci_vcpu_affinity_info()
135 struct kvm_vcpu *tmp; kvm_psci_vcpu_affinity_info()
167 static void kvm_prepare_system_event(struct kvm_vcpu *vcpu, u32 type) kvm_prepare_system_event()
170 struct kvm_vcpu *tmp; kvm_prepare_system_event()
191 static void kvm_psci_system_off(struct kvm_vcpu *vcpu) kvm_psci_system_off()
196 static void kvm_psci_system_reset(struct kvm_vcpu *vcpu) kvm_psci_system_reset()
201 int kvm_psci_version(struct kvm_vcpu *vcpu) kvm_psci_version()
209 static int kvm_psci_0_2_call(struct kvm_vcpu *vcpu) kvm_psci_0_2_call()
280 static int kvm_psci_0_1_call(struct kvm_vcpu *vcpu) kvm_psci_0_1_call()
316 int kvm_psci_call(struct kvm_vcpu *vcpu) kvm_psci_call()
H A Darm.c57 static DEFINE_PER_CPU(struct kvm_vcpu *, kvm_arm_running_vcpu);
64 static void kvm_arm_set_running_vcpu(struct kvm_vcpu *vcpu) kvm_arm_set_running_vcpu()
74 struct kvm_vcpu *kvm_arm_get_running_vcpu(void) kvm_arm_get_running_vcpu()
83 struct kvm_vcpu * __percpu *kvm_get_running_vcpus(void) kvm_get_running_vcpus()
93 int kvm_arch_vcpu_should_kick(struct kvm_vcpu *vcpu) kvm_arch_vcpu_should_kick()
144 int kvm_arch_vcpu_fault(struct kvm_vcpu *vcpu, struct vm_fault *vmf) kvm_arch_vcpu_fault()
213 struct kvm_vcpu *kvm_arch_vcpu_create(struct kvm *kvm, unsigned int id) kvm_arch_vcpu_create()
216 struct kvm_vcpu *vcpu; kvm_arch_vcpu_create()
251 void kvm_arch_vcpu_postcreate(struct kvm_vcpu *vcpu) kvm_arch_vcpu_postcreate()
256 void kvm_arch_vcpu_free(struct kvm_vcpu *vcpu) kvm_arch_vcpu_free()
264 void kvm_arch_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_arch_vcpu_destroy()
269 int kvm_cpu_has_pending_timer(struct kvm_vcpu *vcpu) kvm_cpu_has_pending_timer()
274 void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) kvm_arch_vcpu_blocking()
279 void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) kvm_arch_vcpu_unblocking()
284 int kvm_arch_vcpu_init(struct kvm_vcpu *vcpu) kvm_arch_vcpu_init()
298 void kvm_arch_vcpu_load(struct kvm_vcpu *vcpu, int cpu) kvm_arch_vcpu_load()
306 void kvm_arch_vcpu_put(struct kvm_vcpu *vcpu) kvm_arch_vcpu_put()
318 int kvm_arch_vcpu_ioctl_get_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_mpstate()
329 int kvm_arch_vcpu_ioctl_set_mpstate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_mpstate()
353 int kvm_arch_vcpu_runnable(struct kvm_vcpu *v) kvm_arch_vcpu_runnable()
446 static int kvm_vcpu_first_run_init(struct kvm_vcpu *vcpu) kvm_vcpu_first_run_init()
488 struct kvm_vcpu *vcpu; kvm_arm_halt_guest()
498 struct kvm_vcpu *vcpu; kvm_arm_resume_guest()
508 static void vcpu_sleep(struct kvm_vcpu *vcpu) vcpu_sleep()
516 static int kvm_vcpu_initialized(struct kvm_vcpu *vcpu) kvm_vcpu_initialized()
532 int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_arch_vcpu_ioctl_run()
654 static int vcpu_interrupt_line(struct kvm_vcpu *vcpu, int number, bool level) vcpu_interrupt_line()
693 struct kvm_vcpu *vcpu = NULL; kvm_vm_ioctl_irq_line()
746 static int kvm_vcpu_set_target(struct kvm_vcpu *vcpu, kvm_vcpu_set_target()
788 static int kvm_arch_vcpu_ioctl_vcpu_init(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_vcpu_init()
820 struct kvm_vcpu *vcpu = filp->private_data; kvm_arch_vcpu_ioctl()
1150 struct kvm_vcpu *kvm_mpidr_to_vcpu(struct kvm *kvm, unsigned long mpidr) kvm_mpidr_to_vcpu()
1152 struct kvm_vcpu *vcpu; kvm_mpidr_to_vcpu()
1215 int rc = kvm_init(NULL, sizeof(struct kvm_vcpu), 0, THIS_MODULE); arm_init()
H A Dcoproc.c53 static inline void vcpu_cp15_reg64_set(struct kvm_vcpu *vcpu, vcpu_cp15_reg64_set()
61 static inline u64 vcpu_cp15_reg64_get(struct kvm_vcpu *vcpu, vcpu_cp15_reg64_get()
72 int kvm_handle_cp10_id(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp10_id()
78 int kvm_handle_cp_0_13_access(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp_0_13_access()
88 int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp14_load_store()
94 int kvm_handle_cp14_access(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp14_access()
100 static void reset_mpidr(struct kvm_vcpu *vcpu, const struct coproc_reg *r) reset_mpidr()
113 static bool access_actlr(struct kvm_vcpu *vcpu, access_actlr()
125 static bool access_cbar(struct kvm_vcpu *vcpu, access_cbar()
135 static bool access_l2ctlr(struct kvm_vcpu *vcpu, access_l2ctlr()
146 static void reset_l2ctlr(struct kvm_vcpu *vcpu, const struct coproc_reg *r) reset_l2ctlr()
162 static void reset_actlr(struct kvm_vcpu *vcpu, const struct coproc_reg *r) reset_actlr()
181 static bool access_l2ectlr(struct kvm_vcpu *vcpu, access_l2ectlr()
195 static bool access_dcsw(struct kvm_vcpu *vcpu, access_dcsw()
213 bool access_vm_reg(struct kvm_vcpu *vcpu, access_vm_reg()
238 static bool pm_fake(struct kvm_vcpu *vcpu, pm_fake()
433 static int emulate_cp15(struct kvm_vcpu *vcpu, emulate_cp15()
473 int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp15_64()
490 static void reset_coproc_regs(struct kvm_vcpu *vcpu, reset_coproc_regs()
505 int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp15_32()
571 static const struct coproc_reg *index_to_coproc_reg(struct kvm_vcpu *vcpu, index_to_coproc_reg()
606 static void get_##name(struct kvm_vcpu *v, \
891 static int vfp_get_reg(const struct kvm_vcpu *vcpu, u64 id, void __user *uaddr) vfp_get_reg()
935 static int vfp_set_reg(struct kvm_vcpu *vcpu, u64 id, const void __user *uaddr) vfp_set_reg()
999 static int vfp_get_reg(const struct kvm_vcpu *vcpu, u64 id, void __user *uaddr) vfp_get_reg()
1004 static int vfp_set_reg(struct kvm_vcpu *vcpu, u64 id, const void __user *uaddr) vfp_set_reg()
1010 int kvm_arm_coproc_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_coproc_get_reg()
1039 int kvm_arm_coproc_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_coproc_set_reg()
1133 static int walk_cp15(struct kvm_vcpu *vcpu, u64 __user *uind) walk_cp15()
1175 unsigned long kvm_arm_num_coproc_regs(struct kvm_vcpu *vcpu) kvm_arm_num_coproc_regs()
1183 int kvm_arm_copy_coproc_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) kvm_arm_copy_coproc_indices()
1245 void kvm_reset_coprocs(struct kvm_vcpu *vcpu) kvm_reset_coprocs()
H A Dperf.c32 struct kvm_vcpu *vcpu; kvm_is_user_mode()
44 struct kvm_vcpu *vcpu; kvm_get_guest_ip()
H A Demulate.c113 unsigned long *vcpu_reg(struct kvm_vcpu *vcpu, u8 reg_num) vcpu_reg()
145 unsigned long *vcpu_spsr(struct kvm_vcpu *vcpu) vcpu_spsr()
169 bool kvm_condition_valid(struct kvm_vcpu *vcpu) kvm_condition_valid()
218 static void kvm_adjust_itstate(struct kvm_vcpu *vcpu) kvm_adjust_itstate()
250 void kvm_skip_instr(struct kvm_vcpu *vcpu, bool is_wide_instr) kvm_skip_instr()
267 static u32 exc_vector_base(struct kvm_vcpu *vcpu) exc_vector_base()
287 void kvm_inject_undefined(struct kvm_vcpu *vcpu) kvm_inject_undefined()
321 static void inject_abt(struct kvm_vcpu *vcpu, bool is_pabt, unsigned long addr) inject_abt()
386 void kvm_inject_dabt(struct kvm_vcpu *vcpu, unsigned long addr) kvm_inject_dabt()
399 void kvm_inject_pabt(struct kvm_vcpu *vcpu, unsigned long addr) kvm_inject_pabt()
H A Dmmio.c96 int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_mmio_return()
124 static int decode_hsr(struct kvm_vcpu *vcpu, bool *is_write, int *len) decode_hsr()
162 int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run, io_mem_abort()
H A Dreset.c57 int kvm_reset_vcpu(struct kvm_vcpu *vcpu) kvm_reset_vcpu()
H A Dmmu.c1041 static bool kvm_is_write_fault(struct kvm_vcpu *vcpu) kvm_is_write_fault()
1209 static void coherent_cache_guest_page(struct kvm_vcpu *vcpu, pfn_t pfn, coherent_cache_guest_page()
1215 static int user_mem_abort(struct kvm_vcpu *vcpu, phys_addr_t fault_ipa, user_mem_abort()
1350 static void handle_access_fault(struct kvm_vcpu *vcpu, phys_addr_t fault_ipa) handle_access_fault()
1397 int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_guest_abort()
1625 void kvm_mmu_free_memory_caches(struct kvm_vcpu *vcpu) kvm_mmu_free_memory_caches()
1896 void kvm_set_way_flush(struct kvm_vcpu *vcpu) kvm_set_way_flush()
1917 void kvm_toggle_cache(struct kvm_vcpu *vcpu, bool was_enabled) kvm_toggle_cache()
H A Dinterrupts.S104 * int __kvm_vcpu_run(struct kvm_vcpu *vcpu)
/linux-4.4.14/arch/arm64/kvm/
H A Dguest.c41 int kvm_arch_vcpu_setup(struct kvm_vcpu *vcpu) kvm_arch_vcpu_setup()
51 static int get_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) get_core_reg()
76 static int set_core_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) set_core_reg()
124 int kvm_arch_vcpu_ioctl_get_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_get_regs()
129 int kvm_arch_vcpu_ioctl_set_regs(struct kvm_vcpu *vcpu, struct kvm_regs *regs) kvm_arch_vcpu_ioctl_set_regs()
156 static int copy_timer_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) copy_timer_indices()
170 static int set_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) set_timer_reg()
183 static int get_timer_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) get_timer_reg()
197 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu) kvm_arm_num_regs()
208 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) kvm_arm_copy_reg_indices()
228 int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_get_reg()
244 int kvm_arm_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_set_reg()
260 int kvm_arch_vcpu_ioctl_get_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_get_sregs()
266 int kvm_arch_vcpu_ioctl_set_sregs(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_sregs()
322 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_get_fpu()
327 int kvm_arch_vcpu_ioctl_set_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu) kvm_arch_vcpu_ioctl_set_fpu()
332 int kvm_arch_vcpu_ioctl_translate(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_translate()
353 int kvm_arch_vcpu_ioctl_set_guest_debug(struct kvm_vcpu *vcpu, kvm_arch_vcpu_ioctl_set_guest_debug()
H A Dsys_regs.h46 bool (*access)(struct kvm_vcpu *,
51 void (*reset)(struct kvm_vcpu *, const struct sys_reg_desc *);
60 int (*get_user)(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd,
62 int (*set_user)(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd,
73 static inline bool ignore_write(struct kvm_vcpu *vcpu, ignore_write()
79 static inline bool read_zero(struct kvm_vcpu *vcpu, read_zero()
86 static inline bool write_to_read_only(struct kvm_vcpu *vcpu, write_to_read_only()
95 static inline bool read_from_write_only(struct kvm_vcpu *vcpu, read_from_write_only()
105 static inline void reset_unknown(struct kvm_vcpu *vcpu, reset_unknown()
113 static inline void reset_val(struct kvm_vcpu *vcpu, const struct sys_reg_desc *r) reset_val()
H A Dinject_fault.c32 static void prepare_fault32(struct kvm_vcpu *vcpu, u32 mode, u32 vect_offset) prepare_fault32()
62 static void inject_undef32(struct kvm_vcpu *vcpu) inject_undef32()
71 static void inject_abt32(struct kvm_vcpu *vcpu, bool is_pabt, inject_abt32()
100 static void inject_abt64(struct kvm_vcpu *vcpu, bool is_iabt, unsigned long addr) inject_abt64()
138 static void inject_undef64(struct kvm_vcpu *vcpu) inject_undef64()
167 void kvm_inject_dabt(struct kvm_vcpu *vcpu, unsigned long addr) kvm_inject_dabt()
183 void kvm_inject_pabt(struct kvm_vcpu *vcpu, unsigned long addr) kvm_inject_pabt()
197 void kvm_inject_undefined(struct kvm_vcpu *vcpu) kvm_inject_undefined()
H A Dhandle_exit.c34 typedef int (*exit_handle_fn)(struct kvm_vcpu *, struct kvm_run *);
36 static int handle_hvc(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_hvc()
52 static int handle_smc(struct kvm_vcpu *vcpu, struct kvm_run *run) handle_smc()
70 static int kvm_handle_wfx(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_wfx()
97 static int kvm_handle_guest_debug(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_guest_debug()
145 static exit_handle_fn kvm_get_exit_handler(struct kvm_vcpu *vcpu) kvm_get_exit_handler()
164 int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run, handle_exit()
H A Dtrace.h48 TP_PROTO(struct kvm_vcpu *vcpu, __u32 guest_debug),
52 __field(struct kvm_vcpu *, vcpu)
153 TP_PROTO(struct kvm_vcpu *vcpu, __u32 guest_debug),
157 __field(struct kvm_vcpu *, vcpu)
H A Dsys_regs.c80 static bool access_dcsw(struct kvm_vcpu *vcpu, access_dcsw()
96 static bool access_vm_reg(struct kvm_vcpu *vcpu, access_vm_reg()
122 static bool access_gic_sgi(struct kvm_vcpu *vcpu, access_gic_sgi()
134 static bool trap_raz_wi(struct kvm_vcpu *vcpu, trap_raz_wi()
144 static bool trap_oslsr_el1(struct kvm_vcpu *vcpu, trap_oslsr_el1()
156 static bool trap_dbgauthstatus_el1(struct kvm_vcpu *vcpu, trap_dbgauthstatus_el1()
197 static bool trap_debug_regs(struct kvm_vcpu *vcpu, trap_debug_regs()
222 static inline void reg_to_dbg(struct kvm_vcpu *vcpu, reg_to_dbg()
237 static inline void dbg_to_reg(struct kvm_vcpu *vcpu, dbg_to_reg()
246 static inline bool trap_bvr(struct kvm_vcpu *vcpu, trap_bvr()
262 static int set_bvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, set_bvr()
272 static int get_bvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, get_bvr()
282 static inline void reset_bvr(struct kvm_vcpu *vcpu, reset_bvr()
288 static inline bool trap_bcr(struct kvm_vcpu *vcpu, trap_bcr()
304 static int set_bcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, set_bcr()
315 static int get_bcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, get_bcr()
325 static inline void reset_bcr(struct kvm_vcpu *vcpu, reset_bcr()
331 static inline bool trap_wvr(struct kvm_vcpu *vcpu, trap_wvr()
348 static int set_wvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, set_wvr()
358 static int get_wvr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, get_wvr()
368 static inline void reset_wvr(struct kvm_vcpu *vcpu, reset_wvr()
374 static inline bool trap_wcr(struct kvm_vcpu *vcpu, trap_wcr()
390 static int set_wcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, set_wcr()
400 static int get_wcr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *rd, get_wcr()
410 static inline void reset_wcr(struct kvm_vcpu *vcpu, reset_wcr()
416 static void reset_amair_el1(struct kvm_vcpu *vcpu, const struct sys_reg_desc *r) reset_amair_el1()
424 static void reset_mpidr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *r) reset_mpidr()
681 static bool trap_dbgidr(struct kvm_vcpu *vcpu, trap_dbgidr()
700 static bool trap_debug32(struct kvm_vcpu *vcpu, trap_debug32()
725 static inline bool trap_xvr(struct kvm_vcpu *vcpu, trap_xvr()
969 int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp14_load_store()
985 static int emulate_cp(struct kvm_vcpu *vcpu, emulate_cp()
1019 static void unhandled_cp_access(struct kvm_vcpu *vcpu, unhandled_cp_access()
1049 static int kvm_handle_cp_64(struct kvm_vcpu *vcpu, kvm_handle_cp_64()
1101 static int kvm_handle_cp_32(struct kvm_vcpu *vcpu, kvm_handle_cp_32()
1132 int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp15_64()
1143 int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp15_32()
1154 int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp14_64()
1161 int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_cp14_32()
1168 static int emulate_sys_reg(struct kvm_vcpu *vcpu, emulate_sys_reg()
1205 static void reset_sys_reg_descs(struct kvm_vcpu *vcpu, reset_sys_reg_descs()
1220 int kvm_handle_sys_reg(struct kvm_vcpu *vcpu, struct kvm_run *run) kvm_handle_sys_reg()
1280 static const struct sys_reg_desc *index_to_sys_reg_desc(struct kvm_vcpu *vcpu, index_to_sys_reg_desc()
1315 static void get_##reg(struct kvm_vcpu *v, \
1522 int kvm_arm_sys_reg_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_sys_reg_get_reg()
1543 int kvm_arm_sys_reg_set_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg) kvm_arm_sys_reg_set_reg()
1615 static int walk_sys_regs(struct kvm_vcpu *vcpu, u64 __user *uind) walk_sys_regs()
1657 unsigned long kvm_arm_num_sys_reg_descs(struct kvm_vcpu *vcpu) kvm_arm_num_sys_reg_descs()
1664 int kvm_arm_copy_sys_reg_indices(struct kvm_vcpu *vcpu, u64 __user *uindices) kvm_arm_copy_sys_reg_indices()
1741 void kvm_reset_sys_regs(struct kvm_vcpu *vcpu) kvm_reset_sys_regs()
H A Demulate.c55 static int kvm_vcpu_get_condition(const struct kvm_vcpu *vcpu) kvm_vcpu_get_condition()
68 bool kvm_condition_valid32(const struct kvm_vcpu *vcpu) kvm_condition_valid32()
117 static void kvm_adjust_itstate(struct kvm_vcpu *vcpu) kvm_adjust_itstate()
149 void kvm_skip_instr32(struct kvm_vcpu *vcpu, bool is_wide_instr) kvm_skip_instr32()
H A Dsys_regs_generic_v8.c33 static bool access_actlr(struct kvm_vcpu *vcpu, access_actlr()
44 static void reset_actlr(struct kvm_vcpu *vcpu, const struct sys_reg_desc *r) reset_actlr()
H A Ddebug.c47 static void save_guest_debug_regs(struct kvm_vcpu *vcpu) save_guest_debug_regs()
55 static void restore_guest_debug_regs(struct kvm_vcpu *vcpu) restore_guest_debug_regs()
82 void kvm_arm_reset_debug_ptr(struct kvm_vcpu *vcpu) kvm_arm_reset_debug_ptr()
107 void kvm_arm_setup_debug(struct kvm_vcpu *vcpu) kvm_arm_setup_debug()
194 void kvm_arm_clear_debug(struct kvm_vcpu *vcpu) kvm_arm_clear_debug()
H A Dregmap.c112 unsigned long *vcpu_reg32(const struct kvm_vcpu *vcpu, u8 reg_num) vcpu_reg32()
144 unsigned long *vcpu_spsr32(const struct kvm_vcpu *vcpu) vcpu_spsr32()
H A Dreset.c98 int kvm_reset_vcpu(struct kvm_vcpu *vcpu) kvm_reset_vcpu()
H A Dhyp.S709 * u64 __kvm_vcpu_run(struct kvm_vcpu *vcpu);
/linux-4.4.14/arch/arm64/kernel/
H A Dasm-offsets.c107 DEFINE(VCPU_CONTEXT, offsetof(struct kvm_vcpu, arch.ctxt)); main()
115 DEFINE(VCPU_ESR_EL2, offsetof(struct kvm_vcpu, arch.fault.esr_el2)); main()
116 DEFINE(VCPU_FAR_EL2, offsetof(struct kvm_vcpu, arch.fault.far_el2)); main()
117 DEFINE(VCPU_HPFAR_EL2, offsetof(struct kvm_vcpu, arch.fault.hpfar_el2)); main()
118 DEFINE(VCPU_DEBUG_FLAGS, offsetof(struct kvm_vcpu, arch.debug_flags)); main()
119 DEFINE(VCPU_DEBUG_PTR, offsetof(struct kvm_vcpu, arch.debug_ptr)); main()
124 DEFINE(VCPU_HCR_EL2, offsetof(struct kvm_vcpu, arch.hcr_el2)); main()
125 DEFINE(VCPU_MDCR_EL2, offsetof(struct kvm_vcpu, arch.mdcr_el2)); main()
126 DEFINE(VCPU_IRQ_LINES, offsetof(struct kvm_vcpu, arch.irq_lines)); main()
127 DEFINE(VCPU_HOST_CONTEXT, offsetof(struct kvm_vcpu, arch.host_cpu_context)); main()
128 DEFINE(VCPU_HOST_DEBUG_STATE, offsetof(struct kvm_vcpu, arch.host_debug_state)); main()
129 DEFINE(VCPU_TIMER_CNTV_CTL, offsetof(struct kvm_vcpu, arch.timer_cpu.cntv_ctl)); main()
130 DEFINE(VCPU_TIMER_CNTV_CVAL, offsetof(struct kvm_vcpu, arch.timer_cpu.cntv_cval)); main()
133 DEFINE(VCPU_KVM, offsetof(struct kvm_vcpu, kvm)); main()
134 DEFINE(VCPU_VGIC_CPU, offsetof(struct kvm_vcpu, arch.vgic_cpu)); main()
/linux-4.4.14/virt/kvm/arm/
H A Dvgic.h50 void vgic_dist_irq_set_pending(struct kvm_vcpu *vcpu, int irq);
51 void vgic_dist_irq_clear_pending(struct kvm_vcpu *vcpu, int irq);
52 void vgic_cpu_irq_clear(struct kvm_vcpu *vcpu, int irq);
56 void vgic_get_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr);
57 void vgic_set_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr);
59 bool vgic_queue_irq(struct kvm_vcpu *vcpu, u8 sgi_source_id, int irq);
60 void vgic_unqueue_irqs(struct kvm_vcpu *vcpu);
72 bool handle_mmio_raz_wi(struct kvm_vcpu *vcpu, struct kvm_exit_mmio *mmio,
91 bool (*handle_mmio)(struct kvm_vcpu *vcpu, struct kvm_exit_mmio *mmio,
H A Darch_timer.c66 struct kvm_vcpu *vcpu = *(struct kvm_vcpu **)dev_id; kvm_arch_timer_handler()
84 struct kvm_vcpu *vcpu; kvm_timer_inject_irq_work()
86 vcpu = container_of(work, struct kvm_vcpu, arch.timer_cpu.expired); kvm_timer_inject_irq_work()
98 static u64 kvm_timer_compute_delta(struct kvm_vcpu *vcpu) kvm_timer_compute_delta()
121 struct kvm_vcpu *vcpu; kvm_timer_expire()
125 vcpu = container_of(timer, struct kvm_vcpu, arch.timer_cpu); kvm_timer_expire()
142 static bool kvm_timer_irq_can_fire(struct kvm_vcpu *vcpu) kvm_timer_irq_can_fire()
150 bool kvm_timer_should_fire(struct kvm_vcpu *vcpu) kvm_timer_should_fire()
164 static void kvm_timer_update_irq(struct kvm_vcpu *vcpu, bool new_level) kvm_timer_update_irq()
184 static int kvm_timer_update_state(struct kvm_vcpu *vcpu) kvm_timer_update_state()
208 void kvm_timer_schedule(struct kvm_vcpu *vcpu) kvm_timer_schedule()
233 void kvm_timer_unschedule(struct kvm_vcpu *vcpu) kvm_timer_unschedule()
246 void kvm_timer_flush_hwstate(struct kvm_vcpu *vcpu) kvm_timer_flush_hwstate()
290 void kvm_timer_sync_hwstate(struct kvm_vcpu *vcpu) kvm_timer_sync_hwstate()
303 int kvm_timer_vcpu_reset(struct kvm_vcpu *vcpu, kvm_timer_vcpu_reset()
338 void kvm_timer_vcpu_init(struct kvm_vcpu *vcpu) kvm_timer_vcpu_init()
352 int kvm_arm_timer_set_reg(struct kvm_vcpu *vcpu, u64 regid, u64 value) kvm_arm_timer_set_reg()
374 u64 kvm_arm_timer_get_reg(struct kvm_vcpu *vcpu, u64 regid) kvm_arm_timer_get_reg()
472 void kvm_timer_vcpu_terminate(struct kvm_vcpu *vcpu) kvm_timer_vcpu_terminate()
H A Dvgic-v2.c33 static struct vgic_lr vgic_v2_get_lr(const struct kvm_vcpu *vcpu, int lr) vgic_v2_get_lr()
59 static void vgic_v2_set_lr(struct kvm_vcpu *vcpu, int lr, vgic_v2_set_lr()
89 static u64 vgic_v2_get_elrsr(const struct kvm_vcpu *vcpu) vgic_v2_get_elrsr()
94 static u64 vgic_v2_get_eisr(const struct kvm_vcpu *vcpu) vgic_v2_get_eisr()
99 static void vgic_v2_clear_eisr(struct kvm_vcpu *vcpu) vgic_v2_clear_eisr()
104 static u32 vgic_v2_get_interrupt_status(const struct kvm_vcpu *vcpu) vgic_v2_get_interrupt_status()
117 static void vgic_v2_enable_underflow(struct kvm_vcpu *vcpu) vgic_v2_enable_underflow()
122 static void vgic_v2_disable_underflow(struct kvm_vcpu *vcpu) vgic_v2_disable_underflow()
127 static void vgic_v2_get_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcrp) vgic_v2_get_vmcr()
137 static void vgic_v2_set_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcrp) vgic_v2_set_vmcr()
149 static void vgic_v2_enable(struct kvm_vcpu *vcpu) vgic_v2_enable()
H A Dvgic-v3.c47 static struct vgic_lr vgic_v3_get_lr(const struct kvm_vcpu *vcpu, int lr) vgic_v3_get_lr()
78 static void vgic_v3_set_lr(struct kvm_vcpu *vcpu, int lr, vgic_v3_set_lr()
122 static u64 vgic_v3_get_elrsr(const struct kvm_vcpu *vcpu) vgic_v3_get_elrsr()
127 static u64 vgic_v3_get_eisr(const struct kvm_vcpu *vcpu) vgic_v3_get_eisr()
132 static void vgic_v3_clear_eisr(struct kvm_vcpu *vcpu) vgic_v3_clear_eisr()
137 static u32 vgic_v3_get_interrupt_status(const struct kvm_vcpu *vcpu) vgic_v3_get_interrupt_status()
150 static void vgic_v3_get_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcrp) vgic_v3_get_vmcr()
160 static void vgic_v3_enable_underflow(struct kvm_vcpu *vcpu) vgic_v3_enable_underflow()
165 static void vgic_v3_disable_underflow(struct kvm_vcpu *vcpu) vgic_v3_disable_underflow()
170 static void vgic_v3_set_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcrp) vgic_v3_set_vmcr()
182 static void vgic_v3_enable(struct kvm_vcpu *vcpu) vgic_v3_enable()
H A Dvgic.c107 static void vgic_retire_disabled_irqs(struct kvm_vcpu *vcpu);
108 static void vgic_retire_lr(int lr_nr, struct kvm_vcpu *vcpu);
109 static struct vgic_lr vgic_get_lr(const struct kvm_vcpu *vcpu, int lr);
110 static void vgic_set_lr(struct kvm_vcpu *vcpu, int lr, struct vgic_lr lr_desc);
111 static u64 vgic_get_elrsr(struct kvm_vcpu *vcpu);
112 static struct irq_phys_map *vgic_irq_map_search(struct kvm_vcpu *vcpu,
114 static int compute_pending_for_cpu(struct kvm_vcpu *vcpu);
119 static void add_sgi_source(struct kvm_vcpu *vcpu, int irq, int source) add_sgi_source()
124 static bool queue_sgi(struct kvm_vcpu *vcpu, int irq) queue_sgi()
274 static bool vgic_irq_is_edge(struct kvm_vcpu *vcpu, int irq) vgic_irq_is_edge()
283 static int vgic_irq_is_enabled(struct kvm_vcpu *vcpu, int irq) vgic_irq_is_enabled()
290 static int vgic_irq_is_queued(struct kvm_vcpu *vcpu, int irq) vgic_irq_is_queued()
297 static int vgic_irq_is_active(struct kvm_vcpu *vcpu, int irq) vgic_irq_is_active()
304 static void vgic_irq_set_queued(struct kvm_vcpu *vcpu, int irq) vgic_irq_set_queued()
311 static void vgic_irq_clear_queued(struct kvm_vcpu *vcpu, int irq) vgic_irq_clear_queued()
318 static void vgic_irq_set_active(struct kvm_vcpu *vcpu, int irq) vgic_irq_set_active()
325 static void vgic_irq_clear_active(struct kvm_vcpu *vcpu, int irq) vgic_irq_clear_active()
332 static int vgic_dist_irq_get_level(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_get_level()
339 static void vgic_dist_irq_set_level(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_set_level()
346 static void vgic_dist_irq_clear_level(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_clear_level()
353 static int vgic_dist_irq_soft_pend(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_soft_pend()
360 static void vgic_dist_irq_clear_soft_pend(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_clear_soft_pend()
372 static int vgic_dist_irq_is_pending(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_is_pending()
379 void vgic_dist_irq_set_pending(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_set_pending()
386 void vgic_dist_irq_clear_pending(struct kvm_vcpu *vcpu, int irq) vgic_dist_irq_clear_pending()
393 static void vgic_cpu_irq_set(struct kvm_vcpu *vcpu, int irq) vgic_cpu_irq_set()
402 void vgic_cpu_irq_clear(struct kvm_vcpu *vcpu, int irq) vgic_cpu_irq_clear()
411 static bool vgic_can_sample_irq(struct kvm_vcpu *vcpu, int irq) vgic_can_sample_irq()
477 bool handle_mmio_raz_wi(struct kvm_vcpu *vcpu, struct kvm_exit_mmio *mmio, handle_mmio_raz_wi()
490 struct kvm_vcpu *target_vcpu = kvm_get_vcpu(kvm, vcpu_id); vgic_handle_enable_reg()
692 void vgic_unqueue_irqs(struct kvm_vcpu *vcpu) vgic_unqueue_irqs()
774 static bool call_range_handler(struct kvm_vcpu *vcpu, call_range_handler()
817 static int vgic_handle_mmio_access(struct kvm_vcpu *vcpu, vgic_handle_mmio_access()
866 static int vgic_handle_mmio_read(struct kvm_vcpu *vcpu, vgic_handle_mmio_read()
873 static int vgic_handle_mmio_write(struct kvm_vcpu *vcpu, vgic_handle_mmio_write()
907 struct kvm_vcpu *vcpu = NULL; vgic_register_kvm_io_dev()
938 static int compute_active_for_cpu(struct kvm_vcpu *vcpu) compute_active_for_cpu()
968 static int compute_pending_for_cpu(struct kvm_vcpu *vcpu) compute_pending_for_cpu()
1010 struct kvm_vcpu *vcpu; vgic_update_state()
1024 static struct vgic_lr vgic_get_lr(const struct kvm_vcpu *vcpu, int lr) vgic_get_lr()
1029 static void vgic_set_lr(struct kvm_vcpu *vcpu, int lr, vgic_set_lr()
1035 static inline u64 vgic_get_elrsr(struct kvm_vcpu *vcpu) vgic_get_elrsr()
1040 static inline u64 vgic_get_eisr(struct kvm_vcpu *vcpu) vgic_get_eisr()
1045 static inline void vgic_clear_eisr(struct kvm_vcpu *vcpu) vgic_clear_eisr()
1050 static inline u32 vgic_get_interrupt_status(struct kvm_vcpu *vcpu) vgic_get_interrupt_status()
1055 static inline void vgic_enable_underflow(struct kvm_vcpu *vcpu) vgic_enable_underflow()
1060 static inline void vgic_disable_underflow(struct kvm_vcpu *vcpu) vgic_disable_underflow()
1065 void vgic_get_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr) vgic_get_vmcr()
1070 void vgic_set_vmcr(struct kvm_vcpu *vcpu, struct vgic_vmcr *vmcr) vgic_set_vmcr()
1075 static inline void vgic_enable(struct kvm_vcpu *vcpu) vgic_enable()
1080 static void vgic_retire_lr(int lr_nr, struct kvm_vcpu *vcpu) vgic_retire_lr()
1099 static bool dist_active_irq(struct kvm_vcpu *vcpu) dist_active_irq()
1106 bool kvm_vgic_map_is_active(struct kvm_vcpu *vcpu, struct irq_phys_map *map) kvm_vgic_map_is_active()
1129 static void vgic_retire_disabled_irqs(struct kvm_vcpu *vcpu) vgic_retire_disabled_irqs()
1143 static void vgic_queue_irq_to_lr(struct kvm_vcpu *vcpu, int irq, vgic_queue_irq_to_lr()
1186 bool vgic_queue_irq(struct kvm_vcpu *vcpu, u8 sgi_source_id, int irq) vgic_queue_irq()
1226 static bool vgic_queue_hwirq(struct kvm_vcpu *vcpu, int irq) vgic_queue_hwirq()
1249 static void __kvm_vgic_flush_hwstate(struct kvm_vcpu *vcpu) __kvm_vgic_flush_hwstate()
1311 static int process_queued_irq(struct kvm_vcpu *vcpu, process_queued_irq()
1359 static bool vgic_process_maintenance(struct kvm_vcpu *vcpu) vgic_process_maintenance()
1417 static bool vgic_sync_hwirq(struct kvm_vcpu *vcpu, int lr, struct vgic_lr vlr) vgic_sync_hwirq()
1435 static void __kvm_vgic_sync_hwstate(struct kvm_vcpu *vcpu) __kvm_vgic_sync_hwstate()
1461 void kvm_vgic_flush_hwstate(struct kvm_vcpu *vcpu) kvm_vgic_flush_hwstate()
1473 void kvm_vgic_sync_hwstate(struct kvm_vcpu *vcpu) kvm_vgic_sync_hwstate()
1481 int kvm_vgic_vcpu_pending_irq(struct kvm_vcpu *vcpu) kvm_vgic_vcpu_pending_irq()
1493 struct kvm_vcpu *vcpu; vgic_kick_vcpus()
1506 static int vgic_validate_injection(struct kvm_vcpu *vcpu, int irq, int level) vgic_validate_injection()
1529 struct kvm_vcpu *vcpu; vgic_update_irq_pending()
1704 static struct list_head *vgic_get_irq_phys_map_list(struct kvm_vcpu *vcpu, vgic_get_irq_phys_map_list()
1726 struct irq_phys_map *kvm_vgic_map_phys_irq(struct kvm_vcpu *vcpu, kvm_vgic_map_phys_irq()
1784 static struct irq_phys_map *vgic_irq_map_search(struct kvm_vcpu *vcpu, vgic_irq_map_search()
1821 int kvm_vgic_unmap_phys_irq(struct kvm_vcpu *vcpu, struct irq_phys_map *map) kvm_vgic_unmap_phys_irq()
1862 void kvm_vgic_vcpu_destroy(struct kvm_vcpu *vcpu) kvm_vgic_vcpu_destroy()
1875 static int vgic_vcpu_init_maps(struct kvm_vcpu *vcpu, int nr_irqs) vgic_vcpu_init_maps()
1906 void kvm_vgic_vcpu_early_init(struct kvm_vcpu *vcpu) kvm_vgic_vcpu_early_init()
1926 struct kvm_vcpu *vcpu; kvm_vgic_destroy()
1965 struct kvm_vcpu *vcpu; vgic_init()
2098 struct kvm_vcpu *vcpu; kvm_vgic_create()
H A Dvgic-v3-emul.c52 static bool handle_mmio_rao_wi(struct kvm_vcpu *vcpu, handle_mmio_rao_wi()
63 static bool handle_mmio_ctlr(struct kvm_vcpu *vcpu, handle_mmio_ctlr()
93 static bool handle_mmio_typer(struct kvm_vcpu *vcpu, handle_mmio_typer()
108 static bool handle_mmio_iidr(struct kvm_vcpu *vcpu, handle_mmio_iidr()
120 static bool handle_mmio_set_enable_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_set_enable_reg_dist()
134 static bool handle_mmio_clear_enable_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_clear_enable_reg_dist()
148 static bool handle_mmio_set_pending_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_set_pending_reg_dist()
161 static bool handle_mmio_clear_pending_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_clear_pending_reg_dist()
174 static bool handle_mmio_set_active_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_set_active_reg_dist()
187 static bool handle_mmio_clear_active_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_clear_active_reg_dist()
200 static bool handle_mmio_priority_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_priority_reg_dist()
219 static bool handle_mmio_cfg_reg_dist(struct kvm_vcpu *vcpu, handle_mmio_cfg_reg_dist()
269 * type (or storing kvm_vcpu pointers) should lift the limit.
274 static bool handle_mmio_route_reg(struct kvm_vcpu *vcpu, handle_mmio_route_reg()
349 static bool handle_mmio_idregs(struct kvm_vcpu *vcpu, handle_mmio_idregs()
529 static bool handle_mmio_ctlr_redist(struct kvm_vcpu *vcpu, handle_mmio_ctlr_redist()
539 static bool handle_mmio_typer_redist(struct kvm_vcpu *vcpu, handle_mmio_typer_redist()
545 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_typer_redist()
566 static bool handle_mmio_set_enable_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_set_enable_reg_redist()
570 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_set_enable_reg_redist()
577 static bool handle_mmio_clear_enable_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_clear_enable_reg_redist()
581 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_clear_enable_reg_redist()
588 static bool handle_mmio_set_active_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_set_active_reg_redist()
592 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_set_active_reg_redist()
598 static bool handle_mmio_clear_active_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_clear_active_reg_redist()
602 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_clear_active_reg_redist()
608 static bool handle_mmio_set_pending_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_set_pending_reg_redist()
612 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_set_pending_reg_redist()
618 static bool handle_mmio_clear_pending_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_clear_pending_reg_redist()
622 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_clear_pending_reg_redist()
628 static bool handle_mmio_priority_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_priority_reg_redist()
632 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_priority_reg_redist()
642 static bool handle_mmio_cfg_reg_redist(struct kvm_vcpu *vcpu, handle_mmio_cfg_reg_redist()
646 struct kvm_vcpu *redist_vcpu = mmio->private; handle_mmio_cfg_reg_redist()
755 static bool vgic_v3_queue_sgi(struct kvm_vcpu *vcpu, int irq) vgic_v3_queue_sgi()
866 static void vgic_v3_add_sgi_source(struct kvm_vcpu *vcpu, int irq, int source) vgic_v3_add_sgi_source()
888 static int match_mpidr(u64 sgi_aff, u16 sgi_cpu_mask, struct kvm_vcpu *vcpu) match_mpidr()
929 void vgic_v3_dispatch_sgi(struct kvm_vcpu *vcpu, u64 reg) vgic_v3_dispatch_sgi()
932 struct kvm_vcpu *c_vcpu; vgic_v3_dispatch_sgi()
H A Dvgic-v2-emul.c37 static void vgic_dispatch_sgi(struct kvm_vcpu *vcpu, u32 reg); vgic_get_sgi_sources()
43 static bool handle_mmio_misc(struct kvm_vcpu *vcpu, handle_mmio_misc()
78 static bool handle_mmio_set_enable_reg(struct kvm_vcpu *vcpu, handle_mmio_set_enable_reg()
86 static bool handle_mmio_clear_enable_reg(struct kvm_vcpu *vcpu, handle_mmio_clear_enable_reg()
94 static bool handle_mmio_set_pending_reg(struct kvm_vcpu *vcpu, handle_mmio_set_pending_reg()
102 static bool handle_mmio_clear_pending_reg(struct kvm_vcpu *vcpu, handle_mmio_clear_pending_reg()
110 static bool handle_mmio_set_active_reg(struct kvm_vcpu *vcpu, handle_mmio_set_active_reg()
118 static bool handle_mmio_clear_active_reg(struct kvm_vcpu *vcpu, handle_mmio_clear_active_reg()
126 static bool handle_mmio_priority_reg(struct kvm_vcpu *vcpu, handle_mmio_priority_reg()
157 struct kvm_vcpu *vcpu; vgic_set_target_reg()
185 static bool handle_mmio_target_reg(struct kvm_vcpu *vcpu, handle_mmio_target_reg()
216 static bool handle_mmio_cfg_reg(struct kvm_vcpu *vcpu, handle_mmio_cfg_reg()
227 static bool handle_mmio_sgi_reg(struct kvm_vcpu *vcpu, handle_mmio_sgi_reg()
244 static bool read_set_clear_sgi_pend_reg(struct kvm_vcpu *vcpu, read_set_clear_sgi_pend_reg()
266 static bool write_set_clear_sgi_pend_reg(struct kvm_vcpu *vcpu, write_set_clear_sgi_pend_reg()
302 static bool handle_mmio_sgi_set(struct kvm_vcpu *vcpu, handle_mmio_sgi_set()
312 static bool handle_mmio_sgi_clear(struct kvm_vcpu *vcpu, handle_mmio_sgi_clear()
407 static void vgic_dispatch_sgi(struct kvm_vcpu *vcpu, u32 reg) vgic_dispatch_sgi()
449 static bool vgic_v2_queue_sgi(struct kvm_vcpu *vcpu, int irq) vgic_v2_queue_sgi()
544 static void vgic_v2_add_sgi_source(struct kvm_vcpu *vcpu, int irq, int source) vgic_v2_add_sgi_source()
573 static bool handle_cpu_mmio_misc(struct kvm_vcpu *vcpu, handle_cpu_mmio_misc()
614 static bool handle_mmio_abpr(struct kvm_vcpu *vcpu, handle_mmio_abpr()
620 static bool handle_cpu_mmio_ident(struct kvm_vcpu *vcpu, handle_cpu_mmio_ident()
671 struct kvm_vcpu *vcpu, *tmp_vcpu; vgic_attr_regs_access()
/linux-4.4.14/virt/kvm/
H A Dasync_pf.c31 static inline void kvm_async_page_present_sync(struct kvm_vcpu *vcpu, kvm_async_page_present_sync()
38 static inline void kvm_async_page_present_async(struct kvm_vcpu *vcpu, kvm_async_page_present_async()
65 void kvm_async_pf_vcpu_init(struct kvm_vcpu *vcpu) kvm_async_pf_vcpu_init()
77 struct kvm_vcpu *vcpu = apf->vcpu; async_pf_execute()
108 void kvm_clear_async_pf_completion_queue(struct kvm_vcpu *vcpu) kvm_clear_async_pf_completion_queue()
141 void kvm_check_async_pf_completion(struct kvm_vcpu *vcpu) kvm_check_async_pf_completion()
162 int kvm_setup_async_pf(struct kvm_vcpu *vcpu, gva_t gva, unsigned long hva, kvm_setup_async_pf()
209 int kvm_async_pf_wakeup_all(struct kvm_vcpu *vcpu) kvm_async_pf_wakeup_all()
H A Dasync_pf.h29 void kvm_async_pf_vcpu_init(struct kvm_vcpu *vcpu);
H A Dkvm_main.c133 int vcpu_load(struct kvm_vcpu *vcpu) vcpu_load()
146 void vcpu_put(struct kvm_vcpu *vcpu) vcpu_put()
164 struct kvm_vcpu *vcpu; kvm_make_all_cpus_request()
219 int kvm_vcpu_init(struct kvm_vcpu *vcpu, struct kvm *kvm, unsigned id) kvm_vcpu_init()
259 void kvm_vcpu_uninit(struct kvm_vcpu *vcpu) kvm_vcpu_uninit()
1163 struct kvm_memory_slot *kvm_vcpu_gfn_to_memslot(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_gfn_to_memslot()
1243 unsigned long kvm_vcpu_gfn_to_hva(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_gfn_to_hva()
1271 unsigned long kvm_vcpu_gfn_to_hva_prot(struct kvm_vcpu *vcpu, gfn_t gfn, bool *writable) kvm_vcpu_gfn_to_hva_prot()
1493 pfn_t kvm_vcpu_gfn_to_pfn_atomic(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_gfn_to_pfn_atomic()
1505 pfn_t kvm_vcpu_gfn_to_pfn(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_gfn_to_pfn()
1551 struct page *kvm_vcpu_gfn_to_page(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_gfn_to_page()
1647 int kvm_vcpu_read_guest_page(struct kvm_vcpu *vcpu, gfn_t gfn, void *data, kvm_vcpu_read_guest_page()
1676 int kvm_vcpu_read_guest(struct kvm_vcpu *vcpu, gpa_t gpa, void *data, unsigned long len) kvm_vcpu_read_guest()
1724 int kvm_vcpu_read_guest_atomic(struct kvm_vcpu *vcpu, gpa_t gpa, kvm_vcpu_read_guest_atomic()
1760 int kvm_vcpu_write_guest_page(struct kvm_vcpu *vcpu, gfn_t gfn, kvm_vcpu_write_guest_page()
1790 int kvm_vcpu_write_guest(struct kvm_vcpu *vcpu, gpa_t gpa, const void *data, kvm_vcpu_write_guest()
1945 void kvm_vcpu_mark_page_dirty(struct kvm_vcpu *vcpu, gfn_t gfn) kvm_vcpu_mark_page_dirty()
1954 static void grow_halt_poll_ns(struct kvm_vcpu *vcpu) grow_halt_poll_ns()
1972 static void shrink_halt_poll_ns(struct kvm_vcpu *vcpu) shrink_halt_poll_ns()
1986 static int kvm_vcpu_check_block(struct kvm_vcpu *vcpu) kvm_vcpu_check_block()
2003 void kvm_vcpu_block(struct kvm_vcpu *vcpu) kvm_vcpu_block()
2068 void kvm_vcpu_kick(struct kvm_vcpu *vcpu) kvm_vcpu_kick()
2089 int kvm_vcpu_yield_to(struct kvm_vcpu *target) kvm_vcpu_yield_to()
2131 static bool kvm_vcpu_eligible_for_directed_yield(struct kvm_vcpu *vcpu) kvm_vcpu_eligible_for_directed_yield()
2148 void kvm_vcpu_on_spin(struct kvm_vcpu *me) kvm_vcpu_on_spin()
2151 struct kvm_vcpu *vcpu; kvm_vcpu_on_spin()
2202 struct kvm_vcpu *vcpu = vma->vm_file->private_data; kvm_vcpu_fault()
2234 struct kvm_vcpu *vcpu = filp->private_data; kvm_vcpu_release()
2253 static int create_vcpu_fd(struct kvm_vcpu *vcpu) create_vcpu_fd()
2264 struct kvm_vcpu *vcpu, *v; kvm_vm_ioctl_create_vcpu()
2325 static int kvm_vcpu_ioctl_set_sigmask(struct kvm_vcpu *vcpu, sigset_t *sigset) kvm_vcpu_ioctl_set_sigmask()
2339 struct kvm_vcpu *vcpu = filp->private_data; kvm_vcpu_ioctl()
2543 struct kvm_vcpu *vcpu = filp->private_data; kvm_vcpu_compat_ioctl()
3238 static int __kvm_io_bus_write(struct kvm_vcpu *vcpu, struct kvm_io_bus *bus, __kvm_io_bus_write()
3259 int kvm_io_bus_write(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx, gpa_t addr, kvm_io_bus_write()
3277 int kvm_io_bus_write_cookie(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx, kvm_io_bus_write_cookie()
3304 static int __kvm_io_bus_read(struct kvm_vcpu *vcpu, struct kvm_io_bus *bus, __kvm_io_bus_read()
3326 int kvm_io_bus_read(struct kvm_vcpu *vcpu, enum kvm_bus bus_idx, gpa_t addr, kvm_io_bus_read()
3426 struct kvm_vcpu *vcpu; vcpu_stat_get()
3501 struct kvm_vcpu *preempt_notifier_to_vcpu(struct preempt_notifier *pn) preempt_notifier_to_vcpu()
3503 return container_of(pn, struct kvm_vcpu, preempt_notifier); preempt_notifier_to_vcpu()
3508 struct kvm_vcpu *vcpu = preempt_notifier_to_vcpu(pn); kvm_sched_in()
3521 struct kvm_vcpu *vcpu = preempt_notifier_to_vcpu(pn); kvm_sched_out()
3573 vcpu_align = __alignof__(struct kvm_vcpu);
3574 kvm_vcpu_cache = kmem_cache_create("kvm_vcpu", vcpu_size, vcpu_align,
H A Dcoalesced_mmio.c63 static int coalesced_mmio_write(struct kvm_vcpu *vcpu, coalesced_mmio_write()
H A Deventfd.c724 ioeventfd_write(struct kvm_vcpu *vcpu, struct kvm_io_device *this, gpa_t addr, ioeventfd_write()
/linux-4.4.14/arch/s390/include/asm/
H A Dkvm_host.h621 bool kvm_arch_can_inject_async_page_present(struct kvm_vcpu *vcpu);
623 void kvm_arch_async_page_ready(struct kvm_vcpu *vcpu,
626 void kvm_arch_async_page_not_present(struct kvm_vcpu *vcpu,
629 void kvm_arch_async_page_present(struct kvm_vcpu *vcpu,
638 static inline void kvm_arch_vcpu_uninit(struct kvm_vcpu *vcpu) {} kvm_arch_sched_in()
639 static inline void kvm_arch_sched_in(struct kvm_vcpu *vcpu, int cpu) {} kvm_arch_free_memslot()
646 static inline void kvm_arch_vcpu_blocking(struct kvm_vcpu *vcpu) {} kvm_arch_vcpu_unblocking()
647 static inline void kvm_arch_vcpu_unblocking(struct kvm_vcpu *vcpu) {}
/linux-4.4.14/arch/mips/kernel/
H A Dasm-offsets.c345 OFFSET(VCPU_RUN, kvm_vcpu, run); output_kvm_defines()
346 OFFSET(VCPU_HOST_ARCH, kvm_vcpu, arch); output_kvm_defines()

Completed in 3689 milliseconds