kvm_run            19 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp10_id(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            20 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp_0_13_access(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            21 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            22 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            23 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            24 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            25 arch/arm/include/asm/kvm_coproc.h int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run           287 arch/arm/include/asm/kvm_host.h int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run           290 arch/arm/include/asm/kvm_host.h static inline void handle_exit_early(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run            24 arch/arm/include/asm/kvm_mmio.h int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            25 arch/arm/include/asm/kvm_mmio.h int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run            60 arch/arm/include/asm/kvm_mmu.h int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            81 arch/arm/kvm/coproc.c int kvm_handle_cp10_id(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            87 arch/arm/kvm/coproc.c int kvm_handle_cp_0_13_access(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            97 arch/arm/kvm/coproc.c int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           629 arch/arm/kvm/coproc.c int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           641 arch/arm/kvm/coproc.c int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           694 arch/arm/kvm/coproc.c int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           705 arch/arm/kvm/coproc.c int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            17 arch/arm/kvm/handle_exit.c typedef int (*exit_handle_fn)(struct kvm_vcpu *, struct kvm_run *);
kvm_run            19 arch/arm/kvm/handle_exit.c static int handle_hvc(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            36 arch/arm/kvm/handle_exit.c static int handle_smc(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            62 arch/arm/kvm/handle_exit.c static int kvm_handle_wfx(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            80 arch/arm/kvm/handle_exit.c static int kvm_handle_unknown_ec(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           118 arch/arm/kvm/handle_exit.c int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run            30 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            31 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            32 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            33 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            34 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            35 arch/arm64/include/asm/kvm_coproc.h int kvm_handle_sys_reg(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run           475 arch/arm64/include/asm/kvm_host.h int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run           477 arch/arm64/include/asm/kvm_host.h void handle_exit_early(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run            23 arch/arm64/include/asm/kvm_mmio.h int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            24 arch/arm64/include/asm/kvm_mmio.h int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run           161 arch/arm64/include/asm/kvm_mmu.h int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run);
kvm_run            28 arch/arm64/kvm/handle_exit.c typedef int (*exit_handle_fn)(struct kvm_vcpu *, struct kvm_run *);
kvm_run            36 arch/arm64/kvm/handle_exit.c static int handle_hvc(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            53 arch/arm64/kvm/handle_exit.c static int handle_smc(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            72 arch/arm64/kvm/handle_exit.c static int handle_no_fpsimd(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            90 arch/arm64/kvm/handle_exit.c static int kvm_handle_wfx(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           120 arch/arm64/kvm/handle_exit.c static int kvm_handle_guest_debug(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           147 arch/arm64/kvm/handle_exit.c static int kvm_handle_unknown_ec(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           158 arch/arm64/kvm/handle_exit.c static int handle_sve(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           181 arch/arm64/kvm/handle_exit.c static int kvm_handle_ptrauth(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           226 arch/arm64/kvm/handle_exit.c static int handle_trap_exceptions(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           251 arch/arm64/kvm/handle_exit.c int handle_exit(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run           303 arch/arm64/kvm/handle_exit.c void handle_exit_early(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run          2027 arch/arm64/kvm/sys_regs.c int kvm_handle_cp14_load_store(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          2206 arch/arm64/kvm/sys_regs.c int kvm_handle_cp15_64(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          2217 arch/arm64/kvm/sys_regs.c int kvm_handle_cp15_32(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          2228 arch/arm64/kvm/sys_regs.c int kvm_handle_cp14_64(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          2235 arch/arm64/kvm/sys_regs.c int kvm_handle_cp14_32(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          2287 arch/arm64/kvm/sys_regs.c int kvm_handle_sys_reg(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           317 arch/mips/include/asm/kvm_host.h 	int (*vcpu_run)(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           819 arch/mips/include/asm/kvm_host.h 	int (*vcpu_run)(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           820 arch/mips/include/asm/kvm_host.h 	void (*vcpu_reenter)(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           828 arch/mips/include/asm/kvm_host.h extern int kvm_mips_handle_exit(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           875 arch/mips/include/asm/kvm_host.h 						     struct kvm_run *run,
kvm_run           982 arch/mips/include/asm/kvm_host.h 						   struct kvm_run *run,
kvm_run           989 arch/mips/include/asm/kvm_host.h 						      struct kvm_run *run,
kvm_run           994 arch/mips/include/asm/kvm_host.h 							 struct kvm_run *run,
kvm_run           999 arch/mips/include/asm/kvm_host.h 							struct kvm_run *run,
kvm_run          1004 arch/mips/include/asm/kvm_host.h 							 struct kvm_run *run,
kvm_run          1009 arch/mips/include/asm/kvm_host.h 							struct kvm_run *run,
kvm_run          1014 arch/mips/include/asm/kvm_host.h 						     struct kvm_run *run,
kvm_run          1019 arch/mips/include/asm/kvm_host.h 						      struct kvm_run *run,
kvm_run          1024 arch/mips/include/asm/kvm_host.h 						struct kvm_run *run,
kvm_run          1029 arch/mips/include/asm/kvm_host.h 						     struct kvm_run *run,
kvm_run          1034 arch/mips/include/asm/kvm_host.h 						     struct kvm_run *run,
kvm_run          1039 arch/mips/include/asm/kvm_host.h 						       struct kvm_run *run,
kvm_run          1044 arch/mips/include/asm/kvm_host.h 							 struct kvm_run *run,
kvm_run          1049 arch/mips/include/asm/kvm_host.h 						      struct kvm_run *run,
kvm_run          1054 arch/mips/include/asm/kvm_host.h 							 struct kvm_run *run,
kvm_run          1058 arch/mips/include/asm/kvm_host.h 							 struct kvm_run *run);
kvm_run          1087 arch/mips/include/asm/kvm_host.h 					       struct kvm_run *run,
kvm_run          1093 arch/mips/include/asm/kvm_host.h 					     struct kvm_run *run,
kvm_run          1098 arch/mips/include/asm/kvm_host.h 					   struct kvm_run *run,
kvm_run          1102 arch/mips/include/asm/kvm_host.h 					     struct kvm_run *run,
kvm_run          1106 arch/mips/include/asm/kvm_host.h 					    struct kvm_run *run,
kvm_run          1265 arch/mips/kvm/emulate.c 					   struct kvm_run *run,
kvm_run          1600 arch/mips/kvm/emulate.c 					     struct kvm_run *run,
kvm_run          1681 arch/mips/kvm/emulate.c 					    u32 cause, struct kvm_run *run,
kvm_run          1755 arch/mips/kvm/emulate.c 						     struct kvm_run *run,
kvm_run          1799 arch/mips/kvm/emulate.c 					     struct kvm_run *run,
kvm_run          1932 arch/mips/kvm/emulate.c 					    struct kvm_run *run,
kvm_run          2003 arch/mips/kvm/emulate.c 					       struct kvm_run *run,
kvm_run          2038 arch/mips/kvm/emulate.c 						  struct kvm_run *run,
kvm_run          2082 arch/mips/kvm/emulate.c 						 struct kvm_run *run,
kvm_run          2124 arch/mips/kvm/emulate.c 						  struct kvm_run *run,
kvm_run          2166 arch/mips/kvm/emulate.c 						 struct kvm_run *run,
kvm_run          2207 arch/mips/kvm/emulate.c 					      struct kvm_run *run,
kvm_run          2247 arch/mips/kvm/emulate.c 					       struct kvm_run *run,
kvm_run          2276 arch/mips/kvm/emulate.c 					      struct kvm_run *run,
kvm_run          2311 arch/mips/kvm/emulate.c 					      struct kvm_run *run,
kvm_run          2346 arch/mips/kvm/emulate.c 						struct kvm_run *run,
kvm_run          2381 arch/mips/kvm/emulate.c 						  struct kvm_run *run,
kvm_run          2416 arch/mips/kvm/emulate.c 					       struct kvm_run *run,
kvm_run          2451 arch/mips/kvm/emulate.c 						  struct kvm_run *run,
kvm_run          2485 arch/mips/kvm/emulate.c 					 struct kvm_run *run,
kvm_run          2578 arch/mips/kvm/emulate.c 						  struct kvm_run *run)
kvm_run          2625 arch/mips/kvm/emulate.c 						  struct kvm_run *run,
kvm_run          2663 arch/mips/kvm/emulate.c 					       struct kvm_run *run,
kvm_run          2759 arch/mips/kvm/emulate.c 					      struct kvm_run *run,
kvm_run           435 arch/mips/kvm/mips.c int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run          1281 arch/mips/kvm/mips.c int kvm_mips_handle_exit(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run            70 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           119 arch/mips/kvm/trap_emul.c static int kvm_mips_bad_load(u32 cause, u32 *opc, struct kvm_run *run,
kvm_run           152 arch/mips/kvm/trap_emul.c static int kvm_mips_bad_store(u32 cause, u32 *opc, struct kvm_run *run,
kvm_run           179 arch/mips/kvm/trap_emul.c static int kvm_mips_bad_access(u32 cause, u32 *opc, struct kvm_run *run,
kvm_run           191 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           251 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           323 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           343 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           362 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           380 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           398 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           416 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           434 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           452 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run           477 arch/mips/kvm/trap_emul.c 	struct kvm_run *run = vcpu->run;
kvm_run          1182 arch/mips/kvm/trap_emul.c static void kvm_trap_emul_vcpu_reenter(struct kvm_run *run,
kvm_run          1226 arch/mips/kvm/trap_emul.c static int kvm_trap_emul_vcpu_run(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           902 arch/mips/kvm/vz.c 					      struct kvm_run *run,
kvm_run          1065 arch/mips/kvm/vz.c 					       struct kvm_run *run,
kvm_run          1137 arch/mips/kvm/vz.c 	struct kvm_run *run = vcpu->run;
kvm_run          1468 arch/mips/kvm/vz.c 	struct kvm_run *run = vcpu->run;
kvm_run          1515 arch/mips/kvm/vz.c 	struct kvm_run *run = vcpu->run;
kvm_run          1538 arch/mips/kvm/vz.c 	struct kvm_run *run = vcpu->run;
kvm_run          1585 arch/mips/kvm/vz.c 	struct kvm_run *run = vcpu->run;
kvm_run          3132 arch/mips/kvm/vz.c static void kvm_vz_vcpu_reenter(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run          3148 arch/mips/kvm/vz.c static int kvm_vz_vcpu_run(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           158 arch/powerpc/include/asm/kvm_book3s.h extern int kvmppc_book3s_hv_page_fault(struct kvm_run *run,
kvm_run           163 arch/powerpc/include/asm/kvm_book3s.h extern int kvmppc_hv_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           177 arch/powerpc/include/asm/kvm_book3s.h extern int kvmppc_book3s_radix_page_fault(struct kvm_run *run,
kvm_run           237 arch/powerpc/include/asm/kvm_book3s.h extern int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           303 arch/powerpc/include/asm/kvm_book3s.h int kvmhv_run_single_vcpu(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu,
kvm_run           308 arch/powerpc/include/asm/kvm_book3s.h long int kvmhv_nested_page_fault(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           793 arch/powerpc/include/asm/kvm_host.h 	struct kvm_run *kvm_run;
kvm_run            61 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu);
kvm_run            62 arch/powerpc/include/asm/kvm_ppc.h extern int __kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu);
kvm_run            66 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            69 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_loads(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            72 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_vsx_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            75 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_vmx_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            77 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_vmx_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            79 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            82 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_handle_vsx_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            93 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_emulate_instruction(struct kvm_run *run,
kvm_run            96 arch/powerpc/include/asm/kvm_ppc.h extern int kvmppc_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           275 arch/powerpc/include/asm/kvm_ppc.h 	int (*vcpu_run)(struct kvm_run *run, struct kvm_vcpu *vcpu);
kvm_run           302 arch/powerpc/include/asm/kvm_ppc.h 	int (*emulate_op)(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           791 arch/powerpc/kvm/book3s.c int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu)
kvm_run           793 arch/powerpc/kvm/book3s.c 	return vcpu->kvm->arch.kvm_ops->vcpu_run(kvm_run, vcpu);
kvm_run            20 arch/powerpc/kvm/book3s.h extern int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           428 arch/powerpc/kvm/book3s_64_mmu_hv.c int kvmppc_hv_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           491 arch/powerpc/kvm/book3s_64_mmu_hv.c int kvmppc_book3s_hv_page_fault(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           889 arch/powerpc/kvm/book3s_64_mmu_radix.c int kvmppc_book3s_radix_page_fault(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           238 arch/powerpc/kvm/book3s_emulate.c int kvmppc_core_emulate_op_pr(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1133 arch/powerpc/kvm/book3s_hv.c static int kvmppc_emulate_debug_inst(struct kvm_run *run,
kvm_run          1250 arch/powerpc/kvm/book3s_hv.c static int kvmppc_handle_exit_hv(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1439 arch/powerpc/kvm/book3s_hv.c static int kvmppc_handle_nested_exit(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run          2940 arch/powerpc/kvm/book3s_hv.c 			ret = kvmppc_handle_exit_hv(vcpu->arch.kvm_run, vcpu,
kvm_run          3906 arch/powerpc/kvm/book3s_hv.c static int kvmppc_run_vcpu(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu)
kvm_run          3914 arch/powerpc/kvm/book3s_hv.c 	kvm_run->exit_reason = 0;
kvm_run          3926 arch/powerpc/kvm/book3s_hv.c 	vcpu->arch.kvm_run = kvm_run;
kvm_run          3959 arch/powerpc/kvm/book3s_hv.c 				kvm_run->exit_reason = KVM_EXIT_FAIL_ENTRY;
kvm_run          3960 arch/powerpc/kvm/book3s_hv.c 				kvm_run->fail_entry.
kvm_run          3979 arch/powerpc/kvm/book3s_hv.c 				v->arch.kvm_run->exit_reason = KVM_EXIT_INTR;
kvm_run          4020 arch/powerpc/kvm/book3s_hv.c 		kvm_run->exit_reason = KVM_EXIT_INTR;
kvm_run          4031 arch/powerpc/kvm/book3s_hv.c 	trace_kvmppc_run_vcpu_exit(vcpu, kvm_run);
kvm_run          4036 arch/powerpc/kvm/book3s_hv.c int kvmhv_run_single_vcpu(struct kvm_run *kvm_run,
kvm_run          4048 arch/powerpc/kvm/book3s_hv.c 	kvm_run->exit_reason = 0;
kvm_run          4055 arch/powerpc/kvm/book3s_hv.c 	vcpu->arch.kvm_run = kvm_run;
kvm_run          4173 arch/powerpc/kvm/book3s_hv.c 			r = kvmppc_handle_exit_hv(kvm_run, vcpu, current);
kvm_run          4175 arch/powerpc/kvm/book3s_hv.c 			r = kvmppc_handle_nested_exit(kvm_run, vcpu);
kvm_run          4185 arch/powerpc/kvm/book3s_hv.c 				kvm_run->exit_reason = KVM_EXIT_INTR;
kvm_run          4201 arch/powerpc/kvm/book3s_hv.c 	trace_kvmppc_run_vcpu_exit(vcpu, kvm_run);
kvm_run          4207 arch/powerpc/kvm/book3s_hv.c 	kvm_run->exit_reason = KVM_EXIT_INTR;
kvm_run          4215 arch/powerpc/kvm/book3s_hv.c static int kvmppc_vcpu_run_hv(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run          4967 arch/powerpc/kvm/book3s_hv.c static int kvmppc_core_emulate_op_hv(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           293 arch/powerpc/kvm/book3s_hv_nested.c 		r = kvmhv_run_single_vcpu(vcpu->arch.kvm_run, vcpu, hdec_exp,
kvm_run          1260 arch/powerpc/kvm/book3s_hv_nested.c static long int __kvmhv_nested_page_fault(struct kvm_run *run,
kvm_run          1431 arch/powerpc/kvm/book3s_hv_nested.c long int kvmhv_nested_page_fault(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           172 arch/powerpc/kvm/book3s_paired_singles.c static int kvmppc_emulate_fpr_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           216 arch/powerpc/kvm/book3s_paired_singles.c static int kvmppc_emulate_fpr_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           262 arch/powerpc/kvm/book3s_paired_singles.c static int kvmppc_emulate_psq_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           305 arch/powerpc/kvm/book3s_paired_singles.c static int kvmppc_emulate_psq_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           621 arch/powerpc/kvm/book3s_paired_singles.c int kvmppc_emulate_paired_single(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           667 arch/powerpc/kvm/book3s_pr.c int kvmppc_handle_pagefault(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1056 arch/powerpc/kvm/book3s_pr.c static int kvmppc_exit_pr_progint(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1119 arch/powerpc/kvm/book3s_pr.c int kvmppc_handle_exit_pr(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1805 arch/powerpc/kvm/book3s_pr.c static int kvmppc_vcpu_run_pr(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu)
kvm_run          1814 arch/powerpc/kvm/book3s_pr.c 		kvm_run->exit_reason = KVM_EXIT_INTERNAL_ERROR;
kvm_run          1841 arch/powerpc/kvm/book3s_pr.c 	ret = __kvmppc_vcpu_run(kvm_run, vcpu);
kvm_run           733 arch/powerpc/kvm/booke.c int kvmppc_vcpu_run(struct kvm_run *kvm_run, struct kvm_vcpu *vcpu)
kvm_run           739 arch/powerpc/kvm/booke.c 		kvm_run->exit_reason = KVM_EXIT_INTERNAL_ERROR;
kvm_run           781 arch/powerpc/kvm/booke.c 	ret = __kvmppc_vcpu_run(kvm_run, vcpu);
kvm_run           803 arch/powerpc/kvm/booke.c static int emulation_exit(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           837 arch/powerpc/kvm/booke.c static int kvmppc_handle_debug(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run           957 arch/powerpc/kvm/booke.c static int kvmppc_resume_inst_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           984 arch/powerpc/kvm/booke.c int kvmppc_handle_exit(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            73 arch/powerpc/kvm/booke.h int kvmppc_booke_emulate_op(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            98 arch/powerpc/kvm/booke.h extern int kvmppc_core_emulate_op_e500(struct kvm_run *run,
kvm_run           106 arch/powerpc/kvm/booke.h extern int kvmppc_core_emulate_op_e500(struct kvm_run *run,
kvm_run            42 arch/powerpc/kvm/booke_emulate.c int kvmppc_booke_emulate_op(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run            86 arch/powerpc/kvm/e500_emulate.c static int kvmppc_e500_emul_ehpriv(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           128 arch/powerpc/kvm/e500_emulate.c int kvmppc_core_emulate_op_e500(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run           194 arch/powerpc/kvm/emulate.c int kvmppc_emulate_instruction(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run            74 arch/powerpc/kvm/emulate_loadstore.c 	struct kvm_run *run = vcpu->run;
kvm_run           281 arch/powerpc/kvm/powerpc.c int kvmppc_emulate_mmio(struct kvm_run *run, struct kvm_vcpu *vcpu)
kvm_run          1111 arch/powerpc/kvm/powerpc.c                                       struct kvm_run *run)
kvm_run          1222 arch/powerpc/kvm/powerpc.c static int __kvmppc_handle_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1267 arch/powerpc/kvm/powerpc.c int kvmppc_handle_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1276 arch/powerpc/kvm/powerpc.c int kvmppc_handle_loads(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1284 arch/powerpc/kvm/powerpc.c int kvmppc_handle_vsx_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1310 arch/powerpc/kvm/powerpc.c int kvmppc_handle_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1426 arch/powerpc/kvm/powerpc.c int kvmppc_handle_vsx_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1458 arch/powerpc/kvm/powerpc.c 			struct kvm_run *run)
kvm_run          1493 arch/powerpc/kvm/powerpc.c int kvmppc_handle_vmx_load(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1588 arch/powerpc/kvm/powerpc.c int kvmppc_handle_vmx_store(struct kvm_run *run, struct kvm_vcpu *vcpu,
kvm_run          1637 arch/powerpc/kvm/powerpc.c 		struct kvm_run *run)
kvm_run          1768 arch/powerpc/kvm/powerpc.c int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           475 arch/powerpc/kvm/trace_hv.h 	TP_PROTO(struct kvm_vcpu *vcpu, struct kvm_run *run),
kvm_run          3867 arch/s390/kvm/kvm-s390.c static void sync_regs(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run)
kvm_run          3872 arch/s390/kvm/kvm-s390.c 	riccb = (struct runtime_instr_cb *) &kvm_run->s.regs.riccb;
kvm_run          3873 arch/s390/kvm/kvm-s390.c 	gscb = (struct gs_cb *) &kvm_run->s.regs.gscb;
kvm_run          3874 arch/s390/kvm/kvm-s390.c 	vcpu->arch.sie_block->gpsw.mask = kvm_run->psw_mask;
kvm_run          3875 arch/s390/kvm/kvm-s390.c 	vcpu->arch.sie_block->gpsw.addr = kvm_run->psw_addr;
kvm_run          3876 arch/s390/kvm/kvm-s390.c 	if (kvm_run->kvm_dirty_regs & KVM_SYNC_PREFIX)
kvm_run          3877 arch/s390/kvm/kvm-s390.c 		kvm_s390_set_prefix(vcpu, kvm_run->s.regs.prefix);
kvm_run          3878 arch/s390/kvm/kvm-s390.c 	if (kvm_run->kvm_dirty_regs & KVM_SYNC_CRS) {
kvm_run          3879 arch/s390/kvm/kvm-s390.c 		memcpy(&vcpu->arch.sie_block->gcr, &kvm_run->s.regs.crs, 128);
kvm_run          3883 arch/s390/kvm/kvm-s390.c 	if (kvm_run->kvm_dirty_regs & KVM_SYNC_ARCH0) {
kvm_run          3884 arch/s390/kvm/kvm-s390.c 		kvm_s390_set_cpu_timer(vcpu, kvm_run->s.regs.cputm);
kvm_run          3885 arch/s390/kvm/kvm-s390.c 		vcpu->arch.sie_block->ckc = kvm_run->s.regs.ckc;
kvm_run          3886 arch/s390/kvm/kvm-s390.c 		vcpu->arch.sie_block->todpr = kvm_run->s.regs.todpr;
kvm_run          3887 arch/s390/kvm/kvm-s390.c 		vcpu->arch.sie_block->pp = kvm_run->s.regs.pp;
kvm_run          3888 arch/s390/kvm/kvm-s390.c 		vcpu->arch.sie_block->gbea = kvm_run->s.regs.gbea;
kvm_run          3890 arch/s390/kvm/kvm-s390.c 	if (kvm_run->kvm_dirty_regs & KVM_SYNC_PFAULT) {
kvm_run          3891 arch/s390/kvm/kvm-s390.c 		vcpu->arch.pfault_token = kvm_run->s.regs.pft;
kvm_run          3892 arch/s390/kvm/kvm-s390.c 		vcpu->arch.pfault_select = kvm_run->s.regs.pfs;
kvm_run          3893 arch/s390/kvm/kvm-s390.c 		vcpu->arch.pfault_compare = kvm_run->s.regs.pfc;
kvm_run          3901 arch/s390/kvm/kvm-s390.c 	if ((kvm_run->kvm_dirty_regs & KVM_SYNC_RICCB) &&
kvm_run          3912 arch/s390/kvm/kvm-s390.c 	if ((kvm_run->kvm_dirty_regs & KVM_SYNC_GSCB) &&
kvm_run          3921 arch/s390/kvm/kvm-s390.c 	if ((kvm_run->kvm_dirty_regs & KVM_SYNC_BPBC) &&
kvm_run          3924 arch/s390/kvm/kvm-s390.c 		vcpu->arch.sie_block->fpf |= kvm_run->s.regs.bpbc ? FPF_BPBC : 0;
kvm_run          3956 arch/s390/kvm/kvm-s390.c 	kvm_run->kvm_dirty_regs = 0;
kvm_run          3959 arch/s390/kvm/kvm-s390.c static void store_regs(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run)
kvm_run          3961 arch/s390/kvm/kvm-s390.c 	kvm_run->psw_mask = vcpu->arch.sie_block->gpsw.mask;
kvm_run          3962 arch/s390/kvm/kvm-s390.c 	kvm_run->psw_addr = vcpu->arch.sie_block->gpsw.addr;
kvm_run          3963 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.prefix = kvm_s390_get_prefix(vcpu);
kvm_run          3964 arch/s390/kvm/kvm-s390.c 	memcpy(&kvm_run->s.regs.crs, &vcpu->arch.sie_block->gcr, 128);
kvm_run          3965 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.cputm = kvm_s390_get_cpu_timer(vcpu);
kvm_run          3966 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.ckc = vcpu->arch.sie_block->ckc;
kvm_run          3967 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.todpr = vcpu->arch.sie_block->todpr;
kvm_run          3968 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.pp = vcpu->arch.sie_block->pp;
kvm_run          3969 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.gbea = vcpu->arch.sie_block->gbea;
kvm_run          3970 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.pft = vcpu->arch.pfault_token;
kvm_run          3971 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.pfs = vcpu->arch.pfault_select;
kvm_run          3972 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.pfc = vcpu->arch.pfault_compare;
kvm_run          3973 arch/s390/kvm/kvm-s390.c 	kvm_run->s.regs.bpbc = (vcpu->arch.sie_block->fpf & FPF_BPBC) == FPF_BPBC;
kvm_run          3997 arch/s390/kvm/kvm-s390.c int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run)
kvm_run          4001 arch/s390/kvm/kvm-s390.c 	if (kvm_run->immediate_exit)
kvm_run          4004 arch/s390/kvm/kvm-s390.c 	if (kvm_run->kvm_valid_regs & ~KVM_SYNC_S390_VALID_FIELDS ||
kvm_run          4005 arch/s390/kvm/kvm-s390.c 	    kvm_run->kvm_dirty_regs & ~KVM_SYNC_S390_VALID_FIELDS)
kvm_run          4027 arch/s390/kvm/kvm-s390.c 	sync_regs(vcpu, kvm_run);
kvm_run          4034 arch/s390/kvm/kvm-s390.c 		kvm_run->exit_reason = KVM_EXIT_INTR;
kvm_run          4049 arch/s390/kvm/kvm-s390.c 	store_regs(vcpu, kvm_run);
kvm_run          1247 arch/x86/kvm/lapic.c 	struct kvm_run *run = vcpu->run;
kvm_run          2772 arch/x86/kvm/svm.c 	struct kvm_run *kvm_run = svm->vcpu.run;
kvm_run          2790 arch/x86/kvm/svm.c 		kvm_run->exit_reason = KVM_EXIT_DEBUG;
kvm_run          2791 arch/x86/kvm/svm.c 		kvm_run->debug.arch.pc =
kvm_run          2793 arch/x86/kvm/svm.c 		kvm_run->debug.arch.exception = DB_VECTOR;
kvm_run          2802 arch/x86/kvm/svm.c 	struct kvm_run *kvm_run = svm->vcpu.run;
kvm_run          2804 arch/x86/kvm/svm.c 	kvm_run->exit_reason = KVM_EXIT_DEBUG;
kvm_run          2805 arch/x86/kvm/svm.c 	kvm_run->debug.arch.pc = svm->vmcb->save.cs.base + svm->vmcb->save.rip;
kvm_run          2806 arch/x86/kvm/svm.c 	kvm_run->debug.arch.exception = BP_VECTOR;
kvm_run          2910 arch/x86/kvm/svm.c 	struct kvm_run *kvm_run = svm->vcpu.run;
kvm_run          2919 arch/x86/kvm/svm.c 	kvm_run->exit_reason = KVM_EXIT_SHUTDOWN;
kvm_run          4139 arch/x86/kvm/svm.c 	struct kvm_run *kvm_run = svm->vcpu.run;
kvm_run          4149 arch/x86/kvm/svm.c 	kvm_run->exit_reason = KVM_EXIT_SET_TPR;
kvm_run          4979 arch/x86/kvm/svm.c 	struct kvm_run *kvm_run = vcpu->run;
kvm_run          5018 arch/x86/kvm/svm.c 		kvm_run->exit_reason = KVM_EXIT_FAIL_ENTRY;
kvm_run          5019 arch/x86/kvm/svm.c 		kvm_run->fail_entry.hardware_entry_failure_reason
kvm_run          4597 arch/x86/kvm/vmx/vmx.c 	struct kvm_run *kvm_run = vcpu->run;
kvm_run          4674 arch/x86/kvm/vmx/vmx.c 		kvm_run->debug.arch.dr6 = dr6 | DR6_FIXED_1;
kvm_run          4675 arch/x86/kvm/vmx/vmx.c 		kvm_run->debug.arch.dr7 = vmcs_readl(GUEST_DR7);
kvm_run          4685 arch/x86/kvm/vmx/vmx.c 		kvm_run->exit_reason = KVM_EXIT_DEBUG;
kvm_run          4687 arch/x86/kvm/vmx/vmx.c 		kvm_run->debug.arch.pc = vmcs_readl(GUEST_CS_BASE) + rip;
kvm_run          4688 arch/x86/kvm/vmx/vmx.c 		kvm_run->debug.arch.exception = ex_no;
kvm_run          4691 arch/x86/kvm/vmx/vmx.c 		kvm_run->exit_reason = KVM_EXIT_EXCEPTION;
kvm_run          4692 arch/x86/kvm/vmx/vmx.c 		kvm_run->ex.exception = ex_no;
kvm_run          4693 arch/x86/kvm/vmx/vmx.c 		kvm_run->ex.error_code = error_code;
kvm_run          6588 arch/x86/kvm/x86.c 	struct kvm_run *kvm_run = vcpu->run;
kvm_run          6591 arch/x86/kvm/x86.c 		kvm_run->debug.arch.dr6 = DR6_BS | DR6_FIXED_1 | DR6_RTM;
kvm_run          6592 arch/x86/kvm/x86.c 		kvm_run->debug.arch.pc = vcpu->arch.singlestep_rip;
kvm_run          6593 arch/x86/kvm/x86.c 		kvm_run->debug.arch.exception = DB_VECTOR;
kvm_run          6594 arch/x86/kvm/x86.c 		kvm_run->exit_reason = KVM_EXIT_DEBUG;
kvm_run          6628 arch/x86/kvm/x86.c 		struct kvm_run *kvm_run = vcpu->run;
kvm_run          6635 arch/x86/kvm/x86.c 			kvm_run->debug.arch.dr6 = dr6 | DR6_FIXED_1 | DR6_RTM;
kvm_run          6636 arch/x86/kvm/x86.c 			kvm_run->debug.arch.pc = eip;
kvm_run          6637 arch/x86/kvm/x86.c 			kvm_run->debug.arch.exception = DB_VECTOR;
kvm_run          6638 arch/x86/kvm/x86.c 			kvm_run->exit_reason = KVM_EXIT_DEBUG;
kvm_run          7535 arch/x86/kvm/x86.c 	struct kvm_run *kvm_run = vcpu->run;
kvm_run          7537 arch/x86/kvm/x86.c 	kvm_run->if_flag = (kvm_get_rflags(vcpu) & X86_EFLAGS_IF) != 0;
kvm_run          7538 arch/x86/kvm/x86.c 	kvm_run->flags = is_smm(vcpu) ? KVM_RUN_X86_SMM : 0;
kvm_run          7539 arch/x86/kvm/x86.c 	kvm_run->cr8 = kvm_get_cr8(vcpu);
kvm_run          7540 arch/x86/kvm/x86.c 	kvm_run->apic_base = kvm_get_apic_base(vcpu);
kvm_run          7541 arch/x86/kvm/x86.c 	kvm_run->ready_for_interrupt_injection =
kvm_run          8470 arch/x86/kvm/x86.c 	struct kvm_run *run = vcpu->run;
kvm_run          8559 arch/x86/kvm/x86.c int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run)
kvm_run          8568 arch/x86/kvm/x86.c 		if (kvm_run->immediate_exit) {
kvm_run          8597 arch/x86/kvm/x86.c 		if (kvm_set_cr8(vcpu, kvm_run->cr8) != 0) {
kvm_run          8612 arch/x86/kvm/x86.c 	if (kvm_run->immediate_exit)
kvm_run           279 include/linux/kvm_host.h 	struct kvm_run *run;
kvm_run           851 include/linux/kvm_host.h int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *kvm_run);
kvm_run            13 include/linux/kvm_types.h struct kvm_run;
kvm_run           154 tools/testing/selftests/kvm/dirty_log_test.c 	struct kvm_run *run;
kvm_run           113 tools/testing/selftests/kvm/include/kvm_util.h struct kvm_run *vcpu_state(struct kvm_vm *vm, uint32_t vcpuid);
kvm_run            94 tools/testing/selftests/kvm/lib/aarch64/ucall.c 	struct kvm_run *run = vcpu_state(vm, vcpu_id);
kvm_run           781 tools/testing/selftests/kvm/lib/kvm_util.c 	TEST_ASSERT(ret >= sizeof(struct kvm_run),
kvm_run           828 tools/testing/selftests/kvm/lib/kvm_util.c 	vcpu->state = (struct kvm_run *) mmap(NULL, sizeof(*vcpu->state),
kvm_run          1117 tools/testing/selftests/kvm/lib/kvm_util.c struct kvm_run *vcpu_state(struct kvm_vm *vm, uint32_t vcpuid)
kvm_run            41 tools/testing/selftests/kvm/lib/kvm_util_internal.h 	struct kvm_run *state;
kvm_run           247 tools/testing/selftests/kvm/lib/s390x/processor.c 	struct kvm_run *run;
kvm_run            38 tools/testing/selftests/kvm/lib/s390x/ucall.c 	struct kvm_run *run = vcpu_state(vm, vcpu_id);
kvm_run            40 tools/testing/selftests/kvm/lib/x86_64/ucall.c 	struct kvm_run *run = vcpu_state(vm, vcpu_id);
kvm_run            35 tools/testing/selftests/kvm/s390x/memop.c 	struct kvm_run *run;
kvm_run            69 tools/testing/selftests/kvm/s390x/sync_regs_test.c 	struct kvm_run *run;
kvm_run            66 tools/testing/selftests/kvm/x86_64/cr4_cpuid_sync_test.c 	struct kvm_run *run;
kvm_run            78 tools/testing/selftests/kvm/x86_64/evmcs_test.c 	struct kvm_run *run;
kvm_run            36 tools/testing/selftests/kvm/x86_64/mmio_warning_test.c 	struct kvm_run *run;
kvm_run            44 tools/testing/selftests/kvm/x86_64/mmio_warning_test.c 	struct kvm_run *run = tc->run;
kvm_run            57 tools/testing/selftests/kvm/x86_64/mmio_warning_test.c 	struct kvm_run *run;
kvm_run            66 tools/testing/selftests/kvm/x86_64/mmio_warning_test.c 	run = (struct kvm_run *)mmap(0, 4096, PROT_READ|PROT_WRITE, MAP_SHARED,
kvm_run            50 tools/testing/selftests/kvm/x86_64/platform_info_test.c 	struct kvm_run *run = vcpu_state(vm, VCPU_ID);
kvm_run            71 tools/testing/selftests/kvm/x86_64/platform_info_test.c 	struct kvm_run *run = vcpu_state(vm, VCPU_ID);
kvm_run            94 tools/testing/selftests/kvm/x86_64/smm_test.c 	struct kvm_run *run;
kvm_run           126 tools/testing/selftests/kvm/x86_64/state_test.c 	struct kvm_run *run;
kvm_run            83 tools/testing/selftests/kvm/x86_64/sync_regs_test.c 	struct kvm_run *run;
kvm_run            67 tools/testing/selftests/kvm/x86_64/vmx_close_while_nested_test.c 		volatile struct kvm_run *run = vcpu_state(vm, VCPU_ID);
kvm_run            77 tools/testing/selftests/kvm/x86_64/vmx_dirty_log_test.c 	struct kvm_run *run;
kvm_run           142 tools/testing/selftests/kvm/x86_64/vmx_tsc_adjust_test.c 		volatile struct kvm_run *run = vcpu_state(vm, VCPU_ID);
kvm_run           682 virt/kvm/arm/arm.c int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run            83 virt/kvm/arm/mmio.c int kvm_handle_mmio_return(struct kvm_vcpu *vcpu, struct kvm_run *run)
kvm_run           156 virt/kvm/arm/mmio.c int io_mem_abort(struct kvm_vcpu *vcpu, struct kvm_run *run,
kvm_run          1908 virt/kvm/arm/mmu.c int kvm_handle_guest_abort(struct kvm_vcpu *vcpu, struct kvm_run *run)