kvm_cpu_context 144 arch/arm/include/asm/kvm_host.h struct kvm_cpu_context host_ctxt; kvm_cpu_context 149 arch/arm/include/asm/kvm_host.h static inline void kvm_init_host_cpu_context(struct kvm_cpu_context *cpu_ctxt) kvm_cpu_context 163 arch/arm/include/asm/kvm_host.h struct kvm_cpu_context ctxt; kvm_cpu_context 178 arch/arm/include/asm/kvm_host.h struct kvm_cpu_context *host_cpu_context; kvm_cpu_context 102 arch/arm/include/asm/kvm_hyp.h void __sysreg_save_state(struct kvm_cpu_context *ctxt); kvm_cpu_context 103 arch/arm/include/asm/kvm_hyp.h void __sysreg_restore_state(struct kvm_cpu_context *ctxt); kvm_cpu_context 119 arch/arm/include/asm/kvm_hyp.h void __hyp_text __banked_save_state(struct kvm_cpu_context *ctxt); kvm_cpu_context 120 arch/arm/include/asm/kvm_hyp.h void __hyp_text __banked_restore_state(struct kvm_cpu_context *ctxt); kvm_cpu_context 123 arch/arm/include/asm/kvm_hyp.h struct kvm_cpu_context *host); kvm_cpu_context 173 arch/arm/kernel/asm-offsets.c DEFINE(CPU_CTXT_VFP, offsetof(struct kvm_cpu_context, vfp)); kvm_cpu_context 174 arch/arm/kernel/asm-offsets.c DEFINE(CPU_CTXT_GP_REGS, offsetof(struct kvm_cpu_context, gp_regs)); kvm_cpu_context 18 arch/arm/kvm/hyp/banked-sr.c void __hyp_text __banked_save_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 45 arch/arm/kvm/hyp/banked-sr.c void __hyp_text __banked_restore_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 12 arch/arm/kvm/hyp/cp15-sr.c static u64 *cp15_64(struct kvm_cpu_context *ctxt, int idx) kvm_cpu_context 17 arch/arm/kvm/hyp/cp15-sr.c void __hyp_text __sysreg_save_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 45 arch/arm/kvm/hyp/cp15-sr.c void __hyp_text __sysreg_restore_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 148 arch/arm/kvm/hyp/switch.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 149 arch/arm/kvm/hyp/switch.c struct kvm_cpu_context *guest_ctxt; kvm_cpu_context 227 arch/arm/kvm/hyp/switch.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 231 arch/arm64/include/asm/kvm_host.h struct kvm_cpu_context host_ctxt; kvm_cpu_context 245 arch/arm64/include/asm/kvm_host.h struct kvm_cpu_context ctxt; kvm_cpu_context 280 arch/arm64/include/asm/kvm_host.h struct kvm_cpu_context *host_cpu_context; kvm_cpu_context 489 arch/arm64/include/asm/kvm_host.h static inline void kvm_init_host_cpu_context(struct kvm_cpu_context *cpu_ctxt) kvm_cpu_context 63 arch/arm64/include/asm/kvm_hyp.h void __sysreg_save_state_nvhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 64 arch/arm64/include/asm/kvm_hyp.h void __sysreg_restore_state_nvhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 65 arch/arm64/include/asm/kvm_hyp.h void sysreg_save_host_state_vhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 66 arch/arm64/include/asm/kvm_hyp.h void sysreg_restore_host_state_vhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 67 arch/arm64/include/asm/kvm_hyp.h void sysreg_save_guest_state_vhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 68 arch/arm64/include/asm/kvm_hyp.h void sysreg_restore_guest_state_vhe(struct kvm_cpu_context *ctxt); kvm_cpu_context 81 arch/arm64/include/asm/kvm_hyp.h u64 __guest_enter(struct kvm_vcpu *vcpu, struct kvm_cpu_context *host_ctxt); kvm_cpu_context 96 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_GP_REGS, offsetof(struct kvm_cpu_context, gp_regs)); kvm_cpu_context 97 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_APIAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APIAKEYLO_EL1])); kvm_cpu_context 98 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_APIBKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APIBKEYLO_EL1])); kvm_cpu_context 99 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_APDAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APDAKEYLO_EL1])); kvm_cpu_context 100 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_APDBKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APDBKEYLO_EL1])); kvm_cpu_context 101 arch/arm64/kernel/asm-offsets.c DEFINE(CPU_APGAKEYLO_EL1, offsetof(struct kvm_cpu_context, sys_regs[APGAKEYLO_EL1])); kvm_cpu_context 103 arch/arm64/kernel/asm-offsets.c DEFINE(HOST_CONTEXT_VCPU, offsetof(struct kvm_cpu_context, __hyp_running_vcpu)); kvm_cpu_context 134 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *ctxt) kvm_cpu_context 153 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *ctxt) kvm_cpu_context 173 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 174 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *guest_ctxt; kvm_cpu_context 199 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 200 arch/arm64/kvm/hyp/debug-sr.c struct kvm_cpu_context *guest_ctxt; kvm_cpu_context 561 arch/arm64/kvm/hyp/switch.c static bool __hyp_text __pmu_switch_to_guest(struct kvm_cpu_context *host_ctxt) kvm_cpu_context 581 arch/arm64/kvm/hyp/switch.c static void __hyp_text __pmu_switch_to_host(struct kvm_cpu_context *host_ctxt) kvm_cpu_context 599 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 600 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *guest_ctxt; kvm_cpu_context 655 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 656 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *guest_ctxt; kvm_cpu_context 738 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *__host_ctxt) kvm_cpu_context 766 arch/arm64/kvm/hyp/switch.c struct kvm_cpu_context *host_ctxt) kvm_cpu_context 781 arch/arm64/kvm/hyp/switch.c void __hyp_text __noreturn hyp_panic(struct kvm_cpu_context *host_ctxt) kvm_cpu_context 26 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_save_common_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 37 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_save_user_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 43 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_save_el1_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 69 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_save_el2_return_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 78 arch/arm64/kvm/hyp/sysreg-sr.c void __hyp_text __sysreg_save_state_nvhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 86 arch/arm64/kvm/hyp/sysreg-sr.c void sysreg_save_host_state_vhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 92 arch/arm64/kvm/hyp/sysreg-sr.c void sysreg_save_guest_state_vhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 99 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_restore_common_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 110 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_restore_user_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 116 arch/arm64/kvm/hyp/sysreg-sr.c static void __hyp_text __sysreg_restore_el1_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 144 arch/arm64/kvm/hyp/sysreg-sr.c __sysreg_restore_el2_return_state(struct kvm_cpu_context *ctxt) kvm_cpu_context 170 arch/arm64/kvm/hyp/sysreg-sr.c void __hyp_text __sysreg_restore_state_nvhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 178 arch/arm64/kvm/hyp/sysreg-sr.c void sysreg_restore_host_state_vhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 184 arch/arm64/kvm/hyp/sysreg-sr.c void sysreg_restore_guest_state_vhe(struct kvm_cpu_context *ctxt) kvm_cpu_context 248 arch/arm64/kvm/hyp/sysreg-sr.c struct kvm_cpu_context *host_ctxt = vcpu->arch.host_cpu_context; kvm_cpu_context 249 arch/arm64/kvm/hyp/sysreg-sr.c struct kvm_cpu_context *guest_ctxt = &vcpu->arch.ctxt; kvm_cpu_context 284 arch/arm64/kvm/hyp/sysreg-sr.c struct kvm_cpu_context *host_ctxt = vcpu->arch.host_cpu_context; kvm_cpu_context 285 arch/arm64/kvm/hyp/sysreg-sr.c struct kvm_cpu_context *guest_ctxt = &vcpu->arch.ctxt; kvm_cpu_context 166 arch/arm64/kvm/pmu.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 187 arch/arm64/kvm/pmu.c struct kvm_cpu_context *host_ctxt; kvm_cpu_context 400 virt/kvm/arm/arm.c struct kvm_cpu_context __maybe_unused *ctxt = vcpu->arch.host_cpu_context;