events_guest      227 arch/arm64/include/asm/kvm_host.h 	u32 events_guest;
events_guest      572 arch/arm64/kvm/hyp/switch.c 	if (pmu->events_guest)
events_guest      573 arch/arm64/kvm/hyp/switch.c 		write_sysreg(pmu->events_guest, pmcntenset_el0);
events_guest      575 arch/arm64/kvm/hyp/switch.c 	return (pmu->events_host || pmu->events_guest);
events_guest      589 arch/arm64/kvm/hyp/switch.c 	if (pmu->events_guest)
events_guest      590 arch/arm64/kvm/hyp/switch.c 		write_sysreg(pmu->events_guest, pmcntenclr_el0);
events_guest       42 arch/arm64/kvm/pmu.c 		ctx->pmu_events.events_guest |= set;
events_guest       53 arch/arm64/kvm/pmu.c 	ctx->pmu_events.events_guest &= ~clr;
events_guest      168 arch/arm64/kvm/pmu.c 	u32 events_guest, events_host;
events_guest      175 arch/arm64/kvm/pmu.c 	events_guest = host->pmu_events.events_guest;
events_guest      178 arch/arm64/kvm/pmu.c 	kvm_vcpu_pmu_enable_el0(events_guest);
events_guest      189 arch/arm64/kvm/pmu.c 	u32 events_guest, events_host;
events_guest      196 arch/arm64/kvm/pmu.c 	events_guest = host->pmu_events.events_guest;
events_guest      200 arch/arm64/kvm/pmu.c 	kvm_vcpu_pmu_disable_el0(events_guest);