hv_vcpu           166 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu);
hv_vcpu           173 arch/x86/kvm/hyperv.c 	for (idx = 0; idx < ARRAY_SIZE(hv_vcpu->stimer); idx++) {
hv_vcpu           174 arch/x86/kvm/hyperv.c 		stimer = &hv_vcpu->stimer[idx];
hv_vcpu           191 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv;
hv_vcpu           193 arch/x86/kvm/hyperv.c 	hv_vcpu->exit.type = KVM_EXIT_HYPERV_SYNIC;
hv_vcpu           194 arch/x86/kvm/hyperv.c 	hv_vcpu->exit.u.synic.msr = msr;
hv_vcpu           195 arch/x86/kvm/hyperv.c 	hv_vcpu->exit.u.synic.control = synic->control;
hv_vcpu           196 arch/x86/kvm/hyperv.c 	hv_vcpu->exit.u.synic.evt_page = synic->evt_page;
hv_vcpu           197 arch/x86/kvm/hyperv.c 	hv_vcpu->exit.u.synic.msg_page = synic->msg_page;
hv_vcpu           674 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu);
hv_vcpu           679 arch/x86/kvm/hyperv.c 	for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++)
hv_vcpu           680 arch/x86/kvm/hyperv.c 		if (test_and_clear_bit(i, hv_vcpu->stimer_pending_bitmap)) {
hv_vcpu           681 arch/x86/kvm/hyperv.c 			stimer = &hv_vcpu->stimer[i];
hv_vcpu           704 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu);
hv_vcpu           707 arch/x86/kvm/hyperv.c 	for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++)
hv_vcpu           708 arch/x86/kvm/hyperv.c 		stimer_cleanup(&hv_vcpu->stimer[i]);
hv_vcpu           755 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu);
hv_vcpu           758 arch/x86/kvm/hyperv.c 	synic_init(&hv_vcpu->synic);
hv_vcpu           760 arch/x86/kvm/hyperv.c 	bitmap_zero(hv_vcpu->stimer_pending_bitmap, HV_SYNIC_STIMER_COUNT);
hv_vcpu           761 arch/x86/kvm/hyperv.c 	for (i = 0; i < ARRAY_SIZE(hv_vcpu->stimer); i++)
hv_vcpu           762 arch/x86/kvm/hyperv.c 		stimer_init(&hv_vcpu->stimer[i], i);
hv_vcpu           767 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = vcpu_to_hv_vcpu(vcpu);
hv_vcpu           769 arch/x86/kvm/hyperv.c 	hv_vcpu->vp_index = kvm_vcpu_get_idx(vcpu);
hv_vcpu          1082 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv;
hv_vcpu          1093 arch/x86/kvm/hyperv.c 		if (new_vp_index == hv_vcpu->vp_index)
hv_vcpu          1102 arch/x86/kvm/hyperv.c 		if (hv_vcpu->vp_index == vcpu_idx)
hv_vcpu          1107 arch/x86/kvm/hyperv.c 		hv_vcpu->vp_index = new_vp_index;
hv_vcpu          1115 arch/x86/kvm/hyperv.c 			hv_vcpu->hv_vapic = data;
hv_vcpu          1132 arch/x86/kvm/hyperv.c 		hv_vcpu->hv_vapic = data;
hv_vcpu          1149 arch/x86/kvm/hyperv.c 		hv_vcpu->runtime_offset = data - current_task_runtime_100ns();
hv_vcpu          1241 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = &vcpu->arch.hyperv;
hv_vcpu          1245 arch/x86/kvm/hyperv.c 		data = hv_vcpu->vp_index;
hv_vcpu          1254 arch/x86/kvm/hyperv.c 		data = hv_vcpu->hv_vapic;
hv_vcpu          1257 arch/x86/kvm/hyperv.c 		data = current_task_runtime_100ns() + hv_vcpu->runtime_offset;
hv_vcpu          1356 arch/x86/kvm/hyperv.c 	struct kvm_vcpu_hv *hv_vcpu = &current_vcpu->arch.hyperv;
hv_vcpu          1416 arch/x86/kvm/hyperv.c 	cpumask_clear(&hv_vcpu->tlb_flush);
hv_vcpu          1428 arch/x86/kvm/hyperv.c 				    vcpu_mask, &hv_vcpu->tlb_flush);
hv_vcpu            31 arch/x86/kvm/hyperv.h static inline struct kvm_vcpu *hv_vcpu_to_vcpu(struct kvm_vcpu_hv *hv_vcpu)
hv_vcpu            35 arch/x86/kvm/hyperv.h 	arch = container_of(hv_vcpu, struct kvm_vcpu_arch, hyperv);
hv_vcpu            76 arch/x86/kvm/hyperv.h 	struct kvm_vcpu_hv *hv_vcpu;
hv_vcpu            78 arch/x86/kvm/hyperv.h 	hv_vcpu = container_of(stimer - stimer->index, struct kvm_vcpu_hv,
hv_vcpu            80 arch/x86/kvm/hyperv.h 	return hv_vcpu_to_vcpu(hv_vcpu);