giveup_ext        318 arch/powerpc/include/asm/kvm_ppc.h 	void (*giveup_ext)(struct kvm_vcpu *vcpu, ulong msr);
giveup_ext       2091 arch/powerpc/kvm/book3s_pr.c 	.giveup_ext = kvmppc_giveup_ext,
giveup_ext        246 arch/powerpc/kvm/emulate_loadstore.c 			if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext        247 arch/powerpc/kvm/emulate_loadstore.c 				vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
giveup_ext        270 arch/powerpc/kvm/emulate_loadstore.c 			if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext        271 arch/powerpc/kvm/emulate_loadstore.c 				vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
giveup_ext        315 arch/powerpc/kvm/emulate_loadstore.c 			if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext        316 arch/powerpc/kvm/emulate_loadstore.c 				vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu,
giveup_ext       1161 arch/powerpc/kvm/powerpc.c 		if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext       1162 arch/powerpc/kvm/powerpc.c 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_FP);
giveup_ext       1177 arch/powerpc/kvm/powerpc.c 		if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext       1178 arch/powerpc/kvm/powerpc.c 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VSX);
giveup_ext       1194 arch/powerpc/kvm/powerpc.c 		if (vcpu->kvm->arch.kvm_ops->giveup_ext)
giveup_ext       1195 arch/powerpc/kvm/powerpc.c 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VEC);