MSR_VSX            61 arch/powerpc/include/asm/switch_to.h 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
MSR_VSX           130 arch/powerpc/kernel/process.c 		newmsr |= MSR_VSX;
MSR_VSX           150 arch/powerpc/kernel/process.c 		newmsr &= ~MSR_VSX;
MSR_VSX           168 arch/powerpc/kernel/process.c 		msr &= ~MSR_VSX;
MSR_VSX           265 arch/powerpc/kernel/process.c 		msr &= ~MSR_VSX;
MSR_VSX           347 arch/powerpc/kernel/process.c 	WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC)));
MSR_VSX           360 arch/powerpc/kernel/process.c 	msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);
MSR_VSX           362 arch/powerpc/kernel/process.c 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
MSR_VSX           371 arch/powerpc/kernel/process.c 	cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);
MSR_VSX           374 arch/powerpc/kernel/process.c 	    (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) {
MSR_VSX           395 arch/powerpc/kernel/process.c 		if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) {
MSR_VSX           468 arch/powerpc/kernel/process.c 		msr_all_available |= MSR_VSX;
MSR_VSX           495 arch/powerpc/kernel/process.c 	WARN_ON((usermsr & MSR_VSX) && !((usermsr & MSR_FP) && (usermsr & MSR_VEC)));
MSR_VSX           547 arch/powerpc/kernel/process.c 		msr |= MSR_VSX;
MSR_VSX           569 arch/powerpc/kernel/process.c 	WARN_ON((usermsr & MSR_VSX) && !((usermsr & MSR_FP) && (usermsr & MSR_VEC)));
MSR_VSX           978 arch/powerpc/kernel/process.c 	new->thread.regs->msr &= ~(MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX          1032 arch/powerpc/kernel/process.c 	msr_diff &= MSR_FP | MSR_VEC | MSR_VSX;
MSR_VSX          1308 arch/powerpc/kernel/process.c 	{MSR_VSX,	"VSX"},
MSR_VSX          1641 arch/powerpc/kernel/process.c 	childregs->msr &= ~(MSR_FP|MSR_VEC|MSR_VSX);
MSR_VSX           427 arch/powerpc/kernel/signal_32.c 	msr &= ~MSR_VSX;
MSR_VSX           439 arch/powerpc/kernel/signal_32.c 		msr |= MSR_VSX;
MSR_VSX           576 arch/powerpc/kernel/signal_32.c 		if (msr & MSR_VSX) {
MSR_VSX           585 arch/powerpc/kernel/signal_32.c 		msr |= MSR_VSX;
MSR_VSX           684 arch/powerpc/kernel/signal_32.c 	regs->msr &= ~MSR_VSX;
MSR_VSX           685 arch/powerpc/kernel/signal_32.c 	if (msr & MSR_VSX) {
MSR_VSX           796 arch/powerpc/kernel/signal_32.c 	regs->msr &= ~MSR_VSX;
MSR_VSX           797 arch/powerpc/kernel/signal_32.c 	if (msr & MSR_VSX) {
MSR_VSX          1091 arch/powerpc/kernel/signal_32.c 	    (new_msr & MSR_VSX))
MSR_VSX           150 arch/powerpc/kernel/signal_64.c 	msr &= ~MSR_VSX;
MSR_VSX           164 arch/powerpc/kernel/signal_64.c 		msr |= MSR_VSX;
MSR_VSX           223 arch/powerpc/kernel/signal_64.c 	msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX           288 arch/powerpc/kernel/signal_64.c 		if (msr & MSR_VSX)
MSR_VSX           296 arch/powerpc/kernel/signal_64.c 		msr |= MSR_VSX;
MSR_VSX           369 arch/powerpc/kernel/signal_64.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);
MSR_VSX           402 arch/powerpc/kernel/signal_64.c 	if ((msr & MSR_VSX) != 0) {
MSR_VSX           489 arch/powerpc/kernel/signal_64.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);
MSR_VSX           535 arch/powerpc/kernel/signal_64.c 	if (v_regs && ((msr & MSR_VSX) != 0)) {
MSR_VSX           657 arch/powerpc/kernel/signal_64.c 	    (new_msr & MSR_VSX))
MSR_VSX           966 arch/powerpc/kernel/traps.c 	msr_mask = MSR_VSX;
MSR_VSX           181 arch/powerpc/kvm/book3s_emulate.c 	kvmppc_giveup_ext(vcpu, MSR_VSX);
MSR_VSX          3561 arch/powerpc/kvm/book3s_hv.c 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX          3667 arch/powerpc/kvm/book3s_hv.c 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX           136 arch/powerpc/kvm/book3s_pr.c 	kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX           325 arch/powerpc/kvm/book3s_pr.c 		(MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX           348 arch/powerpc/kvm/book3s_pr.c 	kvmppc_giveup_ext(vcpu, MSR_VSX);
MSR_VSX           792 arch/powerpc/kvm/book3s_pr.c 	if (msr & MSR_VSX)
MSR_VSX           822 arch/powerpc/kvm/book3s_pr.c 	vcpu->arch.guest_owned_ext &= ~(msr | MSR_VSX);
MSR_VSX           860 arch/powerpc/kvm/book3s_pr.c 	if (msr == MSR_VSX) {
MSR_VSX           874 arch/powerpc/kvm/book3s_pr.c 		msr = MSR_FP | MSR_VEC | MSR_VSX;
MSR_VSX          1355 arch/powerpc/kvm/book3s_pr.c 			ext_msr = MSR_VSX;
MSR_VSX          1849 arch/powerpc/kvm/book3s_pr.c 	kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX);
MSR_VSX            42 arch/powerpc/kvm/emulate_loadstore.c 	if (!(kvmppc_get_msr(vcpu) & MSR_VSX)) {
MSR_VSX           317 arch/powerpc/kvm/emulate_loadstore.c 						MSR_VSX);
MSR_VSX          1178 arch/powerpc/kvm/powerpc.c 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_VSX);
MSR_VSX          2977 arch/powerpc/lib/sstep.c 		unsigned long msrbit = MSR_VSX;
MSR_VSX          3048 arch/powerpc/lib/sstep.c 		unsigned long msrbit = MSR_VSX;
MSR_VSX            79 arch/powerpc/lib/test_emulate_step.c 	regs->msr |= MSR_VSX;