MSR_FP             35 arch/powerpc/include/asm/switch_to.h 	msr_check_and_clear(MSR_FP);
MSR_FP             61 arch/powerpc/include/asm/switch_to.h 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
MSR_FP            129 arch/powerpc/kernel/process.c 	if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP))
MSR_FP            149 arch/powerpc/kernel/process.c 	if (cpu_has_feature(CPU_FTR_VSX) && (bits & MSR_FP))
MSR_FP            165 arch/powerpc/kernel/process.c 	msr &= ~(MSR_FP|MSR_FE0|MSR_FE1);
MSR_FP            177 arch/powerpc/kernel/process.c 	msr_check_and_set(MSR_FP);
MSR_FP            179 arch/powerpc/kernel/process.c 	msr_check_and_clear(MSR_FP);
MSR_FP            199 arch/powerpc/kernel/process.c 		if (tsk->thread.regs->msr & MSR_FP) {
MSR_FP            221 arch/powerpc/kernel/process.c 	cpumsr = msr_check_and_set(MSR_FP);
MSR_FP            223 arch/powerpc/kernel/process.c 	if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) {
MSR_FP            347 arch/powerpc/kernel/process.c 	WARN_ON((msr & MSR_VSX) && !((msr & MSR_FP) && (msr & MSR_VEC)));
MSR_FP            350 arch/powerpc/kernel/process.c 	if (msr & MSR_FP)
MSR_FP            360 arch/powerpc/kernel/process.c 	msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);
MSR_FP            362 arch/powerpc/kernel/process.c 	msr_check_and_clear(MSR_FP|MSR_VEC|MSR_VSX);
MSR_FP            371 arch/powerpc/kernel/process.c 	cpumsr = msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);
MSR_FP            374 arch/powerpc/kernel/process.c 	    (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) {
MSR_FP            395 arch/powerpc/kernel/process.c 		if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) {
MSR_FP            460 arch/powerpc/kernel/process.c 	msr_all_available |= MSR_FP;
MSR_FP            495 arch/powerpc/kernel/process.c 	WARN_ON((usermsr & MSR_VSX) && !((usermsr & MSR_FP) && (usermsr & MSR_VEC)));
MSR_FP            498 arch/powerpc/kernel/process.c 	if (usermsr & MSR_FP)
MSR_FP            539 arch/powerpc/kernel/process.c 	if ((!(msr & MSR_FP)) && restore_fp(current))
MSR_FP            540 arch/powerpc/kernel/process.c 		msr |= MSR_FP | current->thread.fpexc_mode;
MSR_FP            545 arch/powerpc/kernel/process.c 	if ((msr & (MSR_FP | MSR_VEC)) == (MSR_FP | MSR_VEC) &&
MSR_FP            569 arch/powerpc/kernel/process.c 	WARN_ON((usermsr & MSR_VSX) && !((usermsr & MSR_FP) && (usermsr & MSR_VEC)));
MSR_FP            571 arch/powerpc/kernel/process.c 	if (usermsr & MSR_FP)
MSR_FP            865 arch/powerpc/kernel/process.c 	if ((thr->ckpt_regs.msr & MSR_FP) == 0)
MSR_FP            978 arch/powerpc/kernel/process.c 	new->thread.regs->msr &= ~(MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP           1032 arch/powerpc/kernel/process.c 	msr_diff &= MSR_FP | MSR_VEC | MSR_VSX;
MSR_FP           1035 arch/powerpc/kernel/process.c 	if (msr_diff & MSR_FP)
MSR_FP           1314 arch/powerpc/kernel/process.c 	{MSR_FP,	"FP"},
MSR_FP           1641 arch/powerpc/kernel/process.c 	childregs->msr &= ~(MSR_FP|MSR_VEC|MSR_VSX);
MSR_FP           1864 arch/powerpc/kernel/process.c 	if (regs != NULL && (regs->msr & MSR_FP) != 0)
MSR_FP            558 arch/powerpc/kernel/signal_32.c 	if (msr & MSR_FP) {
MSR_FP            701 arch/powerpc/kernel/signal_32.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1);
MSR_FP            789 arch/powerpc/kernel/signal_32.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1);
MSR_FP            868 arch/powerpc/kernel/signal_32.c 	msr_check_and_set(msr & (MSR_FP | MSR_VEC));
MSR_FP            869 arch/powerpc/kernel/signal_32.c 	if (msr & MSR_FP) {
MSR_FP            871 arch/powerpc/kernel/signal_32.c 		regs->msr |= (MSR_FP | current->thread.fpexc_mode);
MSR_FP            223 arch/powerpc/kernel/signal_64.c 	msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP            271 arch/powerpc/kernel/signal_64.c 	if (msr & MSR_FP)
MSR_FP            369 arch/powerpc/kernel/signal_64.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);
MSR_FP            489 arch/powerpc/kernel/signal_64.c 	regs->msr &= ~(MSR_FP | MSR_FE0 | MSR_FE1 | MSR_VEC | MSR_VSX);
MSR_FP            582 arch/powerpc/kernel/signal_64.c 	msr_check_and_set(msr & (MSR_FP | MSR_VEC));
MSR_FP            583 arch/powerpc/kernel/signal_64.c 	if (msr & MSR_FP) {
MSR_FP            585 arch/powerpc/kernel/signal_64.c 		regs->msr |= (MSR_FP | tsk->thread.fpexc_mode);
MSR_FP            741 arch/powerpc/kvm/book3s_emulate.c 				kvmppc_giveup_ext(vcpu, MSR_FP);
MSR_FP           3561 arch/powerpc/kvm/book3s_hv.c 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP           3667 arch/powerpc/kvm/book3s_hv.c 	msr_check_and_set(MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP            656 arch/powerpc/kvm/book3s_paired_singles.c 	if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
MSR_FP            661 arch/powerpc/kvm/book3s_paired_singles.c 	kvmppc_giveup_ext(vcpu, MSR_FP);
MSR_FP            136 arch/powerpc/kvm/book3s_pr.c 	kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP            325 arch/powerpc/kvm/book3s_pr.c 		(MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP            330 arch/powerpc/kvm/book3s_pr.c 	if (ext_diff == MSR_FP)
MSR_FP            527 arch/powerpc/kvm/book3s_pr.c 	if (kvmppc_get_msr(vcpu) & MSR_FP)
MSR_FP            528 arch/powerpc/kvm/book3s_pr.c 		kvmppc_handle_ext(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL, MSR_FP);
MSR_FP            793 arch/powerpc/kvm/book3s_pr.c 		msr |= MSR_FP | MSR_VEC;
MSR_FP            803 arch/powerpc/kvm/book3s_pr.c 	if (msr & MSR_FP) {
MSR_FP            809 arch/powerpc/kvm/book3s_pr.c 		if (t->regs->msr & MSR_FP)
MSR_FP            874 arch/powerpc/kvm/book3s_pr.c 		msr = MSR_FP | MSR_VEC | MSR_VSX;
MSR_FP            886 arch/powerpc/kvm/book3s_pr.c 	if (msr & MSR_FP) {
MSR_FP            925 arch/powerpc/kvm/book3s_pr.c 	if (lost_ext & MSR_FP) {
MSR_FP           1347 arch/powerpc/kvm/book3s_pr.c 			ext_msr = MSR_FP;
MSR_FP           1836 arch/powerpc/kvm/book3s_pr.c 	if (kvmppc_get_msr(vcpu) & MSR_FP)
MSR_FP           1837 arch/powerpc/kvm/book3s_pr.c 		kvmppc_handle_ext(vcpu, BOOK3S_INTERRUPT_FP_UNAVAIL, MSR_FP);
MSR_FP           1849 arch/powerpc/kvm/book3s_pr.c 	kvmppc_giveup_ext(vcpu, MSR_FP | MSR_VEC | MSR_VSX);
MSR_FP            135 arch/powerpc/kvm/booke.c 	if (!(current->thread.regs->msr & MSR_FP)) {
MSR_FP            140 arch/powerpc/kvm/booke.c 		current->thread.regs->msr |= MSR_FP;
MSR_FP            152 arch/powerpc/kvm/booke.c 	if (current->thread.regs->msr & MSR_FP)
MSR_FP            163 arch/powerpc/kvm/booke.c 	vcpu->arch.shadow_msr &= ~MSR_FP;
MSR_FP            164 arch/powerpc/kvm/booke.c 	vcpu->arch.shadow_msr |= vcpu->arch.shared->msr & MSR_FP;
MSR_FP             30 arch/powerpc/kvm/emulate_loadstore.c 	if (!(kvmppc_get_msr(vcpu) & MSR_FP)) {
MSR_FP            248 arch/powerpc/kvm/emulate_loadstore.c 						MSR_FP);
MSR_FP           1162 arch/powerpc/kvm/powerpc.c 			vcpu->kvm->arch.kvm_ops->giveup_ext(vcpu, MSR_FP);
MSR_FP            492 arch/powerpc/lib/sstep.c 	if (regs->msr & MSR_FP)
MSR_FP            499 arch/powerpc/lib/sstep.c 		if (regs->msr & MSR_FP)
MSR_FP            526 arch/powerpc/lib/sstep.c 	if (regs->msr & MSR_FP)
MSR_FP            538 arch/powerpc/lib/sstep.c 		if (regs->msr & MSR_FP)
MSR_FP            832 arch/powerpc/lib/sstep.c 		if (regs->msr & MSR_FP) {
MSR_FP            863 arch/powerpc/lib/sstep.c 		if (regs->msr & MSR_FP) {
MSR_FP           2963 arch/powerpc/lib/sstep.c 		if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP))
MSR_FP           3034 arch/powerpc/lib/sstep.c 		if (!(regs->msr & MSR_PR) && !(regs->msr & MSR_FP))
MSR_FP             77 arch/powerpc/lib/test_emulate_step.c 	regs->msr |= MSR_FP;