Lines Matching refs:msr
87 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_fpu_maybe_transactional()
89 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_fpu_maybe_transactional()
105 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_altivec_maybe_transactional()
107 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_altivec_maybe_transactional()
136 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
160 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) in enable_kernel_fp()
176 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) in enable_kernel_altivec()
194 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
212 if (current->thread.regs && (current->thread.regs->msr & MSR_VSX)) in enable_kernel_vsx()
234 if (tsk->thread.regs->msr & MSR_VSX) { in flush_vsx_to_thread()
253 if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) in enable_kernel_spe()
267 if (tsk->thread.regs->msr & MSR_SPE) { in flush_spe_to_thread()
543 msr_diff = thr->ckpt_regs.msr & ~thr->regs->msr; in tm_reclaim_thread()
572 tm_reclaim(thr, thr->regs->msr, cause); in tm_reclaim_thread()
580 thr->regs->msr |= msr_diff; in tm_reclaim_thread()
606 if (!MSR_TM_ACTIVE(thr->regs->msr)) in tm_reclaim_task()
615 thr->ckpt_regs.msr = thr->regs->msr; in tm_reclaim_task()
620 thr->regs->ccr, thr->regs->msr, in tm_reclaim_task()
664 unsigned long msr; in tm_recheckpoint_new_task() local
680 if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ in tm_recheckpoint_new_task()
684 msr = new->thread.ckpt_regs.msr; in tm_recheckpoint_new_task()
688 new->pid, new->thread.regs->msr, msr); in tm_recheckpoint_new_task()
691 tm_recheckpoint(&new->thread, msr); in tm_recheckpoint_new_task()
694 if (msr & MSR_FP) { in tm_recheckpoint_new_task()
696 new->thread.regs->msr |= in tm_recheckpoint_new_task()
700 if (msr & MSR_VEC) { in tm_recheckpoint_new_task()
702 new->thread.regs->msr |= MSR_VEC; in tm_recheckpoint_new_task()
706 if (msr & MSR_VSX) in tm_recheckpoint_new_task()
707 new->thread.regs->msr |= MSR_VSX; in tm_recheckpoint_new_task()
741 if (!MSR_TM_ACTIVE(regs->msr)) in restore_tm_state()
744 msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; in restore_tm_state()
749 regs->msr |= current->thread.fpexc_mode; in restore_tm_state()
755 regs->msr |= msr_diff; in restore_tm_state()
796 if (prev->thread.regs && (prev->thread.regs->msr & MSR_FP)) in __switch_to()
810 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VEC)) in __switch_to()
814 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VSX)) in __switch_to()
826 if ((prev->thread.regs && (prev->thread.regs->msr & MSR_SPE))) in __switch_to()
836 new->thread.regs->msr |= MSR_VEC; in __switch_to()
840 new->thread.regs->msr |= MSR_VSX; in __switch_to()
847 new->thread.regs->msr |= MSR_SPE; in __switch_to()
935 if (!(regs->msr & MSR_IR)) in show_instructions()
1021 printk("MSR: "REG" ", regs->msr); in show_regs()
1022 printbits(regs->msr, msr_bits); in show_regs()
1037 if (MSR_TM_ACTIVE(regs->msr)) in show_regs()
1259 regs->msr = MSR_USER; in start_thread()
1301 regs->msr = MSR_USER64; in start_thread()
1305 regs->msr = MSR_USER32; in start_thread()
1329 regs->msr |= MSR_TM; in start_thread()
1383 if (regs != NULL && (regs->msr & MSR_FP) != 0) in set_fpexc_mode()
1384 regs->msr = (regs->msr & ~(MSR_FE0|MSR_FE1)) in set_fpexc_mode()
1432 regs->msr &= ~MSR_LE; in set_endian()
1434 regs->msr |= MSR_LE; in set_endian()
1453 if (regs->msr & MSR_LE) { in get_endian()