Lines Matching refs:thread

86 	if (tsk == current && tsk->thread.regs &&  in giveup_fpu_maybe_transactional()
87 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_fpu_maybe_transactional()
89 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_fpu_maybe_transactional()
104 if (tsk == current && tsk->thread.regs && in giveup_altivec_maybe_transactional()
105 MSR_TM_ACTIVE(tsk->thread.regs->msr) && in giveup_altivec_maybe_transactional()
107 tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; in giveup_altivec_maybe_transactional()
126 if (tsk->thread.regs) { in flush_fp_to_thread()
136 if (tsk->thread.regs->msr & MSR_FP) { in flush_fp_to_thread()
160 if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) in enable_kernel_fp()
176 if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) in enable_kernel_altivec()
192 if (tsk->thread.regs) { in flush_altivec_to_thread()
194 if (tsk->thread.regs->msr & MSR_VEC) { in flush_altivec_to_thread()
212 if (current->thread.regs && (current->thread.regs->msr & MSR_VSX)) in enable_kernel_vsx()
232 if (tsk->thread.regs) { in flush_vsx_to_thread()
234 if (tsk->thread.regs->msr & MSR_VSX) { in flush_vsx_to_thread()
253 if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) in enable_kernel_spe()
265 if (tsk->thread.regs) { in flush_spe_to_thread()
267 if (tsk->thread.regs->msr & MSR_SPE) { in flush_spe_to_thread()
271 tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); in flush_spe_to_thread()
311 current->thread.trap_nr = signal_code; in do_send_trap()
329 current->thread.trap_nr = TRAP_HWBKPT; in do_break()
355 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
357 thread->debug.iac1 = thread->debug.iac2 = 0; in set_debug_reg_defaults()
359 thread->debug.iac3 = thread->debug.iac4 = 0; in set_debug_reg_defaults()
361 thread->debug.dac1 = thread->debug.dac2 = 0; in set_debug_reg_defaults()
363 thread->debug.dvc1 = thread->debug.dvc2 = 0; in set_debug_reg_defaults()
365 thread->debug.dbcr0 = 0; in set_debug_reg_defaults()
370 thread->debug.dbcr1 = DBCR1_IAC1US | DBCR1_IAC2US | in set_debug_reg_defaults()
376 thread->debug.dbcr2 = DBCR2_DAC1US | DBCR2_DAC2US; in set_debug_reg_defaults()
378 thread->debug.dbcr1 = 0; in set_debug_reg_defaults()
416 if ((current->thread.debug.dbcr0 & DBCR0_IDM) in switch_booke_debug_regs()
423 static void set_debug_reg_defaults(struct thread_struct *thread) in set_debug_reg_defaults() argument
425 thread->hw_brk.address = 0; in set_debug_reg_defaults()
426 thread->hw_brk.type = 0; in set_debug_reg_defaults()
427 set_breakpoint(&thread->hw_brk); in set_debug_reg_defaults()
586 tm_reclaim_thread(&current->thread, current_thread_info(), cause); in tm_reclaim_current()
601 struct thread_struct *thr = &tsk->thread; in tm_reclaim_task()
637 extern void __tm_recheckpoint(struct thread_struct *thread,
640 void tm_recheckpoint(struct thread_struct *thread, in tm_recheckpoint() argument
655 tm_restore_sprs(thread); in tm_recheckpoint()
657 __tm_recheckpoint(thread, orig_msr); in tm_recheckpoint()
677 if (!new->thread.regs) in tm_recheckpoint_new_task()
680 if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ in tm_recheckpoint_new_task()
681 tm_restore_sprs(&new->thread); in tm_recheckpoint_new_task()
684 msr = new->thread.ckpt_regs.msr; in tm_recheckpoint_new_task()
688 new->pid, new->thread.regs->msr, msr); in tm_recheckpoint_new_task()
691 tm_recheckpoint(&new->thread, msr); in tm_recheckpoint_new_task()
695 do_load_up_transact_fpu(&new->thread); in tm_recheckpoint_new_task()
696 new->thread.regs->msr |= in tm_recheckpoint_new_task()
697 (MSR_FP | new->thread.fpexc_mode); in tm_recheckpoint_new_task()
701 do_load_up_transact_altivec(&new->thread); in tm_recheckpoint_new_task()
702 new->thread.regs->msr |= MSR_VEC; in tm_recheckpoint_new_task()
707 new->thread.regs->msr |= MSR_VSX; in tm_recheckpoint_new_task()
744 msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; in restore_tm_state()
748 load_fp_state(&current->thread.fp_state); in restore_tm_state()
749 regs->msr |= current->thread.fpexc_mode; in restore_tm_state()
753 load_vr_state(&current->thread.vr_state); in restore_tm_state()
782 save_early_sprs(&prev->thread); in __switch_to()
796 if (prev->thread.regs && (prev->thread.regs->msr & MSR_FP)) in __switch_to()
810 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VEC)) in __switch_to()
814 if (prev->thread.regs && (prev->thread.regs->msr & MSR_VSX)) in __switch_to()
826 if ((prev->thread.regs && (prev->thread.regs->msr & MSR_SPE))) in __switch_to()
835 if (new->thread.regs && last_task_used_altivec == new) in __switch_to()
836 new->thread.regs->msr |= MSR_VEC; in __switch_to()
839 if (new->thread.regs && last_task_used_vsx == new) in __switch_to()
840 new->thread.regs->msr |= MSR_VSX; in __switch_to()
846 if (new->thread.regs && last_task_used_spe == new) in __switch_to()
847 new->thread.regs->msr |= MSR_SPE; in __switch_to()
853 switch_booke_debug_regs(&new->thread.debug); in __switch_to()
860 if (unlikely(!hw_brk_match(this_cpu_ptr(&current_brk), &new->thread.hw_brk))) in __switch_to()
861 __set_breakpoint(&new->thread.hw_brk); in __switch_to()
866 new_thread = &new->thread; in __switch_to()
867 old_thread = &current->thread; in __switch_to()
1074 set_debug_reg_defaults(&current->thread); in flush_thread()
1123 p->thread.ksp_vsid = sp_vsid; in setup_ksp_vsid()
1159 p->thread.regs = NULL; /* no user register state */ in copy_thread()
1169 p->thread.regs = childregs; in copy_thread()
1196 p->thread.ksp = sp; in copy_thread()
1198 p->thread.ksp_limit = (unsigned long)task_stack_page(p) + in copy_thread()
1202 p->thread.ptrace_bps[0] = NULL; in copy_thread()
1205 p->thread.fp_save_area = NULL; in copy_thread()
1207 p->thread.vr_save_area = NULL; in copy_thread()
1214 p->thread.dscr_inherit = current->thread.dscr_inherit; in copy_thread()
1215 p->thread.dscr = current->thread.dscr; in copy_thread()
1218 p->thread.ppr = INIT_PPR; in copy_thread()
1237 if (!current->thread.regs) { in start_thread()
1239 current->thread.regs = regs - 1; in start_thread()
1310 current->thread.used_vsr = 0; in start_thread()
1312 memset(&current->thread.fp_state, 0, sizeof(current->thread.fp_state)); in start_thread()
1313 current->thread.fp_save_area = NULL; in start_thread()
1315 memset(&current->thread.vr_state, 0, sizeof(current->thread.vr_state)); in start_thread()
1316 current->thread.vr_state.vscr.u[3] = 0x00010000; /* Java mode disabled */ in start_thread()
1317 current->thread.vr_save_area = NULL; in start_thread()
1318 current->thread.vrsave = 0; in start_thread()
1319 current->thread.used_vr = 0; in start_thread()
1322 memset(current->thread.evr, 0, sizeof(current->thread.evr)); in start_thread()
1323 current->thread.acc = 0; in start_thread()
1324 current->thread.spefscr = 0; in start_thread()
1325 current->thread.used_spe = 0; in start_thread()
1330 current->thread.tm_tfhar = 0; in start_thread()
1331 current->thread.tm_texasr = 0; in start_thread()
1332 current->thread.tm_tfiar = 0; in start_thread()
1342 struct pt_regs *regs = tsk->thread.regs; in set_fpexc_mode()
1363 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in set_fpexc_mode()
1364 tsk->thread.fpexc_mode = val & in set_fpexc_mode()
1382 tsk->thread.fpexc_mode = __pack_fe01(val); in set_fpexc_mode()
1385 | tsk->thread.fpexc_mode; in set_fpexc_mode()
1393 if (tsk->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) in get_fpexc_mode()
1408 tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); in get_fpexc_mode()
1409 val = tsk->thread.fpexc_mode; in get_fpexc_mode()
1416 val = __unpack_fe01(tsk->thread.fpexc_mode); in get_fpexc_mode()
1422 struct pt_regs *regs = tsk->thread.regs; in set_endian()
1443 struct pt_regs *regs = tsk->thread.regs; in get_endian()
1466 tsk->thread.align_ctl = val; in set_unalign_ctl()
1472 return put_user(tsk->thread.align_ctl, (unsigned int __user *)adr); in get_unalign_ctl()
1521 sp = p->thread.ksp; in get_wchan()
1558 sp = tsk->thread.ksp; in show_stack()