Lines Matching refs:thread

272 		buf[i] = task->thread.TS_FPR(i);  in copy_fpr_to_user()
273 buf[i] = task->thread.fp_state.fpscr; in copy_fpr_to_user()
286 task->thread.TS_FPR(i) = buf[i]; in copy_fpr_from_user()
287 task->thread.fp_state.fpscr = buf[i]; in copy_fpr_from_user()
300 buf[i] = task->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; in copy_vsx_to_user()
313 task->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; in copy_vsx_from_user()
326 buf[i] = task->thread.TS_TRANS_FPR(i); in copy_transact_fpr_to_user()
327 buf[i] = task->thread.transact_fp.fpscr; in copy_transact_fpr_to_user()
340 task->thread.TS_TRANS_FPR(i) = buf[i]; in copy_transact_fpr_from_user()
341 task->thread.transact_fp.fpscr = buf[i]; in copy_transact_fpr_from_user()
354 buf[i] = task->thread.transact_fp.fpr[i][TS_VSRLOWOFFSET]; in copy_transact_vsx_to_user()
367 task->thread.transact_fp.fpr[i][TS_VSRLOWOFFSET] = buf[i]; in copy_transact_vsx_from_user()
375 return __copy_to_user(to, task->thread.fp_state.fpr, in copy_fpr_to_user()
382 return __copy_from_user(task->thread.fp_state.fpr, from, in copy_fpr_from_user()
390 return __copy_to_user(to, task->thread.transact_fp.fpr, in copy_transact_fpr_to_user()
397 return __copy_from_user(task->thread.transact_fp.fpr, from, in copy_transact_fpr_from_user()
423 if (current->thread.used_vr) { in save_user_regs()
425 if (__copy_to_user(&frame->mc_vregs, &current->thread.vr_state, in save_user_regs()
441 current->thread.vrsave = mfspr(SPRN_VRSAVE); in save_user_regs()
442 if (__put_user(current->thread.vrsave, (u32 __user *)&frame->mc_vregs[32])) in save_user_regs()
460 if (current->thread.used_vsr && ctx_has_vsx_region) { in save_user_regs()
469 if (current->thread.used_spe) { in save_user_regs()
471 if (__copy_to_user(&frame->mc_vregs, current->thread.evr, in save_user_regs()
481 if (__put_user(current->thread.spefscr, (u32 __user *)&frame->mc_vregs + ELF_NEVRREG)) in save_user_regs()
532 if (save_general_regs(&current->thread.ckpt_regs, frame) in save_tm_user_regs()
547 if (current->thread.used_vr) { in save_tm_user_regs()
549 if (__copy_to_user(&frame->mc_vregs, &current->thread.vr_state, in save_tm_user_regs()
554 &current->thread.transact_vr, in save_tm_user_regs()
559 &current->thread.vr_state, in save_tm_user_regs()
576 current->thread.vrsave = mfspr(SPRN_VRSAVE); in save_tm_user_regs()
577 if (__put_user(current->thread.vrsave, in save_tm_user_regs()
581 if (__put_user(current->thread.transact_vrsave, in save_tm_user_regs()
585 if (__put_user(current->thread.vrsave, in save_tm_user_regs()
608 if (current->thread.used_vsr) { in save_tm_user_regs()
628 if (current->thread.used_spe) { in save_tm_user_regs()
630 if (__copy_to_user(&frame->mc_vregs, current->thread.evr, in save_tm_user_regs()
639 if (__put_user(current->thread.spefscr, (u32 __user *)&frame->mc_vregs + ELF_NEVRREG)) in save_tm_user_regs()
707 if (__copy_from_user(&current->thread.vr_state, &sr->mc_vregs, in restore_user_regs()
710 } else if (current->thread.used_vr) in restore_user_regs()
711 memset(&current->thread.vr_state, 0, in restore_user_regs()
715 if (__get_user(current->thread.vrsave, (u32 __user *)&sr->mc_vregs[32])) in restore_user_regs()
718 mtspr(SPRN_VRSAVE, current->thread.vrsave); in restore_user_regs()
736 } else if (current->thread.used_vsr) in restore_user_regs()
738 current->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; in restore_user_regs()
752 if (__copy_from_user(current->thread.evr, &sr->mc_vregs, in restore_user_regs()
755 } else if (current->thread.used_spe) in restore_user_regs()
756 memset(current->thread.evr, 0, ELF_NEVRREG * sizeof(u32)); in restore_user_regs()
759 if (__get_user(current->thread.spefscr, (u32 __user *)&sr->mc_vregs + ELF_NEVRREG)) in restore_user_regs()
790 err |= restore_general_regs(&current->thread.ckpt_regs, sr); in restore_tm_user_regs()
792 err |= __get_user(current->thread.tm_tfhar, &sr->mc_gregs[PT_NIP]); in restore_tm_user_regs()
814 if (__copy_from_user(&current->thread.vr_state, &sr->mc_vregs, in restore_tm_user_regs()
816 __copy_from_user(&current->thread.transact_vr, in restore_tm_user_regs()
820 } else if (current->thread.used_vr) { in restore_tm_user_regs()
821 memset(&current->thread.vr_state, 0, in restore_tm_user_regs()
823 memset(&current->thread.transact_vr, 0, in restore_tm_user_regs()
828 if (__get_user(current->thread.vrsave, in restore_tm_user_regs()
830 __get_user(current->thread.transact_vrsave, in restore_tm_user_regs()
834 mtspr(SPRN_VRSAVE, current->thread.vrsave); in restore_tm_user_regs()
853 } else if (current->thread.used_vsr) in restore_tm_user_regs()
855 current->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; in restore_tm_user_regs()
856 current->thread.transact_fp.fpr[i][TS_VSRLOWOFFSET] = 0; in restore_tm_user_regs()
866 if (__copy_from_user(current->thread.evr, &sr->mc_vregs, in restore_tm_user_regs()
869 } else if (current->thread.used_spe) in restore_tm_user_regs()
870 memset(current->thread.evr, 0, ELF_NEVRREG * sizeof(u32)); in restore_tm_user_regs()
873 if (__get_user(current->thread.spefscr, (u32 __user *)&sr->mc_vregs in restore_tm_user_regs()
893 current->thread.tm_texasr |= TEXASR_FS; in restore_tm_user_regs()
895 tm_recheckpoint(&current->thread, msr); in restore_tm_user_regs()
899 do_load_up_transact_fpu(&current->thread); in restore_tm_user_regs()
900 regs->msr |= (MSR_FP | current->thread.fpexc_mode); in restore_tm_user_regs()
904 do_load_up_transact_altivec(&current->thread); in restore_tm_user_regs()
1050 current->thread.fp_state.fpscr = 0; /* turn off all fp exceptions */ in handle_rt_signal32()
1318 unsigned long new_dbcr0 = current->thread.debug.dbcr0; in sys_debug_setcontext()
1333 current->thread.debug.dbcr1)) { in sys_debug_setcontext()
1368 current->thread.debug.dbcr0 = new_dbcr0; in sys_debug_setcontext()
1470 current->thread.fp_state.fpscr = 0; /* turn off all fp exceptions */ in handle_signal32()