Lines Matching refs:thread

44 	struct thread_struct *thread = &task->thread;  in update_cr_regs()  local
56 if (task->thread.per_flags & PER_FLAG_NO_TE) in update_cr_regs()
62 if (task->thread.vxrs) in update_cr_regs()
71 if (task->thread.per_flags & PER_FLAG_TE_ABORT_RAND) { in update_cr_regs()
72 if (task->thread.per_flags & in update_cr_regs()
83 new.control = thread->per_user.control; in update_cr_regs()
84 new.start = thread->per_user.start; in update_cr_regs()
85 new.end = thread->per_user.end; in update_cr_regs()
138 memset(&task->thread.per_user, 0, sizeof(task->thread.per_user)); in ptrace_disable()
139 memset(&task->thread.per_event, 0, sizeof(task->thread.per_event)); in ptrace_disable()
142 task->thread.per_flags = 0; in ptrace_disable()
155 PER_EVENT_IFETCH : child->thread.per_user.control; in __peek_user_per()
159 0 : child->thread.per_user.start; in __peek_user_per()
163 PSW_ADDR_INSN : child->thread.per_user.end; in __peek_user_per()
170 return child->thread.per_user.start; in __peek_user_per()
173 return child->thread.per_user.end; in __peek_user_per()
177 child->thread.per_event.cause << (BITS_PER_LONG - 16); in __peek_user_per()
180 return child->thread.per_event.address; in __peek_user_per()
184 child->thread.per_event.paid << (BITS_PER_LONG - 8); in __peek_user_per()
224 tmp = ((unsigned long) child->thread.acrs[15]) << 32; in __peek_user()
226 tmp = *(addr_t *)((addr_t) &child->thread.acrs + offset); in __peek_user()
245 tmp = child->thread.fp_regs.fpc; in __peek_user()
254 if (child->thread.vxrs) in __peek_user()
256 ((addr_t) child->thread.vxrs + 2*offset); in __peek_user()
259 ((addr_t) &child->thread.fp_regs.fprs + offset); in __peek_user()
313 child->thread.per_user.control = in __poke_user_per()
317 child->thread.per_user.start = data; in __poke_user_per()
320 child->thread.per_user.end = data; in __poke_user_per()
366 child->thread.acrs[15] = (unsigned int) (data >> 32); in __poke_user()
368 *(addr_t *)((addr_t) &child->thread.acrs + offset) = data; in __poke_user()
390 child->thread.fp_regs.fpc = data >> (BITS_PER_LONG - 32); in __poke_user()
398 if (child->thread.vxrs) in __poke_user()
400 child->thread.vxrs + 2*offset) = data; in __poke_user()
403 &child->thread.fp_regs.fprs + offset) = data; in __poke_user()
482 child->thread.per_flags &= ~PER_FLAG_NO_TE; in arch_ptrace()
487 child->thread.per_flags |= PER_FLAG_NO_TE; in arch_ptrace()
488 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
491 if (!MACHINE_HAS_TE || (child->thread.per_flags & PER_FLAG_NO_TE)) in arch_ptrace()
495 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
498 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
499 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND_TEND; in arch_ptrace()
502 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
503 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND_TEND; in arch_ptrace()
542 PER_EVENT_IFETCH : child->thread.per_user.control; in __peek_user_per_compat()
546 0 : child->thread.per_user.start; in __peek_user_per_compat()
550 PSW32_ADDR_INSN : child->thread.per_user.end; in __peek_user_per_compat()
557 return (__u32) child->thread.per_user.start; in __peek_user_per_compat()
560 return (__u32) child->thread.per_user.end; in __peek_user_per_compat()
563 return (__u32) child->thread.per_event.cause << 16; in __peek_user_per_compat()
566 return (__u32) child->thread.per_event.address; in __peek_user_per_compat()
569 return (__u32) child->thread.per_event.paid << 24; in __peek_user_per_compat()
605 tmp = *(__u32*)((addr_t) &child->thread.acrs + offset); in __peek_user_compat()
624 tmp = child->thread.fp_regs.fpc; in __peek_user_compat()
632 if (child->thread.vxrs) in __peek_user_compat()
634 ((addr_t) child->thread.vxrs + 2*offset); in __peek_user_compat()
637 ((addr_t) &child->thread.fp_regs.fprs + offset); in __peek_user_compat()
674 child->thread.per_user.control = in __poke_user_per_compat()
678 child->thread.per_user.start = data; in __poke_user_per_compat()
681 child->thread.per_user.end = data; in __poke_user_per_compat()
728 *(__u32*)((addr_t) &child->thread.acrs + offset) = tmp; in __poke_user_compat()
749 child->thread.fp_regs.fpc = data; in __poke_user_compat()
757 if (child->thread.vxrs) in __poke_user_compat()
759 child->thread.vxrs + 2*offset) = tmp; in __poke_user_compat()
762 &child->thread.fp_regs.fprs + offset) = tmp; in __poke_user_compat()
894 save_access_regs(target->thread.acrs); in s390_regs_get()
923 save_access_regs(target->thread.acrs); in s390_regs_set()
946 restore_access_regs(target->thread.acrs); in s390_regs_set()
956 save_fp_ctl(&target->thread.fp_regs.fpc); in s390_fpregs_get()
957 save_fp_regs(target->thread.fp_regs.fprs); in s390_fpregs_get()
958 } else if (target->thread.vxrs) { in s390_fpregs_get()
962 target->thread.fp_regs.fprs[i] = in s390_fpregs_get()
963 *(freg_t *)(target->thread.vxrs + i); in s390_fpregs_get()
966 &target->thread.fp_regs, 0, -1); in s390_fpregs_get()
977 save_fp_ctl(&target->thread.fp_regs.fpc); in s390_fpregs_set()
978 save_fp_regs(target->thread.fp_regs.fprs); in s390_fpregs_set()
983 u32 ufpc[2] = { target->thread.fp_regs.fpc, 0 }; in s390_fpregs_set()
990 target->thread.fp_regs.fpc = ufpc[0]; in s390_fpregs_set()
995 target->thread.fp_regs.fprs, in s390_fpregs_set()
1000 restore_fp_ctl(&target->thread.fp_regs.fpc); in s390_fpregs_set()
1001 restore_fp_regs(target->thread.fp_regs.fprs); in s390_fpregs_set()
1002 } else if (target->thread.vxrs) { in s390_fpregs_set()
1006 *(freg_t *)(target->thread.vxrs + i) = in s390_fpregs_set()
1007 target->thread.fp_regs.fprs[i]; in s390_fpregs_set()
1050 data = target->thread.trap_tdb; in s390_tdb_get()
1072 if (target->thread.vxrs) { in s390_vxrs_low_get()
1074 save_vx_regs(target->thread.vxrs); in s390_vxrs_low_get()
1076 vxrs[i] = *((__u64 *)(target->thread.vxrs + i) + 1); in s390_vxrs_low_get()
1092 if (!target->thread.vxrs) { in s390_vxrs_low_set()
1097 save_vx_regs(target->thread.vxrs); in s390_vxrs_low_set()
1102 *((__u64 *)(target->thread.vxrs + i) + 1) = vxrs[i]; in s390_vxrs_low_set()
1104 restore_vx_regs(target->thread.vxrs); in s390_vxrs_low_set()
1119 if (target->thread.vxrs) { in s390_vxrs_high_get()
1121 save_vx_regs(target->thread.vxrs); in s390_vxrs_high_get()
1122 memcpy(vxrs, target->thread.vxrs + __NUM_VXRS_LOW, in s390_vxrs_high_get()
1138 if (!target->thread.vxrs) { in s390_vxrs_high_set()
1143 save_vx_regs(target->thread.vxrs); in s390_vxrs_high_set()
1146 target->thread.vxrs + __NUM_VXRS_LOW, 0, -1); in s390_vxrs_high_set()
1148 restore_vx_regs(target->thread.vxrs); in s390_vxrs_high_set()
1246 save_access_regs(target->thread.acrs); in s390_compat_regs_get()
1275 save_access_regs(target->thread.acrs); in s390_compat_regs_set()
1298 restore_access_regs(target->thread.acrs); in s390_compat_regs_set()