Lines Matching refs:thread

44 	struct thread_struct *thread = &task->thread;  in update_cr_regs()  local
54 if (task->thread.per_flags & PER_FLAG_NO_TE) in update_cr_regs()
61 if (task->thread.per_flags & PER_FLAG_TE_ABORT_RAND) { in update_cr_regs()
62 if (task->thread.per_flags & PER_FLAG_TE_ABORT_RAND_TEND) in update_cr_regs()
71 new.control = thread->per_user.control; in update_cr_regs()
72 new.start = thread->per_user.start; in update_cr_regs()
73 new.end = thread->per_user.end; in update_cr_regs()
126 memset(&task->thread.per_user, 0, sizeof(task->thread.per_user)); in ptrace_disable()
127 memset(&task->thread.per_event, 0, sizeof(task->thread.per_event)); in ptrace_disable()
130 task->thread.per_flags = 0; in ptrace_disable()
143 PER_EVENT_IFETCH : child->thread.per_user.control; in __peek_user_per()
147 0 : child->thread.per_user.start; in __peek_user_per()
151 PSW_ADDR_INSN : child->thread.per_user.end; in __peek_user_per()
158 return child->thread.per_user.start; in __peek_user_per()
161 return child->thread.per_user.end; in __peek_user_per()
165 child->thread.per_event.cause << (BITS_PER_LONG - 16); in __peek_user_per()
168 return child->thread.per_event.address; in __peek_user_per()
172 child->thread.per_event.paid << (BITS_PER_LONG - 8); in __peek_user_per()
212 tmp = ((unsigned long) child->thread.acrs[15]) << 32; in __peek_user()
214 tmp = *(addr_t *)((addr_t) &child->thread.acrs + offset); in __peek_user()
233 tmp = child->thread.fpu.fpc; in __peek_user()
244 ((addr_t) child->thread.fpu.vxrs + 2*offset); in __peek_user()
247 ((addr_t) child->thread.fpu.fprs + offset); in __peek_user()
301 child->thread.per_user.control = in __poke_user_per()
305 child->thread.per_user.start = data; in __poke_user_per()
308 child->thread.per_user.end = data; in __poke_user_per()
354 child->thread.acrs[15] = (unsigned int) (data >> 32); in __poke_user()
356 *(addr_t *)((addr_t) &child->thread.acrs + offset) = data; in __poke_user()
378 child->thread.fpu.fpc = data >> (BITS_PER_LONG - 32); in __poke_user()
388 child->thread.fpu.vxrs + 2*offset) = data; in __poke_user()
391 child->thread.fpu.fprs + offset) = data; in __poke_user()
470 child->thread.per_flags &= ~PER_FLAG_NO_TE; in arch_ptrace()
475 child->thread.per_flags |= PER_FLAG_NO_TE; in arch_ptrace()
476 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
479 if (!MACHINE_HAS_TE || (child->thread.per_flags & PER_FLAG_NO_TE)) in arch_ptrace()
483 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
486 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
487 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND_TEND; in arch_ptrace()
490 child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; in arch_ptrace()
491 child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND_TEND; in arch_ptrace()
530 PER_EVENT_IFETCH : child->thread.per_user.control; in __peek_user_per_compat()
534 0 : child->thread.per_user.start; in __peek_user_per_compat()
538 PSW32_ADDR_INSN : child->thread.per_user.end; in __peek_user_per_compat()
545 return (__u32) child->thread.per_user.start; in __peek_user_per_compat()
548 return (__u32) child->thread.per_user.end; in __peek_user_per_compat()
551 return (__u32) child->thread.per_event.cause << 16; in __peek_user_per_compat()
554 return (__u32) child->thread.per_event.address; in __peek_user_per_compat()
557 return (__u32) child->thread.per_event.paid << 24; in __peek_user_per_compat()
593 tmp = *(__u32*)((addr_t) &child->thread.acrs + offset); in __peek_user_compat()
612 tmp = child->thread.fpu.fpc; in __peek_user_compat()
622 ((addr_t) child->thread.fpu.vxrs + 2*offset); in __peek_user_compat()
625 ((addr_t) child->thread.fpu.fprs + offset); in __peek_user_compat()
662 child->thread.per_user.control = in __poke_user_per_compat()
666 child->thread.per_user.start = data; in __poke_user_per_compat()
669 child->thread.per_user.end = data; in __poke_user_per_compat()
716 *(__u32*)((addr_t) &child->thread.acrs + offset) = tmp; in __poke_user_compat()
737 child->thread.fpu.fpc = data; in __poke_user_compat()
747 child->thread.fpu.vxrs + 2*offset) = tmp; in __poke_user_compat()
750 child->thread.fpu.fprs + offset) = tmp; in __poke_user_compat()
882 save_access_regs(target->thread.acrs); in s390_regs_get()
911 save_access_regs(target->thread.acrs); in s390_regs_set()
934 restore_access_regs(target->thread.acrs); in s390_regs_set()
948 fp_regs.fpc = target->thread.fpu.fpc; in s390_fpregs_get()
949 fpregs_store(&fp_regs, &target->thread.fpu); in s390_fpregs_get()
968 u32 ufpc[2] = { target->thread.fpu.fpc, 0 }; in s390_fpregs_set()
975 target->thread.fpu.fpc = ufpc[0]; in s390_fpregs_set()
985 convert_fp_to_vx(target->thread.fpu.vxrs, fprs); in s390_fpregs_set()
987 memcpy(target->thread.fpu.fprs, &fprs, sizeof(fprs)); in s390_fpregs_set()
1028 data = target->thread.trap_tdb; in s390_tdb_get()
1053 vxrs[i] = *((__u64 *)(target->thread.fpu.vxrs + i) + 1); in s390_vxrs_low_get()
1073 *((__u64 *)(target->thread.fpu.vxrs + i) + 1) = vxrs[i]; in s390_vxrs_low_set()
1089 memcpy(vxrs, target->thread.fpu.vxrs + __NUM_VXRS_LOW, sizeof(vxrs)); in s390_vxrs_high_get()
1107 target->thread.fpu.vxrs + __NUM_VXRS_LOW, 0, -1); in s390_vxrs_high_set()
1204 save_access_regs(target->thread.acrs); in s390_compat_regs_get()
1233 save_access_regs(target->thread.acrs); in s390_compat_regs_set()
1256 restore_access_regs(target->thread.acrs); in s390_compat_regs_set()