thread 61 arch/arc/include/asm/processor.h #define release_thread(thread) do { } while (0) thread 85 arch/arc/include/asm/processor.h #define TSK_K_ESP(tsk) (tsk->thread.ksp) thread 64 arch/arc/include/asm/unwind.h #define STACK_BOTTOM_UNW(tsk) STACK_LIMIT((tsk)->thread.ksp) thread 65 arch/arc/include/asm/unwind.h #define STACK_TOP_UNW(tsk) ((tsk)->thread.ksp) thread 17 arch/arc/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof(struct task_struct, thread)); thread 32 arch/arc/kernel/fpu.c unsigned int *saveto = &prev->thread.fpu.aux_dpfp[0].l; thread 33 arch/arc/kernel/fpu.c unsigned int *readfrom = &next->thread.fpu.aux_dpfp[0].l; thread 60 arch/arc/kernel/kgdb.c current->thread.callee_reg); thread 66 arch/arc/kernel/kgdb.c current->thread.callee_reg); thread 74 arch/arc/kernel/kgdb.c (struct callee_regs *) task->thread.callee_reg); thread 114 arch/arc/kernel/kgdb.c current->thread.callee_reg, thread 171 arch/arc/kernel/kprobes.c (struct callee_regs *) current->thread.callee_reg, thread 197 arch/arc/kernel/process.c p->thread.ksp = (unsigned long)c_callee; /* THREAD_KSP */ thread 15 arch/arc/kernel/ptrace.c struct callee_regs *tmp = (struct callee_regs *)tsk->thread.callee_reg; thread 88 arch/arc/kernel/ptrace.c REG_O_ONE(efa, &target->thread.fault_address); thread 92 arch/arc/kernel/ptrace.c stop_pc_val = target->thread.fault_address; thread 48 arch/arc/kernel/traps.c tsk->thread.fault_address = (__force unsigned int)addr; thread 126 arch/arc/kernel/troubleshoot.c address = current->thread.fault_address; thread 195 arch/arc/kernel/troubleshoot.c current->thread.fault_address, thread 228 arch/arc/kernel/troubleshoot.c cregs = (struct callee_regs *)current->thread.callee_reg; thread 238 arch/arc/kernel/troubleshoot.c current->thread.fault_address = address; thread 195 arch/arc/mm/fault.c tsk->thread.fault_address = address; thread 12 arch/arc/plat-eznps/ctop.c struct eznps_dp *prev_task_dp = &prev->thread.dp; thread 13 arch/arc/plat-eznps/ctop.c struct eznps_dp *next_task_dp = &next->thread.dp; thread 25 arch/arc/plat-eznps/include/plat/mtm.h ({ struct global_id gid; gid.value = cpu; gid.thread; }) thread 32 arch/arc/plat-eznps/include/plat/mtm.h #define get_thread(map) map.thread thread 104 arch/arc/plat-eznps/smp.c u32 num:8, cluster:8, core:8, thread:8; thread 111 arch/arc/plat-eznps/smp.c ipi.thread = get_thread(gid); thread 29 arch/arm/include/asm/thread_notify.h static inline void thread_notify(unsigned long rc, struct thread_info *thread) thread 32 arch/arm/include/asm/thread_notify.h atomic_notifier_call_chain(&thread_notify_head, rc, thread); thread 60 arch/arm/include/asm/tls.h struct thread_info *thread; thread 62 arch/arm/include/asm/tls.h thread = current_thread_info(); thread 64 arch/arm/include/asm/tls.h thread->tp_value[0] = val; thread 21 arch/arm/kernel/pj4-cp0.c struct thread_info *thread = t; thread 34 arch/arm/kernel/pj4-cp0.c iwmmxt_task_release(thread); thread 38 arch/arm/kernel/pj4-cp0.c iwmmxt_task_switch(thread); thread 206 arch/arm/kernel/process.c struct thread_info *thread = current_thread_info(); thread 211 arch/arm/kernel/process.c memset(thread->used_cp, 0, sizeof(thread->used_cp)); thread 212 arch/arm/kernel/process.c memset(&tsk->thread.debug, 0, sizeof(struct debug_info)); thread 213 arch/arm/kernel/process.c memset(&thread->fpstate, 0, sizeof(union fp_state)); thread 217 arch/arm/kernel/process.c thread_notify(THREAD_NOTIFY_FLUSH, thread); thread 230 arch/arm/kernel/process.c struct thread_info *thread = task_thread_info(p); thread 233 arch/arm/kernel/process.c memset(&thread->cpu_context, 0, sizeof(struct cpu_context_save)); thread 242 arch/arm/kernel/process.c thread->cpu_domain = get_domain(); thread 252 arch/arm/kernel/process.c thread->cpu_context.r4 = stk_sz; thread 253 arch/arm/kernel/process.c thread->cpu_context.r5 = stack_start; thread 256 arch/arm/kernel/process.c thread->cpu_context.pc = (unsigned long)ret_from_fork; thread 257 arch/arm/kernel/process.c thread->cpu_context.sp = (unsigned long)childregs; thread 262 arch/arm/kernel/process.c thread->tp_value[0] = tls; thread 263 arch/arm/kernel/process.c thread->tp_value[1] = get_tpuser(); thread 265 arch/arm/kernel/process.c thread_notify(THREAD_NOTIFY_COPY, thread); thread 268 arch/arm/kernel/process.c thread->stack_canary = p->stack_canary; thread 288 arch/arm/kernel/process.c struct thread_info *thread = current_thread_info(); thread 289 arch/arm/kernel/process.c int used_math = thread->used_cp[1] | thread->used_cp[2]; thread 292 arch/arm/kernel/process.c memcpy(fp, &thread->fpstate.soft, sizeof (*fp)); thread 297 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(tsk); thread 299 arch/arm/kernel/ptrace.c if (!test_ti_thread_flag(thread, TIF_USING_IWMMXT)) thread 301 arch/arm/kernel/ptrace.c iwmmxt_task_disable(thread); /* force it to ram */ thread 302 arch/arm/kernel/ptrace.c return copy_to_user(ufp, &thread->fpstate.iwmmxt, IWMMXT_SIZE) thread 311 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(tsk); thread 313 arch/arm/kernel/ptrace.c if (!test_ti_thread_flag(thread, TIF_USING_IWMMXT)) thread 315 arch/arm/kernel/ptrace.c iwmmxt_task_release(thread); /* force a reload */ thread 316 arch/arm/kernel/ptrace.c return copy_from_user(&thread->fpstate.iwmmxt, ufp, IWMMXT_SIZE) thread 328 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(tsk); thread 330 arch/arm/kernel/ptrace.c crunch_task_disable(thread); /* force it to ram */ thread 331 arch/arm/kernel/ptrace.c return copy_to_user(ufp, &thread->crunchstate, CRUNCH_SIZE) thread 340 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(tsk); thread 342 arch/arm/kernel/ptrace.c crunch_task_release(thread); /* force a reload */ thread 343 arch/arm/kernel/ptrace.c return copy_from_user(&thread->crunchstate, ufp, CRUNCH_SIZE) thread 386 arch/arm/kernel/ptrace.c if (current->thread.debug.hbp[i] == bp) thread 401 arch/arm/kernel/ptrace.c memset(tsk->thread.debug.hbp, 0, sizeof(tsk->thread.debug.hbp)); thread 411 arch/arm/kernel/ptrace.c struct thread_struct *t = &tsk->thread; thread 475 arch/arm/kernel/ptrace.c bp = tsk->thread.debug.hbp[idx]; thread 531 arch/arm/kernel/ptrace.c bp = tsk->thread.debug.hbp[idx]; thread 538 arch/arm/kernel/ptrace.c tsk->thread.debug.hbp[idx] = bp; thread 619 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); thread 621 arch/arm/kernel/ptrace.c thread->used_cp[1] = thread->used_cp[2] = 1; thread 624 arch/arm/kernel/ptrace.c &thread->fpstate, thread 657 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); thread 658 arch/arm/kernel/ptrace.c struct vfp_hard_struct const *vfp = &thread->vfpstate.hard; thread 662 arch/arm/kernel/ptrace.c vfp_sync_hwstate(thread); thread 694 arch/arm/kernel/ptrace.c struct thread_info *thread = task_thread_info(target); thread 699 arch/arm/kernel/ptrace.c vfp_sync_hwstate(thread); thread 700 arch/arm/kernel/ptrace.c new_vfp = thread->vfpstate.hard; thread 722 arch/arm/kernel/ptrace.c thread->vfpstate.hard = new_vfp; thread 723 arch/arm/kernel/ptrace.c vfp_flush_hwstate(thread); thread 313 arch/arm/kernel/signal.c .trap_no = current->thread.trap_no, thread 314 arch/arm/kernel/signal.c .error_code = current->thread.error_code, thread 315 arch/arm/kernel/signal.c .fault_address = current->thread.address, thread 32 arch/arm/kernel/thumbee.c struct thread_info *thread = t; thread 40 arch/arm/kernel/thumbee.c teehbr_write(thread->thumbee_state); thread 275 arch/arm/kernel/traps.c ret = notify_die(DIE_OOPS, str, regs, err, tsk->thread.trap_no, SIGSEGV); thread 369 arch/arm/kernel/traps.c current->thread.error_code = err; thread 370 arch/arm/kernel/traps.c current->thread.trap_no = trap; thread 35 arch/arm/kernel/xscale-cp0.c struct thread_info *thread = t; thread 39 arch/arm/kernel/xscale-cp0.c thread->cpu_context.extra[0] = 0; thread 40 arch/arm/kernel/xscale-cp0.c thread->cpu_context.extra[1] = 0; thread 45 arch/arm/kernel/xscale-cp0.c dsp_load_state(thread->cpu_context.extra); thread 60 arch/arm/kernel/xscale-cp0.c struct thread_info *thread = t; thread 73 arch/arm/kernel/xscale-cp0.c iwmmxt_task_release(thread); thread 77 arch/arm/kernel/xscale-cp0.c iwmmxt_task_switch(thread); thread 23 arch/arm/mach-ep93xx/crunch.c void crunch_task_release(struct thread_info *thread) thread 26 arch/arm/mach-ep93xx/crunch.c if (crunch_owner == &thread->crunchstate) thread 38 arch/arm/mach-ep93xx/crunch.c struct thread_info *thread = (struct thread_info *)t; thread 42 arch/arm/mach-ep93xx/crunch.c crunch_state = &thread->crunchstate; thread 55 arch/arm/mach-ep93xx/crunch.c crunch_task_release(thread); thread 107 arch/arm/mm/context.c struct thread_info *thread = t; thread 112 arch/arm/mm/context.c pid = task_pid_nr(thread->task) << ASID_BITS; thread 160 arch/arm/mm/fault.c tsk->thread.address = addr; thread 161 arch/arm/mm/fault.c tsk->thread.error_code = fsr; thread 162 arch/arm/mm/fault.c tsk->thread.trap_no = 14; thread 50 arch/arm/nwfpe/fpmodule.c struct thread_info *thread = v; thread 53 arch/arm/nwfpe/fpmodule.c nwfpe_init_fpa(&thread->fpstate); thread 140 arch/arm/probes/uprobes/core.c utask->autask.saved_trap_no = current->thread.trap_no; thread 141 arch/arm/probes/uprobes/core.c current->thread.trap_no = UPROBE_TRAP_NR; thread 151 arch/arm/probes/uprobes/core.c WARN_ON_ONCE(current->thread.trap_no != UPROBE_TRAP_NR); thread 153 arch/arm/probes/uprobes/core.c current->thread.trap_no = utask->autask.saved_trap_no; thread 164 arch/arm/probes/uprobes/core.c if (t->thread.trap_no != UPROBE_TRAP_NR) thread 174 arch/arm/probes/uprobes/core.c current->thread.trap_no = utask->autask.saved_trap_no; thread 62 arch/arm/vfp/vfpmodule.c static bool vfp_state_in_hw(unsigned int cpu, struct thread_info *thread) thread 65 arch/arm/vfp/vfpmodule.c if (thread->vfpstate.hard.cpu != cpu) thread 68 arch/arm/vfp/vfpmodule.c return vfp_current_hw_state[cpu] == &thread->vfpstate; thread 76 arch/arm/vfp/vfpmodule.c static void vfp_force_reload(unsigned int cpu, struct thread_info *thread) thread 78 arch/arm/vfp/vfpmodule.c if (vfp_state_in_hw(cpu, thread)) { thread 83 arch/arm/vfp/vfpmodule.c thread->vfpstate.hard.cpu = NR_CPUS; thread 90 arch/arm/vfp/vfpmodule.c static void vfp_thread_flush(struct thread_info *thread) thread 92 arch/arm/vfp/vfpmodule.c union vfp_state *vfp = &thread->vfpstate; thread 118 arch/arm/vfp/vfpmodule.c static void vfp_thread_exit(struct thread_info *thread) thread 121 arch/arm/vfp/vfpmodule.c union vfp_state *vfp = &thread->vfpstate; thread 129 arch/arm/vfp/vfpmodule.c static void vfp_thread_copy(struct thread_info *thread) thread 134 arch/arm/vfp/vfpmodule.c thread->vfpstate = parent->vfpstate; thread 136 arch/arm/vfp/vfpmodule.c thread->vfpstate.hard.cpu = NR_CPUS; thread 161 arch/arm/vfp/vfpmodule.c struct thread_info *thread = v; thread 172 arch/arm/vfp/vfpmodule.c cpu = thread->cpu; thread 191 arch/arm/vfp/vfpmodule.c vfp_thread_flush(thread); thread 195 arch/arm/vfp/vfpmodule.c vfp_thread_exit(thread); thread 199 arch/arm/vfp/vfpmodule.c vfp_thread_copy(thread); thread 220 arch/arm/vfp/vfpmodule.c current->thread.error_code = 0; thread 221 arch/arm/vfp/vfpmodule.c current->thread.trap_no = 6; thread 516 arch/arm/vfp/vfpmodule.c void vfp_sync_hwstate(struct thread_info *thread) thread 520 arch/arm/vfp/vfpmodule.c if (vfp_state_in_hw(cpu, thread)) { thread 527 arch/arm/vfp/vfpmodule.c vfp_save_state(&thread->vfpstate, fpexc | FPEXC_EN); thread 535 arch/arm/vfp/vfpmodule.c void vfp_flush_hwstate(struct thread_info *thread) thread 539 arch/arm/vfp/vfpmodule.c vfp_force_reload(cpu, thread); thread 551 arch/arm/vfp/vfpmodule.c struct thread_info *thread = current_thread_info(); thread 552 arch/arm/vfp/vfpmodule.c struct vfp_hard_struct *hwstate = &thread->vfpstate.hard; thread 555 arch/arm/vfp/vfpmodule.c vfp_sync_hwstate(thread); thread 576 arch/arm/vfp/vfpmodule.c vfp_flush_hwstate(thread); thread 589 arch/arm/vfp/vfpmodule.c struct thread_info *thread = current_thread_info(); thread 590 arch/arm/vfp/vfpmodule.c struct vfp_hard_struct *hwstate = &thread->vfpstate.hard; thread 594 arch/arm/vfp/vfpmodule.c vfp_flush_hwstate(thread); thread 672 arch/arm/vfp/vfpmodule.c struct thread_info *thread = current_thread_info(); thread 691 arch/arm/vfp/vfpmodule.c if (vfp_state_in_hw(cpu, thread)) thread 692 arch/arm/vfp/vfpmodule.c vfp_save_state(&thread->vfpstate, fpexc); thread 205 arch/arm64/include/asm/compat.h static inline int is_compat_thread(struct thread_info *thread) thread 207 arch/arm64/include/asm/compat.h return test_ti_thread_flag(thread, TIF_32BIT); thread 212 arch/arm64/include/asm/compat.h static inline int is_compat_thread(struct thread_info *thread) thread 64 arch/arm64/include/asm/fpsimd.h static inline void *sve_pffr(struct thread_struct *thread) thread 66 arch/arm64/include/asm/fpsimd.h return (char *)thread->sve_state + sve_ffr_offset(thread->sve_vl); thread 83 arch/arm64/include/asm/pointer_auth.h ptrauth_keys_init(&__ptiu_tsk->thread.keys_user); \ thread 84 arch/arm64/include/asm/pointer_auth.h ptrauth_keys_switch(&__ptiu_tsk->thread.keys_user); \ thread 88 arch/arm64/include/asm/pointer_auth.h ptrauth_keys_switch(&(tsk)->thread.keys_user) thread 169 arch/arm64/include/asm/processor.h __tls = &(t)->thread.uw.tp2_value; \ thread 171 arch/arm64/include/asm/processor.h __tls = &(t)->thread.uw.tp_value; \ thread 175 arch/arm64/include/asm/processor.h #define task_user_tls(t) (&(t)->thread.uw.tp_value) thread 47 arch/arm64/include/asm/thread_info.h ((unsigned long)(tsk->thread.cpu_context.pc)) thread 49 arch/arm64/include/asm/thread_info.h ((unsigned long)(tsk->thread.cpu_context.sp)) thread 51 arch/arm64/include/asm/thread_info.h ((unsigned long)(tsk->thread.cpu_context.fp)) thread 42 arch/arm64/kernel/asm-offsets.c DEFINE(THREAD_CPU_CONTEXT, offsetof(struct task_struct, thread.cpu_context)); thread 205 arch/arm64/kernel/fpsimd.c kfree(task->thread.sve_state); thread 206 arch/arm64/kernel/fpsimd.c task->thread.sve_state = NULL; thread 276 arch/arm64/kernel/fpsimd.c sve_load_state(sve_pffr(¤t->thread), thread 277 arch/arm64/kernel/fpsimd.c ¤t->thread.uw.fpsimd_state.fpsr, thread 278 arch/arm64/kernel/fpsimd.c sve_vq_from_vl(current->thread.sve_vl) - 1); thread 280 arch/arm64/kernel/fpsimd.c fpsimd_load_state(¤t->thread.uw.fpsimd_state); thread 438 arch/arm64/kernel/fpsimd.c void *sst = task->thread.sve_state; thread 439 arch/arm64/kernel/fpsimd.c struct user_fpsimd_state const *fst = &task->thread.uw.fpsimd_state; thread 444 arch/arm64/kernel/fpsimd.c vq = sve_vq_from_vl(task->thread.sve_vl); thread 462 arch/arm64/kernel/fpsimd.c void const *sst = task->thread.sve_state; thread 463 arch/arm64/kernel/fpsimd.c struct user_fpsimd_state *fst = &task->thread.uw.fpsimd_state; thread 470 arch/arm64/kernel/fpsimd.c vq = sve_vq_from_vl(task->thread.sve_vl); thread 485 arch/arm64/kernel/fpsimd.c return SVE_SIG_REGS_SIZE(sve_vq_from_vl(task->thread.sve_vl)); thread 500 arch/arm64/kernel/fpsimd.c if (task->thread.sve_state) { thread 501 arch/arm64/kernel/fpsimd.c memset(task->thread.sve_state, 0, sve_state_size(current)); thread 506 arch/arm64/kernel/fpsimd.c task->thread.sve_state = thread 513 arch/arm64/kernel/fpsimd.c BUG_ON(!task->thread.sve_state); thread 560 arch/arm64/kernel/fpsimd.c void *sst = task->thread.sve_state; thread 561 arch/arm64/kernel/fpsimd.c struct user_fpsimd_state const *fst = &task->thread.uw.fpsimd_state; thread 566 arch/arm64/kernel/fpsimd.c vq = sve_vq_from_vl(task->thread.sve_vl); thread 594 arch/arm64/kernel/fpsimd.c task->thread.sve_vl_onexec = vl; thread 597 arch/arm64/kernel/fpsimd.c task->thread.sve_vl_onexec = 0; thread 603 arch/arm64/kernel/fpsimd.c if (vl == task->thread.sve_vl) thread 630 arch/arm64/kernel/fpsimd.c task->thread.sve_vl = vl; thread 650 arch/arm64/kernel/fpsimd.c ret = current->thread.sve_vl_onexec; thread 652 arch/arm64/kernel/fpsimd.c ret = current->thread.sve_vl; thread 1001 arch/arm64/kernel/fpsimd.c &next->thread.uw.fpsimd_state; thread 1002 arch/arm64/kernel/fpsimd.c wrong_cpu = next->thread.fpsimd_cpu != smp_processor_id(); thread 1020 arch/arm64/kernel/fpsimd.c memset(¤t->thread.uw.fpsimd_state, 0, thread 1021 arch/arm64/kernel/fpsimd.c sizeof(current->thread.uw.fpsimd_state)); thread 1038 arch/arm64/kernel/fpsimd.c vl = current->thread.sve_vl_onexec ? thread 1039 arch/arm64/kernel/fpsimd.c current->thread.sve_vl_onexec : sve_default_vl; thread 1048 arch/arm64/kernel/fpsimd.c current->thread.sve_vl = vl; thread 1055 arch/arm64/kernel/fpsimd.c current->thread.sve_vl_onexec = 0; thread 1098 arch/arm64/kernel/fpsimd.c last->st = ¤t->thread.uw.fpsimd_state; thread 1099 arch/arm64/kernel/fpsimd.c last->sve_state = current->thread.sve_state; thread 1100 arch/arm64/kernel/fpsimd.c last->sve_vl = current->thread.sve_vl; thread 1101 arch/arm64/kernel/fpsimd.c current->thread.fpsimd_cpu = smp_processor_id(); thread 1171 arch/arm64/kernel/fpsimd.c current->thread.uw.fpsimd_state = *state; thread 1196 arch/arm64/kernel/fpsimd.c t->thread.fpsimd_cpu = NR_CPUS; thread 227 arch/arm64/kernel/hw_breakpoint.c struct debug_info *debug_info = ¤t->thread.debug; thread 632 arch/arm64/kernel/hw_breakpoint.c debug_info = ¤t->thread.debug; thread 746 arch/arm64/kernel/hw_breakpoint.c debug_info = ¤t->thread.debug; thread 845 arch/arm64/kernel/hw_breakpoint.c struct debug_info *debug_info = ¤t->thread.debug; thread 914 arch/arm64/kernel/hw_breakpoint.c current_debug_info = ¤t->thread.debug; thread 915 arch/arm64/kernel/hw_breakpoint.c next_debug_info = &next->thread.debug; thread 130 arch/arm64/kernel/kgdb.c struct cpu_context *cpu_context = &task->thread.cpu_context; thread 12 arch/arm64/kernel/pointer_auth.c struct ptrauth_keys *keys = &tsk->thread.keys_user; thread 67 arch/arm64/kernel/probes/uprobes.c current->thread.fault_code = UPROBE_INV_FAULT_CODE; thread 81 arch/arm64/kernel/probes/uprobes.c WARN_ON_ONCE(current->thread.fault_code != UPROBE_INV_FAULT_CODE); thread 97 arch/arm64/kernel/probes/uprobes.c if (t->thread.fault_code != UPROBE_INV_FAULT_CODE) thread 302 arch/arm64/kernel/process.c current->thread.uw.tp_value = 0; thread 355 arch/arm64/kernel/process.c dst->thread.sve_state = NULL; thread 368 arch/arm64/kernel/process.c memset(&p->thread.cpu_context, 0, sizeof(struct cpu_context)); thread 401 arch/arm64/kernel/process.c p->thread.uw.tp_value = tls; thread 415 arch/arm64/kernel/process.c p->thread.cpu_context.x19 = stack_start; thread 416 arch/arm64/kernel/process.c p->thread.cpu_context.x20 = stk_sz; thread 418 arch/arm64/kernel/process.c p->thread.cpu_context.pc = (unsigned long)ret_from_fork; thread 419 arch/arm64/kernel/process.c p->thread.cpu_context.sp = (unsigned long)childregs; thread 436 arch/arm64/kernel/process.c write_sysreg(next->thread.uw.tp_value, tpidrro_el0); thread 183 arch/arm64/kernel/ptrace.c if (current->thread.debug.hbp_break[i] == bp) { thread 190 arch/arm64/kernel/ptrace.c if (current->thread.debug.hbp_watch[i] == bp) { thread 212 arch/arm64/kernel/ptrace.c struct thread_struct *t = &tsk->thread; thread 231 arch/arm64/kernel/ptrace.c memset(&tsk->thread.debug, 0, sizeof(struct debug_info)); thread 245 arch/arm64/kernel/ptrace.c bp = tsk->thread.debug.hbp_break[idx]; thread 251 arch/arm64/kernel/ptrace.c bp = tsk->thread.debug.hbp_watch[idx]; thread 271 arch/arm64/kernel/ptrace.c tsk->thread.debug.hbp_break[idx] = bp; thread 278 arch/arm64/kernel/ptrace.c tsk->thread.debug.hbp_watch[idx] = bp; thread 637 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; thread 671 arch/arm64/kernel/ptrace.c newstate = target->thread.uw.fpsimd_state; thread 678 arch/arm64/kernel/ptrace.c target->thread.uw.fpsimd_state = newstate; thread 706 arch/arm64/kernel/ptrace.c unsigned long *tls = &target->thread.uw.tp_value; thread 719 arch/arm64/kernel/ptrace.c unsigned long tls = target->thread.uw.tp_value; thread 725 arch/arm64/kernel/ptrace.c target->thread.uw.tp_value = tls; thread 770 arch/arm64/kernel/ptrace.c header->vl = target->thread.sve_vl; thread 834 arch/arm64/kernel/ptrace.c target->thread.sve_state, thread 853 arch/arm64/kernel/ptrace.c &target->thread.uw.fpsimd_state.fpsr, thread 895 arch/arm64/kernel/ptrace.c vq = sve_vq_from_vl(target->thread.sve_vl); thread 933 arch/arm64/kernel/ptrace.c target->thread.sve_state, thread 952 arch/arm64/kernel/ptrace.c &target->thread.uw.fpsimd_state.fpsr, thread 1024 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; thread 1041 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; thread 1075 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; thread 1092 arch/arm64/kernel/ptrace.c struct ptrauth_keys *keys = &target->thread.keys_user; thread 1368 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; thread 1404 arch/arm64/kernel/ptrace.c uregs = &target->thread.uw.fpsimd_state; thread 1427 arch/arm64/kernel/ptrace.c compat_ulong_t tls = (compat_ulong_t)target->thread.uw.tp_value; thread 1437 arch/arm64/kernel/ptrace.c compat_ulong_t tls = target->thread.uw.tp_value; thread 1443 arch/arm64/kernel/ptrace.c target->thread.uw.tp_value = tls; thread 1735 arch/arm64/kernel/ptrace.c ret = put_user((compat_ulong_t)child->thread.uw.tp_value, thread 173 arch/arm64/kernel/signal.c ¤t->thread.uw.fpsimd_state; thread 229 arch/arm64/kernel/signal.c unsigned int vl = current->thread.sve_vl; thread 250 arch/arm64/kernel/signal.c current->thread.sve_state, thread 267 arch/arm64/kernel/signal.c if (sve.vl != current->thread.sve_vl) thread 291 arch/arm64/kernel/signal.c err = __copy_from_user(current->thread.sve_state, thread 577 arch/arm64/kernel/signal.c if (add_all || current->thread.fault_code) { thread 591 arch/arm64/kernel/signal.c vl = current->thread.sve_vl; thread 622 arch/arm64/kernel/signal.c __put_user_error(current->thread.fault_address, &sf->uc.uc_mcontext.fault_address, err); thread 639 arch/arm64/kernel/signal.c __put_user_error(current->thread.fault_code, &esr_ctx->esr, err); thread 97 arch/arm64/kernel/signal32.c ¤t->thread.uw.fpsimd_state; thread 413 arch/arm64/kernel/signal32.c __put_user_error(!!(current->thread.fault_code & ESR_ELx_WNR) << thread 415 arch/arm64/kernel/signal32.c __put_user_error(current->thread.fault_address, &sf->uc.uc_mcontext.fault_address, err); thread 92 arch/arm64/kernel/sys_compat.c current->thread.uw.tp_value = regs->regs[0]; thread 213 arch/arm64/kernel/traps.c unsigned int esr = tsk->thread.fault_code; thread 262 arch/arm64/kernel/traps.c current->thread.fault_address = 0; thread 263 arch/arm64/kernel/traps.c current->thread.fault_code = err; thread 809 arch/arm64/kernel/traps.c current->thread.fault_address = 0; thread 810 arch/arm64/kernel/traps.c current->thread.fault_code = esr; thread 32 arch/arm64/kvm/fpsimd.c struct user_fpsimd_state *fpsimd = ¤t->thread.uw.fpsimd_state; thread 364 arch/arm64/kvm/hyp/switch.c struct thread_struct *thread = container_of( thread 368 arch/arm64/kvm/hyp/switch.c sve_save_state(sve_pffr(thread), thread 334 arch/arm64/mm/fault.c current->thread.fault_address = address; thread 348 arch/arm64/mm/fault.c if (!is_ttbr0_addr(current->thread.fault_address)) { thread 386 arch/arm64/mm/fault.c current->thread.fault_code = esr; thread 86 arch/c6x/include/asm/processor.h (*(unsigned long *)&(tsk)->thread.b15_14) thread 88 arch/c6x/include/asm/processor.h (*(((unsigned long *)&(tsk)->thread.b15_14) + 1)) thread 91 arch/c6x/include/asm/processor.h (*(((unsigned long *)&(tsk)->thread.b15_14) + 1)) thread 93 arch/c6x/include/asm/processor.h (*(unsigned long *)&(tsk)->thread.b15_14) thread 23 arch/c6x/include/asm/switch_to.h current->thread.wchan = (u_long) __builtin_return_address(0); \ thread 24 arch/c6x/include/asm/switch_to.h (last) = __switch_to(&(prev)->thread, \ thread 25 arch/c6x/include/asm/switch_to.h &(next)->thread, (prev)); \ thread 27 arch/c6x/include/asm/switch_to.h current->thread.wchan = 0; \ thread 101 arch/c6x/kernel/process.c current->thread.usp = usp; thread 119 arch/c6x/kernel/process.c p->thread.pc = (unsigned long) ret_from_kernel_thread; thread 127 arch/c6x/kernel/process.c p->thread.pc = (unsigned long) ret_from_fork; thread 131 arch/c6x/kernel/process.c p->thread.usp = childregs->sp; thread 133 arch/c6x/kernel/process.c p->thread.wchan = p->thread.pc; thread 150 arch/c6x/kernel/process.c return p->thread.wchan; thread 13 arch/csky/include/asm/switch_to.h save_to_user_fp(&prev->thread.user_fp); thread 14 arch/csky/include/asm/switch_to.h restore_from_user_fp(&next->thread.user_fp); thread 41 arch/csky/include/asm/thread_info.h ((unsigned long)(((struct switch_stack *)(tsk->thread.ksp))->r8)) thread 16 arch/csky/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof(struct task_struct, thread)); thread 32 arch/csky/kernel/process.c struct switch_stack *sw = (struct switch_stack *)tsk->thread.ksp; thread 47 arch/csky/kernel/process.c save_to_user_fp(&p->thread.user_fp); thread 54 arch/csky/kernel/process.c p->thread.ksp = (unsigned long)childstack; thread 80 arch/csky/kernel/process.c memcpy(fpu, ¤t->thread.user_fp, sizeof(*fpu)); thread 115 arch/csky/kernel/ptrace.c struct user_fp *regs = (struct user_fp *)&target->thread.user_fp; thread 141 arch/csky/kernel/ptrace.c struct user_fp *regs = (struct user_fp *)&target->thread.user_fp; thread 133 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 151 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 155 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 160 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 164 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 174 arch/csky/kernel/traps.c tsk->thread.trap_no = vector; thread 182 arch/csky/mm/fault.c tsk->thread.trap_no = (regs->sr >> 16) & 0xff; thread 188 arch/csky/mm/fault.c tsk->thread.trap_no = (regs->sr >> 16) & 0xff; thread 204 arch/csky/mm/fault.c tsk->thread.trap_no = (regs->sr >> 16) & 0xff; thread 214 arch/csky/mm/fault.c tsk->thread.trap_no = (regs->sr >> 16) & 0xff; thread 113 arch/h8300/include/asm/processor.h if ((tsk)->thread.esp0 > PAGE_SIZE && \ thread 114 arch/h8300/include/asm/processor.h MAP_NR((tsk)->thread.esp0) < max_mapnr) \ thread 115 arch/h8300/include/asm/processor.h eip = ((struct pt_regs *) (tsk)->thread.esp0)->pc; \ thread 118 arch/h8300/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk) == current ? rdusp() : (tsk)->thread.usp) thread 29 arch/h8300/include/asm/ptrace.h #define signal_pt_regs() ((struct pt_regs *)current->thread.esp0) thread 45 arch/h8300/include/asm/switch_to.h : "r" (&(prev->thread)), \ thread 46 arch/h8300/include/asm/switch_to.h "r" (&(next->thread)), \ thread 28 arch/h8300/kernel/asm-offsets.c OFFSET(TASK_THREAD, task_struct, thread); thread 92 arch/h8300/kernel/kgdb.c gdb_regs[GDB_SP] = p->thread.ksp; thread 126 arch/h8300/kernel/process.c p->thread.usp = usp ?: rdusp(); thread 128 arch/h8300/kernel/process.c p->thread.ksp = (unsigned long)childregs; thread 143 arch/h8300/kernel/process.c fp = ((struct pt_regs *)p->thread.ksp)->er6; thread 43 arch/h8300/kernel/ptrace.c return task->thread.usp + sizeof(long)*2; thread 46 arch/h8300/kernel/ptrace.c return *(unsigned short *)(task->thread.esp0 + thread 49 arch/h8300/kernel/ptrace.c return *(unsigned long *)(task->thread.esp0 + thread 61 arch/h8300/kernel/ptrace.c task->thread.usp = data - sizeof(long)*2; thread 63 arch/h8300/kernel/ptrace.c oldccr = *(unsigned short *)(task->thread.esp0 + thread 68 arch/h8300/kernel/ptrace.c *(unsigned short *)(task->thread.esp0 + thread 72 arch/h8300/kernel/ptrace.c oldexr = *(unsigned short *)(task->thread.esp0 + thread 77 arch/h8300/kernel/ptrace.c *(unsigned short *)(task->thread.esp0 + thread 81 arch/h8300/kernel/ptrace.c *(unsigned long *)(task->thread.esp0 + thread 20 arch/h8300/kernel/ptrace_h.c if ((long)child->thread.breakinfo.addr != -1L) { thread 21 arch/h8300/kernel/ptrace_h.c *(child->thread.breakinfo.addr) = child->thread.breakinfo.inst; thread 22 arch/h8300/kernel/ptrace_h.c child->thread.breakinfo.addr = (unsigned short *)-1L; thread 244 arch/h8300/kernel/ptrace_h.c child->thread.breakinfo.addr = next; thread 245 arch/h8300/kernel/ptrace_h.c child->thread.breakinfo.inst = *next; thread 251 arch/h8300/kernel/ptrace_h.c if ((unsigned long)current->thread.breakinfo.addr == bp) { thread 266 arch/h8300/kernel/signal.c current->thread.esp0 = (unsigned long) regs; thread 48 arch/h8300/kernel/traps.c current->thread.esp0 = ssp; thread 85 arch/hexagon/kernel/asm-offsets.c OFFSET(_TASK_STRUCT_THREAD, task_struct, thread); thread 75 arch/hexagon/kernel/process.c p->thread.switch_sp = ss; thread 142 arch/hexagon/kernel/process.c fp = ((struct hexagon_switch_stack *)p->thread.switch_sp)->fp; thread 183 arch/hexagon/kernel/smp.c struct thread_info *thread = (struct thread_info *)idle->stack; thread 186 arch/hexagon/kernel/smp.c thread->cpu = cpu; thread 189 arch/hexagon/kernel/smp.c stack_start = ((void *) thread) + THREAD_SIZE; thread 106 arch/hexagon/kernel/traps.c task->thread.switch_sp)->fp; thread 41 arch/ia64/include/asm/processor.h #define TASK_UNMAPPED_BASE (current->thread.map_base) thread 252 arch/ia64/include/asm/processor.h (task)->thread.flags = (((task)->thread.flags & ~IA64_THREAD_UAC_MASK) \ thread 258 arch/ia64/include/asm/processor.h put_user(((task)->thread.flags & IA64_THREAD_UAC_MASK) >> IA64_THREAD_UAC_SHIFT, \ thread 264 arch/ia64/include/asm/processor.h (task)->thread.flags = (((task)->thread.flags & ~IA64_THREAD_FPEMU_MASK) \ thread 270 arch/ia64/include/asm/processor.h put_user(((task)->thread.flags & IA64_THREAD_FPEMU_MASK) >> IA64_THREAD_FPEMU_SHIFT, \ thread 316 arch/ia64/include/asm/processor.h regs->ar_bspstore = current->thread.rbs_bot; \ thread 353 arch/ia64/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk)->thread.ksp) thread 403 arch/ia64/include/asm/processor.h (__ia64_islfo_task->thread.last_fph_cpu == smp_processor_id() \ thread 413 arch/ia64/include/asm/processor.h __ia64_slfo_task->thread.last_fph_cpu = smp_processor_id(); \ thread 418 arch/ia64/include/asm/processor.h #define ia64_drop_fpu(t) ((t)->thread.last_fph_cpu = -1) thread 42 arch/ia64/include/asm/switch_to.h ((t)->thread.flags & (IA64_THREAD_DBG_VALID|IA64_THREAD_PM_VALID) \ thread 64 arch/ia64/include/asm/switch_to.h (prev)->thread.flags |= IA64_THREAD_FPH_VALID; \ thread 65 arch/ia64/include/asm/switch_to.h __ia64_save_fpu((prev)->thread.fph); \ thread 69 arch/ia64/include/asm/switch_to.h if (unlikely((current->thread.flags & IA64_THREAD_MIGRATION) && \ thread 61 arch/ia64/kernel/asm-offsets.c DEFINE(IA64_TASK_THREAD_KSP_OFFSET, offsetof (struct task_struct, thread.ksp)); thread 62 arch/ia64/kernel/asm-offsets.c DEFINE(IA64_TASK_THREAD_ON_USTACK_OFFSET, offsetof (struct task_struct, thread.on_ustack)); thread 144 arch/ia64/kernel/crash.c current->thread.ksp = (__u64)info->sw - 16; thread 95 arch/ia64/kernel/machine_kexec.c current->thread.ksp = (__u64)info->sw - 16; thread 1121 arch/ia64/kernel/mca.c previous_current->thread.ksp = (u64)p - 16; thread 349 arch/ia64/kernel/perfmon.c #define PFM_GET_CTX(t) ((pfm_context_t *)(t)->thread.pfm_context) thread 370 arch/ia64/kernel/perfmon.c #define PFM_SET_WORK_PENDING(t, v) do { (t)->thread.pfm_needs_checking = v; } while(0); thread 371 arch/ia64/kernel/perfmon.c #define PFM_GET_WORK_PENDING(t) (t)->thread.pfm_needs_checking thread 3381 arch/ia64/kernel/perfmon.c pfm_context_t *ctx = task->thread.pfm_context; thread 3392 arch/ia64/kernel/perfmon.c if (task->thread.flags & IA64_THREAD_DBG_VALID) return 0; thread 3629 arch/ia64/kernel/perfmon.c struct thread_struct *thread = NULL; thread 3654 arch/ia64/kernel/perfmon.c thread = &task->thread; thread 3679 arch/ia64/kernel/perfmon.c if (is_loaded && (thread->flags & IA64_THREAD_DBG_VALID) != 0) { thread 4091 arch/ia64/kernel/perfmon.c if (t->thread.pfm_context == ctx) { thread 4108 arch/ia64/kernel/perfmon.c struct thread_struct *thread; thread 4156 arch/ia64/kernel/perfmon.c thread = &task->thread; thread 4164 arch/ia64/kernel/perfmon.c if (thread->flags & IA64_THREAD_DBG_VALID) { thread 4222 arch/ia64/kernel/perfmon.c thread->pfm_context, ctx)); thread 4225 arch/ia64/kernel/perfmon.c old = ia64_cmpxchg(acq, &thread->pfm_context, NULL, ctx, sizeof(pfm_context_t *)); thread 4249 arch/ia64/kernel/perfmon.c thread->flags |= IA64_THREAD_PM_VALID; thread 4421 arch/ia64/kernel/perfmon.c task->thread.pfm_context = NULL; thread 4470 arch/ia64/kernel/perfmon.c task->thread.flags &= ~IA64_THREAD_PM_VALID; thread 4475 arch/ia64/kernel/perfmon.c task->thread.pfm_context = NULL; thread 5433 arch/ia64/kernel/perfmon.c if (ctx->ctx_fl_system == 0 && (task->thread.flags & IA64_THREAD_PM_VALID) == 0) thread 5735 arch/ia64/kernel/perfmon.c task->thread.pfm_context = NULL; thread 5736 arch/ia64/kernel/perfmon.c task->thread.flags &= ~IA64_THREAD_PM_VALID; thread 5944 arch/ia64/kernel/perfmon.c if (unlikely((task->thread.flags & IA64_THREAD_PM_VALID) == 0)) return; thread 6682 arch/ia64/kernel/perfmon.c struct thread_struct *thread; thread 6686 arch/ia64/kernel/perfmon.c thread = &task->thread; thread 6691 arch/ia64/kernel/perfmon.c thread->pfm_context = NULL; thread 71 arch/ia64/kernel/perfmon_itanium.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 90 arch/ia64/kernel/perfmon_itanium.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 110 arch/ia64/kernel/perfmon_mckinley.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 128 arch/ia64/kernel/perfmon_mckinley.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 194 arch/ia64/kernel/perfmon_montecito.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 214 arch/ia64/kernel/perfmon_montecito.h if (task && (task->thread.flags & IA64_THREAD_DBG_VALID) != 0) return -EINVAL; thread 177 arch/ia64/kernel/process.c if (current->thread.pfm_needs_checking) thread 270 arch/ia64/kernel/process.c if ((task->thread.flags & IA64_THREAD_DBG_VALID) != 0) thread 271 arch/ia64/kernel/process.c ia64_save_debug_regs(&task->thread.dbr[0]); thread 274 arch/ia64/kernel/process.c if ((task->thread.flags & IA64_THREAD_PM_VALID) != 0) thread 290 arch/ia64/kernel/process.c if ((task->thread.flags & IA64_THREAD_DBG_VALID) != 0) thread 291 arch/ia64/kernel/process.c ia64_load_debug_regs(&task->thread.dbr[0]); thread 294 arch/ia64/kernel/process.c if ((task->thread.flags & IA64_THREAD_PM_VALID) != 0) thread 353 arch/ia64/kernel/process.c p->thread.ksp = (unsigned long) child_stack - 16; thread 374 arch/ia64/kernel/process.c p->thread.flags = ((current->thread.flags & ~THREAD_FLAGS_TO_CLEAR) thread 437 arch/ia64/kernel/process.c if (current->thread.pfm_context) thread 533 arch/ia64/kernel/process.c if ((task->thread.flags & IA64_THREAD_FPH_VALID) != 0) thread 534 arch/ia64/kernel/process.c memcpy(dst + 32, task->thread.fph, 96*16); thread 569 arch/ia64/kernel/process.c current->thread.flags &= ~(IA64_THREAD_FPH_VALID | IA64_THREAD_DBG_VALID); thread 584 arch/ia64/kernel/process.c if (tsk->thread.pfm_context) thread 588 arch/ia64/kernel/process.c if (tsk->thread.flags & IA64_THREAD_DBG_VALID) thread 696 arch/ia64/kernel/ptrace.c task->thread.flags |= IA64_THREAD_FPH_VALID; thread 697 arch/ia64/kernel/ptrace.c ia64_save_fpu(&task->thread.fph[0]); thread 716 arch/ia64/kernel/ptrace.c if (!(task->thread.flags & IA64_THREAD_FPH_VALID)) { thread 717 arch/ia64/kernel/ptrace.c task->thread.flags |= IA64_THREAD_FPH_VALID; thread 718 arch/ia64/kernel/ptrace.c memset(&task->thread.fph, 0, sizeof(task->thread.fph)); thread 843 arch/ia64/kernel/ptrace.c sw = (struct switch_stack *) (child->thread.ksp + 16); thread 956 arch/ia64/kernel/ptrace.c retval |= __copy_to_user(&ppr->fr[32], &child->thread.fph, thread 988 arch/ia64/kernel/ptrace.c sw = (struct switch_stack *) (child->thread.ksp + 16); thread 1093 arch/ia64/kernel/ptrace.c retval |= __copy_from_user(&child->thread.fph, &ppr->fr[32], thread 1735 arch/ia64/kernel/ptrace.c if (task->thread.flags & IA64_THREAD_FPH_VALID) thread 1739 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, thread 1818 arch/ia64/kernel/ptrace.c &dst->target->thread.fph, thread 1891 arch/ia64/kernel/ptrace.c return (target->thread.flags & IA64_THREAD_FPH_VALID) ? 128 : 32; thread 2056 arch/ia64/kernel/ptrace.c ptr = &child->thread.ibr[0]; thread 2059 arch/ia64/kernel/ptrace.c ptr = &child->thread.dbr[0]; thread 2089 arch/ia64/kernel/ptrace.c if (!(child->thread.flags & IA64_THREAD_DBG_VALID)) { thread 2090 arch/ia64/kernel/ptrace.c child->thread.flags |= IA64_THREAD_DBG_VALID; thread 2091 arch/ia64/kernel/ptrace.c memset(child->thread.dbr, 0, thread 2092 arch/ia64/kernel/ptrace.c sizeof(child->thread.dbr)); thread 2093 arch/ia64/kernel/ptrace.c memset(child->thread.ibr, 0, thread 2094 arch/ia64/kernel/ptrace.c sizeof(child->thread.ibr)); thread 93 arch/ia64/kernel/signal.c err |= __copy_from_user(current->thread.fph, &sc->sc_fr[32], 96*16); thread 100 arch/ia64/kernel/signal.c __ia64_load_fpu(current->thread.fph); thread 180 arch/ia64/kernel/signal.c if ((current->thread.flags & IA64_THREAD_FPH_VALID)) { thread 182 arch/ia64/kernel/signal.c err = __copy_to_user(&sc->sc_fr[32], current->thread.fph, 96*16); thread 218 arch/ia64/kernel/traps.c if ((current->thread.flags & IA64_THREAD_FPH_VALID) != 0) { thread 219 arch/ia64/kernel/traps.c __ia64_load_fpu(current->thread.fph); thread 295 arch/ia64/kernel/traps.c if (!(current->thread.flags & IA64_THREAD_FPEMU_NOPRINT)) { thread 551 arch/ia64/kernel/traps.c if ((result < 0) || (current->thread.flags & IA64_THREAD_FPEMU_SIGFPE)) { thread 538 arch/ia64/kernel/unaligned.c current->thread.fph[fph_index(regs, regnum)] = *fpval; thread 597 arch/ia64/kernel/unaligned.c *fpval = current->thread.fph[fph_index(regs, regnum)]; thread 1323 arch/ia64/kernel/unaligned.c if ((current->thread.flags & IA64_THREAD_UAC_SIGBUS) != 0) thread 1327 arch/ia64/kernel/unaligned.c !(current->thread.flags & IA64_THREAD_UAC_NOPRINT) && thread 479 arch/ia64/kernel/unwind.c addr = t->thread.fph + (regnum - 32); thread 2074 arch/ia64/kernel/unwind.c struct switch_stack *sw = (struct switch_stack *) (t->thread.ksp + 16); thread 93 arch/ia64/mm/init.c current->thread.rbs_bot = PAGE_ALIGN(current->mm->start_stack - stack_size); thread 117 arch/ia64/mm/init.c vma->vm_start = current->thread.rbs_bot & PAGE_MASK; thread 128 arch/m68k/include/asm/math-emu.h #define FPDATA ((struct fp_data *)current->thread.fp) thread 108 arch/m68k/include/asm/mmu_context.h mmuar = task->thread.ksp; thread 133 arch/m68k/include/asm/processor.h if ((tsk)->thread.esp0 > PAGE_SIZE && \ thread 134 arch/m68k/include/asm/processor.h (virt_addr_valid((tsk)->thread.esp0))) \ thread 135 arch/m68k/include/asm/processor.h eip = ((struct pt_regs *) (tsk)->thread.esp0)->pc; \ thread 137 arch/m68k/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk) == current ? rdusp() : (tsk)->thread.usp) thread 139 arch/m68k/include/asm/processor.h #define task_pt_regs(tsk) ((struct pt_regs *) ((tsk)->thread.esp0)) thread 26 arch/m68k/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof(struct task_struct, thread)); thread 95 arch/m68k/kernel/process.c current->thread.fs = __USER_DS; thread 132 arch/m68k/kernel/process.c p->thread.ksp = (unsigned long)frame; thread 133 arch/m68k/kernel/process.c p->thread.esp0 = (unsigned long)&frame->regs; thread 139 arch/m68k/kernel/process.c p->thread.fs = get_fs().seg; thread 148 arch/m68k/kernel/process.c p->thread.usp = 0; thread 155 arch/m68k/kernel/process.c p->thread.usp = usp ?: rdusp(); thread 163 arch/m68k/kernel/process.c asm volatile ("fsave %0" : : "m" (p->thread.fpstate[0]) : "memory"); thread 165 arch/m68k/kernel/process.c if (!CPU_IS_060 ? p->thread.fpstate[0] : p->thread.fpstate[2]) { thread 172 arch/m68k/kernel/process.c : "m" (p->thread.fp[0]), thread 173 arch/m68k/kernel/process.c "m" (p->thread.fpcntl[0]), thread 174 arch/m68k/kernel/process.c "m" (p->thread.fpcntl[1]), thread 175 arch/m68k/kernel/process.c "m" (p->thread.fpcntl[2]) thread 181 arch/m68k/kernel/process.c : "m" (p->thread.fp[0]), thread 182 arch/m68k/kernel/process.c "m" (p->thread.fpcntl[0]) thread 188 arch/m68k/kernel/process.c asm volatile ("frestore %0" : : "m" (p->thread.fpstate[0])); thread 201 arch/m68k/kernel/process.c memcpy(fpu->fpcntl, current->thread.fpcntl, 12); thread 202 arch/m68k/kernel/process.c memcpy(fpu->fpregs, current->thread.fp, 96); thread 256 arch/m68k/kernel/process.c fp = ((struct switch_stack *)p->thread.ksp)->a6; thread 80 arch/m68k/kernel/ptrace.c addr = &task->thread.usp; thread 82 arch/m68k/kernel/ptrace.c addr = (unsigned long *)(task->thread.esp0 + regoff[regno]); thread 87 arch/m68k/kernel/ptrace.c long stkadj = *(long *)(task->thread.esp0 + PT_REG(stkadj)); thread 105 arch/m68k/kernel/ptrace.c addr = &task->thread.usp; thread 107 arch/m68k/kernel/ptrace.c addr = (unsigned long *)(task->thread.esp0 + regoff[regno]); thread 112 arch/m68k/kernel/ptrace.c long stkadj = *(long *)(task->thread.esp0 + PT_REG(stkadj)); thread 179 arch/m68k/kernel/ptrace.c tmp = child->thread.fp[regno - 21]; thread 220 arch/m68k/kernel/ptrace.c child->thread.fp[regno - 21] = data; thread 250 arch/m68k/kernel/ptrace.c if (copy_to_user(datap, &child->thread.fp, thread 256 arch/m68k/kernel/ptrace.c if (copy_from_user(&child->thread.fp, datap, thread 262 arch/m68k/kernel/signal.c memcpy(current->thread.fpcntl, sc->sc_fpcntl, 12); thread 263 arch/m68k/kernel/signal.c memcpy(current->thread.fp, sc->sc_fpregs, 24); thread 341 arch/m68k/kernel/signal.c if (__copy_from_user(current->thread.fpcntl, thread 345 arch/m68k/kernel/signal.c if (__copy_from_user(current->thread.fp, thread 434 arch/m68k/kernel/signal.c memcpy(sc->sc_fpcntl, current->thread.fpcntl, 12); thread 435 arch/m68k/kernel/signal.c memcpy(sc->sc_fpregs, current->thread.fp, 24); thread 492 arch/m68k/kernel/signal.c current->thread.fpcntl, 12); thread 495 arch/m68k/kernel/signal.c current->thread.fp, 96); thread 1116 arch/m68k/kernel/signal.c current->thread.esp0 = (unsigned long) regs; thread 239 arch/m68k/kernel/traps.c if (wba != current->thread.faddr) thread 357 arch/m68k/kernel/traps.c current->thread.signo = SIGBUS; thread 358 arch/m68k/kernel/traps.c current->thread.faddr = fp->un.fmt7.faddr; thread 761 arch/m68k/kernel/traps.c current->thread.esp0 = (unsigned long) fp; thread 946 arch/m68k/kernel/traps.c stack = (unsigned long *)task->thread.esp0; thread 1147 arch/m68k/kernel/traps.c current->thread.esp0 = ssp; thread 27 arch/m68k/mm/fault.c signo = current->thread.signo; thread 28 arch/m68k/mm/fault.c si_code = current->thread.code; thread 29 arch/m68k/mm/fault.c addr = (void __user *)current->thread.faddr; thread 195 arch/m68k/mm/fault.c current->thread.signo = SIGBUS; thread 196 arch/m68k/mm/fault.c current->thread.faddr = address; thread 200 arch/m68k/mm/fault.c current->thread.signo = SIGBUS; thread 201 arch/m68k/mm/fault.c current->thread.code = BUS_ADRERR; thread 202 arch/m68k/mm/fault.c current->thread.faddr = address; thread 206 arch/m68k/mm/fault.c current->thread.signo = SIGSEGV; thread 207 arch/m68k/mm/fault.c current->thread.code = SEGV_MAPERR; thread 208 arch/m68k/mm/fault.c current->thread.faddr = address; thread 212 arch/m68k/mm/fault.c current->thread.signo = SIGSEGV; thread 213 arch/m68k/mm/fault.c current->thread.code = SEGV_ACCERR; thread 214 arch/m68k/mm/fault.c current->thread.faddr = address; thread 123 arch/microblaze/include/asm/mmu_context_mm.h tsk->thread.pgdir = next->pgd; thread 135 arch/microblaze/include/asm/mmu_context_mm.h current->thread.pgdir = mm->pgd; thread 81 arch/microblaze/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof(struct task_struct, thread)); thread 97 arch/microblaze/kernel/kgdb.c unsigned long *pt_regb = (unsigned long *)(p->thread.regs); thread 173 arch/microblaze/mm/fault.c struct pt_regs *uregs = current->thread.regs; thread 31 arch/mips/cavium-octeon/cpu.c prefetch(¤t->thread.cp2); thread 36 arch/mips/cavium-octeon/cpu.c octeon_cop2_restore(&(current->thread.cp2)); thread 37 arch/mips/cavium-octeon/crypto/octeon-crypto.c octeon_cop2_save(&(current->thread.cp2)); thread 16 arch/mips/include/asm/asmmacro-32.h .macro fpu_save_single thread tmp=t0 thread 40 arch/mips/include/asm/asmmacro-32.h .macro fpu_restore_single thread tmp=t0 thread 64 arch/mips/include/asm/asmmacro-32.h .macro cpu_save_nonscratch thread thread 77 arch/mips/include/asm/asmmacro-32.h .macro cpu_restore_nonscratch thread thread 17 arch/mips/include/asm/asmmacro-64.h .macro cpu_save_nonscratch thread thread 30 arch/mips/include/asm/asmmacro-64.h .macro cpu_restore_nonscratch thread thread 84 arch/mips/include/asm/asmmacro.h .macro fpu_save_16even thread tmp=t0 thread 108 arch/mips/include/asm/asmmacro.h .macro fpu_save_16odd thread thread 132 arch/mips/include/asm/asmmacro.h .macro fpu_save_double thread status tmp thread 143 arch/mips/include/asm/asmmacro.h .macro fpu_restore_16even thread tmp=t0 thread 167 arch/mips/include/asm/asmmacro.h .macro fpu_restore_16odd thread thread 191 arch/mips/include/asm/asmmacro.h .macro fpu_restore_double thread status tmp thread 517 arch/mips/include/asm/asmmacro.h .macro msa_save_all thread thread 561 arch/mips/include/asm/asmmacro.h .macro msa_restore_all thread thread 19 arch/mips/include/asm/cop2.h #define cop2_save(r) octeon_cop2_save(&(r)->thread.cp2) thread 20 arch/mips/include/asm/cop2.h #define cop2_restore(r) octeon_cop2_restore(&(r)->thread.cp2) thread 30 arch/mips/include/asm/cop2.h #define cop2_save(r) nlm_cop2_save(&(r)->thread.cp2) thread 31 arch/mips/include/asm/cop2.h #define cop2_restore(r) nlm_cop2_restore(&(r)->thread.cp2) thread 41 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[0] = mfhi1(); \ thread 42 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[1] = mflo1(); \ thread 43 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[2] = mfhi2(); \ thread 44 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[3] = mflo2(); \ thread 45 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[4] = mfhi3(); \ thread 46 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr[5] = mflo3(); \ thread 47 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspcontrol = rddsp(DSP_MASK); \ thread 58 arch/mips/include/asm/dsp.h mthi1(tsk->thread.dsp.dspr[0]); \ thread 59 arch/mips/include/asm/dsp.h mtlo1(tsk->thread.dsp.dspr[1]); \ thread 60 arch/mips/include/asm/dsp.h mthi2(tsk->thread.dsp.dspr[2]); \ thread 61 arch/mips/include/asm/dsp.h mtlo2(tsk->thread.dsp.dspr[3]); \ thread 62 arch/mips/include/asm/dsp.h mthi3(tsk->thread.dsp.dspr[4]); \ thread 63 arch/mips/include/asm/dsp.h mtlo3(tsk->thread.dsp.dspr[5]); \ thread 64 arch/mips/include/asm/dsp.h wrdsp(tsk->thread.dsp.dspcontrol, DSP_MASK); \ thread 78 arch/mips/include/asm/dsp.h tsk->thread.dsp.dspr; \ thread 350 arch/mips/include/asm/elf.h current->thread.abi = &mips_abi; \ thread 368 arch/mips/include/asm/elf.h current->thread.abi = &mips_abi_n32; \ thread 383 arch/mips/include/asm/elf.h current->thread.abi = &mips_abi_32; \ thread 417 arch/mips/include/asm/elf.h current->thread.abi = &mips_abi; \ thread 169 arch/mips/include/asm/fpu.h tsk->thread.fpu.fcr31 = thread 213 arch/mips/include/asm/fpu.h memset(&target->thread.fpu.fpr, ~0, sizeof(target->thread.fpu.fpr)); thread 248 arch/mips/include/asm/fpu.h return tsk->thread.fpu.fpr; thread 368 arch/mips/include/asm/processor.h #define release_thread(thread) do { } while(0) thread 60 arch/mips/include/asm/switch_to.h prev->cpus_mask = prev->thread.user_cpus_allowed; \ thread 62 arch/mips/include/asm/switch_to.h next->thread.emulated_fp = 0; \ thread 90 arch/mips/include/asm/switch_to.h unsigned long fcr31 = mask_fcr31_x(next->thread.fpu.fcr31); \ thread 95 arch/mips/include/asm/switch_to.h next->thread.fpu.fcr31 &= ~fcr31; \ thread 113 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG16, task_struct, thread.reg16); thread 114 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG17, task_struct, thread.reg17); thread 115 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG18, task_struct, thread.reg18); thread 116 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG19, task_struct, thread.reg19); thread 117 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG20, task_struct, thread.reg20); thread 118 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG21, task_struct, thread.reg21); thread 119 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG22, task_struct, thread.reg22); thread 120 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG23, task_struct, thread.reg23); thread 121 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG29, task_struct, thread.reg29); thread 122 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG30, task_struct, thread.reg30); thread 123 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_REG31, task_struct, thread.reg31); thread 125 arch/mips/kernel/asm-offsets.c thread.cp0_status); thread 128 arch/mips/kernel/asm-offsets.c thread.cp0_badvaddr); thread 130 arch/mips/kernel/asm-offsets.c thread.cp0_baduaddr); thread 132 arch/mips/kernel/asm-offsets.c thread.error_code); thread 133 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_TRAPNO, task_struct, thread.trap_nr); thread 140 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPU, task_struct, thread.fpu); thread 142 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR0, task_struct, thread.fpu.fpr[0]); thread 143 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR1, task_struct, thread.fpu.fpr[1]); thread 144 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR2, task_struct, thread.fpu.fpr[2]); thread 145 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR3, task_struct, thread.fpu.fpr[3]); thread 146 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR4, task_struct, thread.fpu.fpr[4]); thread 147 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR5, task_struct, thread.fpu.fpr[5]); thread 148 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR6, task_struct, thread.fpu.fpr[6]); thread 149 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR7, task_struct, thread.fpu.fpr[7]); thread 150 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR8, task_struct, thread.fpu.fpr[8]); thread 151 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR9, task_struct, thread.fpu.fpr[9]); thread 152 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR10, task_struct, thread.fpu.fpr[10]); thread 153 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR11, task_struct, thread.fpu.fpr[11]); thread 154 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR12, task_struct, thread.fpu.fpr[12]); thread 155 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR13, task_struct, thread.fpu.fpr[13]); thread 156 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR14, task_struct, thread.fpu.fpr[14]); thread 157 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR15, task_struct, thread.fpu.fpr[15]); thread 158 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR16, task_struct, thread.fpu.fpr[16]); thread 159 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR17, task_struct, thread.fpu.fpr[17]); thread 160 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR18, task_struct, thread.fpu.fpr[18]); thread 161 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR19, task_struct, thread.fpu.fpr[19]); thread 162 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR20, task_struct, thread.fpu.fpr[20]); thread 163 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR21, task_struct, thread.fpu.fpr[21]); thread 164 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR22, task_struct, thread.fpu.fpr[22]); thread 165 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR23, task_struct, thread.fpu.fpr[23]); thread 166 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR24, task_struct, thread.fpu.fpr[24]); thread 167 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR25, task_struct, thread.fpu.fpr[25]); thread 168 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR26, task_struct, thread.fpu.fpr[26]); thread 169 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR27, task_struct, thread.fpu.fpr[27]); thread 170 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR28, task_struct, thread.fpu.fpr[28]); thread 171 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR29, task_struct, thread.fpu.fpr[29]); thread 172 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR30, task_struct, thread.fpu.fpr[30]); thread 173 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FPR31, task_struct, thread.fpu.fpr[31]); thread 175 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_FCR31, task_struct, thread.fpu.fcr31); thread 176 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_MSA_CSR, task_struct, thread.fpu.msacsr); thread 315 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_CP2, task_struct, thread.cp2); thread 316 arch/mips/kernel/asm-offsets.c OFFSET(THREAD_CVMSEG, task_struct, thread.cvmseg.cvmseg); thread 154 arch/mips/kernel/branch.c fcr31 = current->thread.fpu.fcr31; thread 694 arch/mips/kernel/branch.c bit = get_fpr32(¤t->thread.fpu.fpr[reg], 0) & 0x1; thread 712 arch/mips/kernel/branch.c fcr31 = current->thread.fpu.fcr31; thread 312 arch/mips/kernel/elf.c t->thread.fpu.fcr31 = c->fpu_csr31; thread 318 arch/mips/kernel/elf.c t->thread.fpu.fcr31 |= FPU_CSR_NAN2008; thread 320 arch/mips/kernel/elf.c t->thread.fpu.fcr31 |= FPU_CSR_ABS2008; thread 147 arch/mips/kernel/kgdb.c memcpy((void *)¤t->thread.fpu.fcr31, mem, thread 155 arch/mips/kernel/kgdb.c memcpy((void *)¤t->thread.fpu.fpr[fp_reg], mem, thread 182 arch/mips/kernel/kgdb.c memcpy(mem, (void *)¤t->thread.fpu.fcr31, thread 191 arch/mips/kernel/kgdb.c memcpy(mem, (void *)¤t->thread.fpu.fpr[fp_reg], thread 251 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg16; thread 252 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg17; thread 253 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg18; thread 254 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg19; thread 255 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg20; thread 256 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg21; thread 257 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg22; thread 258 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg23; thread 265 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg29; thread 266 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg30; thread 267 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg31; thread 269 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.cp0_status; thread 287 arch/mips/kernel/kgdb.c *(ptr++) = p->thread.reg31; thread 115 arch/mips/kernel/mips-mt-fpaff.c cpumask_copy(&p->thread.user_cpus_allowed, new_mask); thread 180 arch/mips/kernel/mips-mt-fpaff.c cpumask_or(&allowed, &p->thread.user_cpus_allowed, p->cpus_ptr); thread 204 arch/mips/kernel/mips-r2-to-r6-emul.c csr = current->thread.fpu.fcr31; thread 227 arch/mips/kernel/mips-r2-to-r6-emul.c csr = current->thread.fpu.fcr31; thread 1178 arch/mips/kernel/mips-r2-to-r6-emul.c err = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 0, thread 1185 arch/mips/kernel/mips-r2-to-r6-emul.c *fcr31 = res = mask_fcr31_x(current->thread.fpu.fcr31); thread 1186 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.fpu.fcr31 &= ~res; thread 1199 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = (unsigned long)fault_addr; thread 1209 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1282 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1356 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1426 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1501 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1620 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1739 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1857 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1969 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 1974 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2025 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2030 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2088 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2093 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2149 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 2154 arch/mips/kernel/mips-r2-to-r6-emul.c current->thread.cp0_baduaddr = vaddr; thread 78 arch/mips/kernel/process.c atomic_set(¤t->thread.bd_emu_frame, BD_EMUFRAME_NONE); thread 134 arch/mips/kernel/process.c p->thread.cp0_status = read_c0_status() & ~(ST0_CU2|ST0_CU1); thread 137 arch/mips/kernel/process.c unsigned long status = p->thread.cp0_status; thread 140 arch/mips/kernel/process.c p->thread.reg16 = usp; /* fn */ thread 141 arch/mips/kernel/process.c p->thread.reg17 = kthread_arg; thread 142 arch/mips/kernel/process.c p->thread.reg29 = childksp; thread 143 arch/mips/kernel/process.c p->thread.reg31 = (unsigned long) ret_from_kernel_thread; thread 162 arch/mips/kernel/process.c p->thread.reg29 = (unsigned long) childregs; thread 163 arch/mips/kernel/process.c p->thread.reg31 = (unsigned long) ret_from_fork; thread 179 arch/mips/kernel/process.c atomic_set(&p->thread.bd_emu_frame, BD_EMUFRAME_NONE); thread 495 arch/mips/kernel/process.c struct thread_struct *t = &tsk->thread; thread 639 arch/mips/kernel/process.c sp = task->thread.reg29 + schedule_mfi.frame_size; thread 657 arch/mips/kernel/process.c top -= PAGE_ALIGN(current->thread.abi->vdso->size); thread 143 arch/mips/kernel/ptrace.c __put_user(child->thread.watch.mips3264.watchlo[i], thread 145 arch/mips/kernel/ptrace.c __put_user(child->thread.watch.mips3264.watchhi[i] & thread 195 arch/mips/kernel/ptrace.c child->thread.watch.mips3264.watchlo[i] = lt[i]; thread 197 arch/mips/kernel/ptrace.c child->thread.watch.mips3264.watchhi[i] = ht[i]; thread 353 arch/mips/kernel/ptrace.c fcr31 = child->thread.fpu.fcr31; thread 355 arch/mips/kernel/ptrace.c child->thread.fpu.fcr31 = (value & ~mask) | (fcr31 & mask); thread 375 arch/mips/kernel/ptrace.c __put_user(child->thread.fpu.fcr31, data + 64); thread 417 arch/mips/kernel/ptrace.c &target->thread.fpu, thread 437 arch/mips/kernel/ptrace.c fpr_val = get_fpr64(&target->thread.fpu.fpr[i], 0); thread 462 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == sizeof(elf_fpreg_t)) thread 470 arch/mips/kernel/ptrace.c &target->thread.fpu.fcr31, thread 492 arch/mips/kernel/ptrace.c &target->thread.fpu, thread 517 arch/mips/kernel/ptrace.c set_fpr64(&target->thread.fpu.fpr[i], 0, fpr_val); thread 552 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == sizeof(elf_fpreg_t)) thread 651 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr[i], thread 674 arch/mips/kernel/ptrace.c .fcsr = target->thread.fpu.fcr31, thread 676 arch/mips/kernel/ptrace.c .msacsr = target->thread.fpu.msacsr, thread 688 arch/mips/kernel/ptrace.c } else if (sizeof(target->thread.fpu.fpr[0]) == regset->size) { thread 691 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr, thread 697 arch/mips/kernel/ptrace.c sizeof(target->thread.fpu.fpr[0])); thread 718 arch/mips/kernel/ptrace.c if (sizeof(target->thread.fpu.fpr[0]) == regset->size) { thread 721 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr, thread 726 arch/mips/kernel/ptrace.c sizeof(target->thread.fpu.fpr[0])); thread 731 arch/mips/kernel/ptrace.c &target->thread.fpu.fpr[i], thread 740 arch/mips/kernel/ptrace.c target->thread.fpu.fcr31 = ctrl_regs.fcsr & ~FPU_CSR_ALL_X; thread 741 arch/mips/kernel/ptrace.c target->thread.fpu.msacsr = ctrl_regs.msacsr & ~MSA_CSR_CAUSEF; thread 776 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspr[i]; thread 779 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspcontrol; thread 817 arch/mips/kernel/ptrace.c target->thread.dsp.dspr[i] = (s32)dspregs[i]; thread 820 arch/mips/kernel/ptrace.c target->thread.dsp.dspcontrol = (s32)dspregs[i]; thread 856 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspr[i]; thread 859 arch/mips/kernel/ptrace.c dspregs[i] = target->thread.dsp.dspcontrol; thread 897 arch/mips/kernel/ptrace.c target->thread.dsp.dspr[i] = dspregs[i]; thread 900 arch/mips/kernel/ptrace.c target->thread.dsp.dspcontrol = dspregs[i]; thread 1218 arch/mips/kernel/ptrace.c tmp = child->thread.fpu.fcr31; thread 1263 arch/mips/kernel/ptrace.c tmp = child->thread.dsp.dspcontrol; thread 1351 arch/mips/kernel/ptrace.c child->thread.dsp.dspcontrol = data; thread 118 arch/mips/kernel/ptrace32.c tmp = child->thread.fpu.fcr31; thread 158 arch/mips/kernel/ptrace32.c tmp = child->thread.dsp.dspcontrol; thread 216 arch/mips/kernel/ptrace32.c memset(&child->thread.fpu, ~0, thread 217 arch/mips/kernel/ptrace32.c sizeof(child->thread.fpu)); thread 218 arch/mips/kernel/ptrace32.c child->thread.fpu.fcr31 = 0; thread 234 arch/mips/kernel/ptrace32.c child->thread.fpu.fcr31 = data; thread 263 arch/mips/kernel/ptrace32.c child->thread.dsp.dspcontrol = data; thread 73 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 82 arch/mips/kernel/signal.c __put_user(get_fpr64(¤t->thread.fpu.fpr[i], 0), thread 85 arch/mips/kernel/signal.c err |= __put_user(current->thread.fpu.fcr31, csr); thread 92 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 102 arch/mips/kernel/signal.c set_fpr64(¤t->thread.fpu.fpr[i], 0, fpr_val); thread 104 arch/mips/kernel/signal.c err |= __get_user(current->thread.fpu.fcr31, csr); thread 128 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 137 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 195 arch/mips/kernel/signal.c err = __put_user(current->thread.fpu.msacsr, &msa->csr); thread 198 arch/mips/kernel/signal.c val = get_fpr64(¤t->thread.fpu.fpr[i], 1); thread 241 arch/mips/kernel/signal.c current->thread.fpu.msacsr = csr; thread 245 arch/mips/kernel/signal.c set_fpr64(¤t->thread.fpu.fpr[i], 1, val); thread 327 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 380 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 805 arch/mips/kernel/signal.c struct mips_abi *abi = current->thread.abi; thread 870 arch/mips/kernel/signal.c regs->regs[2] = current->thread.abi->restart; thread 86 arch/mips/kernel/stacktrace.c regs->regs[29] = tsk->thread.reg29; thread 88 arch/mips/kernel/stacktrace.c regs->cp0_epc = tsk->thread.reg31; thread 210 arch/mips/kernel/traps.c regs.regs[29] = task->thread.reg29; thread 212 arch/mips/kernel/traps.c regs.cp0_epc = task->thread.reg31; thread 394 arch/mips/kernel/traps.c if (notify_die(DIE_OOPS, str, regs, 0, current->thread.trap_nr, thread 480 arch/mips/kernel/traps.c if (notify_die(DIE_OOPS, "bus error", regs, 0, current->thread.trap_nr, thread 803 arch/mips/kernel/traps.c sig = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 1, thread 810 arch/mips/kernel/traps.c fcr31 = mask_fcr31_x(current->thread.fpu.fcr31); thread 811 arch/mips/kernel/traps.c current->thread.fpu.fcr31 &= ~fcr31; thread 832 arch/mips/kernel/traps.c if (notify_die(DIE_FP, "FP exception", regs, 0, current->thread.trap_nr, thread 855 arch/mips/kernel/traps.c sig = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 1, thread 862 arch/mips/kernel/traps.c fcr31 = mask_fcr31_x(current->thread.fpu.fcr31); thread 863 arch/mips/kernel/traps.c current->thread.fpu.fcr31 &= ~fcr31; thread 888 arch/mips/kernel/traps.c ((current->thread.emulated_fp++ > mt_fpemul_threshold))) { thread 897 arch/mips/kernel/traps.c current->thread.user_cpus_allowed thread 924 arch/mips/kernel/traps.c if (kgdb_ll_trap(DIE_TRAP, str, regs, code, current->thread.trap_nr, thread 929 arch/mips/kernel/traps.c if (notify_die(DIE_TRAP, str, regs, code, current->thread.trap_nr, thread 990 arch/mips/kernel/traps.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 1032 arch/mips/kernel/traps.c current->thread.trap_nr, SIGTRAP) == NOTIFY_STOP) thread 1038 arch/mips/kernel/traps.c current->thread.trap_nr, SIGTRAP) == NOTIFY_STOP) thread 1044 arch/mips/kernel/traps.c current->thread.trap_nr, SIGTRAP) == NOTIFY_STOP) thread 1050 arch/mips/kernel/traps.c current->thread.trap_nr, SIGTRAP) == NOTIFY_STOP) thread 1083 arch/mips/kernel/traps.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 1139 arch/mips/kernel/traps.c ¤t->thread.cp0_baduaddr, thread 1148 arch/mips/kernel/traps.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 1150 arch/mips/kernel/traps.c if (notify_die(DIE_RI, "RI Fault", regs, 0, current->thread.trap_nr, thread 1291 arch/mips/kernel/traps.c write_msa_csr(current->thread.fpu.msacsr); thread 1328 arch/mips/kernel/traps.c current->thread.fpu.fcr31); thread 1421 arch/mips/kernel/traps.c sig = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 0, thread 1428 arch/mips/kernel/traps.c fcr31 = mask_fcr31_x(current->thread.fpu.fcr31); thread 1429 arch/mips/kernel/traps.c current->thread.fpu.fcr31 &= ~fcr31; thread 1457 arch/mips/kernel/traps.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 1459 arch/mips/kernel/traps.c current->thread.trap_nr, SIGFPE) == NOTIFY_STOP) thread 1222 arch/mips/kernel/unaligned.c res = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 1, thread 1254 arch/mips/kernel/unaligned.c fpr = ¤t->thread.fpu.fpr[wd]; thread 1742 arch/mips/kernel/unaligned.c res = fpu_emulator_cop1Handler(regs, ¤t->thread.fpu, 1, thread 119 arch/mips/kernel/uprobes.c utask->autask.saved_trap_nr = current->thread.trap_nr; thread 120 arch/mips/kernel/uprobes.c current->thread.trap_nr = UPROBE_TRAP_NR; thread 130 arch/mips/kernel/uprobes.c current->thread.trap_nr = utask->autask.saved_trap_nr; thread 148 arch/mips/kernel/uprobes.c if (tsk->thread.trap_nr != UPROBE_TRAP_NR) thread 89 arch/mips/kernel/vdso.c struct mips_vdso_image *image = current->thread.abi->vdso; thread 20 arch/mips/kernel/watch.c struct mips3264_watch_reg_state *watches = &t->thread.watch.mips3264; thread 53 arch/mips/kernel/watch.c ¤t->thread.watch.mips3264; thread 706 arch/mips/math-emu/cp1emu.c fpr = ¤t->thread.fpu.fpr[insn.i_format.rt]; thread 736 arch/mips/math-emu/cp1emu.c fcr31 = current->thread.fpu.fcr31; thread 1191 arch/mips/math-emu/cp1emu.c fpr = ¤t->thread.fpu.fpr[MIPSInst_RT(ir)]; thread 157 arch/mips/math-emu/dsemul.c fr_idx = atomic_xchg(&tsk->thread.bd_emu_frame, BD_EMUFRAME_NONE); thread 183 arch/mips/math-emu/dsemul.c fr_idx = atomic_read(¤t->thread.bd_emu_frame); thread 196 arch/mips/math-emu/dsemul.c regs->cp0_epc = current->thread.bd_emu_branch_pc; thread 198 arch/mips/math-emu/dsemul.c regs->cp0_epc = current->thread.bd_emu_cont_pc; thread 200 arch/mips/math-emu/dsemul.c atomic_set(¤t->thread.bd_emu_frame, BD_EMUFRAME_NONE); thread 249 arch/mips/math-emu/dsemul.c fr_idx = atomic_read(¤t->thread.bd_emu_frame); thread 285 arch/mips/math-emu/dsemul.c current->thread.bd_emu_branch_pc = branch_pc; thread 286 arch/mips/math-emu/dsemul.c current->thread.bd_emu_cont_pc = cont_pc; thread 287 arch/mips/math-emu/dsemul.c atomic_set(¤t->thread.bd_emu_frame, fr_idx); thread 304 arch/mips/math-emu/dsemul.c xcp->cp0_epc = current->thread.bd_emu_cont_pc; thread 154 arch/mips/math-emu/ieee754.h #define ieee754_csr (*(struct _ieee754_csr *)(¤t->thread.fpu.fcr31)) thread 62 arch/mips/mm/fault.c current->thread.trap_nr, SIGSEGV) == NOTIFY_STOP) thread 207 arch/mips/mm/fault.c tsk->thread.cp0_badvaddr = address; thread 208 arch/mips/mm/fault.c tsk->thread.error_code = write; thread 225 arch/mips/mm/fault.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 233 arch/mips/mm/fault.c current->thread.cp0_baduaddr = address; thread 280 arch/mips/mm/fault.c current->thread.trap_nr = (regs->cp0_cause >> 2) & 0x1f; thread 281 arch/mips/mm/fault.c tsk->thread.cp0_badvaddr = address; thread 106 arch/mips/netlogic/xlp/cop2-ex.c nlm_cop2_restore(&(current->thread.cp2)); thread 96 arch/nds32/include/asm/fpu.h load_fpu(¤t->thread.fpu); thread 101 arch/nds32/include/asm/fpu.h load_fpu(¤t->thread.fpu); thread 76 arch/nds32/include/asm/processor.h #define release_thread(thread) do { } while(0) thread 53 arch/nds32/include/asm/sfp-machine.h #define __FPU_FPCSR (current->thread.fpu.fpcsr) thread 35 arch/nds32/include/asm/thread_info.h #define thread_saved_pc(tsk) ((unsigned long)(tsk->thread.cpu_context.pc)) thread 36 arch/nds32/include/asm/thread_info.h #define thread_saved_fp(tsk) ((unsigned long)(tsk->thread.cpu_context.fp)) thread 16 arch/nds32/kernel/asm-offsets.c offsetof(struct task_struct, thread.cpu_context)); thread 46 arch/nds32/kernel/fpu.c : "r" (&tsk->thread.fpu) thread 59 arch/nds32/kernel/fpu.c : "r" (&tsk->thread.fpu) thread 68 arch/nds32/kernel/fpu.c : "r" (&tsk->thread.fpu) thread 79 arch/nds32/kernel/fpu.c : "r"(&tsk->thread.fpu) thread 176 arch/nds32/kernel/fpu.c load_fpu(¤t->thread.fpu); thread 181 arch/nds32/kernel/fpu.c current->thread.fpu.UDF_IEX_trap = init_fpuregs.UDF_IEX_trap; thread 215 arch/nds32/kernel/fpu.c fpcsr = current->thread.fpu.fpcsr; thread 218 arch/nds32/kernel/fpu.c si_signo = do_fpuemu(regs, ¤t->thread.fpu); thread 219 arch/nds32/kernel/fpu.c fpcsr = current->thread.fpu.fpcsr; thread 221 arch/nds32/kernel/fpu.c current->thread.fpu.fpcsr &= ~(redo_except); thread 157 arch/nds32/kernel/process.c memset(&p->thread.cpu_context, 0, sizeof(struct cpu_context)); thread 162 arch/nds32/kernel/process.c p->thread.cpu_context.r6 = stack_start; thread 164 arch/nds32/kernel/process.c p->thread.cpu_context.r7 = stk_sz; thread 176 arch/nds32/kernel/process.c p->thread.cpu_context.pc = (unsigned long)ret_from_fork; thread 177 arch/nds32/kernel/process.c p->thread.cpu_context.sp = (unsigned long)childregs; thread 189 arch/nds32/kernel/process.c p->thread.fpu = current->thread.fpu; thread 228 arch/nds32/kernel/process.c memcpy(fpu, &tsk->thread.fpu, sizeof(*fpu)); thread 50 arch/nds32/kernel/signal.c return __copy_from_user(&tsk->thread.fpu, &sc->fpu, thread 72 arch/nds32/kernel/signal.c ret = __copy_to_user(&sc->fpu, &tsk->thread.fpu, thread 217 arch/nds32/kernel/signal.c __put_user_error(current->thread.trap_no, &sf->uc.uc_mcontext.trap_no, thread 219 arch/nds32/kernel/signal.c __put_user_error(current->thread.error_code, thread 221 arch/nds32/kernel/signal.c __put_user_error(current->thread.address, thread 61 arch/nds32/kernel/sys_nds32.c current->thread.fpu.UDF_IEX_trap = init_fpuregs.UDF_IEX_trap; thread 65 arch/nds32/kernel/sys_nds32.c old_udf_iex = current->thread.fpu.UDF_IEX_trap; thread 70 arch/nds32/kernel/sys_nds32.c current->thread.fpu.UDF_IEX_trap &= ~act; thread 73 arch/nds32/kernel/sys_nds32.c current->thread.fpu.UDF_IEX_trap |= act; thread 145 arch/nds32/kernel/traps.c base_reg = (unsigned long *)(tsk->thread.cpu_context.sp); thread 150 arch/nds32/kernel/traps.c base_reg = (unsigned long *)(tsk->thread.cpu_context.fp); thread 262 arch/nds32/kernel/traps.c tsk->thread.trap_no = ENTRY_DEBUG_RELATED; thread 263 arch/nds32/kernel/traps.c tsk->thread.error_code = error_code; thread 271 arch/nds32/mm/fault.c tsk->thread.address = addr; thread 272 arch/nds32/mm/fault.c tsk->thread.error_code = error_code; thread 273 arch/nds32/mm/fault.c tsk->thread.trap_no = entry; thread 340 arch/nds32/mm/fault.c tsk->thread.address = addr; thread 341 arch/nds32/mm/fault.c tsk->thread.error_code = error_code; thread 342 arch/nds32/mm/fault.c tsk->thread.trap_no = entry; thread 78 arch/nios2/include/asm/processor.h #define KSTK_EIP(tsk) ((tsk)->thread.kregs->ea) thread 79 arch/nios2/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk)->thread.kregs->sp) thread 17 arch/nios2/kernel/asm-offsets.c OFFSET(TASK_THREAD, task_struct, thread); thread 100 arch/nios2/kernel/kgdb.c gdb_regs[GDB_SP] = p->thread.kregs->sp; thread 101 arch/nios2/kernel/kgdb.c gdb_regs[GDB_PC] = p->thread.kregs->ea; thread 122 arch/nios2/kernel/process.c p->thread.ksp = (unsigned long) childstack; thread 123 arch/nios2/kernel/process.c p->thread.kregs = childregs; thread 135 arch/nios2/kernel/process.c p->thread.kregs = childregs; thread 136 arch/nios2/kernel/process.c p->thread.ksp = (unsigned long) childstack; thread 230 arch/nios2/kernel/process.c fp = ((struct switch_stack *)p->thread.ksp)->fp; /* ;dgt2 */ thread 159 arch/nios2/kernel/setup.c init_task.thread.kregs = &fake_regs; thread 238 arch/nios2/kernel/signal.c current->thread.kregs = regs; thread 67 arch/nios2/kernel/traps.c stack = (unsigned long *)task->thread.ksp; thread 44 arch/openrisc/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof(struct task_struct, thread)); thread 195 arch/parisc/include/asm/compat.h struct pt_regs *regs = ¤t->thread.regs; thread 239 arch/parisc/include/asm/elf.h current->thread.map_base = DEFAULT_MAP_BASE; \ thread 240 arch/parisc/include/asm/elf.h current->thread.task_size = DEFAULT_TASK_SIZE; \ thread 249 arch/parisc/include/asm/elf.h current->thread.map_base = DEFAULT_MAP_BASE32; \ thread 250 arch/parisc/include/asm/elf.h current->thread.task_size = DEFAULT_TASK_SIZE32; \ thread 25 arch/parisc/include/asm/processor.h #define TASK_SIZE_OF(tsk) ((tsk)->thread.task_size) thread 27 arch/parisc/include/asm/processor.h #define TASK_UNMAPPED_BASE (current->thread.map_base) thread 121 arch/parisc/include/asm/processor.h #define task_pt_regs(tsk) ((struct pt_regs *)&((tsk)->thread.regs)) thread 133 arch/parisc/include/asm/processor.h (task)->thread.flags = (((task)->thread.flags & ~PARISC_UAC_MASK) \ thread 141 arch/parisc/include/asm/processor.h put_user(((task)->thread.flags & PARISC_UAC_MASK) \ thread 287 arch/parisc/include/asm/processor.h #define KSTK_EIP(tsk) ((tsk)->thread.regs.iaoq[0]) thread 288 arch/parisc/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk)->thread.regs.gr[30]) thread 53 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_REGS, offsetof(struct task_struct, thread.regs)); thread 54 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_PSW, offsetof(struct task_struct, thread.regs.gr[ 0])); thread 55 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR1, offsetof(struct task_struct, thread.regs.gr[ 1])); thread 56 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR2, offsetof(struct task_struct, thread.regs.gr[ 2])); thread 57 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR3, offsetof(struct task_struct, thread.regs.gr[ 3])); thread 58 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR4, offsetof(struct task_struct, thread.regs.gr[ 4])); thread 59 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR5, offsetof(struct task_struct, thread.regs.gr[ 5])); thread 60 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR6, offsetof(struct task_struct, thread.regs.gr[ 6])); thread 61 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR7, offsetof(struct task_struct, thread.regs.gr[ 7])); thread 62 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR8, offsetof(struct task_struct, thread.regs.gr[ 8])); thread 63 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR9, offsetof(struct task_struct, thread.regs.gr[ 9])); thread 64 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR10, offsetof(struct task_struct, thread.regs.gr[10])); thread 65 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR11, offsetof(struct task_struct, thread.regs.gr[11])); thread 66 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR12, offsetof(struct task_struct, thread.regs.gr[12])); thread 67 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR13, offsetof(struct task_struct, thread.regs.gr[13])); thread 68 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR14, offsetof(struct task_struct, thread.regs.gr[14])); thread 69 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR15, offsetof(struct task_struct, thread.regs.gr[15])); thread 70 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR16, offsetof(struct task_struct, thread.regs.gr[16])); thread 71 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR17, offsetof(struct task_struct, thread.regs.gr[17])); thread 72 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR18, offsetof(struct task_struct, thread.regs.gr[18])); thread 73 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR19, offsetof(struct task_struct, thread.regs.gr[19])); thread 74 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR20, offsetof(struct task_struct, thread.regs.gr[20])); thread 75 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR21, offsetof(struct task_struct, thread.regs.gr[21])); thread 76 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR22, offsetof(struct task_struct, thread.regs.gr[22])); thread 77 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR23, offsetof(struct task_struct, thread.regs.gr[23])); thread 78 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR24, offsetof(struct task_struct, thread.regs.gr[24])); thread 79 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR25, offsetof(struct task_struct, thread.regs.gr[25])); thread 80 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR26, offsetof(struct task_struct, thread.regs.gr[26])); thread 81 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR27, offsetof(struct task_struct, thread.regs.gr[27])); thread 82 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR28, offsetof(struct task_struct, thread.regs.gr[28])); thread 83 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR29, offsetof(struct task_struct, thread.regs.gr[29])); thread 84 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR30, offsetof(struct task_struct, thread.regs.gr[30])); thread 85 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_GR31, offsetof(struct task_struct, thread.regs.gr[31])); thread 86 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR0, offsetof(struct task_struct, thread.regs.fr[ 0])); thread 87 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR1, offsetof(struct task_struct, thread.regs.fr[ 1])); thread 88 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR2, offsetof(struct task_struct, thread.regs.fr[ 2])); thread 89 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR3, offsetof(struct task_struct, thread.regs.fr[ 3])); thread 90 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR4, offsetof(struct task_struct, thread.regs.fr[ 4])); thread 91 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR5, offsetof(struct task_struct, thread.regs.fr[ 5])); thread 92 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR6, offsetof(struct task_struct, thread.regs.fr[ 6])); thread 93 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR7, offsetof(struct task_struct, thread.regs.fr[ 7])); thread 94 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR8, offsetof(struct task_struct, thread.regs.fr[ 8])); thread 95 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR9, offsetof(struct task_struct, thread.regs.fr[ 9])); thread 96 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR10, offsetof(struct task_struct, thread.regs.fr[10])); thread 97 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR11, offsetof(struct task_struct, thread.regs.fr[11])); thread 98 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR12, offsetof(struct task_struct, thread.regs.fr[12])); thread 99 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR13, offsetof(struct task_struct, thread.regs.fr[13])); thread 100 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR14, offsetof(struct task_struct, thread.regs.fr[14])); thread 101 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR15, offsetof(struct task_struct, thread.regs.fr[15])); thread 102 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR16, offsetof(struct task_struct, thread.regs.fr[16])); thread 103 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR17, offsetof(struct task_struct, thread.regs.fr[17])); thread 104 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR18, offsetof(struct task_struct, thread.regs.fr[18])); thread 105 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR19, offsetof(struct task_struct, thread.regs.fr[19])); thread 106 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR20, offsetof(struct task_struct, thread.regs.fr[20])); thread 107 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR21, offsetof(struct task_struct, thread.regs.fr[21])); thread 108 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR22, offsetof(struct task_struct, thread.regs.fr[22])); thread 109 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR23, offsetof(struct task_struct, thread.regs.fr[23])); thread 110 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR24, offsetof(struct task_struct, thread.regs.fr[24])); thread 111 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR25, offsetof(struct task_struct, thread.regs.fr[25])); thread 112 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR26, offsetof(struct task_struct, thread.regs.fr[26])); thread 113 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR27, offsetof(struct task_struct, thread.regs.fr[27])); thread 114 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR28, offsetof(struct task_struct, thread.regs.fr[28])); thread 115 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR29, offsetof(struct task_struct, thread.regs.fr[29])); thread 116 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR30, offsetof(struct task_struct, thread.regs.fr[30])); thread 117 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_FR31, offsetof(struct task_struct, thread.regs.fr[31])); thread 118 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR0, offsetof(struct task_struct, thread.regs.sr[ 0])); thread 119 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR1, offsetof(struct task_struct, thread.regs.sr[ 1])); thread 120 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR2, offsetof(struct task_struct, thread.regs.sr[ 2])); thread 121 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR3, offsetof(struct task_struct, thread.regs.sr[ 3])); thread 122 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR4, offsetof(struct task_struct, thread.regs.sr[ 4])); thread 123 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR5, offsetof(struct task_struct, thread.regs.sr[ 5])); thread 124 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR6, offsetof(struct task_struct, thread.regs.sr[ 6])); thread 125 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SR7, offsetof(struct task_struct, thread.regs.sr[ 7])); thread 126 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IASQ0, offsetof(struct task_struct, thread.regs.iasq[0])); thread 127 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IASQ1, offsetof(struct task_struct, thread.regs.iasq[1])); thread 128 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IAOQ0, offsetof(struct task_struct, thread.regs.iaoq[0])); thread 129 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IAOQ1, offsetof(struct task_struct, thread.regs.iaoq[1])); thread 130 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_CR27, offsetof(struct task_struct, thread.regs.cr27)); thread 131 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_ORIG_R28, offsetof(struct task_struct, thread.regs.orig_r28)); thread 132 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_KSP, offsetof(struct task_struct, thread.regs.ksp)); thread 133 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_KPC, offsetof(struct task_struct, thread.regs.kpc)); thread 134 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_SAR, offsetof(struct task_struct, thread.regs.sar)); thread 135 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IIR, offsetof(struct task_struct, thread.regs.iir)); thread 136 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_ISR, offsetof(struct task_struct, thread.regs.isr)); thread 137 arch/parisc/kernel/asm-offsets.c DEFINE(TASK_PT_IOR, offsetof(struct task_struct, thread.regs.ior)); thread 170 arch/parisc/kernel/process.c memcpy(r, tsk->thread.regs.fr, sizeof(*r)); thread 214 arch/parisc/kernel/process.c struct pt_regs *cregs = &(p->thread.regs); thread 253 arch/parisc/kernel/traps.c if (current->thread.flags & PARISC_KERNEL_DEATH) { thread 258 arch/parisc/kernel/traps.c current->thread.flags |= PARISC_KERNEL_DEATH; thread 447 arch/parisc/kernel/unaligned.c if (current->thread.flags & PARISC_UAC_SIGBUS) { thread 451 arch/parisc/kernel/unaligned.c if (!(current->thread.flags & PARISC_UAC_NOPRINT) && thread 395 arch/parisc/kernel/unwind.c struct pt_regs *r = &t->thread.regs; thread 52 arch/powerpc/include/asm/book3s/32/kup.h .macro kuap_save_and_lock sp, thread, gpr1, gpr2, gpr3 thread 78 arch/powerpc/include/asm/book3s/32/kup.h lwz \gpr, KUAP(thread) thread 120 arch/powerpc/include/asm/book3s/32/kup.h current->thread.kuap = (addr & 0xf0000000) | ((((end - 1) >> 28) + 1) & 0xf); thread 137 arch/powerpc/include/asm/book3s/32/kup.h current->thread.kuap = 0; thread 84 arch/powerpc/include/asm/cell-pmu.h extern void cbe_enable_pm_interrupts(u32 cpu, u32 thread, u32 mask); thread 190 arch/powerpc/include/asm/cell-regs.h struct cbe_iic_thread_regs thread[2]; /* 0x0400 */ thread 118 arch/powerpc/include/asm/compat.h struct pt_regs *regs = current->thread.regs; thread 111 arch/powerpc/include/asm/cputhreads.h void book3e_start_thread(int thread, unsigned long addr); thread 112 arch/powerpc/include/asm/cputhreads.h void book3e_stop_thread(int thread); thread 21 arch/powerpc/include/asm/kup.h .macro kuap_save_and_lock sp, thread, gpr1, gpr2, gpr3 thread 259 arch/powerpc/include/asm/mmu_context.h #define thread_pkey_regs_save(thread) thread 261 arch/powerpc/include/asm/mmu_context.h #define thread_pkey_regs_init(thread) thread 11 arch/powerpc/include/asm/nohash/32/kup-8xx.h .macro kuap_save_and_lock sp, thread, gpr1, gpr2, gpr3 thread 205 arch/powerpc/include/asm/pkeys.h extern void thread_pkey_regs_save(struct thread_struct *thread); thread 208 arch/powerpc/include/asm/pkeys.h extern void thread_pkey_regs_init(struct thread_struct *thread); thread 301 arch/powerpc/include/asm/processor.h #define task_pt_regs(tsk) ((struct pt_regs *)(tsk)->thread.regs) thread 305 arch/powerpc/include/asm/processor.h #define KSTK_EIP(tsk) ((tsk)->thread.regs? (tsk)->thread.regs->nip: 0) thread 306 arch/powerpc/include/asm/processor.h #define KSTK_ESP(tsk) ((tsk)->thread.regs? (tsk)->thread.regs->gpr[1]: 0) thread 509 arch/powerpc/include/asm/ps3.h void ps3_enable_pm_interrupts(u32 cpu, u32 thread, u32 mask); thread 402 arch/powerpc/include/asm/reg_booke.h #define dbcr_iac_range(task) ((task)->thread.debug.dbcr0) thread 416 arch/powerpc/include/asm/reg_booke.h #define dbcr_dac(task) ((task)->thread.debug.dbcr1) thread 462 arch/powerpc/include/asm/reg_booke.h #define dbcr_dac(task) ((task)->thread.debug.dbcr0) thread 496 arch/powerpc/include/asm/reg_booke.h #define dbcr_iac_range(task) ((task)->thread.debug.dbcr1) thread 108 arch/powerpc/include/asm/sfp-machine.h #define __FPU_FPSCR (current->thread.spefscr) thread 128 arch/powerpc/include/asm/sfp-machine.h #define __FPU_FPSCR (current->thread.fp_state.fpscr) thread 82 arch/powerpc/include/asm/switch_to.h t->thread.ebbrr = 0; thread 83 arch/powerpc/include/asm/switch_to.h t->thread.ebbhr = 0; thread 84 arch/powerpc/include/asm/switch_to.h t->thread.bescr = 0; thread 85 arch/powerpc/include/asm/switch_to.h t->thread.mmcr2 = 0; thread 86 arch/powerpc/include/asm/switch_to.h t->thread.mmcr0 = 0; thread 87 arch/powerpc/include/asm/switch_to.h t->thread.siar = 0; thread 88 arch/powerpc/include/asm/switch_to.h t->thread.sdar = 0; thread 89 arch/powerpc/include/asm/switch_to.h t->thread.sier = 0; thread 90 arch/powerpc/include/asm/switch_to.h t->thread.used_ebb = 0; thread 13 arch/powerpc/include/asm/tm.h extern void tm_reclaim(struct thread_struct *thread, thread 16 arch/powerpc/include/asm/tm.h extern void tm_recheckpoint(struct thread_struct *thread); thread 17 arch/powerpc/include/asm/tm.h extern void tm_save_sprs(struct thread_struct *thread); thread 18 arch/powerpc/include/asm/tm.h extern void tm_restore_sprs(struct thread_struct *thread); thread 32 arch/powerpc/include/asm/uaccess.h #define get_fs() (current->thread.addr_limit) thread 36 arch/powerpc/include/asm/uaccess.h current->thread.addr_limit = fs; thread 117 arch/powerpc/kernel/align.c unsigned long *evr = ¤t->thread.evr[reg]; thread 78 arch/powerpc/kernel/asm-offsets.c OFFSET(THREAD, task_struct, thread); thread 65 arch/powerpc/kernel/hw_breakpoint.c if (current->thread.last_hit_ubp != bp) thread 106 arch/powerpc/kernel/hw_breakpoint.c bp->ctx->task->thread.last_hit_ubp = NULL; thread 189 arch/powerpc/kernel/hw_breakpoint.c if (likely(!tsk->thread.last_hit_ubp)) thread 192 arch/powerpc/kernel/hw_breakpoint.c info = counter_arch_bp(tsk->thread.last_hit_ubp); thread 195 arch/powerpc/kernel/hw_breakpoint.c tsk->thread.last_hit_ubp = NULL; thread 227 arch/powerpc/kernel/hw_breakpoint.c current->thread.last_hit_ubp = bp; thread 325 arch/powerpc/kernel/hw_breakpoint.c bp = current->thread.last_hit_ubp; thread 343 arch/powerpc/kernel/hw_breakpoint.c current->thread.last_hit_ubp = NULL; thread 382 arch/powerpc/kernel/hw_breakpoint.c struct thread_struct *t = &tsk->thread; thread 195 arch/powerpc/kernel/kgdb.c struct pt_regs *regs = (struct pt_regs *)(p->thread.ksp + thread 216 arch/powerpc/kernel/kgdb.c PACK64(ptr, p->thread.evr[reg]); thread 336 arch/powerpc/kernel/kgdb.c memcpy(mem, ¤t->thread.evr[regno-32], thread 361 arch/powerpc/kernel/kgdb.c memcpy(¤t->thread.evr[regno-32], mem, thread 96 arch/powerpc/kernel/process.c if (tsk == current && tsk->thread.regs && thread 97 arch/powerpc/kernel/process.c MSR_TM_ACTIVE(tsk->thread.regs->msr) && thread 99 arch/powerpc/kernel/process.c tsk->thread.ckpt_regs.msr = tsk->thread.regs->msr; thread 164 arch/powerpc/kernel/process.c msr = tsk->thread.regs->msr; thread 170 arch/powerpc/kernel/process.c tsk->thread.regs->msr = msr; thread 189 arch/powerpc/kernel/process.c if (tsk->thread.regs) { thread 199 arch/powerpc/kernel/process.c if (tsk->thread.regs->msr & MSR_FP) { thread 223 arch/powerpc/kernel/process.c if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) { thread 233 arch/powerpc/kernel/process.c MSR_TM_ACTIVE(current->thread.regs->msr)) thread 242 arch/powerpc/kernel/process.c if (tsk->thread.load_fp) { thread 243 arch/powerpc/kernel/process.c load_fp_state(¤t->thread.fp_state); thread 244 arch/powerpc/kernel/process.c current->thread.load_fp++; thread 261 arch/powerpc/kernel/process.c msr = tsk->thread.regs->msr; thread 267 arch/powerpc/kernel/process.c tsk->thread.regs->msr = msr; thread 288 arch/powerpc/kernel/process.c if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) { thread 298 arch/powerpc/kernel/process.c MSR_TM_ACTIVE(current->thread.regs->msr)) thread 311 arch/powerpc/kernel/process.c if (tsk->thread.regs) { thread 313 arch/powerpc/kernel/process.c if (tsk->thread.regs->msr & MSR_VEC) { thread 324 arch/powerpc/kernel/process.c if (cpu_has_feature(CPU_FTR_ALTIVEC) && (tsk->thread.load_vec)) { thread 325 arch/powerpc/kernel/process.c load_vr_state(&tsk->thread.vr_state); thread 326 arch/powerpc/kernel/process.c tsk->thread.used_vr = 1; thread 327 arch/powerpc/kernel/process.c tsk->thread.load_vec++; thread 341 arch/powerpc/kernel/process.c unsigned long msr = tsk->thread.regs->msr; thread 373 arch/powerpc/kernel/process.c if (current->thread.regs && thread 374 arch/powerpc/kernel/process.c (current->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP))) { thread 384 arch/powerpc/kernel/process.c MSR_TM_ACTIVE(current->thread.regs->msr)) thread 393 arch/powerpc/kernel/process.c if (tsk->thread.regs) { thread 395 arch/powerpc/kernel/process.c if (tsk->thread.regs->msr & (MSR_VSX|MSR_VEC|MSR_FP)) { thread 407 arch/powerpc/kernel/process.c tsk->thread.used_vsr = 1; thread 434 arch/powerpc/kernel/process.c if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) { thread 443 arch/powerpc/kernel/process.c if (tsk->thread.regs) { thread 445 arch/powerpc/kernel/process.c if (tsk->thread.regs->msr & MSR_SPE) { thread 447 arch/powerpc/kernel/process.c tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); thread 483 arch/powerpc/kernel/process.c if (!tsk->thread.regs) thread 488 arch/powerpc/kernel/process.c usermsr = tsk->thread.regs->msr; thread 529 arch/powerpc/kernel/process.c !current->thread.load_fp && !loadvec(current->thread)) thread 540 arch/powerpc/kernel/process.c msr |= MSR_FP | current->thread.fpexc_mode; thread 559 arch/powerpc/kernel/process.c if (!tsk->thread.regs) thread 562 arch/powerpc/kernel/process.c usermsr = tsk->thread.regs->msr; thread 581 arch/powerpc/kernel/process.c thread_pkey_regs_save(&tsk->thread); thread 586 arch/powerpc/kernel/process.c if (tsk->thread.regs) { thread 590 arch/powerpc/kernel/process.c if (tsk->thread.regs->msr & MSR_SPE) thread 591 arch/powerpc/kernel/process.c tsk->thread.spefscr = mfspr(SPRN_SPEFSCR); thread 604 arch/powerpc/kernel/process.c current->thread.trap_nr = TRAP_HWBKPT; thread 617 arch/powerpc/kernel/process.c current->thread.trap_nr = TRAP_HWBKPT; thread 639 arch/powerpc/kernel/process.c static void set_debug_reg_defaults(struct thread_struct *thread) thread 641 arch/powerpc/kernel/process.c thread->debug.iac1 = thread->debug.iac2 = 0; thread 643 arch/powerpc/kernel/process.c thread->debug.iac3 = thread->debug.iac4 = 0; thread 645 arch/powerpc/kernel/process.c thread->debug.dac1 = thread->debug.dac2 = 0; thread 647 arch/powerpc/kernel/process.c thread->debug.dvc1 = thread->debug.dvc2 = 0; thread 649 arch/powerpc/kernel/process.c thread->debug.dbcr0 = 0; thread 654 arch/powerpc/kernel/process.c thread->debug.dbcr1 = DBCR1_IAC1US | DBCR1_IAC2US | thread 660 arch/powerpc/kernel/process.c thread->debug.dbcr2 = DBCR2_DAC1US | DBCR2_DAC2US; thread 662 arch/powerpc/kernel/process.c thread->debug.dbcr1 = 0; thread 700 arch/powerpc/kernel/process.c if ((current->thread.debug.dbcr0 & DBCR0_IDM) thread 714 arch/powerpc/kernel/process.c static void set_debug_reg_defaults(struct thread_struct *thread) thread 716 arch/powerpc/kernel/process.c thread->hw_brk.address = 0; thread 717 arch/powerpc/kernel/process.c thread->hw_brk.type = 0; thread 719 arch/powerpc/kernel/process.c set_breakpoint(&thread->hw_brk); thread 826 arch/powerpc/kernel/process.c return tsk && tsk->thread.regs && (tsk->thread.regs->msr & MSR_TM); thread 849 arch/powerpc/kernel/process.c giveup_all(container_of(thr, struct task_struct, thread)); thread 876 arch/powerpc/kernel/process.c tm_reclaim_thread(¤t->thread, cause); thread 891 arch/powerpc/kernel/process.c struct thread_struct *thr = &tsk->thread; thread 921 arch/powerpc/kernel/process.c extern void __tm_recheckpoint(struct thread_struct *thread); thread 923 arch/powerpc/kernel/process.c void tm_recheckpoint(struct thread_struct *thread) thread 927 arch/powerpc/kernel/process.c if (!(thread->regs->msr & MSR_TM)) thread 940 arch/powerpc/kernel/process.c tm_restore_sprs(thread); thread 942 arch/powerpc/kernel/process.c __tm_recheckpoint(thread); thread 963 arch/powerpc/kernel/process.c if (!MSR_TM_ACTIVE(new->thread.regs->msr)){ thread 964 arch/powerpc/kernel/process.c tm_restore_sprs(&new->thread); thread 969 arch/powerpc/kernel/process.c new->pid, new->thread.regs->msr); thread 971 arch/powerpc/kernel/process.c tm_recheckpoint(&new->thread); thread 978 arch/powerpc/kernel/process.c new->thread.regs->msr &= ~(MSR_FP | MSR_VEC | MSR_VSX); thread 993 arch/powerpc/kernel/process.c prev->thread.load_tm++; thread 995 arch/powerpc/kernel/process.c if (!MSR_TM_ACTIVE(prev->thread.regs->msr) && prev->thread.load_tm == 0) thread 996 arch/powerpc/kernel/process.c prev->thread.regs->msr &= ~MSR_TM; thread 1031 arch/powerpc/kernel/process.c msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; thread 1036 arch/powerpc/kernel/process.c current->thread.load_fp = 1; thread 1039 arch/powerpc/kernel/process.c current->thread.load_vec = 1; thread 1131 arch/powerpc/kernel/process.c new_thread = &new->thread; thread 1132 arch/powerpc/kernel/process.c old_thread = ¤t->thread; thread 1147 arch/powerpc/kernel/process.c switch_booke_debug_regs(&new->thread.debug); thread 1154 arch/powerpc/kernel/process.c if (unlikely(!hw_brk_match(this_cpu_ptr(¤t_brk), &new->thread.hw_brk))) thread 1155 arch/powerpc/kernel/process.c __set_breakpoint(&new->thread.hw_brk); thread 1163 arch/powerpc/kernel/process.c save_sprs(&prev->thread); thread 1197 arch/powerpc/kernel/process.c if (current->thread.regs) { thread 1198 arch/powerpc/kernel/process.c restore_math(current->thread.regs); thread 1208 arch/powerpc/kernel/process.c if (current->thread.used_vas) thread 1444 arch/powerpc/kernel/process.c set_debug_reg_defaults(¤t->thread); thread 1463 arch/powerpc/kernel/process.c current->thread.used_vas = 1; thread 1520 arch/powerpc/kernel/process.c if (t->thread.tidr) thread 1523 arch/powerpc/kernel/process.c t->thread.tidr = (u16)task_pid_nr(t); thread 1524 arch/powerpc/kernel/process.c mtspr(SPRN_TIDR, t->thread.tidr); thread 1579 arch/powerpc/kernel/process.c p->thread.ksp_vsid = sp_vsid; thread 1618 arch/powerpc/kernel/process.c p->thread.regs = NULL; /* no user register state */ thread 1628 arch/powerpc/kernel/process.c p->thread.regs = childregs; thread 1656 arch/powerpc/kernel/process.c p->thread.ksp = sp; thread 1658 arch/powerpc/kernel/process.c p->thread.ksp_limit = (unsigned long)end_of_stack(p); thread 1661 arch/powerpc/kernel/process.c p->thread.ptrace_bps[0] = NULL; thread 1664 arch/powerpc/kernel/process.c p->thread.fp_save_area = NULL; thread 1666 arch/powerpc/kernel/process.c p->thread.vr_save_area = NULL; thread 1673 arch/powerpc/kernel/process.c p->thread.dscr_inherit = current->thread.dscr_inherit; thread 1674 arch/powerpc/kernel/process.c p->thread.dscr = mfspr(SPRN_DSCR); thread 1679 arch/powerpc/kernel/process.c p->thread.tidr = 0; thread 1705 arch/powerpc/kernel/process.c if (!current->thread.regs) { thread 1707 arch/powerpc/kernel/process.c current->thread.regs = regs - 1; thread 1787 arch/powerpc/kernel/process.c current->thread.used_vsr = 0; thread 1789 arch/powerpc/kernel/process.c current->thread.load_slb = 0; thread 1790 arch/powerpc/kernel/process.c current->thread.load_fp = 0; thread 1791 arch/powerpc/kernel/process.c memset(¤t->thread.fp_state, 0, sizeof(current->thread.fp_state)); thread 1792 arch/powerpc/kernel/process.c current->thread.fp_save_area = NULL; thread 1794 arch/powerpc/kernel/process.c memset(¤t->thread.vr_state, 0, sizeof(current->thread.vr_state)); thread 1795 arch/powerpc/kernel/process.c current->thread.vr_state.vscr.u[3] = 0x00010000; /* Java mode disabled */ thread 1796 arch/powerpc/kernel/process.c current->thread.vr_save_area = NULL; thread 1797 arch/powerpc/kernel/process.c current->thread.vrsave = 0; thread 1798 arch/powerpc/kernel/process.c current->thread.used_vr = 0; thread 1799 arch/powerpc/kernel/process.c current->thread.load_vec = 0; thread 1802 arch/powerpc/kernel/process.c memset(current->thread.evr, 0, sizeof(current->thread.evr)); thread 1803 arch/powerpc/kernel/process.c current->thread.acc = 0; thread 1804 arch/powerpc/kernel/process.c current->thread.spefscr = 0; thread 1805 arch/powerpc/kernel/process.c current->thread.used_spe = 0; thread 1808 arch/powerpc/kernel/process.c current->thread.tm_tfhar = 0; thread 1809 arch/powerpc/kernel/process.c current->thread.tm_texasr = 0; thread 1810 arch/powerpc/kernel/process.c current->thread.tm_tfiar = 0; thread 1811 arch/powerpc/kernel/process.c current->thread.load_tm = 0; thread 1814 arch/powerpc/kernel/process.c thread_pkey_regs_init(¤t->thread); thread 1823 arch/powerpc/kernel/process.c struct pt_regs *regs = tsk->thread.regs; thread 1844 arch/powerpc/kernel/process.c tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); thread 1845 arch/powerpc/kernel/process.c tsk->thread.fpexc_mode = val & thread 1863 arch/powerpc/kernel/process.c tsk->thread.fpexc_mode = __pack_fe01(val); thread 1866 arch/powerpc/kernel/process.c | tsk->thread.fpexc_mode; thread 1874 arch/powerpc/kernel/process.c if (tsk->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) thread 1889 arch/powerpc/kernel/process.c tsk->thread.spefscr_last = mfspr(SPRN_SPEFSCR); thread 1890 arch/powerpc/kernel/process.c val = tsk->thread.fpexc_mode; thread 1897 arch/powerpc/kernel/process.c val = __unpack_fe01(tsk->thread.fpexc_mode); thread 1903 arch/powerpc/kernel/process.c struct pt_regs *regs = tsk->thread.regs; thread 1924 arch/powerpc/kernel/process.c struct pt_regs *regs = tsk->thread.regs; thread 1947 arch/powerpc/kernel/process.c tsk->thread.align_ctl = val; thread 1953 arch/powerpc/kernel/process.c return put_user(tsk->thread.align_ctl, (unsigned int __user *)adr); thread 1997 arch/powerpc/kernel/process.c sp = p->thread.ksp; thread 2052 arch/powerpc/kernel/process.c sp = tsk->thread.ksp; thread 145 arch/powerpc/kernel/ptrace.c tm_save_sprs(&(tsk->thread)); thread 209 arch/powerpc/kernel/ptrace.c return task->thread.regs->msr | task->thread.fpexc_mode; thread 214 arch/powerpc/kernel/ptrace.c task->thread.regs->msr &= ~MSR_DEBUGCHANGE; thread 215 arch/powerpc/kernel/ptrace.c task->thread.regs->msr |= msr & MSR_DEBUGCHANGE; thread 222 arch/powerpc/kernel/ptrace.c return task->thread.ckpt_regs.msr | task->thread.fpexc_mode; thread 227 arch/powerpc/kernel/ptrace.c task->thread.ckpt_regs.msr &= ~MSR_DEBUGCHANGE; thread 228 arch/powerpc/kernel/ptrace.c task->thread.ckpt_regs.msr |= msr & MSR_DEBUGCHANGE; thread 234 arch/powerpc/kernel/ptrace.c task->thread.ckpt_regs.trap = trap & 0xfff0; thread 242 arch/powerpc/kernel/ptrace.c *data = task->thread.dscr; thread 248 arch/powerpc/kernel/ptrace.c task->thread.dscr = dscr; thread 249 arch/powerpc/kernel/ptrace.c task->thread.dscr_inherit = 1; thread 270 arch/powerpc/kernel/ptrace.c task->thread.regs->trap = trap & 0xfff0; thread 281 arch/powerpc/kernel/ptrace.c if ((task->thread.regs == NULL) || !data) thread 307 arch/powerpc/kernel/ptrace.c *data = ((unsigned long *)task->thread.regs)[regno]; thread 319 arch/powerpc/kernel/ptrace.c if (task->thread.regs == NULL) thread 331 arch/powerpc/kernel/ptrace.c ((unsigned long *)task->thread.regs)[regno] = data; thread 343 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) thread 346 arch/powerpc/kernel/ptrace.c if (!FULL_REGS(target->thread.regs)) { thread 349 arch/powerpc/kernel/ptrace.c target->thread.regs->gpr[i] = NV_REG_POISON; thread 353 arch/powerpc/kernel/ptrace.c target->thread.regs, thread 368 arch/powerpc/kernel/ptrace.c &target->thread.regs->orig_gpr3, thread 385 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) thread 388 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(target->thread.regs); thread 391 arch/powerpc/kernel/ptrace.c target->thread.regs, thread 407 arch/powerpc/kernel/ptrace.c &target->thread.regs->orig_gpr3, thread 457 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_FPR(i); thread 458 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.fp_state.fpscr; thread 467 arch/powerpc/kernel/ptrace.c &target->thread.fp_state, 0, -1); thread 495 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_FPR(i); thread 496 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.fp_state.fpscr; thread 504 arch/powerpc/kernel/ptrace.c target->thread.TS_FPR(i) = buf[i]; thread 505 arch/powerpc/kernel/ptrace.c target->thread.fp_state.fpscr = buf[32]; thread 514 arch/powerpc/kernel/ptrace.c &target->thread.fp_state, 0, -1); thread 536 arch/powerpc/kernel/ptrace.c return target->thread.used_vr ? regset->n : 0; thread 565 arch/powerpc/kernel/ptrace.c &target->thread.vr_state, 0, thread 578 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.vrsave; thread 615 arch/powerpc/kernel/ptrace.c &target->thread.vr_state, 0, thread 628 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.vrsave; thread 635 arch/powerpc/kernel/ptrace.c target->thread.vrsave = vrsave.word; thread 653 arch/powerpc/kernel/ptrace.c return target->thread.used_vsr ? regset->n : 0; thread 681 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; thread 714 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; thread 720 arch/powerpc/kernel/ptrace.c target->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; thread 742 arch/powerpc/kernel/ptrace.c return target->thread.used_spe ? regset->n : 0; thread 754 arch/powerpc/kernel/ptrace.c &target->thread.evr, thread 755 arch/powerpc/kernel/ptrace.c 0, sizeof(target->thread.evr)); thread 762 arch/powerpc/kernel/ptrace.c &target->thread.acc, thread 763 arch/powerpc/kernel/ptrace.c sizeof(target->thread.evr), -1); thread 777 arch/powerpc/kernel/ptrace.c &target->thread.evr, thread 778 arch/powerpc/kernel/ptrace.c 0, sizeof(target->thread.evr)); thread 785 arch/powerpc/kernel/ptrace.c &target->thread.acc, thread 786 arch/powerpc/kernel/ptrace.c sizeof(target->thread.evr), -1); thread 807 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 843 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 851 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs, thread 867 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.orig_gpr3, thread 908 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 916 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs, thread 932 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.orig_gpr3, thread 972 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1010 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1019 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_CKFPR(i); thread 1020 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.ckfp_state.fpscr; thread 1056 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1064 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.TS_CKFPR(i); thread 1065 arch/powerpc/kernel/ptrace.c buf[32] = target->thread.ckfp_state.fpscr; thread 1072 arch/powerpc/kernel/ptrace.c target->thread.TS_CKFPR(i) = buf[i]; thread 1073 arch/powerpc/kernel/ptrace.c target->thread.ckfp_state.fpscr = buf[32]; thread 1091 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1131 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1140 arch/powerpc/kernel/ptrace.c &target->thread.ckvr_state, 0, thread 1151 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.ckvrsave; thread 1193 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1201 arch/powerpc/kernel/ptrace.c &target->thread.ckvr_state, 0, thread 1212 arch/powerpc/kernel/ptrace.c vrsave.word = target->thread.ckvrsave; thread 1216 arch/powerpc/kernel/ptrace.c target->thread.ckvrsave = vrsave.word; thread 1236 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1240 arch/powerpc/kernel/ptrace.c return target->thread.used_vsr ? regset->n : 0; thread 1274 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1284 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET]; thread 1322 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1332 arch/powerpc/kernel/ptrace.c buf[i] = target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET]; thread 1338 arch/powerpc/kernel/ptrace.c target->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; thread 1400 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfhar, 0, sizeof(u64)); thread 1405 arch/powerpc/kernel/ptrace.c &target->thread.tm_texasr, sizeof(u64), thread 1411 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfiar, thread 1456 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfhar, 0, sizeof(u64)); thread 1461 arch/powerpc/kernel/ptrace.c &target->thread.tm_texasr, sizeof(u64), thread 1467 arch/powerpc/kernel/ptrace.c &target->thread.tm_tfiar, thread 1478 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1494 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1498 arch/powerpc/kernel/ptrace.c &target->thread.tm_tar, 0, sizeof(u64)); thread 1512 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1516 arch/powerpc/kernel/ptrace.c &target->thread.tm_tar, 0, sizeof(u64)); thread 1526 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1543 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1547 arch/powerpc/kernel/ptrace.c &target->thread.tm_ppr, 0, sizeof(u64)); thread 1561 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1565 arch/powerpc/kernel/ptrace.c &target->thread.tm_ppr, 0, sizeof(u64)); thread 1575 arch/powerpc/kernel/ptrace.c if (MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1591 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1595 arch/powerpc/kernel/ptrace.c &target->thread.tm_dscr, 0, sizeof(u64)); thread 1609 arch/powerpc/kernel/ptrace.c if (!MSR_TM_ACTIVE(target->thread.regs->msr)) thread 1613 arch/powerpc/kernel/ptrace.c &target->thread.tm_dscr, 0, sizeof(u64)); thread 1625 arch/powerpc/kernel/ptrace.c &target->thread.regs->ppr, 0, sizeof(u64)); thread 1634 arch/powerpc/kernel/ptrace.c &target->thread.regs->ppr, 0, sizeof(u64)); thread 1643 arch/powerpc/kernel/ptrace.c &target->thread.dscr, 0, sizeof(u64)); thread 1651 arch/powerpc/kernel/ptrace.c &target->thread.dscr, 0, sizeof(u64)); thread 1661 arch/powerpc/kernel/ptrace.c &target->thread.tar, 0, sizeof(u64)); thread 1669 arch/powerpc/kernel/ptrace.c &target->thread.tar, 0, sizeof(u64)); thread 1678 arch/powerpc/kernel/ptrace.c if (target->thread.used_ebb) thread 1696 arch/powerpc/kernel/ptrace.c if (!target->thread.used_ebb) thread 1700 arch/powerpc/kernel/ptrace.c &target->thread.ebbrr, 0, 3 * sizeof(unsigned long)); thread 1717 arch/powerpc/kernel/ptrace.c if (target->thread.used_ebb) thread 1721 arch/powerpc/kernel/ptrace.c &target->thread.ebbrr, 0, sizeof(unsigned long)); thread 1725 arch/powerpc/kernel/ptrace.c &target->thread.ebbhr, sizeof(unsigned long), thread 1730 arch/powerpc/kernel/ptrace.c &target->thread.bescr, thread 1759 arch/powerpc/kernel/ptrace.c &target->thread.siar, 0, thread 1780 arch/powerpc/kernel/ptrace.c &target->thread.siar, 0, thread 1785 arch/powerpc/kernel/ptrace.c &target->thread.sdar, sizeof(unsigned long), thread 1790 arch/powerpc/kernel/ptrace.c &target->thread.sier, 2 * sizeof(unsigned long), thread 1795 arch/powerpc/kernel/ptrace.c &target->thread.mmcr2, 3 * sizeof(unsigned long), thread 1800 arch/powerpc/kernel/ptrace.c &target->thread.mmcr0, 4 * sizeof(unsigned long), thread 1828 arch/powerpc/kernel/ptrace.c &target->thread.amr, 0, thread 1853 arch/powerpc/kernel/ptrace.c target->thread.amr = (new_amr & target->thread.uamor) | thread 1854 arch/powerpc/kernel/ptrace.c (target->thread.amr & ~target->thread.uamor); thread 2141 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.gpr[0]); thread 2150 arch/powerpc/kernel/ptrace.c &target->thread.ckpt_regs.gpr[0]); thread 2161 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) thread 2164 arch/powerpc/kernel/ptrace.c if (!FULL_REGS(target->thread.regs)) { thread 2170 arch/powerpc/kernel/ptrace.c target->thread.regs->gpr[i] = NV_REG_POISON; thread 2173 arch/powerpc/kernel/ptrace.c &target->thread.regs->gpr[0]); thread 2181 arch/powerpc/kernel/ptrace.c if (target->thread.regs == NULL) thread 2184 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(target->thread.regs); thread 2186 arch/powerpc/kernel/ptrace.c &target->thread.regs->gpr[0]); thread 2304 arch/powerpc/kernel/ptrace.c struct pt_regs *regs = task->thread.regs; thread 2308 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 &= ~DBCR0_BT; thread 2309 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 |= DBCR0_IDM | DBCR0_IC; thread 2321 arch/powerpc/kernel/ptrace.c struct pt_regs *regs = task->thread.regs; thread 2325 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 &= ~DBCR0_IC; thread 2326 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 = DBCR0_IDM | DBCR0_BT; thread 2338 arch/powerpc/kernel/ptrace.c struct pt_regs *regs = task->thread.regs; thread 2348 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 &= ~(DBCR0_IC|DBCR0_BT); thread 2352 arch/powerpc/kernel/ptrace.c if (!DBCR_ACTIVE_EVENTS(task->thread.debug.dbcr0, thread 2353 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr1)) { thread 2357 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 &= ~DBCR0_IDM; thread 2390 arch/powerpc/kernel/ptrace.c struct thread_struct *thread = &(task->thread); thread 2431 arch/powerpc/kernel/ptrace.c bp = thread->ptrace_bps[0]; thread 2435 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = NULL; thread 2451 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = bp; thread 2452 arch/powerpc/kernel/ptrace.c thread->hw_brk = hw_brk; thread 2463 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = bp = register_user_hw_breakpoint(&attr, thread 2466 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = NULL; thread 2474 arch/powerpc/kernel/ptrace.c task->thread.hw_brk = hw_brk; thread 2482 arch/powerpc/kernel/ptrace.c task->thread.debug.dac1 = data & ~0x3UL; thread 2484 arch/powerpc/kernel/ptrace.c if (task->thread.debug.dac1 == 0) { thread 2486 arch/powerpc/kernel/ptrace.c if (!DBCR_ACTIVE_EVENTS(task->thread.debug.dbcr0, thread 2487 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr1)) { thread 2488 arch/powerpc/kernel/ptrace.c task->thread.regs->msr &= ~MSR_DE; thread 2489 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 &= ~DBCR0_IDM; thread 2501 arch/powerpc/kernel/ptrace.c task->thread.debug.dbcr0 |= DBCR0_IDM; thread 2510 arch/powerpc/kernel/ptrace.c task->thread.regs->msr |= MSR_DE; thread 2531 arch/powerpc/kernel/ptrace.c int slot1_in_use = ((child->thread.debug.dbcr0 & DBCR0_IAC1) != 0); thread 2532 arch/powerpc/kernel/ptrace.c int slot2_in_use = ((child->thread.debug.dbcr0 & DBCR0_IAC2) != 0); thread 2533 arch/powerpc/kernel/ptrace.c int slot3_in_use = ((child->thread.debug.dbcr0 & DBCR0_IAC3) != 0); thread 2534 arch/powerpc/kernel/ptrace.c int slot4_in_use = ((child->thread.debug.dbcr0 & DBCR0_IAC4) != 0); thread 2553 arch/powerpc/kernel/ptrace.c child->thread.debug.iac1 = bp_info->addr; thread 2554 arch/powerpc/kernel/ptrace.c child->thread.debug.iac2 = bp_info->addr2; thread 2555 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC1; thread 2564 arch/powerpc/kernel/ptrace.c child->thread.debug.iac3 = bp_info->addr; thread 2565 arch/powerpc/kernel/ptrace.c child->thread.debug.iac4 = bp_info->addr2; thread 2566 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC3; thread 2586 arch/powerpc/kernel/ptrace.c child->thread.debug.iac1 = bp_info->addr; thread 2587 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC1; thread 2593 arch/powerpc/kernel/ptrace.c child->thread.debug.iac2 = bp_info->addr; thread 2594 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC2; thread 2598 arch/powerpc/kernel/ptrace.c child->thread.debug.iac3 = bp_info->addr; thread 2599 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC3; thread 2602 arch/powerpc/kernel/ptrace.c child->thread.debug.iac4 = bp_info->addr; thread 2603 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IAC4; thread 2609 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IDM; thread 2610 arch/powerpc/kernel/ptrace.c child->thread.regs->msr |= MSR_DE; thread 2619 arch/powerpc/kernel/ptrace.c if ((child->thread.debug.dbcr0 & DBCR0_IAC1) == 0) thread 2624 arch/powerpc/kernel/ptrace.c child->thread.debug.iac2 = 0; thread 2627 arch/powerpc/kernel/ptrace.c child->thread.debug.iac1 = 0; thread 2628 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 &= ~DBCR0_IAC1; thread 2631 arch/powerpc/kernel/ptrace.c if ((child->thread.debug.dbcr0 & DBCR0_IAC2) == 0) thread 2637 arch/powerpc/kernel/ptrace.c child->thread.debug.iac2 = 0; thread 2638 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 &= ~DBCR0_IAC2; thread 2642 arch/powerpc/kernel/ptrace.c if ((child->thread.debug.dbcr0 & DBCR0_IAC3) == 0) thread 2647 arch/powerpc/kernel/ptrace.c child->thread.debug.iac4 = 0; thread 2650 arch/powerpc/kernel/ptrace.c child->thread.debug.iac3 = 0; thread 2651 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 &= ~DBCR0_IAC3; thread 2654 arch/powerpc/kernel/ptrace.c if ((child->thread.debug.dbcr0 & DBCR0_IAC4) == 0) thread 2660 arch/powerpc/kernel/ptrace.c child->thread.debug.iac4 = 0; thread 2661 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 &= ~DBCR0_IAC4; thread 2691 arch/powerpc/kernel/ptrace.c child->thread.debug.dac1 = (unsigned long)bp_info->addr; thread 2694 arch/powerpc/kernel/ptrace.c child->thread.debug.dvc1 = thread 2696 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 |= thread 2702 arch/powerpc/kernel/ptrace.c } else if (child->thread.debug.dbcr2 & DBCR2_DAC12MODE) { thread 2712 arch/powerpc/kernel/ptrace.c child->thread.debug.dac2 = (unsigned long)bp_info->addr; thread 2715 arch/powerpc/kernel/ptrace.c child->thread.debug.dvc2 = thread 2717 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 |= thread 2724 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= DBCR0_IDM; thread 2725 arch/powerpc/kernel/ptrace.c child->thread.regs->msr |= MSR_DE; thread 2736 arch/powerpc/kernel/ptrace.c child->thread.debug.dac1 = 0; thread 2739 arch/powerpc/kernel/ptrace.c if (child->thread.debug.dbcr2 & DBCR2_DAC12MODE) { thread 2740 arch/powerpc/kernel/ptrace.c child->thread.debug.dac2 = 0; thread 2741 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 &= ~DBCR2_DAC12MODE; thread 2743 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 &= ~(DBCR2_DVC1M | DBCR2_DVC1BE); thread 2746 arch/powerpc/kernel/ptrace.c child->thread.debug.dvc1 = 0; thread 2753 arch/powerpc/kernel/ptrace.c if (child->thread.debug.dbcr2 & DBCR2_DAC12MODE) thread 2756 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 &= ~(DBCR2_DVC2M | DBCR2_DVC2BE); thread 2759 arch/powerpc/kernel/ptrace.c child->thread.debug.dvc2 = 0; thread 2761 arch/powerpc/kernel/ptrace.c child->thread.debug.dac2 = 0; thread 2803 arch/powerpc/kernel/ptrace.c if (child->thread.debug.dbcr0 & thread 2808 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= (DBCR0_DAC1R | DBCR0_IDM); thread 2810 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 |= (DBCR0_DAC1W | DBCR0_IDM); thread 2811 arch/powerpc/kernel/ptrace.c child->thread.debug.dac1 = bp_info->addr; thread 2812 arch/powerpc/kernel/ptrace.c child->thread.debug.dac2 = bp_info->addr2; thread 2814 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 |= DBCR2_DAC12M; thread 2816 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 |= DBCR2_DAC12MX; thread 2818 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr2 |= DBCR2_DAC12MM; thread 2819 arch/powerpc/kernel/ptrace.c child->thread.regs->msr |= MSR_DE; thread 2830 arch/powerpc/kernel/ptrace.c struct thread_struct *thread = &(child->thread); thread 2901 arch/powerpc/kernel/ptrace.c bp = thread->ptrace_bps[0]; thread 2911 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = bp = register_user_hw_breakpoint(&attr, thread 2914 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = NULL; thread 2924 arch/powerpc/kernel/ptrace.c if (child->thread.hw_brk.address) thread 2930 arch/powerpc/kernel/ptrace.c child->thread.hw_brk = brk; thread 2940 arch/powerpc/kernel/ptrace.c struct thread_struct *thread = &(child->thread); thread 2952 arch/powerpc/kernel/ptrace.c if (!DBCR_ACTIVE_EVENTS(child->thread.debug.dbcr0, thread 2953 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr1)) { thread 2954 arch/powerpc/kernel/ptrace.c child->thread.debug.dbcr0 &= ~DBCR0_IDM; thread 2955 arch/powerpc/kernel/ptrace.c child->thread.regs->msr &= ~MSR_DE; thread 2964 arch/powerpc/kernel/ptrace.c bp = thread->ptrace_bps[0]; thread 2967 arch/powerpc/kernel/ptrace.c thread->ptrace_bps[0] = NULL; thread 2972 arch/powerpc/kernel/ptrace.c if (child->thread.hw_brk.address == 0) thread 2975 arch/powerpc/kernel/ptrace.c child->thread.hw_brk.address = 0; thread 2976 arch/powerpc/kernel/ptrace.c child->thread.hw_brk.type = 0; thread 3000 arch/powerpc/kernel/ptrace.c || (child->thread.regs == NULL)) thread 3007 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(child->thread.regs); thread 3017 arch/powerpc/kernel/ptrace.c memcpy(&tmp, &child->thread.TS_FPR(fpidx), thread 3020 arch/powerpc/kernel/ptrace.c tmp = child->thread.fp_state.fpscr; thread 3035 arch/powerpc/kernel/ptrace.c || (child->thread.regs == NULL)) thread 3042 arch/powerpc/kernel/ptrace.c CHECK_FULL_REGS(child->thread.regs); thread 3050 arch/powerpc/kernel/ptrace.c memcpy(&child->thread.TS_FPR(fpidx), &data, thread 3053 arch/powerpc/kernel/ptrace.c child->thread.fp_state.fpscr = data; thread 3127 arch/powerpc/kernel/ptrace.c ret = put_user(child->thread.debug.dac1, datalp); thread 3129 arch/powerpc/kernel/ptrace.c dabr_fake = ((child->thread.hw_brk.address & (~HW_BRK_TYPE_DABR)) | thread 3130 arch/powerpc/kernel/ptrace.c (child->thread.hw_brk.type & HW_BRK_TYPE_DABR)); thread 95 arch/powerpc/kernel/ptrace32.c CHECK_FULL_REGS(child->thread.regs); thread 107 arch/powerpc/kernel/ptrace32.c tmp = ((unsigned int *)child->thread.fp_state.fpr) thread 145 arch/powerpc/kernel/ptrace32.c CHECK_FULL_REGS(child->thread.regs); thread 149 arch/powerpc/kernel/ptrace32.c tmp = child->thread.fp_state.fpr[numReg - PT_FPR0][0]; thread 199 arch/powerpc/kernel/ptrace32.c CHECK_FULL_REGS(child->thread.regs); thread 209 arch/powerpc/kernel/ptrace32.c ((unsigned int *)child->thread.fp_state.fpr) thread 238 arch/powerpc/kernel/ptrace32.c CHECK_FULL_REGS(child->thread.regs); thread 253 arch/powerpc/kernel/ptrace32.c tmp = &child->thread.fp_state.fpr[numReg - PT_FPR0][0]; thread 270 arch/powerpc/kernel/ptrace32.c ret = put_user(child->thread.debug.dac1, (u32 __user *)data); thread 273 arch/powerpc/kernel/ptrace32.c (child->thread.hw_brk.address & (~HW_BRK_TYPE_DABR)) | thread 274 arch/powerpc/kernel/ptrace32.c (child->thread.hw_brk.type & HW_BRK_TYPE_DABR)); thread 116 arch/powerpc/kernel/signal.c check_syscall_restart(tsk->thread.regs, &ksig.ka, ksig.sig > 0); thread 121 arch/powerpc/kernel/signal.c tsk->thread.regs->trap = 0; thread 131 arch/powerpc/kernel/signal.c if (tsk->thread.hw_brk.address && tsk->thread.hw_brk.type) thread 132 arch/powerpc/kernel/signal.c __set_breakpoint(&tsk->thread.hw_brk); thread 135 arch/powerpc/kernel/signal.c thread_change_pc(tsk, tsk->thread.regs); thread 137 arch/powerpc/kernel/signal.c rseq_signal_deliver(&ksig, tsk->thread.regs); thread 148 arch/powerpc/kernel/signal.c tsk->thread.regs->trap = 0; thread 166 arch/powerpc/kernel/signal.c BUG_ON(regs != current->thread.regs); thread 203 arch/powerpc/kernel/signal.c unsigned long ret = tsk->thread.regs->gpr[1]; thread 208 arch/powerpc/kernel/signal.c if (MSR_TM_ACTIVE(tsk->thread.regs->msr)) { thread 211 arch/powerpc/kernel/signal.c if (MSR_TM_TRANSACTIONAL(tsk->thread.regs->msr)) thread 212 arch/powerpc/kernel/signal.c ret = tsk->thread.ckpt_regs.gpr[1]; thread 221 arch/powerpc/kernel/signal.c tsk->thread.regs->msr &= ~MSR_TS_MASK; thread 247 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.TS_FPR(i); thread 248 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.fp_state.fpscr; thread 261 arch/powerpc/kernel/signal_32.c task->thread.TS_FPR(i) = buf[i]; thread 262 arch/powerpc/kernel/signal_32.c task->thread.fp_state.fpscr = buf[i]; thread 275 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.fp_state.fpr[i][TS_VSRLOWOFFSET]; thread 288 arch/powerpc/kernel/signal_32.c task->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; thread 301 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.TS_CKFPR(i); thread 302 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.ckfp_state.fpscr; thread 315 arch/powerpc/kernel/signal_32.c task->thread.TS_CKFPR(i) = buf[i]; thread 316 arch/powerpc/kernel/signal_32.c task->thread.ckfp_state.fpscr = buf[i]; thread 329 arch/powerpc/kernel/signal_32.c buf[i] = task->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET]; thread 342 arch/powerpc/kernel/signal_32.c task->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET] = buf[i]; thread 350 arch/powerpc/kernel/signal_32.c return __copy_to_user(to, task->thread.fp_state.fpr, thread 357 arch/powerpc/kernel/signal_32.c return __copy_from_user(task->thread.fp_state.fpr, from, thread 365 arch/powerpc/kernel/signal_32.c return __copy_to_user(to, task->thread.ckfp_state.fpr, thread 372 arch/powerpc/kernel/signal_32.c return __copy_from_user(task->thread.ckfp_state.fpr, from, thread 398 arch/powerpc/kernel/signal_32.c if (current->thread.used_vr) { thread 400 arch/powerpc/kernel/signal_32.c if (__copy_to_user(&frame->mc_vregs, ¤t->thread.vr_state, thread 416 arch/powerpc/kernel/signal_32.c current->thread.vrsave = mfspr(SPRN_VRSAVE); thread 417 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.vrsave, (u32 __user *)&frame->mc_vregs[32])) thread 435 arch/powerpc/kernel/signal_32.c if (current->thread.used_vsr && ctx_has_vsx_region) { thread 444 arch/powerpc/kernel/signal_32.c if (current->thread.used_spe) { thread 446 arch/powerpc/kernel/signal_32.c if (__copy_to_user(&frame->mc_vregs, current->thread.evr, thread 456 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.spefscr, (u32 __user *)&frame->mc_vregs + ELF_NEVRREG)) thread 498 arch/powerpc/kernel/signal_32.c if (save_general_regs(¤t->thread.ckpt_regs, frame) thread 513 arch/powerpc/kernel/signal_32.c if (current->thread.used_vr) { thread 514 arch/powerpc/kernel/signal_32.c if (__copy_to_user(&frame->mc_vregs, ¤t->thread.ckvr_state, thread 519 arch/powerpc/kernel/signal_32.c ¤t->thread.vr_state, thread 524 arch/powerpc/kernel/signal_32.c ¤t->thread.ckvr_state, thread 541 arch/powerpc/kernel/signal_32.c current->thread.ckvrsave = mfspr(SPRN_VRSAVE); thread 542 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.ckvrsave, thread 546 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.vrsave, thread 550 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.ckvrsave, thread 573 arch/powerpc/kernel/signal_32.c if (current->thread.used_vsr) { thread 592 arch/powerpc/kernel/signal_32.c if (current->thread.used_spe) { thread 594 arch/powerpc/kernel/signal_32.c if (__copy_to_user(&frame->mc_vregs, current->thread.evr, thread 603 arch/powerpc/kernel/signal_32.c if (__put_user(current->thread.spefscr, (u32 __user *)&frame->mc_vregs + ELF_NEVRREG)) thread 662 arch/powerpc/kernel/signal_32.c if (__copy_from_user(¤t->thread.vr_state, &sr->mc_vregs, thread 665 arch/powerpc/kernel/signal_32.c current->thread.used_vr = true; thread 666 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_vr) thread 667 arch/powerpc/kernel/signal_32.c memset(¤t->thread.vr_state, 0, thread 671 arch/powerpc/kernel/signal_32.c if (__get_user(current->thread.vrsave, (u32 __user *)&sr->mc_vregs[32])) thread 674 arch/powerpc/kernel/signal_32.c mtspr(SPRN_VRSAVE, current->thread.vrsave); thread 692 arch/powerpc/kernel/signal_32.c current->thread.used_vsr = true; thread 693 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_vsr) thread 695 arch/powerpc/kernel/signal_32.c current->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 709 arch/powerpc/kernel/signal_32.c if (__copy_from_user(current->thread.evr, &sr->mc_vregs, thread 712 arch/powerpc/kernel/signal_32.c current->thread.used_spe = true; thread 713 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_spe) thread 714 arch/powerpc/kernel/signal_32.c memset(current->thread.evr, 0, ELF_NEVRREG * sizeof(u32)); thread 717 arch/powerpc/kernel/signal_32.c if (__get_user(current->thread.spefscr, (u32 __user *)&sr->mc_vregs + ELF_NEVRREG)) thread 750 arch/powerpc/kernel/signal_32.c err |= restore_general_regs(¤t->thread.ckpt_regs, sr); thread 752 arch/powerpc/kernel/signal_32.c err |= __get_user(current->thread.tm_tfhar, &sr->mc_gregs[PT_NIP]); thread 765 arch/powerpc/kernel/signal_32.c if (__copy_from_user(¤t->thread.ckvr_state, &sr->mc_vregs, thread 767 arch/powerpc/kernel/signal_32.c __copy_from_user(¤t->thread.vr_state, thread 771 arch/powerpc/kernel/signal_32.c current->thread.used_vr = true; thread 772 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_vr) { thread 773 arch/powerpc/kernel/signal_32.c memset(¤t->thread.vr_state, 0, thread 775 arch/powerpc/kernel/signal_32.c memset(¤t->thread.ckvr_state, 0, thread 780 arch/powerpc/kernel/signal_32.c if (__get_user(current->thread.ckvrsave, thread 782 arch/powerpc/kernel/signal_32.c __get_user(current->thread.vrsave, thread 786 arch/powerpc/kernel/signal_32.c mtspr(SPRN_VRSAVE, current->thread.ckvrsave); thread 805 arch/powerpc/kernel/signal_32.c current->thread.used_vsr = true; thread 806 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_vsr) thread 808 arch/powerpc/kernel/signal_32.c current->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 809 arch/powerpc/kernel/signal_32.c current->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 819 arch/powerpc/kernel/signal_32.c if (__copy_from_user(current->thread.evr, &sr->mc_vregs, thread 822 arch/powerpc/kernel/signal_32.c current->thread.used_spe = true; thread 823 arch/powerpc/kernel/signal_32.c } else if (current->thread.used_spe) thread 824 arch/powerpc/kernel/signal_32.c memset(current->thread.evr, 0, ELF_NEVRREG * sizeof(u32)); thread 827 arch/powerpc/kernel/signal_32.c if (__get_user(current->thread.spefscr, (u32 __user *)&sr->mc_vregs thread 863 arch/powerpc/kernel/signal_32.c current->thread.tm_texasr |= TEXASR_FS; thread 865 arch/powerpc/kernel/signal_32.c tm_recheckpoint(¤t->thread); thread 870 arch/powerpc/kernel/signal_32.c load_fp_state(¤t->thread.fp_state); thread 871 arch/powerpc/kernel/signal_32.c regs->msr |= (MSR_FP | current->thread.fpexc_mode); thread 875 arch/powerpc/kernel/signal_32.c load_vr_state(¤t->thread.vr_state); thread 906 arch/powerpc/kernel/signal_32.c struct pt_regs *regs = tsk->thread.regs; thread 962 arch/powerpc/kernel/signal_32.c tsk->thread.fp_state.fpscr = 0; /* turn off all fp exceptions */ thread 1260 arch/powerpc/kernel/signal_32.c unsigned long new_dbcr0 = current->thread.debug.dbcr0; thread 1275 arch/powerpc/kernel/signal_32.c current->thread.debug.dbcr1)) { thread 1310 arch/powerpc/kernel/signal_32.c current->thread.debug.dbcr0 = new_dbcr0; thread 1367 arch/powerpc/kernel/signal_32.c struct pt_regs *regs = tsk->thread.regs; thread 1419 arch/powerpc/kernel/signal_32.c tsk->thread.fp_state.fpscr = 0; /* turn off all fp exceptions */ thread 107 arch/powerpc/kernel/signal_64.c struct pt_regs *regs = tsk->thread.regs; thread 119 arch/powerpc/kernel/signal_64.c if (tsk->thread.used_vr) { thread 122 arch/powerpc/kernel/signal_64.c err |= __copy_to_user(v_regs, &tsk->thread.vr_state, thread 135 arch/powerpc/kernel/signal_64.c tsk->thread.vrsave = vrsave; thread 157 arch/powerpc/kernel/signal_64.c if (tsk->thread.used_vsr && ctx_has_vsx_region) { thread 210 arch/powerpc/kernel/signal_64.c struct pt_regs *regs = tsk->thread.regs; thread 223 arch/powerpc/kernel/signal_64.c msr |= tsk->thread.ckpt_regs.msr & (MSR_FP | MSR_VEC | MSR_VSX); thread 230 arch/powerpc/kernel/signal_64.c if (tsk->thread.used_vr) { thread 232 arch/powerpc/kernel/signal_64.c err |= __copy_to_user(v_regs, &tsk->thread.ckvr_state, thread 239 arch/powerpc/kernel/signal_64.c &tsk->thread.vr_state, thread 243 arch/powerpc/kernel/signal_64.c &tsk->thread.ckvr_state, thread 255 arch/powerpc/kernel/signal_64.c tsk->thread.ckvrsave = mfspr(SPRN_VRSAVE); thread 256 arch/powerpc/kernel/signal_64.c err |= __put_user(tsk->thread.ckvrsave, (u32 __user *)&v_regs[33]); thread 258 arch/powerpc/kernel/signal_64.c err |= __put_user(tsk->thread.vrsave, thread 261 arch/powerpc/kernel/signal_64.c err |= __put_user(tsk->thread.ckvrsave, thread 282 arch/powerpc/kernel/signal_64.c if (tsk->thread.used_vsr) { thread 305 arch/powerpc/kernel/signal_64.c &tsk->thread.ckpt_regs, GP_REGS_SIZE); thread 330 arch/powerpc/kernel/signal_64.c struct pt_regs *regs = tsk->thread.regs; thread 379 arch/powerpc/kernel/signal_64.c err |= __copy_from_user(&tsk->thread.vr_state, v_regs, thread 381 arch/powerpc/kernel/signal_64.c tsk->thread.used_vr = true; thread 382 arch/powerpc/kernel/signal_64.c } else if (tsk->thread.used_vr) { thread 383 arch/powerpc/kernel/signal_64.c memset(&tsk->thread.vr_state, 0, 33 * sizeof(vector128)); thread 387 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.vrsave, (u32 __user *)&v_regs[33]); thread 389 arch/powerpc/kernel/signal_64.c tsk->thread.vrsave = 0; thread 391 arch/powerpc/kernel/signal_64.c mtspr(SPRN_VRSAVE, tsk->thread.vrsave); thread 404 arch/powerpc/kernel/signal_64.c tsk->thread.used_vsr = true; thread 407 arch/powerpc/kernel/signal_64.c tsk->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 427 arch/powerpc/kernel/signal_64.c struct pt_regs *regs = tsk->thread.regs; thread 439 arch/powerpc/kernel/signal_64.c err |= __copy_from_user(&tsk->thread.ckpt_regs, sc->gp_regs, thread 451 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.tm_tfhar, &sc->gp_regs[PT_NIP]); thread 467 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.ckpt_regs.ctr, thread 469 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.ckpt_regs.link, thread 471 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.ckpt_regs.xer, thread 473 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.ckpt_regs.ccr, thread 502 arch/powerpc/kernel/signal_64.c err |= __copy_from_user(&tsk->thread.ckvr_state, v_regs, thread 504 arch/powerpc/kernel/signal_64.c err |= __copy_from_user(&tsk->thread.vr_state, tm_v_regs, thread 506 arch/powerpc/kernel/signal_64.c current->thread.used_vr = true; thread 508 arch/powerpc/kernel/signal_64.c else if (tsk->thread.used_vr) { thread 509 arch/powerpc/kernel/signal_64.c memset(&tsk->thread.vr_state, 0, 33 * sizeof(vector128)); thread 510 arch/powerpc/kernel/signal_64.c memset(&tsk->thread.ckvr_state, 0, 33 * sizeof(vector128)); thread 514 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.ckvrsave, thread 516 arch/powerpc/kernel/signal_64.c err |= __get_user(tsk->thread.vrsave, thread 520 arch/powerpc/kernel/signal_64.c tsk->thread.vrsave = 0; thread 521 arch/powerpc/kernel/signal_64.c tsk->thread.ckvrsave = 0; thread 524 arch/powerpc/kernel/signal_64.c mtspr(SPRN_VRSAVE, tsk->thread.vrsave); thread 540 arch/powerpc/kernel/signal_64.c tsk->thread.used_vsr = true; thread 543 arch/powerpc/kernel/signal_64.c tsk->thread.fp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 544 arch/powerpc/kernel/signal_64.c tsk->thread.ckfp_state.fpr[i][TS_VSRLOWOFFSET] = 0; thread 550 arch/powerpc/kernel/signal_64.c tsk->thread.tm_texasr |= TEXASR_FS; thread 580 arch/powerpc/kernel/signal_64.c tm_recheckpoint(&tsk->thread); thread 584 arch/powerpc/kernel/signal_64.c load_fp_state(&tsk->thread.fp_state); thread 585 arch/powerpc/kernel/signal_64.c regs->msr |= (MSR_FP | tsk->thread.fpexc_mode); thread 588 arch/powerpc/kernel/signal_64.c load_vr_state(&tsk->thread.vr_state); thread 794 arch/powerpc/kernel/signal_64.c current->thread.regs->msr &= ~MSR_TS_MASK; thread 821 arch/powerpc/kernel/signal_64.c struct pt_regs *regs = tsk->thread.regs; thread 866 arch/powerpc/kernel/signal_64.c tsk->thread.fp_state.fpscr = 0; thread 76 arch/powerpc/kernel/stacktrace.c sp = tsk->thread.ksp; thread 136 arch/powerpc/kernel/stacktrace.c sp = tsk->thread.ksp; thread 125 arch/powerpc/kernel/syscalls.c current->thread.regs->msr ^= MSR_LE; thread 535 arch/powerpc/kernel/sysfs.c if (!current->thread.dscr_inherit) { thread 536 arch/powerpc/kernel/sysfs.c current->thread.dscr = *(unsigned long *)val; thread 349 arch/powerpc/kernel/traps.c current->thread.trap_nr = code; thread 355 arch/powerpc/kernel/traps.c thread_pkey_regs_save(¤t->thread); thread 588 arch/powerpc/kernel/traps.c #define single_stepping(regs) (current->thread.debug.dbcr0 & DBCR0_IC) thread 589 arch/powerpc/kernel/traps.c #define clear_single_step(regs) (current->thread.debug.dbcr0 &= ~DBCR0_IC) thread 917 arch/powerpc/kernel/traps.c vdst = (u8 *)¤t->thread.vr_state.vr[t]; thread 919 arch/powerpc/kernel/traps.c vdst = (u8 *)¤t->thread.fp_state.fpr[t][0]; thread 1185 arch/powerpc/kernel/traps.c code = __parse_fpscr(current->thread.fp_state.fpscr); thread 1414 arch/powerpc/kernel/traps.c current->thread.dscr = regs->gpr[rd]; thread 1415 arch/powerpc/kernel/traps.c current->thread.dscr_inherit = 1; thread 1416 arch/powerpc/kernel/traps.c mtspr(SPRN_DSCR, current->thread.dscr); thread 1445 arch/powerpc/kernel/traps.c code = __parse_fpscr(current->thread.fp_state.fpscr); thread 1605 arch/powerpc/kernel/traps.c if (!(current->thread.align_ctl & PR_UNALIGN_SIGBUS)) thread 1689 arch/powerpc/kernel/traps.c current->thread.load_tm++; thread 1692 arch/powerpc/kernel/traps.c tm_restore_sprs(¤t->thread); thread 1770 arch/powerpc/kernel/traps.c current->thread.dscr = regs->gpr[rd]; thread 1771 arch/powerpc/kernel/traps.c current->thread.dscr_inherit = 1; thread 1772 arch/powerpc/kernel/traps.c current->thread.fscr |= FSCR_DSCR; thread 1773 arch/powerpc/kernel/traps.c mtspr(SPRN_FSCR, current->thread.fscr); thread 1845 arch/powerpc/kernel/traps.c current->thread.load_fp = 1; thread 1850 arch/powerpc/kernel/traps.c tm_recheckpoint(¤t->thread); thread 1863 arch/powerpc/kernel/traps.c current->thread.load_vec = 1; thread 1864 arch/powerpc/kernel/traps.c tm_recheckpoint(¤t->thread); thread 1865 arch/powerpc/kernel/traps.c current->thread.used_vr = 1; thread 1881 arch/powerpc/kernel/traps.c current->thread.used_vsr = 1; thread 1886 arch/powerpc/kernel/traps.c current->thread.load_vec = 1; thread 1887 arch/powerpc/kernel/traps.c current->thread.load_fp = 1; thread 1889 arch/powerpc/kernel/traps.c tm_recheckpoint(¤t->thread); thread 1911 arch/powerpc/kernel/traps.c current->thread.debug.dbcr2 &= ~DBCR2_DAC12MODE; thread 1922 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IAC1; thread 1928 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IAC2; thread 1933 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IAC3; thread 1939 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IAC4; thread 1949 arch/powerpc/kernel/traps.c if (DBCR_ACTIVE_EVENTS(current->thread.debug.dbcr0, thread 1950 arch/powerpc/kernel/traps.c current->thread.debug.dbcr1)) thread 1954 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IDM; thread 1957 arch/powerpc/kernel/traps.c mtspr(SPRN_DBCR0, current->thread.debug.dbcr0); thread 1962 arch/powerpc/kernel/traps.c current->thread.debug.dbsr = debug_status; thread 1979 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_BT; thread 1980 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 |= DBCR0_IDM | DBCR0_IC; thread 2014 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IC; thread 2015 arch/powerpc/kernel/traps.c if (DBCR_ACTIVE_EVENTS(current->thread.debug.dbcr0, thread 2016 arch/powerpc/kernel/traps.c current->thread.debug.dbcr1)) thread 2020 arch/powerpc/kernel/traps.c current->thread.debug.dbcr0 &= ~DBCR0_IDM; thread 2067 arch/powerpc/kernel/traps.c current->thread.vr_state.vscr.u[3] |= 0x10000; thread 2101 arch/powerpc/kernel/traps.c spefscr = current->thread.spefscr; thread 2102 arch/powerpc/kernel/traps.c fpexc_mode = current->thread.fpexc_mode; thread 55 arch/powerpc/kernel/uprobes.c autask->saved_trap_nr = current->thread.trap_nr; thread 56 arch/powerpc/kernel/uprobes.c current->thread.trap_nr = UPROBE_TRAP_NR; thread 86 arch/powerpc/kernel/uprobes.c if (t->thread.trap_nr != UPROBE_TRAP_NR) thread 103 arch/powerpc/kernel/uprobes.c WARN_ON_ONCE(current->thread.trap_nr != UPROBE_TRAP_NR); thread 105 arch/powerpc/kernel/uprobes.c current->thread.trap_nr = utask->autask.saved_trap_nr; thread 158 arch/powerpc/kernel/uprobes.c current->thread.trap_nr = utask->autask.saved_trap_nr; thread 276 arch/powerpc/kernel/vecemu.c vrs = current->thread.vr_state.vr; thread 325 arch/powerpc/kernel/vecemu.c ¤t->thread.vr_state.vscr.u[3]); thread 330 arch/powerpc/kernel/vecemu.c ¤t->thread.vr_state.vscr.u[3]); thread 4236 arch/powerpc/kvm/book3s_hv.c if (cpu_has_feature(CPU_FTR_TM) && current->thread.regs && thread 4237 arch/powerpc/kvm/book3s_hv.c (current->thread.regs->msr & MSR_TM)) { thread 4238 arch/powerpc/kvm/book3s_hv.c if (MSR_TM_ACTIVE(current->thread.regs->msr)) { thread 4245 arch/powerpc/kvm/book3s_hv.c current->thread.tm_tfhar = mfspr(SPRN_TFHAR); thread 4246 arch/powerpc/kvm/book3s_hv.c current->thread.tm_tfiar = mfspr(SPRN_TFIAR); thread 4247 arch/powerpc/kvm/book3s_hv.c current->thread.tm_texasr = mfspr(SPRN_TEXASR); thread 4248 arch/powerpc/kvm/book3s_hv.c current->thread.regs->msr &= ~MSR_TM; thread 4330 arch/powerpc/kvm/book3s_hv.c mtspr(SPRN_FSCR, current->thread.fscr); thread 112 arch/powerpc/kvm/book3s_pr.c current->thread.kvm_shadow_vcpu = vcpu->arch.shadow_vcpu; thread 786 arch/powerpc/kvm/book3s_pr.c struct thread_struct *t = ¤t->thread; thread 816 arch/powerpc/kvm/book3s_pr.c if (current->thread.regs->msr & MSR_VEC) thread 838 arch/powerpc/kvm/book3s_pr.c mtspr(SPRN_TAR, current->thread.tar); thread 849 arch/powerpc/kvm/book3s_pr.c struct thread_struct *t = ¤t->thread; thread 921 arch/powerpc/kvm/book3s_pr.c lost_ext = vcpu->arch.guest_owned_ext & ~current->thread.regs->msr; thread 941 arch/powerpc/kvm/book3s_pr.c current->thread.regs->msr |= lost_ext; thread 999 arch/powerpc/kvm/book3s_pr.c current->thread.tar = mfspr(SPRN_TAR); thread 135 arch/powerpc/kvm/booke.c if (!(current->thread.regs->msr & MSR_FP)) { thread 139 arch/powerpc/kvm/booke.c current->thread.fp_save_area = &vcpu->arch.fp; thread 140 arch/powerpc/kvm/booke.c current->thread.regs->msr |= MSR_FP; thread 152 arch/powerpc/kvm/booke.c if (current->thread.regs->msr & MSR_FP) thread 154 arch/powerpc/kvm/booke.c current->thread.fp_save_area = NULL; thread 177 arch/powerpc/kvm/booke.c if (!(current->thread.regs->msr & MSR_VEC)) { thread 181 arch/powerpc/kvm/booke.c current->thread.vr_save_area = &vcpu->arch.vr; thread 182 arch/powerpc/kvm/booke.c current->thread.regs->msr |= MSR_VEC; thread 196 arch/powerpc/kvm/booke.c if (current->thread.regs->msr & MSR_VEC) thread 198 arch/powerpc/kvm/booke.c current->thread.vr_save_area = NULL; thread 775 arch/powerpc/kvm/booke.c debug = current->thread.debug; thread 776 arch/powerpc/kvm/booke.c current->thread.debug = vcpu->arch.dbg_reg; thread 788 arch/powerpc/kvm/booke.c current->thread.debug = debug; thread 2095 arch/powerpc/kvm/booke.c current->thread.kvm_vcpu = vcpu; thread 2100 arch/powerpc/kvm/booke.c current->thread.kvm_vcpu = NULL; thread 373 arch/powerpc/kvm/booke_emulate.c current->thread.debug = vcpu->arch.dbg_reg; thread 117 arch/powerpc/kvm/mpic.c struct kvm_vcpu *vcpu = current->thread.kvm_vcpu; thread 495 arch/powerpc/lib/sstep.c current->thread.TS_FPR(rn) = u.l[0]; thread 502 arch/powerpc/lib/sstep.c current->thread.TS_FPR(rn) = u.l[1]; thread 529 arch/powerpc/lib/sstep.c u.l[0] = current->thread.TS_FPR(rn); thread 541 arch/powerpc/lib/sstep.c u.l[1] = current->thread.TS_FPR(rn); thread 579 arch/powerpc/lib/sstep.c current->thread.vr_state.vr[rn] = u.v; thread 602 arch/powerpc/lib/sstep.c u.v = current->thread.vr_state.vr[rn]; thread 835 arch/powerpc/lib/sstep.c current->thread.fp_state.fpr[reg][0] = buf.d[0]; thread 836 arch/powerpc/lib/sstep.c current->thread.fp_state.fpr[reg][1] = buf.d[1]; thread 842 arch/powerpc/lib/sstep.c current->thread.vr_state.vr[reg - 32] = buf.v; thread 866 arch/powerpc/lib/sstep.c buf.d[0] = current->thread.fp_state.fpr[reg][0]; thread 867 arch/powerpc/lib/sstep.c buf.d[1] = current->thread.fp_state.fpr[reg][1]; thread 873 arch/powerpc/lib/sstep.c buf.v = current->thread.vr_state.vr[reg - 32]; thread 2708 arch/powerpc/lib/sstep.c if (ea - STACK_INT_FRAME_SIZE <= current->thread.ksp_limit) { thread 332 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 333 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 16) & 0x1f); thread 334 arch/powerpc/math-emu/math.c op2 = (void *)¤t->thread.TS_FPR((insn >> 11) & 0x1f); thread 338 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 339 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 16) & 0x1f); thread 340 arch/powerpc/math-emu/math.c op2 = (void *)¤t->thread.TS_FPR((insn >> 6) & 0x1f); thread 344 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 345 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 16) & 0x1f); thread 346 arch/powerpc/math-emu/math.c op2 = (void *)¤t->thread.TS_FPR((insn >> 11) & 0x1f); thread 347 arch/powerpc/math-emu/math.c op3 = (void *)¤t->thread.TS_FPR((insn >> 6) & 0x1f); thread 353 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 363 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 368 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 372 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 373 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 16) & 0x1f); thread 377 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 378 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 11) & 0x1f); thread 383 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 392 arch/powerpc/math-emu/math.c op0 = (void *)¤t->thread.TS_FPR((insn >> 21) & 0x1f); thread 400 arch/powerpc/math-emu/math.c op2 = (void *)¤t->thread.TS_FPR((insn >> 16) & 0x1f); thread 401 arch/powerpc/math-emu/math.c op3 = (void *)¤t->thread.TS_FPR((insn >> 11) & 0x1f); thread 421 arch/powerpc/math-emu/math.c op1 = (void *)¤t->thread.TS_FPR((insn >> 11) & 0x1f); thread 199 arch/powerpc/math-emu/math_efp.c vc.wp[0] = current->thread.evr[fc]; thread 201 arch/powerpc/math-emu/math_efp.c va.wp[0] = current->thread.evr[fa]; thread 203 arch/powerpc/math-emu/math_efp.c vb.wp[0] = current->thread.evr[fb]; thread 676 arch/powerpc/math-emu/math_efp.c &= ~(FP_EX_INVALID | FP_EX_UNDERFLOW) | current->thread.spefscr_last; thread 679 arch/powerpc/math-emu/math_efp.c current->thread.spefscr_last = __FPU_FPSCR; thread 681 arch/powerpc/math-emu/math_efp.c current->thread.evr[fc] = vc.wp[0]; thread 691 arch/powerpc/math-emu/math_efp.c if (current->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) { thread 693 arch/powerpc/math-emu/math_efp.c && (current->thread.fpexc_mode & PR_FP_EXC_DIV)) thread 696 arch/powerpc/math-emu/math_efp.c && (current->thread.fpexc_mode & PR_FP_EXC_OVF)) thread 699 arch/powerpc/math-emu/math_efp.c && (current->thread.fpexc_mode & PR_FP_EXC_UND)) thread 702 arch/powerpc/math-emu/math_efp.c && (current->thread.fpexc_mode & PR_FP_EXC_RES)) thread 705 arch/powerpc/math-emu/math_efp.c && (current->thread.fpexc_mode & PR_FP_EXC_INV)) thread 752 arch/powerpc/math-emu/math_efp.c s_hi = current->thread.evr[fc] & SIGN_BIT_S; thread 753 arch/powerpc/math-emu/math_efp.c fgpr.wp[0] = current->thread.evr[fc]; thread 798 arch/powerpc/math-emu/math_efp.c s_hi = current->thread.evr[fb] & SIGN_BIT_S; thread 807 arch/powerpc/math-emu/math_efp.c s_hi = current->thread.evr[fb] & SIGN_BIT_S; thread 879 arch/powerpc/math-emu/math_efp.c current->thread.evr[fc] = fgpr.wp[0]; thread 884 arch/powerpc/math-emu/math_efp.c if (current->thread.fpexc_mode & PR_FP_EXC_SW_ENABLE) thread 885 arch/powerpc/math-emu/math_efp.c return (current->thread.fpexc_mode & PR_FP_EXC_RES) ? 1 : 0; thread 334 arch/powerpc/mm/book3s32/mmu.c if (!current->thread.regs) thread 338 arch/powerpc/mm/book3s32/mmu.c if (TRAP(current->thread.regs) != 0x300 && TRAP(current->thread.regs) != 0x400) thread 1638 arch/powerpc/mm/book3s64/hash_utils.c trap = current->thread.regs ? TRAP(current->thread.regs) : 0UL; thread 1688 arch/powerpc/mm/book3s64/hash_utils.c if (local && cpu_has_feature(CPU_FTR_TM) && current->thread.regs && thread 1689 arch/powerpc/mm/book3s64/hash_utils.c MSR_TM_ACTIVE(current->thread.regs->msr)) { thread 269 arch/powerpc/mm/book3s64/pkeys.c void thread_pkey_regs_save(struct thread_struct *thread) thread 277 arch/powerpc/mm/book3s64/pkeys.c thread->amr = read_amr(); thread 278 arch/powerpc/mm/book3s64/pkeys.c thread->iamr = read_iamr(); thread 279 arch/powerpc/mm/book3s64/pkeys.c thread->uamor = read_uamor(); thread 296 arch/powerpc/mm/book3s64/pkeys.c void thread_pkey_regs_init(struct thread_struct *thread) thread 301 arch/powerpc/mm/book3s64/pkeys.c thread->amr = pkey_amr_mask; thread 302 arch/powerpc/mm/book3s64/pkeys.c thread->iamr = pkey_iamr_mask; thread 303 arch/powerpc/mm/book3s64/pkeys.c thread->uamor = pkey_uamor_mask; thread 483 arch/powerpc/mm/book3s64/slb.c tsk->thread.load_slb++; thread 484 arch/powerpc/mm/book3s64/slb.c if (!tsk->thread.load_slb) { thread 148 arch/powerpc/mm/fault.c current->thread.trap_nr = BUS_ADRERR; thread 260 arch/powerpc/mm/fault.c struct pt_regs *uregs = current->thread.regs; thread 20 arch/powerpc/mm/mmu_context.c tsk->thread.pgdir = mm->pgd; thread 33 arch/powerpc/mm/pgtable.c return current->thread.regs && TRAP(current->thread.regs) == 0x400; thread 571 arch/powerpc/perf/core-book3s.c if (!is_ebb_event(event) || current->thread.used_ebb) thread 580 arch/powerpc/perf/core-book3s.c current->thread.used_ebb = 1; thread 581 arch/powerpc/perf/core-book3s.c current->thread.mmcr0 |= MMCR0_PMXE; thread 589 arch/powerpc/perf/core-book3s.c current->thread.siar = mfspr(SPRN_SIAR); thread 590 arch/powerpc/perf/core-book3s.c current->thread.sier = mfspr(SPRN_SIER); thread 591 arch/powerpc/perf/core-book3s.c current->thread.sdar = mfspr(SPRN_SDAR); thread 592 arch/powerpc/perf/core-book3s.c current->thread.mmcr0 = mmcr0 & MMCR0_USER_MASK; thread 593 arch/powerpc/perf/core-book3s.c current->thread.mmcr2 = mfspr(SPRN_MMCR2) & MMCR2_USER_MASK; thread 611 arch/powerpc/perf/core-book3s.c mmcr0 |= current->thread.mmcr0; thread 618 arch/powerpc/perf/core-book3s.c if (!(current->thread.mmcr0 & MMCR0_PMXE)) thread 621 arch/powerpc/perf/core-book3s.c mtspr(SPRN_SIAR, current->thread.siar); thread 622 arch/powerpc/perf/core-book3s.c mtspr(SPRN_SIER, current->thread.sier); thread 623 arch/powerpc/perf/core-book3s.c mtspr(SPRN_SDAR, current->thread.sdar); thread 632 arch/powerpc/perf/core-book3s.c mtspr(SPRN_MMCR2, cpuhw->mmcr[3] | current->thread.mmcr2); thread 677 arch/powerpc/perf/core-book3s.c if ((current->thread.mmcr0 & (MMCR0_PMAO | MMCR0_PMAO_SYNC)) != MMCR0_PMAO) thread 681 arch/powerpc/perf/core-book3s.c if (ebb && !(current->thread.bescr & BESCR_GE)) thread 267 arch/powerpc/platforms/cell/cbe_regs.c struct cbe_thread_map *thread = &cbe_thread_map[i]; thread 269 arch/powerpc/platforms/cell/cbe_regs.c if (thread->cpu_node == cpu) { thread 270 arch/powerpc/platforms/cell/cbe_regs.c thread->regs = map; thread 271 arch/powerpc/platforms/cell/cbe_regs.c thread->cbe_id = cbe_id; thread 272 arch/powerpc/platforms/cell/cbe_regs.c map->be_node = thread->be_node; thread 274 arch/powerpc/platforms/cell/cbe_regs.c if(thread->thread_id == 0) thread 379 arch/powerpc/platforms/cell/interrupt.c void iic_set_interrupt_routing(int cpu, int thread, int priority) thread 388 arch/powerpc/platforms/cell/interrupt.c if (thread == 0) thread 87 arch/powerpc/platforms/cell/interrupt.h extern void iic_set_interrupt_routing(int cpu, int thread, int priority); thread 343 arch/powerpc/platforms/cell/pmu.c void cbe_enable_pm_interrupts(u32 cpu, u32 thread, u32 mask) thread 346 arch/powerpc/platforms/cell/pmu.c iic_set_interrupt_routing(cpu, thread, 0); thread 259 arch/powerpc/platforms/powernv/idle.c unsigned long thread = 1UL << cpu_thread_in_core(cpu); thread 265 arch/powerpc/platforms/powernv/idle.c BUG_ON(s & thread); thread 268 arch/powerpc/platforms/powernv/idle.c new = (s | thread) & ~PNV_CORE_IDLE_LOCK_BIT; thread 317 arch/powerpc/platforms/powernv/idle.c unsigned long thread = 1UL << cpu_thread_in_core(cpu); thread 328 arch/powerpc/platforms/powernv/idle.c BUG_ON(!(*state & thread)); thread 329 arch/powerpc/platforms/powernv/idle.c *state &= ~thread; thread 413 arch/powerpc/platforms/powernv/idle.c *state &= ~(thread << PNV_CORE_IDLE_THREAD_WINKLE_BITS_SHIFT); thread 429 arch/powerpc/platforms/powernv/idle.c if (*state & (thread << PNV_CORE_IDLE_THREAD_WINKLE_BITS_SHIFT)) { thread 430 arch/powerpc/platforms/powernv/idle.c *state &= ~(thread << PNV_CORE_IDLE_THREAD_WINKLE_BITS_SHIFT); thread 436 arch/powerpc/platforms/powernv/idle.c WARN_ON(*state & thread); thread 314 arch/powerpc/platforms/pseries/hotplug-cpu.c u32 thread; thread 324 arch/powerpc/platforms/pseries/hotplug-cpu.c thread = be32_to_cpu(intserv[i]); thread 326 arch/powerpc/platforms/pseries/hotplug-cpu.c if (get_hard_smp_processor_id(cpu) != thread) thread 336 arch/powerpc/platforms/pseries/hotplug-cpu.c "with physical id 0x%x\n", thread); thread 347 arch/powerpc/platforms/pseries/hotplug-cpu.c u32 thread; thread 357 arch/powerpc/platforms/pseries/hotplug-cpu.c thread = be32_to_cpu(intserv[i]); thread 359 arch/powerpc/platforms/pseries/hotplug-cpu.c if (get_hard_smp_processor_id(cpu) != thread) thread 375 arch/powerpc/platforms/pseries/hotplug-cpu.c "with physical id 0x%x\n", thread); thread 514 arch/powerpc/platforms/pseries/hotplug-cpu.c u32 thread; thread 524 arch/powerpc/platforms/pseries/hotplug-cpu.c thread = be32_to_cpu(intserv[i]); thread 526 arch/powerpc/platforms/pseries/hotplug-cpu.c if (get_hard_smp_processor_id(cpu) != thread) thread 550 arch/powerpc/platforms/pseries/hotplug-cpu.c BUG_ON(plpar_hcall_norets(H_PROD, thread) thread 556 arch/powerpc/platforms/pseries/hotplug-cpu.c printk(KERN_WARNING "Could not find cpu to offline with physical id 0x%x\n", thread); thread 137 arch/powerpc/sysdev/fsl_rcpm.c int thread = cpu_thread_in_core(cpu); thread 139 arch/powerpc/sysdev/fsl_rcpm.c book3e_stop_thread(thread); thread 3096 arch/powerpc/xmon/xmon.c tsk->thread.ksp, thread 16 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_RA, task_struct, thread.ra); thread 17 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_SP, task_struct, thread.sp); thread 18 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S0, task_struct, thread.s[0]); thread 19 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S1, task_struct, thread.s[1]); thread 20 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S2, task_struct, thread.s[2]); thread 21 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S3, task_struct, thread.s[3]); thread 22 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S4, task_struct, thread.s[4]); thread 23 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S5, task_struct, thread.s[5]); thread 24 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S6, task_struct, thread.s[6]); thread 25 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S7, task_struct, thread.s[7]); thread 26 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S8, task_struct, thread.s[8]); thread 27 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S9, task_struct, thread.s[9]); thread 28 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S10, task_struct, thread.s[10]); thread 29 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_S11, task_struct, thread.s[11]); thread 30 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_SP, task_struct, thread.sp); thread 39 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F0, task_struct, thread.fstate.f[0]); thread 40 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F1, task_struct, thread.fstate.f[1]); thread 41 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F2, task_struct, thread.fstate.f[2]); thread 42 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F3, task_struct, thread.fstate.f[3]); thread 43 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F4, task_struct, thread.fstate.f[4]); thread 44 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F5, task_struct, thread.fstate.f[5]); thread 45 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F6, task_struct, thread.fstate.f[6]); thread 46 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F7, task_struct, thread.fstate.f[7]); thread 47 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F8, task_struct, thread.fstate.f[8]); thread 48 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F9, task_struct, thread.fstate.f[9]); thread 49 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F10, task_struct, thread.fstate.f[10]); thread 50 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F11, task_struct, thread.fstate.f[11]); thread 51 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F12, task_struct, thread.fstate.f[12]); thread 52 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F13, task_struct, thread.fstate.f[13]); thread 53 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F14, task_struct, thread.fstate.f[14]); thread 54 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F15, task_struct, thread.fstate.f[15]); thread 55 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F16, task_struct, thread.fstate.f[16]); thread 56 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F17, task_struct, thread.fstate.f[17]); thread 57 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F18, task_struct, thread.fstate.f[18]); thread 58 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F19, task_struct, thread.fstate.f[19]); thread 59 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F20, task_struct, thread.fstate.f[20]); thread 60 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F21, task_struct, thread.fstate.f[21]); thread 61 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F22, task_struct, thread.fstate.f[22]); thread 62 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F23, task_struct, thread.fstate.f[23]); thread 63 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F24, task_struct, thread.fstate.f[24]); thread 64 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F25, task_struct, thread.fstate.f[25]); thread 65 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F26, task_struct, thread.fstate.f[26]); thread 66 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F27, task_struct, thread.fstate.f[27]); thread 67 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F28, task_struct, thread.fstate.f[28]); thread 68 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F29, task_struct, thread.fstate.f[29]); thread 69 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F30, task_struct, thread.fstate.f[30]); thread 70 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_F31, task_struct, thread.fstate.f[31]); thread 71 arch/riscv/kernel/asm-offsets.c OFFSET(TASK_THREAD_FCSR, task_struct, thread.fstate.fcsr); thread 118 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.ra) thread 119 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 122 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.sp) thread 123 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 126 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[0]) thread 127 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 130 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[1]) thread 131 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 134 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[2]) thread 135 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 138 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[3]) thread 139 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 142 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[4]) thread 143 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 146 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[5]) thread 147 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 150 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[6]) thread 151 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 154 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[7]) thread 155 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 158 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[8]) thread 159 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 162 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[9]) thread 163 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 166 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[10]) thread 167 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 170 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.s[11]) thread 171 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.ra) thread 175 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[0]) thread 176 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 179 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[1]) thread 180 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 183 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[2]) thread 184 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 187 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[3]) thread 188 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 191 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[4]) thread 192 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 195 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[5]) thread 196 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 199 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[6]) thread 200 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 203 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[7]) thread 204 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 207 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[8]) thread 208 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 211 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[9]) thread 212 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 215 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[10]) thread 216 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 219 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[11]) thread 220 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 223 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[12]) thread 224 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 227 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[13]) thread 228 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 231 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[14]) thread 232 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 235 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[15]) thread 236 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 239 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[16]) thread 240 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 243 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[17]) thread 244 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 247 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[18]) thread 248 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 251 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[19]) thread 252 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 255 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[20]) thread 256 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 259 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[21]) thread 260 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 263 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[22]) thread 264 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 267 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[23]) thread 268 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 271 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[24]) thread 272 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 275 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[25]) thread 276 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 279 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[26]) thread 280 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 283 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[27]) thread 284 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 287 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[28]) thread 288 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 291 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[29]) thread 292 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 295 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[30]) thread 296 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 299 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.f[31]) thread 300 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 303 arch/riscv/kernel/asm-offsets.c offsetof(struct task_struct, thread.fstate.fcsr) thread 304 arch/riscv/kernel/asm-offsets.c - offsetof(struct task_struct, thread.fstate.f[0]) thread 91 arch/riscv/kernel/process.c memset(¤t->thread.fstate, 0, sizeof(current->thread.fstate)); thread 115 arch/riscv/kernel/process.c p->thread.ra = (unsigned long)ret_from_kernel_thread; thread 116 arch/riscv/kernel/process.c p->thread.s[0] = usp; /* fn */ thread 117 arch/riscv/kernel/process.c p->thread.s[1] = arg; thread 125 arch/riscv/kernel/process.c p->thread.ra = (unsigned long)ret_from_fork; thread 127 arch/riscv/kernel/process.c p->thread.sp = (unsigned long)childregs; /* kernel sp */ thread 62 arch/riscv/kernel/ptrace.c struct __riscv_d_ext_state *fstate = &target->thread.fstate; thread 81 arch/riscv/kernel/ptrace.c struct __riscv_d_ext_state *fstate = &target->thread.fstate; thread 35 arch/riscv/kernel/signal.c err = __copy_from_user(¤t->thread.fstate, state, sizeof(*state)); thread 63 arch/riscv/kernel/signal.c err = __copy_to_user(state, ¤t->thread.fstate, sizeof(*state)); thread 38 arch/riscv/kernel/stacktrace.c fp = task->thread.s[0]; thread 39 arch/riscv/kernel/stacktrace.c sp = task->thread.sp; thread 40 arch/riscv/kernel/stacktrace.c pc = task->thread.ra; thread 81 arch/riscv/kernel/stacktrace.c sp = task->thread.sp; thread 82 arch/riscv/kernel/stacktrace.c pc = task->thread.ra; thread 236 arch/s390/include/asm/elf.h current->thread.sys_call_table = \ thread 247 arch/s390/include/asm/elf.h current->thread.sys_call_table = \ thread 251 arch/s390/include/asm/elf.h current->thread.sys_call_table = \ thread 122 arch/s390/include/asm/mmu_context.h set_fs(current->thread.mm_segment); thread 164 arch/s390/include/asm/processor.h .fpu.regs = (void *) init_task.thread.fpu.fprs, \ thread 208 arch/s390/include/asm/processor.h #define is_ri_task(tsk) (!!(tsk)->thread.ri_cb) thread 43 arch/s390/include/asm/stacktrace.h return (unsigned long) task->thread.ksp; thread 39 arch/s390/include/asm/switch_to.h save_access_regs(&prev->thread.acrs[0]); \ thread 40 arch/s390/include/asm/switch_to.h save_ri_cb(prev->thread.ri_cb); \ thread 41 arch/s390/include/asm/switch_to.h save_gs_cb(prev->thread.gs_cb); \ thread 43 arch/s390/include/asm/switch_to.h restore_access_regs(&next->thread.acrs[0]); \ thread 44 arch/s390/include/asm/switch_to.h restore_ri_cb(next->thread.ri_cb, prev->thread.ri_cb); \ thread 45 arch/s390/include/asm/switch_to.h restore_gs_cb(next->thread.gs_cb); \ thread 34 arch/s390/include/asm/uaccess.h #define get_fs() (current->thread.mm_segment) thread 25 arch/s390/kernel/asm-offsets.c OFFSET(__TASK_thread, task_struct, thread); thread 56 arch/s390/kernel/compat_signal.c save_access_regs(current->thread.acrs); thread 63 arch/s390/kernel/compat_signal.c restore_access_regs(current->thread.acrs); thread 78 arch/s390/kernel/compat_signal.c memcpy(&user_sregs.regs.acrs, current->thread.acrs, thread 80 arch/s390/kernel/compat_signal.c fpregs_store((_s390_fp_regs *) &user_sregs.fpregs, ¤t->thread.fpu); thread 116 arch/s390/kernel/compat_signal.c memcpy(¤t->thread.acrs, &user_sregs.regs.acrs, thread 117 arch/s390/kernel/compat_signal.c sizeof(current->thread.acrs)); thread 118 arch/s390/kernel/compat_signal.c fpregs_load((_s390_fp_regs *) &user_sregs.fpregs, ¤t->thread.fpu); thread 141 arch/s390/kernel/compat_signal.c vxrs[i] = *((__u64 *)(current->thread.fpu.vxrs + i) + 1); thread 145 arch/s390/kernel/compat_signal.c current->thread.fpu.vxrs + __NUM_VXRS_LOW, thread 170 arch/s390/kernel/compat_signal.c __copy_from_user(current->thread.fpu.vxrs + __NUM_VXRS_LOW, thread 175 arch/s390/kernel/compat_signal.c *((__u64 *)(current->thread.fpu.vxrs + i) + 1) = vxrs[i]; thread 332 arch/s390/kernel/compat_signal.c regs->gprs[6] = current->thread.last_break; thread 407 arch/s390/kernel/compat_signal.c regs->gprs[5] = current->thread.last_break; thread 17 arch/s390/kernel/guarded_storage.c kfree(tsk->thread.gs_cb); thread 18 arch/s390/kernel/guarded_storage.c kfree(tsk->thread.gs_bc_cb); thread 25 arch/s390/kernel/guarded_storage.c if (!current->thread.gs_cb) { thread 33 arch/s390/kernel/guarded_storage.c current->thread.gs_cb = gs_cb; thread 41 arch/s390/kernel/guarded_storage.c if (current->thread.gs_cb) { thread 43 arch/s390/kernel/guarded_storage.c kfree(current->thread.gs_cb); thread 44 arch/s390/kernel/guarded_storage.c current->thread.gs_cb = NULL; thread 55 arch/s390/kernel/guarded_storage.c gs_cb = current->thread.gs_bc_cb; thread 60 arch/s390/kernel/guarded_storage.c current->thread.gs_bc_cb = gs_cb; thread 71 arch/s390/kernel/guarded_storage.c gs_cb = current->thread.gs_bc_cb; thread 72 arch/s390/kernel/guarded_storage.c current->thread.gs_bc_cb = NULL; thread 83 arch/s390/kernel/guarded_storage.c gs_cb = current->thread.gs_bc_cb; thread 85 arch/s390/kernel/guarded_storage.c kfree(current->thread.gs_cb); thread 86 arch/s390/kernel/guarded_storage.c current->thread.gs_bc_cb = NULL; thread 89 arch/s390/kernel/guarded_storage.c current->thread.gs_cb = gs_cb; thread 100 arch/s390/kernel/guarded_storage.c if (!sibling->thread.gs_bc_cb) thread 23 arch/s390/kernel/perf_regs.c fp = MACHINE_HAS_VX ? *(freg_t *)(current->thread.fpu.vxrs + idx) thread 24 arch/s390/kernel/perf_regs.c : current->thread.fpu.fprs[idx]; thread 78 arch/s390/kernel/process.c dst->thread.fpu.regs = dst->thread.fpu.fprs; thread 92 arch/s390/kernel/process.c p->thread.ksp = (unsigned long) frame; thread 94 arch/s390/kernel/process.c save_access_regs(&p->thread.acrs[0]); thread 96 arch/s390/kernel/process.c p->thread.mm_segment = get_fs(); thread 98 arch/s390/kernel/process.c memset(&p->thread.per_user, 0, sizeof(p->thread.per_user)); thread 99 arch/s390/kernel/process.c memset(&p->thread.per_event, 0, sizeof(p->thread.per_event)); thread 101 arch/s390/kernel/process.c p->thread.per_flags = 0; thread 103 arch/s390/kernel/process.c p->thread.user_timer = 0; thread 104 arch/s390/kernel/process.c p->thread.guest_timer = 0; thread 105 arch/s390/kernel/process.c p->thread.system_timer = 0; thread 106 arch/s390/kernel/process.c p->thread.hardirq_timer = 0; thread 107 arch/s390/kernel/process.c p->thread.softirq_timer = 0; thread 108 arch/s390/kernel/process.c p->thread.last_break = 1; thread 138 arch/s390/kernel/process.c p->thread.ri_cb = NULL; thread 141 arch/s390/kernel/process.c p->thread.gs_cb = NULL; thread 142 arch/s390/kernel/process.c p->thread.gs_bc_cb = NULL; thread 147 arch/s390/kernel/process.c p->thread.acrs[0] = (unsigned int)tls; thread 149 arch/s390/kernel/process.c p->thread.acrs[0] = (unsigned int)(tls >> 32); thread 150 arch/s390/kernel/process.c p->thread.acrs[1] = (unsigned int)tls; thread 158 arch/s390/kernel/process.c current->thread.fpu.fpc = 0; thread 168 arch/s390/kernel/process.c fpregs->fpc = current->thread.fpu.fpc; thread 172 arch/s390/kernel/process.c current->thread.fpu.vxrs); thread 174 arch/s390/kernel/process.c memcpy(&fpregs->fprs, current->thread.fpu.fprs, thread 194 arch/s390/kernel/process.c sf = (struct stack_frame *) p->thread.ksp; thread 48 arch/s390/kernel/ptrace.c struct thread_struct *thread = &task->thread; thread 62 arch/s390/kernel/ptrace.c if (task->thread.per_flags & PER_FLAG_NO_TE) thread 66 arch/s390/kernel/ptrace.c if (task->thread.per_flags & PER_FLAG_TE_ABORT_RAND) { thread 67 arch/s390/kernel/ptrace.c if (task->thread.per_flags & PER_FLAG_TE_ABORT_RAND_TEND) thread 76 arch/s390/kernel/ptrace.c if (task->thread.gs_cb) thread 87 arch/s390/kernel/ptrace.c new.control = thread->per_user.control; thread 88 arch/s390/kernel/ptrace.c new.start = thread->per_user.start; thread 89 arch/s390/kernel/ptrace.c new.end = thread->per_user.end; thread 142 arch/s390/kernel/ptrace.c memset(&task->thread.per_user, 0, sizeof(task->thread.per_user)); thread 143 arch/s390/kernel/ptrace.c memset(&task->thread.per_event, 0, sizeof(task->thread.per_event)); thread 146 arch/s390/kernel/ptrace.c task->thread.per_flags = 0; thread 159 arch/s390/kernel/ptrace.c PER_EVENT_IFETCH : child->thread.per_user.control; thread 163 arch/s390/kernel/ptrace.c 0 : child->thread.per_user.start; thread 167 arch/s390/kernel/ptrace.c -1UL : child->thread.per_user.end; thread 174 arch/s390/kernel/ptrace.c return child->thread.per_user.start; thread 177 arch/s390/kernel/ptrace.c return child->thread.per_user.end; thread 181 arch/s390/kernel/ptrace.c child->thread.per_event.cause << (BITS_PER_LONG - 16); thread 184 arch/s390/kernel/ptrace.c return child->thread.per_event.address; thread 188 arch/s390/kernel/ptrace.c child->thread.per_event.paid << (BITS_PER_LONG - 8); thread 228 arch/s390/kernel/ptrace.c tmp = ((unsigned long) child->thread.acrs[15]) << 32; thread 230 arch/s390/kernel/ptrace.c tmp = *(addr_t *)((addr_t) &child->thread.acrs + offset); thread 249 arch/s390/kernel/ptrace.c tmp = child->thread.fpu.fpc; thread 260 arch/s390/kernel/ptrace.c ((addr_t) child->thread.fpu.vxrs + 2*offset); thread 263 arch/s390/kernel/ptrace.c ((addr_t) child->thread.fpu.fprs + offset); thread 317 arch/s390/kernel/ptrace.c child->thread.per_user.control = thread 321 arch/s390/kernel/ptrace.c child->thread.per_user.start = data; thread 324 arch/s390/kernel/ptrace.c child->thread.per_user.end = data; thread 370 arch/s390/kernel/ptrace.c child->thread.acrs[15] = (unsigned int) (data >> 32); thread 372 arch/s390/kernel/ptrace.c *(addr_t *)((addr_t) &child->thread.acrs + offset) = data; thread 394 arch/s390/kernel/ptrace.c child->thread.fpu.fpc = data >> (BITS_PER_LONG - 32); thread 404 arch/s390/kernel/ptrace.c child->thread.fpu.vxrs + 2*offset) = data; thread 407 arch/s390/kernel/ptrace.c child->thread.fpu.fprs + offset) = data; thread 480 arch/s390/kernel/ptrace.c put_user(child->thread.last_break, thread 486 arch/s390/kernel/ptrace.c child->thread.per_flags &= ~PER_FLAG_NO_TE; thread 491 arch/s390/kernel/ptrace.c child->thread.per_flags |= PER_FLAG_NO_TE; thread 492 arch/s390/kernel/ptrace.c child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; thread 495 arch/s390/kernel/ptrace.c if (!MACHINE_HAS_TE || (child->thread.per_flags & PER_FLAG_NO_TE)) thread 499 arch/s390/kernel/ptrace.c child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND; thread 502 arch/s390/kernel/ptrace.c child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; thread 503 arch/s390/kernel/ptrace.c child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND_TEND; thread 506 arch/s390/kernel/ptrace.c child->thread.per_flags |= PER_FLAG_TE_ABORT_RAND; thread 507 arch/s390/kernel/ptrace.c child->thread.per_flags &= ~PER_FLAG_TE_ABORT_RAND_TEND; thread 544 arch/s390/kernel/ptrace.c PER_EVENT_IFETCH : child->thread.per_user.control; thread 548 arch/s390/kernel/ptrace.c 0 : child->thread.per_user.start; thread 552 arch/s390/kernel/ptrace.c PSW32_ADDR_INSN : child->thread.per_user.end; thread 559 arch/s390/kernel/ptrace.c return (__u32) child->thread.per_user.start; thread 562 arch/s390/kernel/ptrace.c return (__u32) child->thread.per_user.end; thread 565 arch/s390/kernel/ptrace.c return (__u32) child->thread.per_event.cause << 16; thread 568 arch/s390/kernel/ptrace.c return (__u32) child->thread.per_event.address; thread 571 arch/s390/kernel/ptrace.c return (__u32) child->thread.per_event.paid << 24; thread 607 arch/s390/kernel/ptrace.c tmp = *(__u32*)((addr_t) &child->thread.acrs + offset); thread 626 arch/s390/kernel/ptrace.c tmp = child->thread.fpu.fpc; thread 636 arch/s390/kernel/ptrace.c ((addr_t) child->thread.fpu.vxrs + 2*offset); thread 639 arch/s390/kernel/ptrace.c ((addr_t) child->thread.fpu.fprs + offset); thread 676 arch/s390/kernel/ptrace.c child->thread.per_user.control = thread 680 arch/s390/kernel/ptrace.c child->thread.per_user.start = data; thread 683 arch/s390/kernel/ptrace.c child->thread.per_user.end = data; thread 730 arch/s390/kernel/ptrace.c *(__u32*)((addr_t) &child->thread.acrs + offset) = tmp; thread 751 arch/s390/kernel/ptrace.c child->thread.fpu.fpc = data; thread 761 arch/s390/kernel/ptrace.c child->thread.fpu.vxrs + 2*offset) = tmp; thread 764 arch/s390/kernel/ptrace.c child->thread.fpu.fprs + offset) = tmp; thread 830 arch/s390/kernel/ptrace.c put_user(child->thread.last_break, thread 898 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); thread 927 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); thread 950 arch/s390/kernel/ptrace.c restore_access_regs(target->thread.acrs); thread 964 arch/s390/kernel/ptrace.c fp_regs.fpc = target->thread.fpu.fpc; thread 965 arch/s390/kernel/ptrace.c fpregs_store(&fp_regs, &target->thread.fpu); thread 983 arch/s390/kernel/ptrace.c convert_vx_to_fp(fprs, target->thread.fpu.vxrs); thread 985 arch/s390/kernel/ptrace.c memcpy(&fprs, target->thread.fpu.fprs, sizeof(fprs)); thread 989 arch/s390/kernel/ptrace.c u32 ufpc[2] = { target->thread.fpu.fpc, 0 }; thread 996 arch/s390/kernel/ptrace.c target->thread.fpu.fpc = ufpc[0]; thread 1006 arch/s390/kernel/ptrace.c convert_fp_to_vx(target->thread.fpu.vxrs, fprs); thread 1008 arch/s390/kernel/ptrace.c memcpy(target->thread.fpu.fprs, &fprs, sizeof(fprs)); thread 1021 arch/s390/kernel/ptrace.c *k = target->thread.last_break; thread 1024 arch/s390/kernel/ptrace.c if (__put_user(target->thread.last_break, u)) thread 1049 arch/s390/kernel/ptrace.c data = target->thread.trap_tdb; thread 1074 arch/s390/kernel/ptrace.c vxrs[i] = *((__u64 *)(target->thread.fpu.vxrs + i) + 1); thread 1092 arch/s390/kernel/ptrace.c vxrs[i] = *((__u64 *)(target->thread.fpu.vxrs + i) + 1); thread 1097 arch/s390/kernel/ptrace.c *((__u64 *)(target->thread.fpu.vxrs + i) + 1) = vxrs[i]; thread 1113 arch/s390/kernel/ptrace.c memcpy(vxrs, target->thread.fpu.vxrs + __NUM_VXRS_LOW, sizeof(vxrs)); thread 1131 arch/s390/kernel/ptrace.c target->thread.fpu.vxrs + __NUM_VXRS_LOW, 0, -1); thread 1140 arch/s390/kernel/ptrace.c unsigned int *data = &target->thread.system_call; thread 1150 arch/s390/kernel/ptrace.c unsigned int *data = &target->thread.system_call; thread 1160 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_cb; thread 1182 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) { thread 1187 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) thread 1192 arch/s390/kernel/ptrace.c gs_cb = *target->thread.gs_cb; thread 1200 arch/s390/kernel/ptrace.c if (!target->thread.gs_cb) thread 1201 arch/s390/kernel/ptrace.c target->thread.gs_cb = data; thread 1202 arch/s390/kernel/ptrace.c *target->thread.gs_cb = gs_cb; thread 1205 arch/s390/kernel/ptrace.c restore_gs_cb(target->thread.gs_cb); thread 1216 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_bc_cb; thread 1231 arch/s390/kernel/ptrace.c struct gs_cb *data = target->thread.gs_bc_cb; thread 1239 arch/s390/kernel/ptrace.c target->thread.gs_bc_cb = data; thread 1277 arch/s390/kernel/ptrace.c struct runtime_instr_cb *data = target->thread.ri_cb; thread 1299 arch/s390/kernel/ptrace.c if (!target->thread.ri_cb) { thread 1305 arch/s390/kernel/ptrace.c if (target->thread.ri_cb) { thread 1309 arch/s390/kernel/ptrace.c ri_cb = *target->thread.ri_cb; thread 1325 arch/s390/kernel/ptrace.c if (!target->thread.ri_cb) thread 1326 arch/s390/kernel/ptrace.c target->thread.ri_cb = data; thread 1327 arch/s390/kernel/ptrace.c *target->thread.ri_cb = ri_cb; thread 1329 arch/s390/kernel/ptrace.c load_runtime_instr_cb(target->thread.ri_cb); thread 1432 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); thread 1461 arch/s390/kernel/ptrace.c save_access_regs(target->thread.acrs); thread 1484 arch/s390/kernel/ptrace.c restore_access_regs(target->thread.acrs); thread 1558 arch/s390/kernel/ptrace.c last_break = target->thread.last_break; thread 28 arch/s390/kernel/runtime_instr.c kfree(tsk->thread.ri_cb); thread 36 arch/s390/kernel/runtime_instr.c if (!task->thread.ri_cb) thread 41 arch/s390/kernel/runtime_instr.c kfree(task->thread.ri_cb); thread 42 arch/s390/kernel/runtime_instr.c task->thread.ri_cb = NULL; thread 85 arch/s390/kernel/runtime_instr.c if (!current->thread.ri_cb) { thread 90 arch/s390/kernel/runtime_instr.c cb = current->thread.ri_cb; thread 98 arch/s390/kernel/runtime_instr.c current->thread.ri_cb = cb; thread 109 arch/s390/kernel/signal.c save_access_regs(current->thread.acrs); thread 116 arch/s390/kernel/signal.c restore_access_regs(current->thread.acrs); thread 130 arch/s390/kernel/signal.c memcpy(&user_sregs.regs.acrs, current->thread.acrs, thread 132 arch/s390/kernel/signal.c fpregs_store(&user_sregs.fpregs, ¤t->thread.fpu); thread 167 arch/s390/kernel/signal.c memcpy(¤t->thread.acrs, &user_sregs.regs.acrs, thread 168 arch/s390/kernel/signal.c sizeof(current->thread.acrs)); thread 170 arch/s390/kernel/signal.c fpregs_load(&user_sregs.fpregs, ¤t->thread.fpu); thread 186 arch/s390/kernel/signal.c vxrs[i] = *((__u64 *)(current->thread.fpu.vxrs + i) + 1); thread 190 arch/s390/kernel/signal.c current->thread.fpu.vxrs + __NUM_VXRS_LOW, thread 207 arch/s390/kernel/signal.c __copy_from_user(current->thread.fpu.vxrs + __NUM_VXRS_LOW, thread 212 arch/s390/kernel/signal.c *((__u64 *)(current->thread.fpu.vxrs + i) + 1) = vxrs[i]; thread 364 arch/s390/kernel/signal.c regs->gprs[6] = current->thread.last_break; thread 435 arch/s390/kernel/signal.c regs->gprs[5] = current->thread.last_break; thread 472 arch/s390/kernel/signal.c current->thread.system_call = thread 477 arch/s390/kernel/signal.c if (current->thread.system_call) { thread 478 arch/s390/kernel/signal.c regs->int_code = current->thread.system_call; thread 511 arch/s390/kernel/signal.c if (current->thread.system_call) { thread 512 arch/s390/kernel/signal.c regs->int_code = current->thread.system_call; thread 291 arch/s390/kernel/smp.c lc->user_timer = tsk->thread.user_timer; thread 292 arch/s390/kernel/smp.c lc->guest_timer = tsk->thread.guest_timer; thread 293 arch/s390/kernel/smp.c lc->system_timer = tsk->thread.system_timer; thread 294 arch/s390/kernel/smp.c lc->hardirq_timer = tsk->thread.hardirq_timer; thread 295 arch/s390/kernel/smp.c lc->softirq_timer = tsk->thread.softirq_timer; thread 34 arch/s390/kernel/traps.c address = *(unsigned long *)(current->thread.trap_tdb + 24); thread 82 arch/s390/kernel/traps.c (void __force __user *) current->thread.per_event.address); thread 207 arch/s390/kernel/traps.c vic = (current->thread.fpu.fpc & 0xf00) >> 8; thread 233 arch/s390/kernel/traps.c if (current->thread.fpu.fpc & FPC_DXC_MASK) thread 234 arch/s390/kernel/traps.c do_fp_trap(regs, current->thread.fpu.fpc); thread 71 arch/s390/kernel/uprobes.c regs->psw.addr >= current->thread.per_user.start && thread 72 arch/s390/kernel/uprobes.c regs->psw.addr <= current->thread.per_user.end) thread 101 arch/s390/kernel/uprobes.c if (check_per_event(current->thread.per_event.cause, thread 102 arch/s390/kernel/uprobes.c current->thread.per_user.control, regs)) { thread 104 arch/s390/kernel/uprobes.c current->thread.per_event.address = utask->vaddr; thread 140 arch/s390/kernel/uprobes.c current->thread.per_event.address = current->utask->vaddr; thread 254 arch/s390/kernel/uprobes.c if (!(current->thread.per_user.control & PER_EVENT_STORE)) thread 256 arch/s390/kernel/uprobes.c if ((void *)current->thread.per_user.start > (addr + len)) thread 258 arch/s390/kernel/uprobes.c if ((void *)current->thread.per_user.end < addr) thread 260 arch/s390/kernel/uprobes.c current->thread.per_event.address = regs->psw.addr; thread 261 arch/s390/kernel/uprobes.c current->thread.per_event.cause = PER_EVENT_STORE >> 16; thread 154 arch/s390/kernel/vtime.c user = update_tsk_timer(&tsk->thread.user_timer, thread 156 arch/s390/kernel/vtime.c guest = update_tsk_timer(&tsk->thread.guest_timer, thread 158 arch/s390/kernel/vtime.c system = update_tsk_timer(&tsk->thread.system_timer, thread 160 arch/s390/kernel/vtime.c hardirq = update_tsk_timer(&tsk->thread.hardirq_timer, thread 162 arch/s390/kernel/vtime.c softirq = update_tsk_timer(&tsk->thread.softirq_timer, thread 191 arch/s390/kernel/vtime.c prev->thread.user_timer = S390_lowcore.user_timer; thread 192 arch/s390/kernel/vtime.c prev->thread.guest_timer = S390_lowcore.guest_timer; thread 193 arch/s390/kernel/vtime.c prev->thread.system_timer = S390_lowcore.system_timer; thread 194 arch/s390/kernel/vtime.c prev->thread.hardirq_timer = S390_lowcore.hardirq_timer; thread 195 arch/s390/kernel/vtime.c prev->thread.softirq_timer = S390_lowcore.softirq_timer; thread 196 arch/s390/kernel/vtime.c S390_lowcore.user_timer = current->thread.user_timer; thread 197 arch/s390/kernel/vtime.c S390_lowcore.guest_timer = current->thread.guest_timer; thread 198 arch/s390/kernel/vtime.c S390_lowcore.system_timer = current->thread.system_timer; thread 199 arch/s390/kernel/vtime.c S390_lowcore.hardirq_timer = current->thread.hardirq_timer; thread 200 arch/s390/kernel/vtime.c S390_lowcore.softirq_timer = current->thread.softirq_timer; thread 214 arch/s390/kvm/intercept.c if (current->thread.per_flags & PER_FLAG_NO_TE) thread 561 arch/s390/kvm/interrupt.c save_gs_cb(current->thread.gs_cb); thread 622 arch/s390/kvm/interrupt.c rc |= put_guest_lc(vcpu, current->thread.fpu.fpc, thread 3685 arch/s390/kvm/kvm-s390.c hva = gfn_to_hva(vcpu->kvm, gpa_to_gfn(current->thread.gmap_addr)); thread 3686 arch/s390/kvm/kvm-s390.c hva += current->thread.gmap_addr & ~PAGE_MASK; thread 3690 arch/s390/kvm/kvm-s390.c rc = kvm_setup_async_pf(vcpu, current->thread.gmap_addr, hva, &arch); thread 3815 arch/s390/kvm/kvm-s390.c current->thread.gmap_addr; thread 3818 arch/s390/kvm/kvm-s390.c } else if (current->thread.gmap_pfault) { thread 3820 arch/s390/kvm/kvm-s390.c current->thread.gmap_pfault = 0; thread 3823 arch/s390/kvm/kvm-s390.c return kvm_arch_fault_in_page(vcpu, current->thread.gmap_addr, 1); thread 3930 arch/s390/kvm/kvm-s390.c vcpu->arch.host_fpregs.fpc = current->thread.fpu.fpc; thread 3931 arch/s390/kvm/kvm-s390.c vcpu->arch.host_fpregs.regs = current->thread.fpu.regs; thread 3933 arch/s390/kvm/kvm-s390.c current->thread.fpu.regs = vcpu->run->s.regs.vrs; thread 3935 arch/s390/kvm/kvm-s390.c current->thread.fpu.regs = vcpu->run->s.regs.fprs; thread 3936 arch/s390/kvm/kvm-s390.c current->thread.fpu.fpc = vcpu->run->s.regs.fpc; thread 3937 arch/s390/kvm/kvm-s390.c if (test_fp_ctl(current->thread.fpu.fpc)) thread 3939 arch/s390/kvm/kvm-s390.c current->thread.fpu.fpc = 0; thread 3943 arch/s390/kvm/kvm-s390.c if (current->thread.gs_cb) { thread 3944 arch/s390/kvm/kvm-s390.c vcpu->arch.host_gscb = current->thread.gs_cb; thread 3948 arch/s390/kvm/kvm-s390.c current->thread.gs_cb = (struct gs_cb *) thread 3950 arch/s390/kvm/kvm-s390.c restore_gs_cb(current->thread.gs_cb); thread 3978 arch/s390/kvm/kvm-s390.c vcpu->run->s.regs.fpc = current->thread.fpu.fpc; thread 3980 arch/s390/kvm/kvm-s390.c current->thread.fpu.fpc = vcpu->arch.host_fpregs.fpc; thread 3981 arch/s390/kvm/kvm-s390.c current->thread.fpu.regs = vcpu->arch.host_fpregs.regs; thread 3985 arch/s390/kvm/kvm-s390.c save_gs_cb(current->thread.gs_cb); thread 3987 arch/s390/kvm/kvm-s390.c current->thread.gs_cb = vcpu->arch.host_gscb; thread 4125 arch/s390/kvm/kvm-s390.c vcpu->run->s.regs.fpc = current->thread.fpu.fpc; thread 64 arch/s390/kvm/priv.c current->thread.gs_cb = (struct gs_cb *)&vcpu->run->s.regs.gscb; thread 65 arch/s390/kvm/priv.c restore_gs_cb(current->thread.gs_cb); thread 909 arch/s390/kvm/vsie.c if (current->thread.gmap_int_code == PGM_PROTECTION) thread 912 arch/s390/kvm/vsie.c current->thread.gmap_addr, 1); thread 915 arch/s390/kvm/vsie.c current->thread.gmap_addr); thread 918 arch/s390/kvm/vsie.c current->thread.gmap_addr, thread 919 arch/s390/kvm/vsie.c current->thread.gmap_write_flag); thread 921 arch/s390/kvm/vsie.c vsie_page->fault_addr = current->thread.gmap_addr; thread 45 arch/s390/lib/uaccess.c current->thread.mm_segment = fs; thread 69 arch/s390/lib/uaccess.c old_fs = current->thread.mm_segment; thread 74 arch/s390/lib/uaccess.c current->thread.mm_segment |= 1; thread 96 arch/s390/lib/uaccess.c current->thread.mm_segment = old_fs; thread 83 arch/s390/mm/fault.c if (current->thread.mm_segment == USER_DS) thread 89 arch/s390/mm/fault.c if (current->thread.mm_segment & 1) { thread 90 arch/s390/mm/fault.c if (current->thread.mm_segment == USER_DS_SACF) thread 442 arch/s390/mm/fault.c current->thread.gmap_addr = address; thread 443 arch/s390/mm/fault.c current->thread.gmap_write_flag = !!(flags & FAULT_FLAG_WRITE); thread 444 arch/s390/mm/fault.c current->thread.gmap_int_code = regs->int_code & 0xffff; thread 513 arch/s390/mm/fault.c current->thread.gmap_pfault = 1; thread 527 arch/s390/mm/fault.c address = __gmap_link(gmap, current->thread.gmap_addr, thread 722 arch/s390/mm/fault.c if (tsk->thread.pfault_wait == 1) { thread 728 arch/s390/mm/fault.c tsk->thread.pfault_wait = 0; thread 729 arch/s390/mm/fault.c list_del(&tsk->thread.list); thread 741 arch/s390/mm/fault.c tsk->thread.pfault_wait = -1; thread 747 arch/s390/mm/fault.c if (tsk->thread.pfault_wait == 1) { thread 750 arch/s390/mm/fault.c } else if (tsk->thread.pfault_wait == -1) { thread 754 arch/s390/mm/fault.c tsk->thread.pfault_wait = 0; thread 762 arch/s390/mm/fault.c tsk->thread.pfault_wait = 1; thread 763 arch/s390/mm/fault.c list_add(&tsk->thread.list, &pfault_list); thread 780 arch/s390/mm/fault.c struct thread_struct *thread, *next; thread 784 arch/s390/mm/fault.c list_for_each_entry_safe(thread, next, &pfault_list, list) { thread 785 arch/s390/mm/fault.c thread->pfault_wait = 0; thread 786 arch/s390/mm/fault.c list_del(&thread->list); thread 787 arch/s390/mm/fault.c tsk = container_of(thread, struct task_struct, thread); thread 76 arch/s390/mm/pgalloc.c if (current->thread.mm_segment == USER_DS) { thread 81 arch/s390/mm/pgalloc.c if (current->thread.mm_segment == USER_DS_SACF) { thread 52 arch/sh/include/asm/fpu.h tsk->thread.fpu_counter = 0; thread 171 arch/sh/include/asm/processor_32.h #define thread_saved_pc(tsk) (tsk->thread.pc) thread 204 arch/sh/include/asm/processor_64.h #define thread_saved_pc(tsk) (tsk->thread.pc) thread 208 arch/sh/include/asm/processor_64.h #define KSTK_EIP(tsk) ((tsk)->thread.pc) thread 209 arch/sh/include/asm/processor_64.h #define KSTK_ESP(tsk) ((tsk)->thread.sp) thread 8 arch/sh/include/asm/switch_to_32.h (!!(tsk->thread.dsp_status.status & SR_DSP)) thread 13 arch/sh/include/asm/switch_to_32.h (u32 *)&tsk->thread.dsp_status; \ thread 36 arch/sh/include/asm/switch_to_32.h (u32 *)&tsk->thread.dsp_status + 14; \ thread 85 arch/sh/include/asm/switch_to_32.h __ts1 = (u32 *)&prev->thread.sp; \ thread 86 arch/sh/include/asm/switch_to_32.h __ts2 = (u32 *)&prev->thread.pc; \ thread 89 arch/sh/include/asm/switch_to_32.h __ts6 = (u32 *)&next->thread.sp; \ thread 90 arch/sh/include/asm/switch_to_32.h __ts7 = next->thread.pc; \ thread 24 arch/sh/include/asm/switch_to_64.h struct pt_regs *regs = next->thread.uregs; \ thread 27 arch/sh/include/asm/switch_to_64.h last = sh64_switch_to(prev, &prev->thread, next, \ thread 28 arch/sh/include/asm/switch_to_64.h &next->thread); \ thread 22 arch/sh/kernel/cpu/fpu.c if (!tsk->thread.xstate) { thread 23 arch/sh/kernel/cpu/fpu.c tsk->thread.xstate = kmem_cache_alloc(task_xstate_cachep, thread 25 arch/sh/kernel/cpu/fpu.c if (!tsk->thread.xstate) thread 30 arch/sh/kernel/cpu/fpu.c struct sh_fpu_hard_struct *fp = &tsk->thread.xstate->hardfpu; thread 34 arch/sh/kernel/cpu/fpu.c struct sh_fpu_soft_struct *fp = &tsk->thread.xstate->softfpu; thread 51 arch/sh/kernel/cpu/fpu.c tsk->thread.fpu_counter++; thread 52 arch/sh/kernel/cpu/sh2a/fpu.c : "0" ((char *)(&tsk->thread.xstate->hardfpu.status)), thread 84 arch/sh/kernel/cpu/sh2a/fpu.c : "0" (tsk->thread.xstate), "r" (FPSCR_RCHG) thread 456 arch/sh/kernel/cpu/sh2a/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_FPU_ERROR)) { thread 458 arch/sh/kernel/cpu/sh2a/fpu.c denormal_to_double (&tsk->thread.xstate->hardfpu, thread 473 arch/sh/kernel/cpu/sh2a/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; thread 474 arch/sh/kernel/cpu/sh2a/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 475 arch/sh/kernel/cpu/sh2a/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; thread 485 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n+1]; thread 487 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m+1]; thread 492 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; thread 493 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n+1] = llx & 0xffffffff; thread 502 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; thread 516 arch/sh/kernel/cpu/sh2a/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; thread 517 arch/sh/kernel/cpu/sh2a/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 518 arch/sh/kernel/cpu/sh2a/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; thread 528 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n+1]; thread 530 arch/sh/kernel/cpu/sh2a/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m+1]; thread 535 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; thread 536 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n+1] = llx & 0xffffffff; thread 545 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; thread 563 arch/sh/kernel/cpu/sh2a/fpu.c tsk->thread.xstate->hardfpu.fpscr &= thread 84 arch/sh/kernel/cpu/sh4/fpu.c :"0"((char *)(&tsk->thread.xstate->hardfpu.status)), thread 134 arch/sh/kernel/cpu/sh4/fpu.c :"0" (tsk->thread.xstate), "r" (FPSCR_RCHG) thread 230 arch/sh/kernel/cpu/sh4/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_CAUSE_ERROR)) thread 232 arch/sh/kernel/cpu/sh4/fpu.c denormal_to_double(&tsk->thread.xstate->hardfpu, thread 248 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; thread 249 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 250 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; thread 260 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; thread 262 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; thread 264 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; thread 265 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; thread 271 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; thread 286 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; thread 287 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 288 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; thread 298 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; thread 300 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; thread 305 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; thread 306 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; thread 315 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; thread 330 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[n]; thread 331 arch/sh/kernel/cpu/sh4/fpu.c hy = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 332 arch/sh/kernel/cpu/sh4/fpu.c fpscr = tsk->thread.xstate->hardfpu.fpscr; thread 342 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[n + 1]; thread 344 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; thread 348 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = llx >> 32; thread 349 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n + 1] = llx & 0xffffffff; thread 355 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fp_regs[n] = hx; thread 368 arch/sh/kernel/cpu/sh4/fpu.c hx = tsk->thread.xstate->hardfpu.fp_regs[m]; thread 370 arch/sh/kernel/cpu/sh4/fpu.c if ((tsk->thread.xstate->hardfpu.fpscr & FPSCR_CAUSE_ERROR) thread 375 arch/sh/kernel/cpu/sh4/fpu.c llx = ((long long)tsk->thread.xstate->hardfpu.fp_regs[m] << 32) thread 376 arch/sh/kernel/cpu/sh4/fpu.c | tsk->thread.xstate->hardfpu.fp_regs[m + 1]; thread 378 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpul = float64_to_float32(llx); thread 397 arch/sh/kernel/cpu/sh4/fpu.c int roundingMode = FPSCR_ROUNDING_MODE(tsk->thread.xstate->hardfpu.fpscr); thread 409 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr &= thread 411 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr |= fpu_exception_flags; thread 414 arch/sh/kernel/cpu/sh4/fpu.c tsk->thread.xstate->hardfpu.fpscr |= (fpu_exception_flags >> 10); thread 418 arch/sh/kernel/cpu/sh4/fpu.c if ((((tsk->thread.xstate->hardfpu.fpscr & FPSCR_ENABLE_MASK) >> 7) & thread 54 arch/sh/kernel/cpu/sh5/fpu.c : "r" (&tsk->thread.xstate->hardfpu) thread 97 arch/sh/kernel/cpu/sh5/fpu.c : "r" (&tsk->thread.xstate->hardfpu) thread 156 arch/sh/kernel/dumpstack.c sp = (unsigned long *)tsk->thread.sp; thread 265 arch/sh/kernel/hw_breakpoint.c struct thread_struct *t = &tsk->thread; thread 242 arch/sh/kernel/kgdb.c gdb_regs[GDB_R15] = p->thread.sp; thread 243 arch/sh/kernel/kgdb.c gdb_regs[GDB_PC] = p->thread.pc; thread 31 arch/sh/kernel/process.c if (src->thread.xstate) { thread 32 arch/sh/kernel/process.c dst->thread.xstate = kmem_cache_alloc(task_xstate_cachep, thread 34 arch/sh/kernel/process.c if (!dst->thread.xstate) thread 36 arch/sh/kernel/process.c memcpy(dst->thread.xstate, src->thread.xstate, xstate_size); thread 44 arch/sh/kernel/process.c if (tsk->thread.xstate) { thread 45 arch/sh/kernel/process.c kmem_cache_free(task_xstate_cachep, tsk->thread.xstate); thread 46 arch/sh/kernel/process.c tsk->thread.xstate = NULL; thread 132 arch/sh/kernel/process_32.c p->thread.dsp_status = tsk->thread.dsp_status; thread 136 arch/sh/kernel/process_32.c memset(p->thread.ptrace_bps, 0, sizeof(p->thread.ptrace_bps)); thread 139 arch/sh/kernel/process_32.c p->thread.sp = (unsigned long) childregs; thread 142 arch/sh/kernel/process_32.c p->thread.pc = (unsigned long) ret_from_kernel_thread; thread 151 arch/sh/kernel/process_32.c p->thread.fpu_counter = 0; thread 164 arch/sh/kernel/process_32.c p->thread.pc = (unsigned long) ret_from_fork; thread 175 arch/sh/kernel/process_32.c struct thread_struct *next_t = &next->thread; thread 184 arch/sh/kernel/process_32.c if (next->thread.fpu_counter > 5) thread 202 arch/sh/kernel/process_32.c if (next->thread.fpu_counter > 5) thread 222 arch/sh/kernel/process_32.c unsigned long schedule_frame = (unsigned long)p->thread.sp; thread 282 arch/sh/kernel/process_64.c tsk->thread.kregs = regs; thread 331 arch/sh/kernel/process_64.c if(current->thread.kregs==&fake_swapper_regs) { thread 332 arch/sh/kernel/process_64.c current->thread.kregs = thread 334 arch/sh/kernel/process_64.c current->thread.uregs = current->thread.kregs; thread 360 arch/sh/kernel/process_64.c memcpy(fpu, &tsk->thread.xstate->hardfpu, sizeof(*fpu)); thread 390 arch/sh/kernel/process_64.c p->thread.sp = (unsigned long) childregs; thread 398 arch/sh/kernel/process_64.c p->thread.pc = (unsigned long) ret_from_kernel_thread; thread 411 arch/sh/kernel/process_64.c p->thread.uregs = childregs; thread 416 arch/sh/kernel/process_64.c p->thread.pc = (unsigned long) ret_from_fork; thread 450 arch/sh/kernel/process_64.c sh64_switch_to_fp = (long) p->thread.sp; thread 79 arch/sh/kernel/ptrace_32.c struct thread_struct *thread = &tsk->thread; thread 83 arch/sh/kernel/ptrace_32.c bp = thread->ptrace_bps[0]; thread 96 arch/sh/kernel/ptrace_32.c thread->ptrace_bps[0] = bp; thread 197 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); thread 200 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); thread 218 arch/sh/kernel/ptrace_32.c &target->thread.xstate->hardfpu, 0, -1); thread 221 arch/sh/kernel/ptrace_32.c &target->thread.xstate->softfpu, 0, -1); thread 238 arch/sh/kernel/ptrace_32.c (struct pt_dspregs *)&target->thread.dsp_status.dsp_regs; thread 256 arch/sh/kernel/ptrace_32.c (struct pt_dspregs *)&target->thread.dsp_status.dsp_regs; thread 399 arch/sh/kernel/ptrace_32.c tmp = ((unsigned long *)child->thread.xstate) thread 434 arch/sh/kernel/ptrace_32.c ((unsigned long *)child->thread.xstate) thread 60 arch/sh/kernel/ptrace_64.c stack = (unsigned char *)(task->thread.uregs); thread 89 arch/sh/kernel/ptrace_64.c tmp = ((long *)task->thread.xstate)[addr / sizeof(unsigned long)]; thread 101 arch/sh/kernel/ptrace_64.c stack = (unsigned char *)(task->thread.uregs); thread 124 arch/sh/kernel/ptrace_64.c ((long *)task->thread.xstate)[addr / sizeof(unsigned long)] = data; thread 130 arch/sh/kernel/ptrace_64.c struct pt_regs *regs = child->thread.uregs; thread 139 arch/sh/kernel/ptrace_64.c struct pt_regs *regs = child->thread.uregs; thread 226 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); thread 243 arch/sh/kernel/ptrace_64.c &target->thread.xstate->hardfpu, 0, -1); thread 86 arch/sh/kernel/signal_32.c return __copy_from_user(&tsk->thread.xstate->hardfpu, &sc->sc_fpregs[0], thread 110 arch/sh/kernel/signal_32.c return __copy_to_user(&sc->sc_fpregs[0], &tsk->thread.xstate->hardfpu, thread 157 arch/sh/kernel/signal_64.c err |= __copy_from_user(¤t->thread.xstate->hardfpu, &sc->sc_fpregs[0], thread 182 arch/sh/kernel/signal_64.c err |= __copy_to_user(&sc->sc_fpregs[0], ¤t->thread.xstate->hardfpu, thread 221 arch/sh/kernel/smp.c stack_start.sp = tsk->thread.sp; thread 82 arch/sh/kernel/stacktrace.c unsigned long *sp = (unsigned long *)tsk->thread.sp; thread 635 arch/sh/kernel/traps_32.c current->thread.dsp_status.status |= SR_DSP; thread 353 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; thread 357 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; thread 358 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = bufhi; thread 361 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = bufhi; thread 362 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = buflo; thread 364 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg] = buflo; thread 365 arch/sh/kernel/traps_64.c current->thread.xstate->hardfpu.fp_regs[destreg+1] = bufhi; thread 420 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; thread 424 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; thread 425 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; thread 428 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg]; thread 429 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; thread 431 arch/sh/kernel/traps_64.c buflo = current->thread.xstate->hardfpu.fp_regs[srcreg]; thread 432 arch/sh/kernel/traps_64.c bufhi = current->thread.xstate->hardfpu.fp_regs[srcreg+1]; thread 554 arch/sh/math-emu/math.c if ((tsk->thread.xstate->softfpu.fpscr & (1 << 17))) { thread 556 arch/sh/math-emu/math.c denormal_to_double (&tsk->thread.xstate->softfpu, thread 558 arch/sh/math-emu/math.c tsk->thread.xstate->softfpu.fpscr &= thread 598 arch/sh/math-emu/math.c struct sh_fpu_soft_struct *fpu = &(tsk->thread.xstate->softfpu); thread 72 arch/sh/mm/alignment.c if (current->thread.flags & SH_THREAD_UAC_SIGBUS) { thread 77 arch/sh/mm/alignment.c if (current->thread.flags & SH_THREAD_UAC_NOPRINT) thread 85 arch/sh/mm/alignment.c return put_user(tsk->thread.flags & SH_THREAD_UAC_MASK, thread 91 arch/sh/mm/alignment.c tsk->thread.flags = (tsk->thread.flags & ~SH_THREAD_UAC_MASK) | thread 98 arch/sparc/include/asm/processor_32.h #define task_pt_regs(tsk) ((tsk)->thread.kregs) thread 99 arch/sparc/include/asm/processor_32.h #define KSTK_EIP(tsk) ((tsk)->thread.kregs->pc) thread 100 arch/sparc/include/asm/processor_32.h #define KSTK_ESP(tsk) ((tsk)->thread.kregs->u_regs[UREG_FP]) thread 47 arch/sparc/include/asm/ptrace.h struct thread_info *thread; thread 185 arch/sparc/include/asm/sfp-machine_32.h #define FP_ROUNDMODE ((current->thread.fsr >> 30) & 0x3) thread 187 arch/sparc/include/asm/sfp-machine_32.h #define FP_ROUNDMODE ((last_task_used_math->thread.fsr >> 30) & 0x3) thread 201 arch/sparc/include/asm/sfp-machine_32.h #define FP_INHIBIT_RESULTS ((current->thread.fsr >> 23) & _fex) thread 203 arch/sparc/include/asm/sfp-machine_32.h #define FP_INHIBIT_RESULTS ((last_task_used_math->thread.fsr >> 23) & _fex) thread 207 arch/sparc/include/asm/sfp-machine_32.h #define FP_TRAPPING_EXCEPTIONS ((current->thread.fsr >> 23) & 0x1f) thread 209 arch/sparc/include/asm/sfp-machine_32.h #define FP_TRAPPING_EXCEPTIONS ((last_task_used_math->thread.fsr >> 23) & 0x1f) thread 21 arch/sparc/include/asm/switch_to_32.h fpsave(&(prv)->thread.float_regs[0], &(prv)->thread.fsr, \ thread 22 arch/sparc/include/asm/switch_to_32.h &(prv)->thread.fpqueue[0], &(prv)->thread.fpqdepth); \ thread 24 arch/sparc/include/asm/switch_to_32.h (prv)->thread.kregs->psr &= ~PSR_EF; \ thread 34 arch/sparc/include/asm/switch_to_32.h (nxt)->thread.kregs->psr&=~PSR_EF; \ thread 27 arch/sparc/include/asm/switch_to_64.h trap_block[current_thread_info()->cpu].thread = \ thread 26 arch/sparc/include/asm/trap_block.h struct thread_info *thread; thread 28 arch/sparc/include/asm/uaccess_32.h #define get_fs() (current->thread.current_ds) thread 29 arch/sparc/include/asm/uaccess_32.h #define set_fs(val) ((current->thread.current_ds) = (val)) thread 51 arch/sparc/kernel/asm-offsets.c DEFINE(AOFF_task_thread, offsetof(struct task_struct, thread)); thread 192 arch/sparc/kernel/process_32.c fpsave(&tsk->thread.float_regs[0], &tsk->thread.fsr, thread 193 arch/sparc/kernel/process_32.c &tsk->thread.fpqueue[0], &tsk->thread.fpqdepth); thread 213 arch/sparc/kernel/process_32.c fpsave(¤t->thread.float_regs[0], ¤t->thread.fsr, thread 214 arch/sparc/kernel/process_32.c ¤t->thread.fpqueue[0], ¤t->thread.fpqdepth); thread 223 arch/sparc/kernel/process_32.c if (current->thread.flags & SPARC_FLAG_KTHREAD) { thread 224 arch/sparc/kernel/process_32.c current->thread.flags &= ~SPARC_FLAG_KTHREAD; thread 228 arch/sparc/kernel/process_32.c current->thread.kregs = (struct pt_regs *) thread 317 arch/sparc/kernel/process_32.c fpsave(&p->thread.float_regs[0], &p->thread.fsr, thread 318 arch/sparc/kernel/process_32.c &p->thread.fpqueue[0], &p->thread.fpqdepth); thread 339 arch/sparc/kernel/process_32.c p->thread.kregs = childregs; thread 345 arch/sparc/kernel/process_32.c p->thread.flags |= SPARC_FLAG_KTHREAD; thread 346 arch/sparc/kernel/process_32.c p->thread.current_ds = KERNEL_DS; thread 357 arch/sparc/kernel/process_32.c p->thread.flags &= ~SPARC_FLAG_KTHREAD; thread 358 arch/sparc/kernel/process_32.c p->thread.current_ds = USER_DS; thread 360 arch/sparc/kernel/process_32.c ti->kpsr = current->thread.fork_kpsr | PSR_PIL; thread 361 arch/sparc/kernel/process_32.c ti->kwim = current->thread.fork_kwim; thread 425 arch/sparc/kernel/process_32.c fpsave(¤t->thread.float_regs[0], ¤t->thread.fsr, thread 426 arch/sparc/kernel/process_32.c ¤t->thread.fpqueue[0], ¤t->thread.fpqdepth); thread 435 arch/sparc/kernel/process_32.c fpsave(¤t->thread.float_regs[0], ¤t->thread.fsr, thread 436 arch/sparc/kernel/process_32.c ¤t->thread.fpqueue[0], ¤t->thread.fpqdepth); thread 444 arch/sparc/kernel/process_32.c ¤t->thread.float_regs[0], thread 446 arch/sparc/kernel/process_32.c fpregs->pr_fsr = current->thread.fsr; thread 447 arch/sparc/kernel/process_32.c fpregs->pr_qcnt = current->thread.fpqdepth; thread 452 arch/sparc/kernel/process_32.c ¤t->thread.fpqueue[0], thread 234 arch/sparc/kernel/process_64.c rp->thread = tp; thread 246 arch/sparc/kernel/process_64.c while (!gp->thread && ++limit < 100) { thread 283 arch/sparc/kernel/process_64.c tp = gp->thread; thread 382 arch/sparc/kernel/prom_64.c int cpu, unsigned int *thread) thread 415 arch/sparc/kernel/prom_64.c if (thread) { thread 427 arch/sparc/kernel/prom_64.c *thread = proc_id; thread 54 arch/sparc/kernel/ptrace_32.c const struct pt_regs *regs = target->thread.kregs; thread 132 arch/sparc/kernel/ptrace_32.c struct pt_regs *regs = target->thread.kregs; thread 216 arch/sparc/kernel/ptrace_32.c const unsigned long *fpregs = target->thread.float_regs; thread 234 arch/sparc/kernel/ptrace_32.c &target->thread.fsr, thread 260 arch/sparc/kernel/ptrace_32.c unsigned long *fpregs = target->thread.float_regs; thread 276 arch/sparc/kernel/ptrace_32.c &target->thread.fsr, thread 344 arch/sparc/kernel/ptrace_32.c unsigned long addr2 = current->thread.kregs->u_regs[UREG_I4]; thread 373 arch/sparc/kernel/setup_32.c init_task.thread.kregs = &fake_swapper_regs; thread 21 arch/sparc/kernel/sigutil_32.c fpsave(¤t->thread.float_regs[0], ¤t->thread.fsr, thread 22 arch/sparc/kernel/sigutil_32.c ¤t->thread.fpqueue[0], ¤t->thread.fpqdepth); thread 29 arch/sparc/kernel/sigutil_32.c fpsave(¤t->thread.float_regs[0], ¤t->thread.fsr, thread 30 arch/sparc/kernel/sigutil_32.c ¤t->thread.fpqueue[0], ¤t->thread.fpqdepth); thread 36 arch/sparc/kernel/sigutil_32.c ¤t->thread.float_regs[0], thread 38 arch/sparc/kernel/sigutil_32.c err |= __put_user(current->thread.fsr, &fpu->si_fsr); thread 39 arch/sparc/kernel/sigutil_32.c err |= __put_user(current->thread.fpqdepth, &fpu->si_fpqdepth); thread 40 arch/sparc/kernel/sigutil_32.c if (current->thread.fpqdepth != 0) thread 42 arch/sparc/kernel/sigutil_32.c ¤t->thread.fpqueue[0], thread 71 arch/sparc/kernel/sigutil_32.c err = __copy_from_user(¤t->thread.float_regs[0], &fpu->si_float_regs[0], thread 73 arch/sparc/kernel/sigutil_32.c err |= __get_user(current->thread.fsr, &fpu->si_fsr); thread 74 arch/sparc/kernel/sigutil_32.c err |= __get_user(current->thread.fpqdepth, &fpu->si_fpqdepth); thread 75 arch/sparc/kernel/sigutil_32.c if (current->thread.fpqdepth != 0) thread 76 arch/sparc/kernel/sigutil_32.c err |= __copy_from_user(¤t->thread.fpqueue[0], thread 173 arch/sparc/kernel/traps_32.c fpsave(&fptask->thread.float_regs[0], &fptask->thread.fsr, thread 174 arch/sparc/kernel/traps_32.c &fptask->thread.fpqueue[0], &fptask->thread.fpqdepth); thread 178 arch/sparc/kernel/traps_32.c fpload(¤t->thread.float_regs[0], ¤t->thread.fsr); thread 189 arch/sparc/kernel/traps_32.c fpload(¤t->thread.float_regs[0], ¤t->thread.fsr); thread 226 arch/sparc/kernel/traps_32.c fpsave(&fpt->thread.float_regs[0], &fpt->thread.fsr, thread 227 arch/sparc/kernel/traps_32.c &fpt->thread.fpqueue[0], &fpt->thread.fpqdepth); thread 229 arch/sparc/kernel/traps_32.c printk("Hmm, FP exception, fsr was %016lx\n", fpt->thread.fsr); thread 232 arch/sparc/kernel/traps_32.c switch ((fpt->thread.fsr & 0x1c000)) { thread 257 arch/sparc/kernel/traps_32.c fpload(¤t->thread.float_regs[0], ¤t->thread.fsr); thread 280 arch/sparc/kernel/traps_32.c fsr = fpt->thread.fsr; thread 2839 arch/sparc/kernel/traps_64.c p->thread = t; thread 2879 arch/sparc/kernel/traps_64.c thread) || thread 60 arch/sparc/kernel/unaligned_32.c die_if_kernel("Byte sized unaligned access?!?!", current->thread.kregs); thread 322 arch/sparc/kernel/unaligned_32.c if(!(current->thread.flags & SPARC_FLAG_UNALIGNED) || thread 172 arch/sparc/math-emu/math_32.c printk("fpqdepth is %ld\n", fpt->thread.fpqdepth); thread 173 arch/sparc/math-emu/math_32.c for (i = 0; i < fpt->thread.fpqdepth; i++) thread 174 arch/sparc/math-emu/math_32.c printk("%d: %08lx at %08lx\n", i, fpt->thread.fpqueue[i].insn, thread 175 arch/sparc/math-emu/math_32.c (unsigned long)fpt->thread.fpqueue[i].insn_addr); thread 178 arch/sparc/math-emu/math_32.c if (fpt->thread.fpqdepth == 0) { /* no queue, guilty insn is at regs->pc */ thread 183 arch/sparc/math-emu/math_32.c retcode = do_one_mathemu(insn, &fpt->thread.fsr, fpt->thread.float_regs); thread 194 arch/sparc/math-emu/math_32.c for (i = 0; i < fpt->thread.fpqdepth; i++) { thread 195 arch/sparc/math-emu/math_32.c retcode = do_one_mathemu(fpt->thread.fpqueue[i].insn, &(fpt->thread.fsr), fpt->thread.float_regs); thread 201 arch/sparc/math-emu/math_32.c fpt->thread.fsr &= ~(0x3000 | FSR_CEXC_MASK); thread 203 arch/sparc/math-emu/math_32.c fpt->thread.fsr &= ~0x3000; thread 204 arch/sparc/math-emu/math_32.c fpt->thread.fpqdepth = 0; thread 417 arch/sparc/mm/fault_32.c __do_fault_siginfo(code, SIGSEGV, tsk->thread.kregs, address); thread 422 arch/sparc/mm/fault_32.c __do_fault_siginfo(BUS_ADRERR, SIGBUS, tsk->thread.kregs, address); thread 277 arch/um/drivers/chan_user.c int pid, thread, count, thread_fd = -1; thread 290 arch/um/drivers/chan_user.c thread = winch_tramp(fd, port, &thread_fd, &stack); thread 291 arch/um/drivers/chan_user.c if (thread < 0) thread 294 arch/um/drivers/chan_user.c register_winch_irq(thread_fd, fd, thread, port, stack); thread 39 arch/um/include/asm/processor-generic.h } thread; thread 100 arch/um/include/asm/processor-generic.h #define KSTK_REG(tsk, reg) get_thread_reg(reg, &tsk->thread.switch_buf) thread 27 arch/um/kernel/exec.c arch_flush_thread(¤t->thread.arch); thread 88 arch/um/kernel/process.c to->thread.prev_sched = from; thread 91 arch/um/kernel/process.c switch_threads(&from->thread.switch_buf, &to->thread.switch_buf); thread 94 arch/um/kernel/process.c return current->thread.prev_sched; thread 99 arch/um/kernel/process.c struct pt_regs *regs = ¤t->thread.regs; thread 123 arch/um/kernel/process.c if (current->thread.prev_sched != NULL) thread 124 arch/um/kernel/process.c schedule_tail(current->thread.prev_sched); thread 125 arch/um/kernel/process.c current->thread.prev_sched = NULL; thread 127 arch/um/kernel/process.c fn = current->thread.request.u.thread.proc; thread 128 arch/um/kernel/process.c arg = current->thread.request.u.thread.arg; thread 134 arch/um/kernel/process.c userspace(¤t->thread.regs.regs, current_thread_info()->aux_fp_regs); thread 142 arch/um/kernel/process.c schedule_tail(current->thread.prev_sched); thread 151 arch/um/kernel/process.c current->thread.prev_sched = NULL; thread 153 arch/um/kernel/process.c userspace(¤t->thread.regs.regs, current_thread_info()->aux_fp_regs); thread 163 arch/um/kernel/process.c p->thread = (struct thread_struct) INIT_THREAD; thread 166 arch/um/kernel/process.c memcpy(&p->thread.regs.regs, current_pt_regs(), thread 167 arch/um/kernel/process.c sizeof(p->thread.regs.regs)); thread 168 arch/um/kernel/process.c PT_REGS_SET_SYSCALL_RETURN(&p->thread.regs, 0); thread 170 arch/um/kernel/process.c REGS_SP(p->thread.regs.regs.gp) = sp; thread 174 arch/um/kernel/process.c arch_copy_thread(¤t->thread.arch, &p->thread.arch); thread 176 arch/um/kernel/process.c get_safe_registers(p->thread.regs.regs.gp, p->thread.regs.regs.fp); thread 177 arch/um/kernel/process.c p->thread.request.u.thread.proc = (int (*)(void *))sp; thread 178 arch/um/kernel/process.c p->thread.request.u.thread.arg = (void *)arg; thread 182 arch/um/kernel/process.c new_thread(task_stack_page(p), &p->thread.switch_buf, handler); thread 386 arch/um/kernel/process.c if (task->thread.singlestep_syscall) thread 421 arch/um/kernel/process.c sp = p->thread.switch_buf->JB_SP; thread 16 arch/um/kernel/ptrace.c child->thread.singlestep_syscall = 0; thread 26 arch/um/kernel/ptrace.c child->thread.singlestep_syscall = 0; thread 132 arch/um/kernel/signal.c current->thread.singlestep_syscall = thread 133 arch/um/kernel/signal.c is_syscall(PT_REGS_IP(¤t->thread.regs)); thread 43 arch/um/kernel/skas/process.c init_task.thread.request.u.thread.proc = start_kernel_proc; thread 44 arch/um/kernel/skas/process.c init_task.thread.request.u.thread.arg = NULL; thread 46 arch/um/kernel/skas/process.c &init_task.thread.switch_buf); thread 22 arch/um/kernel/stacktrace.c struct pt_regs *segv_regs = tsk->thread.segv_regs; thread 31 arch/um/kernel/sysrq.c struct pt_regs *segv_regs = current->thread.segv_regs; thread 333 arch/um/kernel/tlb.c do_signal(¤t->thread.regs); thread 165 arch/um/kernel/trap.c current->thread.arch.faultinfo = fi; thread 172 arch/um/kernel/trap.c do_signal(¤t->thread.regs); thread 219 arch/um/kernel/trap.c current->thread.segv_regs = container_of(regs, struct pt_regs, regs); thread 248 arch/um/kernel/trap.c catcher = current->thread.fault_catcher; thread 252 arch/um/kernel/trap.c current->thread.fault_addr = (void *) address; thread 255 arch/um/kernel/trap.c else if (current->thread.fault_addr != NULL) thread 269 arch/um/kernel/trap.c current->thread.arch.faultinfo = fi; thread 273 arch/um/kernel/trap.c current->thread.arch.faultinfo = fi; thread 279 arch/um/kernel/trap.c current->thread.segv_regs = NULL; thread 303 arch/um/kernel/trap.c current->thread.arch.faultinfo = *fi; thread 314 arch/um/kernel/trap.c if (current->thread.fault_catcher != NULL) thread 315 arch/um/kernel/trap.c UML_LONGJMP(current->thread.fault_catcher, 1); thread 58 arch/unicore32/kernel/fpu-ucf64.c current->thread.error_code = 0; thread 59 arch/unicore32/kernel/fpu-ucf64.c current->thread.trap_no = 6; thread 205 arch/unicore32/kernel/process.c struct thread_info *thread = current_thread_info(); thread 208 arch/unicore32/kernel/process.c memset(thread->used_cp, 0, sizeof(thread->used_cp)); thread 209 arch/unicore32/kernel/process.c memset(&tsk->thread.debug, 0, sizeof(struct debug_info)); thread 211 arch/unicore32/kernel/process.c memset(&thread->fpstate, 0, sizeof(struct fp_state)); thread 226 arch/unicore32/kernel/process.c struct thread_info *thread = task_thread_info(p); thread 229 arch/unicore32/kernel/process.c memset(&thread->cpu_context, 0, sizeof(struct cpu_context_save)); thread 230 arch/unicore32/kernel/process.c thread->cpu_context.sp = (unsigned long)childregs; thread 232 arch/unicore32/kernel/process.c thread->cpu_context.pc = (unsigned long)ret_from_kernel_thread; thread 233 arch/unicore32/kernel/process.c thread->cpu_context.r4 = stack_start; thread 234 arch/unicore32/kernel/process.c thread->cpu_context.r5 = stk_sz; thread 237 arch/unicore32/kernel/process.c thread->cpu_context.pc = (unsigned long)ret_from_fork; thread 263 arch/unicore32/kernel/process.c struct thread_info *thread = current_thread_info(); thread 264 arch/unicore32/kernel/process.c int used_math = thread->used_cp[1] | thread->used_cp[2]; thread 268 arch/unicore32/kernel/process.c memcpy(fp, &thread->fpstate, sizeof(*fp)); thread 172 arch/unicore32/kernel/signal.c err |= __put_user(current->thread.trap_no, thread 174 arch/unicore32/kernel/signal.c err |= __put_user(current->thread.error_code, thread 176 arch/unicore32/kernel/signal.c err |= __put_user(current->thread.address, thread 304 arch/unicore32/kernel/signal.c struct thread_info *thread = current_thread_info(); thread 179 arch/unicore32/kernel/traps.c static int __die(const char *str, int err, struct thread_info *thread, thread 182 arch/unicore32/kernel/traps.c struct task_struct *tsk = thread->task; thread 190 arch/unicore32/kernel/traps.c ret = notify_die(DIE_OOPS, str, regs, err, tsk->thread.trap_no, \ thread 198 arch/unicore32/kernel/traps.c TASK_COMM_LEN, tsk->comm, task_pid_nr(tsk), thread + 1); thread 217 arch/unicore32/kernel/traps.c struct thread_info *thread = current_thread_info(); thread 225 arch/unicore32/kernel/traps.c ret = __die(str, err, thread, regs); thread 245 arch/unicore32/kernel/traps.c current->thread.error_code = err; thread 246 arch/unicore32/kernel/traps.c current->thread.trap_no = trap; thread 121 arch/unicore32/mm/fault.c tsk->thread.address = addr; thread 122 arch/unicore32/mm/fault.c tsk->thread.error_code = fsr; thread 123 arch/unicore32/mm/fault.c tsk->thread.trap_no = 14; thread 107 arch/x86/entry/vsyscall/vsyscall_64.c struct thread_struct *thread = ¤t->thread; thread 109 arch/x86/entry/vsyscall/vsyscall_64.c thread->error_code = X86_PF_USER | X86_PF_WRITE; thread 110 arch/x86/entry/vsyscall/vsyscall_64.c thread->cr2 = ptr; thread 111 arch/x86/entry/vsyscall/vsyscall_64.c thread->trap_nr = X86_TRAP_PF; thread 239 arch/x86/entry/vsyscall/vsyscall_64.c prev_sig_on_uaccess_err = current->thread.sig_on_uaccess_err; thread 240 arch/x86/entry/vsyscall/vsyscall_64.c current->thread.sig_on_uaccess_err = 1; thread 264 arch/x86/entry/vsyscall/vsyscall_64.c current->thread.sig_on_uaccess_err = prev_sig_on_uaccess_err; thread 210 arch/x86/events/amd/uncore.c int thread = 2 * (cpu_data(event->cpu).cpu_core_id % 4); thread 213 arch/x86/events/amd/uncore.c thread += cpu_data(event->cpu).apicid & 1; thread 215 arch/x86/events/amd/uncore.c hwc->config |= (1ULL << (AMD64_L3_THREAD_SHIFT + thread) & thread 952 arch/x86/events/intel/p4.c int thread = p4_ht_config_thread(hwc->config); thread 959 arch/x86/events/intel/p4.c escr_addr = bind->escr_msr[thread]; thread 1192 arch/x86/events/intel/p4.c static int p4_next_cntr(int thread, unsigned long *used_mask, thread 1198 arch/x86/events/intel/p4.c j = bind->cntr[thread][i]; thread 1213 arch/x86/events/intel/p4.c unsigned int i, thread, num; thread 1224 arch/x86/events/intel/p4.c thread = p4_ht_thread(cpu); thread 1237 arch/x86/events/intel/p4.c escr_idx = p4_get_escr_idx(bind->escr_msr[thread]); thread 1248 arch/x86/events/intel/p4.c cntr_idx = p4_next_cntr(thread, used_mask, bind); thread 200 arch/x86/ia32/ia32_signal.c put_user_ex(current->thread.trap_nr, &sc->trapno); thread 201 arch/x86/ia32/ia32_signal.c put_user_ex(current->thread.error_code, &sc->err); thread 212 arch/x86/ia32/ia32_signal.c put_user_ex(current->thread.cr2, &sc->cr2); thread 186 arch/x86/include/asm/elf.h elf_common_init(¤t->thread, _r, 0) thread 189 arch/x86/include/asm/elf.h elf_common_init(¤t->thread, regs, __USER_DS) thread 536 arch/x86/include/asm/fpu/internal.h struct fpu *fpu = ¤t->thread.fpu; thread 143 arch/x86/include/asm/pgtable.h pk = get_xsave_addr(¤t->thread.fpu.state.xsave, XFEATURE_PKRU); thread 377 arch/x86/include/asm/segment.h # define task_user_gs(tsk) ((tsk)->thread.gs) thread 67 arch/x86/include/asm/stacktrace.h return &((struct inactive_task_frame *)task->thread.sp)->bp; thread 86 arch/x86/include/asm/stacktrace.h return (unsigned long *)task->thread.sp; thread 53 arch/x86/include/asm/suspend_64.h #define loaddebug(thread,register) \ thread 54 arch/x86/include/asm/suspend_64.h set_debugreg((thread)->debugreg##register, register) thread 32 arch/x86/include/asm/switch_to.h READ_ONCE(*(unsigned char *)next->thread.sp); thread 76 arch/x86/include/asm/switch_to.h static inline void refresh_sysenter_cs(struct thread_struct *thread) thread 79 arch/x86/include/asm/switch_to.h if (unlikely(this_cpu_read(cpu_tss_rw.x86_tss.ss1) == thread->sysenter_cs)) thread 82 arch/x86/include/asm/switch_to.h this_cpu_write(cpu_tss_rw.x86_tss.ss1, thread->sysenter_cs); thread 83 arch/x86/include/asm/switch_to.h wrmsr(MSR_IA32_SYSENTER_CS, thread->sysenter_cs, 0); thread 93 arch/x86/include/asm/switch_to.h load_sp0(task->thread.sp0); thread 95 arch/x86/include/asm/switch_to.h this_cpu_write(cpu_tss_rw.x86_tss.sp1, task->thread.sp0); thread 28 arch/x86/include/asm/uaccess.h #define get_fs() (current->thread.addr_limit) thread 31 arch/x86/include/asm/uaccess.h current->thread.addr_limit = fs; thread 37 arch/x86/include/asm/uaccess.h #define user_addr_max() (current->thread.addr_limit.seg) thread 493 arch/x86/include/asm/uaccess.h current->thread.uaccess_err = 0; \ thread 498 arch/x86/include/asm/uaccess.h current->thread.uaccess_err = 0; \ thread 503 arch/x86/include/asm/uaccess.h (err) |= (current->thread.uaccess_err ? -EFAULT : 0); \ thread 35 arch/x86/kernel/asm-offsets.c OFFSET(TASK_threadsp, task_struct, thread.sp); thread 41 arch/x86/kernel/asm-offsets.c OFFSET(TASK_addr_limit, task_struct, thread.addr_limit); thread 1891 arch/x86/kernel/cpu/common.c memset(me->thread.tls_array, 0, GDT_ENTRY_TLS_ENTRIES * 8); thread 1154 arch/x86/kernel/cpu/resctrl/pseudo_lock.c struct task_struct *thread; thread 1181 arch/x86/kernel/cpu/resctrl/pseudo_lock.c thread = kthread_create_on_node(measure_cycles_lat_fn, plr, thread 1186 arch/x86/kernel/cpu/resctrl/pseudo_lock.c thread = kthread_create_on_node(measure_l2_residency, plr, thread 1191 arch/x86/kernel/cpu/resctrl/pseudo_lock.c thread = kthread_create_on_node(measure_l3_residency, plr, thread 1198 arch/x86/kernel/cpu/resctrl/pseudo_lock.c if (IS_ERR(thread)) { thread 1199 arch/x86/kernel/cpu/resctrl/pseudo_lock.c ret = PTR_ERR(thread); thread 1202 arch/x86/kernel/cpu/resctrl/pseudo_lock.c kthread_bind(thread, cpu); thread 1203 arch/x86/kernel/cpu/resctrl/pseudo_lock.c wake_up_process(thread); thread 1274 arch/x86/kernel/cpu/resctrl/pseudo_lock.c struct task_struct *thread; thread 1291 arch/x86/kernel/cpu/resctrl/pseudo_lock.c thread = kthread_create_on_node(pseudo_lock_fn, rdtgrp, thread 1294 arch/x86/kernel/cpu/resctrl/pseudo_lock.c if (IS_ERR(thread)) { thread 1295 arch/x86/kernel/cpu/resctrl/pseudo_lock.c ret = PTR_ERR(thread); thread 1300 arch/x86/kernel/cpu/resctrl/pseudo_lock.c kthread_bind(thread, plr->cpu); thread 1301 arch/x86/kernel/cpu/resctrl/pseudo_lock.c wake_up_process(thread); thread 392 arch/x86/kernel/dumpstack.c current->thread.trap_nr, SIGSEGV) == NOTIFY_STOP) thread 101 arch/x86/kernel/fpu/core.c copy_fpregs_to_fpstate(¤t->thread.fpu); thread 123 arch/x86/kernel/fpu/core.c WARN_ON_FPU(fpu != ¤t->thread.fpu); thread 169 arch/x86/kernel/fpu/core.c struct fpu *dst_fpu = &dst->thread.fpu; thread 170 arch/x86/kernel/fpu/core.c struct fpu *src_fpu = &src->thread.fpu; thread 177 arch/x86/kernel/fpu/core.c WARN_ON_FPU(src_fpu != ¤t->thread.fpu); thread 216 arch/x86/kernel/fpu/core.c WARN_ON_FPU(fpu != ¤t->thread.fpu); thread 239 arch/x86/kernel/fpu/core.c if (fpu == ¤t->thread.fpu) thread 261 arch/x86/kernel/fpu/core.c WARN_ON_FPU(fpu == ¤t->thread.fpu); thread 280 arch/x86/kernel/fpu/core.c if (fpu == ¤t->thread.fpu) { thread 323 arch/x86/kernel/fpu/core.c WARN_ON_FPU(fpu != ¤t->thread.fpu); /* Almost certainly an anomaly */ thread 355 arch/x86/kernel/fpu/core.c struct fpu *fpu = ¤t->thread.fpu; thread 367 arch/x86/kernel/fpu/core.c struct fpu *fpu = ¤t->thread.fpu; thread 38 arch/x86/kernel/fpu/init.c fpstate_init_soft(¤t->thread.fpu.state.soft); thread 166 arch/x86/kernel/fpu/init.c task_size -= sizeof(((struct task_struct *)0)->thread.fpu.state); thread 183 arch/x86/kernel/fpu/init.c CHECK_MEMBER_AT_END_OF(struct task_struct, thread); thread 33 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 49 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 80 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 117 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 234 arch/x86/kernel/fpu/regset.c struct fxregs_state *fxsave = &tsk->thread.fpu.state.fxsave; thread 254 arch/x86/kernel/fpu/regset.c env->fos = tsk->thread.ds; thread 299 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 328 arch/x86/kernel/fpu/regset.c struct fpu *fpu = &target->thread.fpu; thread 348 arch/x86/kernel/fpu/regset.c convert_to_fxsr(&target->thread.fpu.state.fxsave, &env); thread 61 arch/x86/kernel/fpu/signal.c struct xregs_state *xsave = &tsk->thread.fpu.state.xsave; thread 67 arch/x86/kernel/fpu/signal.c copy_fxregs_to_kernel(&tsk->thread.fpu); thread 278 arch/x86/kernel/fpu/signal.c struct fpu *fpu = &tsk->thread.fpu; thread 885 arch/x86/kernel/fpu/xstate.c struct fpu *fpu = ¤t->thread.fpu; thread 1254 arch/x86/kernel/fpu/xstate.c unsigned long timestamp = READ_ONCE(task->thread.fpu.avx512_timestamp); thread 372 arch/x86/kernel/hw_breakpoint.c struct thread_struct *thread = ¤t->thread; thread 375 arch/x86/kernel/hw_breakpoint.c bp = thread->ptrace_bps[i]; thread 388 arch/x86/kernel/hw_breakpoint.c dump->u_debugreg[6] = current->thread.debugreg6; thread 400 arch/x86/kernel/hw_breakpoint.c struct thread_struct *t = &tsk->thread; thread 417 arch/x86/kernel/hw_breakpoint.c set_debugreg(current->thread.debugreg6, 6); thread 465 arch/x86/kernel/hw_breakpoint.c current->thread.debugreg6 &= ~DR_TRAP_BITS; thread 512 arch/x86/kernel/hw_breakpoint.c if ((current->thread.debugreg6 & DR_TRAP_BITS) || thread 29 arch/x86/kernel/ioport.c struct thread_struct *t = ¤t->thread; thread 119 arch/x86/kernel/ioport.c struct thread_struct *t = ¤t->thread; thread 153 arch/x86/kernel/kgdb.c gdb_regs[GDB_BP] = ((struct inactive_task_frame *)p->thread.sp)->bp; thread 176 arch/x86/kernel/kgdb.c gdb_regs[GDB_SP] = p->thread.sp; thread 634 arch/x86/kernel/kgdb.c tsk->thread.debugreg6 |= (DR_TRAP0 << i); thread 101 arch/x86/kernel/process.c dst->thread.vm86 = NULL; thread 112 arch/x86/kernel/process.c struct thread_struct *t = &tsk->thread; thread 140 arch/x86/kernel/process.c memset(tsk->thread.tls_array, 0, sizeof(tsk->thread.tls_array)); thread 142 arch/x86/kernel/process.c fpu__clear(&tsk->thread.fpu); thread 495 arch/x86/kernel/process.c prev = &prev_p->thread; thread 496 arch/x86/kernel/process.c next = &next_p->thread; thread 837 arch/x86/kernel/process.c sp = READ_ONCE(p->thread.sp); thread 133 arch/x86/kernel/process_32.c p->thread.sp = (unsigned long) fork_frame; thread 134 arch/x86/kernel/process_32.c p->thread.sp0 = (unsigned long) (childregs+1); thread 135 arch/x86/kernel/process_32.c memset(p->thread.ptrace_bps, 0, sizeof(p->thread.ptrace_bps)); thread 142 arch/x86/kernel/process_32.c p->thread.io_bitmap_ptr = NULL; thread 153 arch/x86/kernel/process_32.c p->thread.io_bitmap_ptr = NULL; thread 158 arch/x86/kernel/process_32.c p->thread.io_bitmap_ptr = kmemdup(tsk->thread.io_bitmap_ptr, thread 160 arch/x86/kernel/process_32.c if (!p->thread.io_bitmap_ptr) { thread 161 arch/x86/kernel/process_32.c p->thread.io_bitmap_max = 0; thread 176 arch/x86/kernel/process_32.c if (err && p->thread.io_bitmap_ptr) { thread 177 arch/x86/kernel/process_32.c kfree(p->thread.io_bitmap_ptr); thread 178 arch/x86/kernel/process_32.c p->thread.io_bitmap_max = 0; thread 230 arch/x86/kernel/process_32.c struct thread_struct *prev = &prev_p->thread, thread 231 arch/x86/kernel/process_32.c *next = &next_p->thread; thread 193 arch/x86/kernel/process_64.c prev_p->thread.fsbase = 0; thread 195 arch/x86/kernel/process_64.c prev_p->thread.gsbase = 0; thread 201 arch/x86/kernel/process_64.c savesegment(fs, task->thread.fsindex); thread 202 arch/x86/kernel/process_64.c savesegment(gs, task->thread.gsindex); thread 203 arch/x86/kernel/process_64.c save_base_legacy(task, task->thread.fsindex, FS); thread 204 arch/x86/kernel/process_64.c save_base_legacy(task, task->thread.gsindex, GS); thread 307 arch/x86/kernel/process_64.c base = get_desc_base(&task->thread.tls_array[idx]); thread 338 arch/x86/kernel/process_64.c else if (task->thread.fsindex == 0) thread 339 arch/x86/kernel/process_64.c fsbase = task->thread.fsbase; thread 341 arch/x86/kernel/process_64.c fsbase = x86_fsgsbase_read_task(task, task->thread.fsindex); thread 352 arch/x86/kernel/process_64.c else if (task->thread.gsindex == 0) thread 353 arch/x86/kernel/process_64.c gsbase = task->thread.gsbase; thread 355 arch/x86/kernel/process_64.c gsbase = x86_fsgsbase_read_task(task, task->thread.gsindex); thread 364 arch/x86/kernel/process_64.c task->thread.fsbase = fsbase; thread 371 arch/x86/kernel/process_64.c task->thread.gsbase = gsbase; thread 389 arch/x86/kernel/process_64.c p->thread.sp = (unsigned long) fork_frame; thread 390 arch/x86/kernel/process_64.c p->thread.io_bitmap_ptr = NULL; thread 392 arch/x86/kernel/process_64.c savesegment(gs, p->thread.gsindex); thread 393 arch/x86/kernel/process_64.c p->thread.gsbase = p->thread.gsindex ? 0 : me->thread.gsbase; thread 394 arch/x86/kernel/process_64.c savesegment(fs, p->thread.fsindex); thread 395 arch/x86/kernel/process_64.c p->thread.fsbase = p->thread.fsindex ? 0 : me->thread.fsbase; thread 396 arch/x86/kernel/process_64.c savesegment(es, p->thread.es); thread 397 arch/x86/kernel/process_64.c savesegment(ds, p->thread.ds); thread 398 arch/x86/kernel/process_64.c memset(p->thread.ptrace_bps, 0, sizeof(p->thread.ptrace_bps)); thread 416 arch/x86/kernel/process_64.c p->thread.io_bitmap_ptr = kmemdup(me->thread.io_bitmap_ptr, thread 418 arch/x86/kernel/process_64.c if (!p->thread.io_bitmap_ptr) { thread 419 arch/x86/kernel/process_64.c p->thread.io_bitmap_max = 0; thread 441 arch/x86/kernel/process_64.c if (err && p->thread.io_bitmap_ptr) { thread 442 arch/x86/kernel/process_64.c kfree(p->thread.io_bitmap_ptr); thread 443 arch/x86/kernel/process_64.c p->thread.io_bitmap_max = 0; thread 506 arch/x86/kernel/process_64.c struct thread_struct *prev = &prev_p->thread; thread 507 arch/x86/kernel/process_64.c struct thread_struct *next = &next_p->thread; thread 727 arch/x86/kernel/process_64.c task->thread.gsbase = arg2; thread 730 arch/x86/kernel/process_64.c task->thread.gsindex = 0; thread 757 arch/x86/kernel/process_64.c task->thread.fsbase = arg2; thread 759 arch/x86/kernel/process_64.c task->thread.fsindex = 0; thread 244 arch/x86/kernel/ptrace.c return task->thread.fsindex; thread 250 arch/x86/kernel/ptrace.c return task->thread.gsindex; thread 256 arch/x86/kernel/ptrace.c return task->thread.ds; thread 262 arch/x86/kernel/ptrace.c return task->thread.es; thread 282 arch/x86/kernel/ptrace.c task->thread.fsindex = value; thread 284 arch/x86/kernel/ptrace.c loadsegment(fs, task->thread.fsindex); thread 287 arch/x86/kernel/ptrace.c task->thread.gsindex = value; thread 289 arch/x86/kernel/ptrace.c load_gs_index(task->thread.gsindex); thread 292 arch/x86/kernel/ptrace.c task->thread.ds = value; thread 294 arch/x86/kernel/ptrace.c loadsegment(ds, task->thread.ds); thread 297 arch/x86/kernel/ptrace.c task->thread.es = value; thread 299 arch/x86/kernel/ptrace.c loadsegment(es, task->thread.es); thread 378 arch/x86/kernel/ptrace.c if (child->thread.fsbase != value) thread 387 arch/x86/kernel/ptrace.c if (child->thread.gsbase != value) thread 480 arch/x86/kernel/ptrace.c struct thread_struct *thread = &(current->thread); thread 487 arch/x86/kernel/ptrace.c if (thread->ptrace_bps[i] == bp) thread 491 arch/x86/kernel/ptrace.c thread->debugreg6 |= (DR_TRAP0 << i); thread 566 arch/x86/kernel/ptrace.c struct thread_struct *thread = &tsk->thread; thread 572 arch/x86/kernel/ptrace.c old_dr7 = ptrace_get_dr7(thread->ptrace_bps); thread 579 arch/x86/kernel/ptrace.c struct perf_event *bp = thread->ptrace_bps[i]; thread 592 arch/x86/kernel/ptrace.c thread->ptrace_bps[i] = bp; thread 617 arch/x86/kernel/ptrace.c struct thread_struct *thread = &tsk->thread; thread 622 arch/x86/kernel/ptrace.c struct perf_event *bp = thread->ptrace_bps[index]; thread 627 arch/x86/kernel/ptrace.c val = thread->debugreg6; thread 629 arch/x86/kernel/ptrace.c val = thread->ptrace_dr7; thread 637 arch/x86/kernel/ptrace.c struct thread_struct *t = &tsk->thread; thread 676 arch/x86/kernel/ptrace.c struct thread_struct *thread = &tsk->thread; thread 683 arch/x86/kernel/ptrace.c thread->debugreg6 = val; thread 688 arch/x86/kernel/ptrace.c thread->ptrace_dr7 = val; thread 700 arch/x86/kernel/ptrace.c return target->thread.io_bitmap_max / regset->size; thread 708 arch/x86/kernel/ptrace.c if (!target->thread.io_bitmap_ptr) thread 712 arch/x86/kernel/ptrace.c target->thread.io_bitmap_ptr, thread 1339 arch/x86/kernel/ptrace.c tsk->thread.trap_nr = X86_TRAP_DB; thread 1340 arch/x86/kernel/ptrace.c tsk->thread.error_code = error_code; thread 192 arch/x86/kernel/signal.c put_user_ex(current->thread.trap_nr, &sc->trapno); thread 193 arch/x86/kernel/signal.c put_user_ex(current->thread.error_code, &sc->err); thread 212 arch/x86/kernel/signal.c put_user_ex(current->thread.cr2, &sc->cr2); thread 714 arch/x86/kernel/signal.c struct fpu *fpu = ¤t->thread.fpu; thread 1040 arch/x86/kernel/smpboot.c idle->thread.sp = (unsigned long)task_pt_regs(idle); thread 1043 arch/x86/kernel/smpboot.c initial_stack = idle->thread.sp; thread 23 arch/x86/kernel/tls.c struct thread_struct *t = ¤t->thread; thread 87 arch/x86/kernel/tls.c struct thread_struct *t = &p->thread; thread 105 arch/x86/kernel/tls.c if (t == ¤t->thread) thread 180 arch/x86/kernel/tls.c if (p->thread.fsindex == modified_sel) thread 181 arch/x86/kernel/tls.c p->thread.fsbase = info.base_addr; thread 183 arch/x86/kernel/tls.c if (p->thread.gsindex == modified_sel) thread 184 arch/x86/kernel/tls.c p->thread.gsbase = info.base_addr; thread 236 arch/x86/kernel/tls.c fill_user_desc(&info, idx, &p->thread.tls_array[index]); thread 251 arch/x86/kernel/tls.c struct thread_struct *t = &target->thread; thread 272 arch/x86/kernel/tls.c tls = &target->thread.tls_array[pos]; thread 210 arch/x86/kernel/traps.c tsk->thread.error_code = error_code; thread 211 arch/x86/kernel/traps.c tsk->thread.trap_nr = trapnr; thread 224 arch/x86/kernel/traps.c tsk->thread.error_code = error_code; thread 225 arch/x86/kernel/traps.c tsk->thread.trap_nr = trapnr; thread 374 arch/x86/kernel/traps.c tsk->thread.error_code = error_code; thread 375 arch/x86/kernel/traps.c tsk->thread.trap_nr = X86_TRAP_DF; thread 542 arch/x86/kernel/traps.c tsk->thread.error_code = error_code; thread 543 arch/x86/kernel/traps.c tsk->thread.trap_nr = X86_TRAP_GP; thread 560 arch/x86/kernel/traps.c tsk->thread.error_code = error_code; thread 561 arch/x86/kernel/traps.c tsk->thread.trap_nr = X86_TRAP_GP; thread 763 arch/x86/kernel/traps.c tsk->thread.debugreg6 = dr6; thread 798 arch/x86/kernel/traps.c tsk->thread.debugreg6 &= ~DR_STEP; thread 802 arch/x86/kernel/traps.c si_code = get_si_code(tsk->thread.debugreg6); thread 803 arch/x86/kernel/traps.c if (tsk->thread.debugreg6 & (DR_STEP | DR_TRAP_BITS) || user_icebp) thread 821 arch/x86/kernel/traps.c struct fpu *fpu = &task->thread.fpu; thread 832 arch/x86/kernel/traps.c task->thread.error_code = error_code; thread 833 arch/x86/kernel/traps.c task->thread.trap_nr = trapnr; thread 846 arch/x86/kernel/traps.c task->thread.trap_nr = trapnr; thread 847 arch/x86/kernel/traps.c task->thread.error_code = error_code; thread 287 arch/x86/kernel/umip.c tsk->thread.cr2 = (unsigned long)addr; thread 288 arch/x86/kernel/umip.c tsk->thread.error_code = X86_PF_USER | X86_PF_WRITE; thread 289 arch/x86/kernel/umip.c tsk->thread.trap_nr = X86_TRAP_PF; thread 322 arch/x86/kernel/unwind_orc.c if (task != current && state->sp == task->thread.sp) { thread 323 arch/x86/kernel/unwind_orc.c struct inactive_task_frame *frame = (void *)task->thread.sp; thread 651 arch/x86/kernel/unwind_orc.c struct inactive_task_frame *frame = (void *)task->thread.sp; thread 653 arch/x86/kernel/unwind_orc.c state->sp = task->thread.sp; thread 923 arch/x86/kernel/uprobes.c utask->autask.saved_trap_nr = current->thread.trap_nr; thread 924 arch/x86/kernel/uprobes.c current->thread.trap_nr = UPROBE_TRAP_NR; thread 946 arch/x86/kernel/uprobes.c if (t->thread.trap_nr != UPROBE_TRAP_NR) thread 965 arch/x86/kernel/uprobes.c WARN_ON_ONCE(current->thread.trap_nr != UPROBE_TRAP_NR); thread 966 arch/x86/kernel/uprobes.c current->thread.trap_nr = utask->autask.saved_trap_nr; thread 1037 arch/x86/kernel/uprobes.c current->thread.trap_nr = utask->autask.saved_trap_nr; thread 87 arch/x86/kernel/vm86_32.c #define VFLAGS (*(unsigned short *)&(current->thread.vm86->veflags)) thread 88 arch/x86/kernel/vm86_32.c #define VEFLAGS (current->thread.vm86->veflags) thread 100 arch/x86/kernel/vm86_32.c struct vm86 *vm86 = current->thread.vm86; thread 150 arch/x86/kernel/vm86_32.c tsk->thread.sp0 = vm86->saved_sp0; thread 151 arch/x86/kernel/vm86_32.c tsk->thread.sysenter_cs = __KERNEL_CS; thread 153 arch/x86/kernel/vm86_32.c refresh_sysenter_cs(&tsk->thread); thread 242 arch/x86/kernel/vm86_32.c struct vm86 *vm86 = tsk->thread.vm86; thread 276 arch/x86/kernel/vm86_32.c tsk->thread.vm86 = vm86; thread 365 arch/x86/kernel/vm86_32.c vm86->saved_sp0 = tsk->thread.sp0; thread 370 arch/x86/kernel/vm86_32.c tsk->thread.sp0 += 16; thread 373 arch/x86/kernel/vm86_32.c tsk->thread.sysenter_cs = 0; thread 374 arch/x86/kernel/vm86_32.c refresh_sysenter_cs(&tsk->thread); thread 422 arch/x86/kernel/vm86_32.c set_flags(VEFLAGS, flags, current->thread.vm86->veflags_mask); thread 432 arch/x86/kernel/vm86_32.c set_flags(VFLAGS, flags, current->thread.vm86->veflags_mask); thread 447 arch/x86/kernel/vm86_32.c return flags | (VEFLAGS & current->thread.vm86->veflags_mask); thread 542 arch/x86/kernel/vm86_32.c struct vm86 *vm86 = current->thread.vm86; thread 572 arch/x86/kernel/vm86_32.c struct vm86 *vm86 = current->thread.vm86; thread 584 arch/x86/kernel/vm86_32.c current->thread.trap_nr = trapno; thread 585 arch/x86/kernel/vm86_32.c current->thread.error_code = error_code; thread 597 arch/x86/kernel/vm86_32.c struct vm86plus_info_struct *vmpi = ¤t->thread.vm86->vm86plus; thread 2362 arch/x86/kvm/svm.c wrmsrl(MSR_KERNEL_GS_BASE, current->thread.gsbase); thread 1151 arch/x86/kvm/vmx/vmx.c fs_sel = current->thread.fsindex; thread 1152 arch/x86/kvm/vmx/vmx.c gs_sel = current->thread.gsindex; thread 1153 arch/x86/kvm/vmx/vmx.c fs_base = current->thread.fsbase; thread 1154 arch/x86/kvm/vmx/vmx.c vmx->msr_host_kernel_gs_base = current->thread.gsbase; thread 8520 arch/x86/kvm/x86.c memcpy(&fpu->state, ¤t->thread.fpu.state, thread 56 arch/x86/math-emu/fpu_aux.c fpstate_init_soft(¤t->thread.fpu.state.soft); thread 231 arch/x86/math-emu/fpu_entry.c current->thread.trap_nr = X86_TRAP_MF; thread 232 arch/x86/math-emu/fpu_entry.c current->thread.error_code = 0; thread 624 arch/x86/math-emu/fpu_entry.c current->thread.trap_nr = X86_TRAP_MF; thread 625 arch/x86/math-emu/fpu_entry.c current->thread.error_code = 0; thread 643 arch/x86/math-emu/fpu_entry.c struct swregs_state *s387 = &target->thread.fpu.state.soft; thread 695 arch/x86/math-emu/fpu_entry.c struct swregs_state *s387 = &target->thread.fpu.state.soft; thread 76 arch/x86/math-emu/fpu_system.h #define I387 (¤t->thread.fpu.state) thread 138 arch/x86/mm/extable.c current->thread.uaccess_err = 1; thread 277 arch/x86/mm/fault.c if (!v8086_mode(regs) || !tsk->thread.vm86) thread 282 arch/x86/mm/fault.c tsk->thread.vm86->screen_bitmap |= 1 << bit; thread 723 arch/x86/mm/fault.c tsk->thread.trap_nr = X86_TRAP_PF; thread 724 arch/x86/mm/fault.c tsk->thread.error_code = error_code | X86_PF_USER; thread 725 arch/x86/mm/fault.c tsk->thread.cr2 = address; thread 761 arch/x86/mm/fault.c if (current->thread.sig_on_uaccess_err && signal) { thread 171 arch/x86/um/asm/elf.h (pr_reg)[21] = current->thread.arch.fs; \ thread 19 arch/x86/um/asm/processor.h (address + 65536 + 32 * sizeof(unsigned long) >= UPT_SP(¤t->thread.regs.regs)) thread 31 arch/x86/um/asm/processor.h #define task_pt_regs(t) (&(t)->thread.regs) thread 38 arch/x86/um/asm/processor_32.h static inline void arch_flush_thread(struct arch_thread *thread) thread 41 arch/x86/um/asm/processor_32.h memset(&thread->tls_array, 0, sizeof(thread->tls_array)); thread 24 arch/x86/um/asm/processor_64.h static inline void arch_flush_thread(struct arch_thread *thread) thread 91 arch/x86/um/ptrace_32.c UPT_SYSCALL_NR(&child->thread.regs.regs) = value; thread 115 arch/x86/um/ptrace_32.c child->thread.regs.regs.gp[HOST_EFLAGS] |= value; thread 120 arch/x86/um/ptrace_32.c child->thread.regs.regs.gp[reg_offsets[regno]] = value; thread 137 arch/x86/um/ptrace_32.c child->thread.arch.debugregs[addr] = data; thread 172 arch/x86/um/ptrace_32.c return mask & child->thread.regs.regs.gp[reg_offsets[regno]]; thread 191 arch/x86/um/ptrace_32.c tmp = child->thread.arch.debugregs[addr]; thread 85 arch/x86/um/ptrace_64.c UPT_SYSCALL_NR(&child->thread.regs.regs) = value; thread 107 arch/x86/um/ptrace_64.c child->thread.regs.regs.gp[HOST_EFLAGS] |= value; thread 114 arch/x86/um/ptrace_64.c child->thread.regs.regs.gp[reg_offsets[regno >> 3]] = value; thread 131 arch/x86/um/ptrace_64.c child->thread.arch.debugregs[addr] = data; thread 178 arch/x86/um/ptrace_64.c return mask & child->thread.regs.regs.gp[reg_offsets[regno >> 3]]; thread 196 arch/x86/um/ptrace_64.c tmp = child->thread.arch.debugregs[addr]; thread 241 arch/x86/um/signal.c struct faultinfo * fi = ¤t->thread.arch.faultinfo; thread 333 arch/x86/um/signal.c err |= copy_sc_to_user(&uc->uc_mcontext, fp, ¤t->thread.regs, 0); thread 454 arch/x86/um/signal.c unsigned long sp = PT_REGS_SP(¤t->thread.regs); thread 466 arch/x86/um/signal.c if (copy_sc_from_user(¤t->thread.regs, sc)) thread 470 arch/x86/um/signal.c PT_REGS_SYSCALL_NR(¤t->thread.regs) = -1; thread 471 arch/x86/um/signal.c return PT_REGS_SYSCALL_RET(¤t->thread.regs); thread 561 arch/x86/um/signal.c unsigned long sp = PT_REGS_SP(¤t->thread.regs); thread 572 arch/x86/um/signal.c if (copy_sc_from_user(¤t->thread.regs, &uc->uc_mcontext)) thread 576 arch/x86/um/signal.c PT_REGS_SYSCALL_NR(¤t->thread.regs) = -1; thread 577 arch/x86/um/signal.c return PT_REGS_SYSCALL_RET(¤t->thread.regs); thread 38 arch/x86/um/syscalls_64.c ret = restore_registers(pid, ¤t->thread.regs.regs); thread 61 arch/x86/um/syscalls_64.c current->thread.arch.fs = (unsigned long) ptr; thread 62 arch/x86/um/syscalls_64.c ret = save_registers(pid, ¤t->thread.regs.regs); thread 65 arch/x86/um/syscalls_64.c ret = save_registers(pid, ¤t->thread.regs.regs); thread 85 arch/x86/um/syscalls_64.c if ((to->thread.arch.fs == 0) || (to->mm == NULL)) thread 88 arch/x86/um/syscalls_64.c arch_prctl(to, ARCH_SET_FS, (void __user *) to->thread.arch.fs); thread 65 arch/x86/um/tls_32.c struct thread_struct *t = &task->thread; thread 99 arch/x86/um/tls_32.c &to->thread.arch.tls_array[idx - GDT_ENTRY_TLS_MIN]; thread 139 arch/x86/um/tls_32.c &task->thread.arch.tls_array[i - GDT_ENTRY_TLS_MIN]; thread 163 arch/x86/um/tls_32.c &task->thread.arch.tls_array[i - GDT_ENTRY_TLS_MIN]; thread 206 arch/x86/um/tls_32.c struct thread_struct *t = &task->thread; thread 241 arch/x86/um/tls_32.c struct thread_struct *t = &task->thread; thread 15 arch/x86/um/tls_64.c t->thread.arch.fs = tls; thread 215 arch/xtensa/include/asm/processor.h #define release_thread(thread) do { } while(0) thread 27 arch/xtensa/include/asm/stacktrace.h sp = (unsigned long *)task->thread.sp; thread 35 arch/xtensa/include/asm/uaccess.h #define get_fs() (current->thread.current_ds) thread 36 arch/xtensa/include/asm/uaccess.h #define set_fs(val) (current->thread.current_ds = (val)) thread 77 arch/xtensa/kernel/asm-offsets.c DEFINE(TASK_THREAD, offsetof (struct task_struct, thread)); thread 93 arch/xtensa/kernel/asm-offsets.c DEFINE(THREAD_RA, offsetof (struct task_struct, thread.ra)); thread 94 arch/xtensa/kernel/asm-offsets.c DEFINE(THREAD_SP, offsetof (struct task_struct, thread.sp)); thread 109 arch/xtensa/kernel/asm-offsets.c thread.current_ds)); thread 236 arch/xtensa/kernel/hw_breakpoint.c struct thread_struct *t = &tsk->thread; thread 259 arch/xtensa/kernel/hw_breakpoint.c memset(tsk->thread.ptrace_bp, 0, sizeof(tsk->thread.ptrace_bp)); thread 260 arch/xtensa/kernel/hw_breakpoint.c memset(tsk->thread.ptrace_wp, 0, sizeof(tsk->thread.ptrace_wp)); thread 219 arch/xtensa/kernel/process.c p->thread.sp = (unsigned long)childregs; thread 226 arch/xtensa/kernel/process.c p->thread.ra = MAKE_RA_FOR_CALL( thread 271 arch/xtensa/kernel/process.c p->thread.ra = MAKE_RA_FOR_CALL( thread 309 arch/xtensa/kernel/process.c sp = p->thread.sp; thread 310 arch/xtensa/kernel/process.c pc = MAKE_PC_FROM_RA(p->thread.ra, p->thread.sp); thread 374 arch/xtensa/kernel/ptrace.c if (current->thread.ptrace_bp[i] == bp) thread 379 arch/xtensa/kernel/ptrace.c if (current->thread.ptrace_wp[i] == bp) thread 428 arch/xtensa/kernel/ptrace.c bp = child->thread.ptrace_wp[idx]; thread 430 arch/xtensa/kernel/ptrace.c bp = child->thread.ptrace_bp[idx]; thread 467 arch/xtensa/kernel/ptrace.c bp = child->thread.ptrace_wp[idx]; thread 473 arch/xtensa/kernel/ptrace.c bp = child->thread.ptrace_bp[idx]; thread 483 arch/xtensa/kernel/ptrace.c child->thread.ptrace_wp[idx] = bp; thread 485 arch/xtensa/kernel/ptrace.c child->thread.ptrace_bp[idx] = bp; thread 331 arch/xtensa/kernel/traps.c current->thread.bad_vaddr = regs->excvaddr; thread 332 arch/xtensa/kernel/traps.c current->thread.error_code = -3; thread 158 arch/xtensa/mm/fault.c current->thread.bad_vaddr = address; thread 159 arch/xtensa/mm/fault.c current->thread.error_code = is_write; thread 184 arch/xtensa/mm/fault.c current->thread.bad_vaddr = address; thread 242 arch/xtensa/mm/fault.c current->thread.bad_uaddr = address; thread 92 crypto/algboss.c struct task_struct *thread; thread 187 crypto/algboss.c thread = kthread_run(cryptomgr_probe, param, "cryptomgr_probe"); thread 188 crypto/algboss.c if (IS_ERR(thread)) thread 229 crypto/algboss.c struct task_struct *thread; thread 250 crypto/algboss.c thread = kthread_run(cryptomgr_test, param, "cryptomgr_test"); thread 251 crypto/algboss.c if (IS_ERR(thread)) thread 52 drivers/acpi/acpi_dbg.c struct task_struct *thread; thread 69 drivers/acpi/acpi_dbg.c return acpi_aml_io.thread ? true : false; thread 86 drivers/acpi/acpi_dbg.c current != acpi_aml_io.thread) thread 409 drivers/acpi/acpi_dbg.c acpi_aml_io.thread = NULL; thread 441 drivers/acpi/acpi_dbg.c acpi_aml_io.thread = t; thread 175 drivers/acpi/acpica/acdispat.h acpi_ds_call_control_method(struct acpi_thread_state *thread, thread 300 drivers/acpi/acpica/acdispat.h *thread); thread 317 drivers/acpi/acpica/acdispat.h *thread); thread 321 drivers/acpi/acpica/acdispat.h struct acpi_thread_state *thread); thread 326 drivers/acpi/acpica/acdispat.h *thread); thread 258 drivers/acpi/acpica/acinterp.h void acpi_ex_release_all_mutexes(struct acpi_thread_state *thread); thread 663 drivers/acpi/acpica/aclocal.h struct acpi_thread_state thread; thread 90 drivers/acpi/acpica/acstruct.h struct acpi_thread_state *thread; thread 37 drivers/acpi/acpica/dbobject.c struct acpi_thread_state *thread; thread 66 drivers/acpi/acpica/dbobject.c thread = walk_state->thread; thread 67 drivers/acpi/acpica/dbobject.c if (!thread) { thread 94 drivers/acpi/acpica/dsdebug.c struct acpi_thread_state *thread; thread 120 drivers/acpi/acpica/dsdebug.c thread = walk_state->thread; thread 121 drivers/acpi/acpica/dsdebug.c if (!thread) { thread 137 drivers/acpi/acpica/dsdebug.c next_walk_state = thread->walk_state_list; thread 344 drivers/acpi/acpica/dsmethod.c && (walk_state->thread->current_sync_level > thread 350 drivers/acpi/acpica/dsmethod.c walk_state->thread->current_sync_level)); thread 361 drivers/acpi/acpica/dsmethod.c (walk_state->thread->thread_id != thread 380 drivers/acpi/acpica/dsmethod.c walk_state->thread->current_sync_level; thread 383 drivers/acpi/acpica/dsmethod.c walk_state->thread->thread_id; thread 394 drivers/acpi/acpica/dsmethod.c walk_state->thread->current_sync_level = thread 456 drivers/acpi/acpica/dsmethod.c acpi_ds_call_control_method(struct acpi_thread_state *thread, thread 499 drivers/acpi/acpica/dsmethod.c thread); thread 758 drivers/acpi/acpica/dsmethod.c walk_state->thread->current_sync_level = thread 413 drivers/acpi/acpica/dswstate.c *thread) thread 417 drivers/acpi/acpica/dswstate.c if (!thread) { thread 422 drivers/acpi/acpica/dswstate.c thread->walk_state_list)); thread 424 drivers/acpi/acpica/dswstate.c return (thread->walk_state_list); thread 442 drivers/acpi/acpica/dswstate.c struct acpi_thread_state *thread) thread 446 drivers/acpi/acpica/dswstate.c walk_state->next = thread->walk_state_list; thread 447 drivers/acpi/acpica/dswstate.c thread->walk_state_list = walk_state; thread 466 drivers/acpi/acpica/dswstate.c struct acpi_walk_state *acpi_ds_pop_walk_state(struct acpi_thread_state *thread) thread 472 drivers/acpi/acpica/dswstate.c walk_state = thread->walk_state_list; thread 478 drivers/acpi/acpica/dswstate.c thread->walk_state_list = walk_state->next; thread 512 drivers/acpi/acpica/dswstate.c *thread) thread 527 drivers/acpi/acpica/dswstate.c walk_state->thread = thread; thread 539 drivers/acpi/acpica/dswstate.c if (thread) { thread 540 drivers/acpi/acpica/dswstate.c acpi_ds_push_walk_state(walk_state, thread); thread 21 drivers/acpi/acpica/exmutex.c struct acpi_thread_state *thread); thread 37 drivers/acpi/acpica/exmutex.c struct acpi_thread_state *thread = obj_desc->mutex.owner_thread; thread 39 drivers/acpi/acpica/exmutex.c if (!thread) { thread 61 drivers/acpi/acpica/exmutex.c thread->acquired_mutex_list = obj_desc->mutex.next; thread 80 drivers/acpi/acpica/exmutex.c struct acpi_thread_state *thread) thread 84 drivers/acpi/acpica/exmutex.c list_head = thread->acquired_mutex_list; thread 99 drivers/acpi/acpica/exmutex.c thread->acquired_mutex_list = obj_desc; thread 205 drivers/acpi/acpica/exmutex.c if (!walk_state->thread) { thread 216 drivers/acpi/acpica/exmutex.c if (walk_state->thread->current_sync_level > obj_desc->mutex.sync_level) { thread 221 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level)); thread 229 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level, thread 231 drivers/acpi/acpica/exmutex.c walk_state->thread)); thread 235 drivers/acpi/acpica/exmutex.c walk_state->thread->thread_id); thread 241 drivers/acpi/acpica/exmutex.c obj_desc->mutex.owner_thread = walk_state->thread; thread 243 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level; thread 244 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level = thread 249 drivers/acpi/acpica/exmutex.c acpi_ex_link_mutex(obj_desc, walk_state->thread); thread 255 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level, thread 365 drivers/acpi/acpica/exmutex.c if (!walk_state->thread) { thread 376 drivers/acpi/acpica/exmutex.c if ((owner_thread->thread_id != walk_state->thread->thread_id) && thread 380 drivers/acpi/acpica/exmutex.c (u32)walk_state->thread->thread_id, thread 399 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level)); thread 415 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level, thread 418 drivers/acpi/acpica/exmutex.c walk_state->thread)); thread 436 drivers/acpi/acpica/exmutex.c walk_state->thread->current_sync_level, thread 461 drivers/acpi/acpica/exmutex.c void acpi_ex_release_all_mutexes(struct acpi_thread_state *thread) thread 463 drivers/acpi/acpica/exmutex.c union acpi_operand_object *next = thread->acquired_mutex_list; thread 491 drivers/acpi/acpica/exmutex.c thread->current_sync_level = thread 411 drivers/acpi/acpica/psparse.c struct acpi_thread_state *thread; thread 428 drivers/acpi/acpica/psparse.c thread = acpi_ut_create_thread_state(); thread 429 drivers/acpi/acpica/psparse.c if (!thread) { thread 443 drivers/acpi/acpica/psparse.c walk_state->thread = thread; thread 450 drivers/acpi/acpica/psparse.c walk_state->thread->current_sync_level = thread 454 drivers/acpi/acpica/psparse.c acpi_ds_push_walk_state(walk_state, thread); thread 460 drivers/acpi/acpica/psparse.c acpi_gbl_current_walk_list = thread; thread 503 drivers/acpi/acpica/psparse.c acpi_ds_call_control_method(thread, walk_state, thread 514 drivers/acpi/acpica/psparse.c walk_state = acpi_ds_get_current_walk_state(thread); thread 553 drivers/acpi/acpica/psparse.c walk_state = acpi_ds_pop_walk_state(thread); thread 585 drivers/acpi/acpica/psparse.c walk_state = acpi_ds_get_current_walk_state(thread); thread 682 drivers/acpi/acpica/psparse.c acpi_ex_release_all_mutexes(thread); thread 684 drivers/acpi/acpica/psparse.c (union acpi_generic_state, thread)); thread 129 drivers/acpi/acpica/utstate.c state->thread.thread_id = acpi_os_get_thread_id(); thread 133 drivers/acpi/acpica/utstate.c if (!state->thread.thread_id) { thread 135 drivers/acpi/acpica/utstate.c state->thread.thread_id = (acpi_thread_id) 1; thread 816 drivers/android/binder.c binder_enqueue_deferred_thread_work_ilocked(struct binder_thread *thread, thread 819 drivers/android/binder.c WARN_ON(!list_empty(&thread->waiting_thread_node)); thread 820 drivers/android/binder.c binder_enqueue_work_ilocked(work, &thread->todo); thread 834 drivers/android/binder.c binder_enqueue_thread_work_ilocked(struct binder_thread *thread, thread 837 drivers/android/binder.c WARN_ON(!list_empty(&thread->waiting_thread_node)); thread 838 drivers/android/binder.c binder_enqueue_work_ilocked(work, &thread->todo); thread 839 drivers/android/binder.c thread->process_todo = true; thread 851 drivers/android/binder.c binder_enqueue_thread_work(struct binder_thread *thread, thread 854 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 855 drivers/android/binder.c binder_enqueue_thread_work_ilocked(thread, work); thread 856 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 915 drivers/android/binder.c static void binder_free_thread(struct binder_thread *thread); thread 919 drivers/android/binder.c static bool binder_has_work_ilocked(struct binder_thread *thread, thread 922 drivers/android/binder.c return thread->process_todo || thread 923 drivers/android/binder.c thread->looper_need_return || thread 925 drivers/android/binder.c !binder_worklist_empty_ilocked(&thread->proc->todo)); thread 928 drivers/android/binder.c static bool binder_has_work(struct binder_thread *thread, bool do_proc_work) thread 932 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 933 drivers/android/binder.c has_work = binder_has_work_ilocked(thread, do_proc_work); thread 934 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 939 drivers/android/binder.c static bool binder_available_for_proc_work_ilocked(struct binder_thread *thread) thread 941 drivers/android/binder.c return !thread->transaction_stack && thread 942 drivers/android/binder.c binder_worklist_empty_ilocked(&thread->todo) && thread 943 drivers/android/binder.c (thread->looper & (BINDER_LOOPER_STATE_ENTERED | thread 951 drivers/android/binder.c struct binder_thread *thread; thread 954 drivers/android/binder.c thread = rb_entry(n, struct binder_thread, rb_node); thread 955 drivers/android/binder.c if (thread->looper & BINDER_LOOPER_STATE_POLL && thread 956 drivers/android/binder.c binder_available_for_proc_work_ilocked(thread)) { thread 958 drivers/android/binder.c wake_up_interruptible_sync(&thread->wait); thread 960 drivers/android/binder.c wake_up_interruptible(&thread->wait); thread 980 drivers/android/binder.c struct binder_thread *thread; thread 983 drivers/android/binder.c thread = list_first_entry_or_null(&proc->waiting_threads, thread 987 drivers/android/binder.c if (thread) thread 988 drivers/android/binder.c list_del_init(&thread->waiting_thread_node); thread 990 drivers/android/binder.c return thread; thread 1010 drivers/android/binder.c struct binder_thread *thread, thread 1015 drivers/android/binder.c if (thread) { thread 1017 drivers/android/binder.c wake_up_interruptible_sync(&thread->wait); thread 1019 drivers/android/binder.c wake_up_interruptible(&thread->wait); thread 1041 drivers/android/binder.c struct binder_thread *thread = binder_select_thread_ilocked(proc); thread 1043 drivers/android/binder.c binder_wakeup_thread_ilocked(proc, thread, /* sync = */false); thread 1210 drivers/android/binder.c struct binder_thread *thread = container_of(target_list, thread 1213 drivers/android/binder.c BUG_ON(&thread->todo != target_list); thread 1214 drivers/android/binder.c binder_enqueue_deferred_thread_work_ilocked(thread, thread 1802 drivers/android/binder.c static void binder_thread_dec_tmpref(struct binder_thread *thread) thread 1808 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 1809 drivers/android/binder.c atomic_dec(&thread->tmp_ref); thread 1810 drivers/android/binder.c if (thread->is_dead && !atomic_read(&thread->tmp_ref)) { thread 1811 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 1812 drivers/android/binder.c binder_free_thread(thread); thread 1815 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 2438 drivers/android/binder.c struct binder_thread *thread) thread 2441 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 2454 drivers/android/binder.c proc->pid, thread->pid, (u64)fp->binder, thread 2467 drivers/android/binder.c &thread->todo, &rdata); thread 2491 drivers/android/binder.c struct binder_thread *thread) thread 2493 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 2503 drivers/android/binder.c proc->pid, thread->pid, fp->handle); thread 2564 drivers/android/binder.c struct binder_thread *thread, thread 2567 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 2580 drivers/android/binder.c proc->pid, thread->pid, thread 2590 drivers/android/binder.c proc->pid, thread->pid, fd); thread 2628 drivers/android/binder.c struct binder_thread *thread, thread 2633 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 2639 drivers/android/binder.c proc->pid, thread->pid, (u64)fda->num_fds); thread 2646 drivers/android/binder.c proc->pid, thread->pid, (u64)fda->num_fds); thread 2660 drivers/android/binder.c proc->pid, thread->pid); thread 2672 drivers/android/binder.c ret = binder_translate_fd(fd, offset, t, thread, thread 2681 drivers/android/binder.c struct binder_thread *thread, thread 2690 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 2704 drivers/android/binder.c proc->pid, thread->pid); thread 2713 drivers/android/binder.c proc->pid, thread->pid); thread 2721 drivers/android/binder.c proc->pid, thread->pid); thread 2729 drivers/android/binder.c proc->pid, thread->pid); thread 2755 drivers/android/binder.c struct binder_thread *thread) thread 2764 drivers/android/binder.c BUG_ON(thread); thread 2774 drivers/android/binder.c if (proc->is_dead || (thread && thread->is_dead)) { thread 2780 drivers/android/binder.c if (!thread && !pending_async) thread 2781 drivers/android/binder.c thread = binder_select_thread_ilocked(proc); thread 2783 drivers/android/binder.c if (thread) thread 2784 drivers/android/binder.c binder_enqueue_thread_work_ilocked(thread, &t->work); thread 2791 drivers/android/binder.c binder_wakeup_thread_ilocked(proc, thread, !oneway /* sync */); thread 2842 drivers/android/binder.c struct binder_thread *thread, thread 2873 drivers/android/binder.c e->from_thread = thread->pid; thread 2881 drivers/android/binder.c in_reply_to = thread->transaction_stack; thread 2885 drivers/android/binder.c proc->pid, thread->pid); thread 2891 drivers/android/binder.c if (in_reply_to->to_thread != thread) { thread 2894 drivers/android/binder.c proc->pid, thread->pid, in_reply_to->debug_id, thread 2907 drivers/android/binder.c thread->transaction_stack = in_reply_to->to_parent; thread 2920 drivers/android/binder.c proc->pid, thread->pid, thread 2955 drivers/android/binder.c proc->pid, thread->pid); thread 2971 drivers/android/binder.c proc->pid, thread->pid); thread 2996 drivers/android/binder.c w = list_first_entry_or_null(&thread->todo, thread 3010 drivers/android/binder.c proc->pid, thread->pid); thread 3018 drivers/android/binder.c if (!(tr->flags & TF_ONE_WAY) && thread->transaction_stack) { thread 3021 drivers/android/binder.c tmp = thread->transaction_stack; thread 3022 drivers/android/binder.c if (tmp->to_thread != thread) { thread 3025 drivers/android/binder.c proc->pid, thread->pid, tmp->debug_id, thread 3083 drivers/android/binder.c proc->pid, thread->pid, t->debug_id, thread 3092 drivers/android/binder.c proc->pid, thread->pid, t->debug_id, thread 3100 drivers/android/binder.c t->from = thread; thread 3179 drivers/android/binder.c proc->pid, thread->pid); thread 3193 drivers/android/binder.c proc->pid, thread->pid); thread 3201 drivers/android/binder.c proc->pid, thread->pid, (u64)tr->offsets_size); thread 3209 drivers/android/binder.c proc->pid, thread->pid, thread 3244 drivers/android/binder.c proc->pid, thread->pid, thread 3262 drivers/android/binder.c ret = binder_translate_binder(fp, t, thread); thread 3280 drivers/android/binder.c ret = binder_translate_handle(fp, t, thread); thread 3298 drivers/android/binder.c thread, in_reply_to); thread 3327 drivers/android/binder.c proc->pid, thread->pid); thread 3340 drivers/android/binder.c proc->pid, thread->pid); thread 3346 drivers/android/binder.c ret = binder_translate_fd_array(fda, parent, t, thread, thread 3366 drivers/android/binder.c proc->pid, thread->pid); thread 3380 drivers/android/binder.c proc->pid, thread->pid); thread 3393 drivers/android/binder.c ret = binder_fixup_parent(t, thread, bp, thread 3413 drivers/android/binder.c proc->pid, thread->pid, hdr->type); thread 3424 drivers/android/binder.c binder_enqueue_thread_work(thread, tcomplete); thread 3446 drivers/android/binder.c binder_enqueue_deferred_thread_work_ilocked(thread, tcomplete); thread 3448 drivers/android/binder.c t->from_parent = thread->transaction_stack; thread 3449 drivers/android/binder.c thread->transaction_stack = t; thread 3453 drivers/android/binder.c binder_pop_transaction_ilocked(thread, t); thread 3460 drivers/android/binder.c binder_enqueue_thread_work(thread, tcomplete); thread 3522 drivers/android/binder.c proc->pid, thread->pid, return_error, return_error_param, thread 3543 drivers/android/binder.c BUG_ON(thread->return_error.cmd != BR_OK); thread 3545 drivers/android/binder.c thread->return_error.cmd = BR_TRANSACTION_COMPLETE; thread 3546 drivers/android/binder.c binder_enqueue_thread_work(thread, &thread->return_error.work); thread 3549 drivers/android/binder.c thread->return_error.cmd = return_error; thread 3550 drivers/android/binder.c binder_enqueue_thread_work(thread, &thread->return_error.work); thread 3598 drivers/android/binder.c struct binder_thread *thread, thread 3608 drivers/android/binder.c while (ptr < end && thread->return_error.cmd == BR_OK) { thread 3618 drivers/android/binder.c atomic_inc(&thread->stats.bc[_IOC_NR(cmd)]); thread 3652 drivers/android/binder.c proc->pid, thread->pid, thread 3672 drivers/android/binder.c proc->pid, thread->pid, debug_string, thread 3678 drivers/android/binder.c proc->pid, thread->pid, debug_string, thread 3699 drivers/android/binder.c proc->pid, thread->pid, thread 3708 drivers/android/binder.c proc->pid, thread->pid, thread 3720 drivers/android/binder.c proc->pid, thread->pid, thread 3730 drivers/android/binder.c proc->pid, thread->pid, thread 3743 drivers/android/binder.c proc->pid, thread->pid, thread 3772 drivers/android/binder.c proc->pid, thread->pid, thread 3777 drivers/android/binder.c proc->pid, thread->pid, thread 3784 drivers/android/binder.c proc->pid, thread->pid, (u64)data_ptr, thread 3798 drivers/android/binder.c binder_transaction(proc, thread, &tr.transaction_data, thread 3809 drivers/android/binder.c binder_transaction(proc, thread, &tr, thread 3817 drivers/android/binder.c proc->pid, thread->pid); thread 3819 drivers/android/binder.c if (thread->looper & BINDER_LOOPER_STATE_ENTERED) { thread 3820 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_INVALID; thread 3822 drivers/android/binder.c proc->pid, thread->pid); thread 3824 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_INVALID; thread 3826 drivers/android/binder.c proc->pid, thread->pid); thread 3831 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_REGISTERED; thread 3837 drivers/android/binder.c proc->pid, thread->pid); thread 3838 drivers/android/binder.c if (thread->looper & BINDER_LOOPER_STATE_REGISTERED) { thread 3839 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_INVALID; thread 3841 drivers/android/binder.c proc->pid, thread->pid); thread 3843 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_ENTERED; thread 3848 drivers/android/binder.c proc->pid, thread->pid); thread 3849 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_EXITED; thread 3872 drivers/android/binder.c WARN_ON(thread->return_error.cmd != thread 3874 drivers/android/binder.c thread->return_error.cmd = BR_ERROR; thread 3876 drivers/android/binder.c thread, thread 3877 drivers/android/binder.c &thread->return_error.work); thread 3881 drivers/android/binder.c proc->pid, thread->pid); thread 3889 drivers/android/binder.c proc->pid, thread->pid, thread 3901 drivers/android/binder.c proc->pid, thread->pid, thread 3913 drivers/android/binder.c proc->pid, thread->pid); thread 3935 drivers/android/binder.c proc->pid, thread->pid); thread 3943 drivers/android/binder.c proc->pid, thread->pid, thread 3954 drivers/android/binder.c if (thread->looper & thread 3958 drivers/android/binder.c thread, thread 4000 drivers/android/binder.c proc->pid, thread->pid, (u64)cookie, thread 4004 drivers/android/binder.c proc->pid, thread->pid, (u64)cookie); thread 4011 drivers/android/binder.c if (thread->looper & thread 4015 drivers/android/binder.c thread, &death->work); thread 4028 drivers/android/binder.c proc->pid, thread->pid, cmd); thread 4037 drivers/android/binder.c struct binder_thread *thread, uint32_t cmd) thread 4043 drivers/android/binder.c atomic_inc(&thread->stats.br[_IOC_NR(cmd)]); thread 4048 drivers/android/binder.c struct binder_thread *thread, thread 4069 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4071 drivers/android/binder.c proc->pid, thread->pid, cmd_name, node_debug_id, thread 4078 drivers/android/binder.c static int binder_wait_for_work(struct binder_thread *thread, thread 4082 drivers/android/binder.c struct binder_proc *proc = thread->proc; thread 4088 drivers/android/binder.c prepare_to_wait(&thread->wait, &wait, TASK_INTERRUPTIBLE); thread 4089 drivers/android/binder.c if (binder_has_work_ilocked(thread, do_proc_work)) thread 4092 drivers/android/binder.c list_add(&thread->waiting_thread_node, thread 4097 drivers/android/binder.c list_del_init(&thread->waiting_thread_node); thread 4103 drivers/android/binder.c finish_wait(&thread->wait, &wait); thread 4176 drivers/android/binder.c struct binder_thread *thread, thread 4195 drivers/android/binder.c wait_for_proc_work = binder_available_for_proc_work_ilocked(thread); thread 4198 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_WAITING; thread 4201 drivers/android/binder.c !!thread->transaction_stack, thread 4202 drivers/android/binder.c !binder_worklist_empty(proc, &thread->todo)); thread 4204 drivers/android/binder.c if (!(thread->looper & (BINDER_LOOPER_STATE_REGISTERED | thread 4207 drivers/android/binder.c proc->pid, thread->pid, thread->looper); thread 4215 drivers/android/binder.c if (!binder_has_work(thread, wait_for_proc_work)) thread 4218 drivers/android/binder.c ret = binder_wait_for_work(thread, wait_for_proc_work); thread 4221 drivers/android/binder.c thread->looper &= ~BINDER_LOOPER_STATE_WAITING; thread 4237 drivers/android/binder.c if (!binder_worklist_empty_ilocked(&thread->todo)) thread 4238 drivers/android/binder.c list = &thread->todo; thread 4246 drivers/android/binder.c if (ptr - buffer == 4 && !thread->looper_need_return) thread 4256 drivers/android/binder.c if (binder_worklist_empty_ilocked(&thread->todo)) thread 4257 drivers/android/binder.c thread->process_todo = false; thread 4276 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4287 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4290 drivers/android/binder.c proc->pid, thread->pid); thread 4328 drivers/android/binder.c proc->pid, thread->pid, thread 4351 drivers/android/binder.c proc, thread, &ptr, node_ptr, thread 4356 drivers/android/binder.c proc, thread, &ptr, node_ptr, thread 4361 drivers/android/binder.c proc, thread, &ptr, node_ptr, thread 4366 drivers/android/binder.c proc, thread, &ptr, node_ptr, thread 4372 drivers/android/binder.c proc->pid, thread->pid, thread 4395 drivers/android/binder.c proc->pid, thread->pid, thread 4416 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4423 drivers/android/binder.c proc->pid, thread->pid, w->type); thread 4478 drivers/android/binder.c proc->pid, thread->pid, thread 4487 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4526 drivers/android/binder.c binder_stat_br(proc, thread, cmd); thread 4529 drivers/android/binder.c proc->pid, thread->pid, thread 4543 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 4544 drivers/android/binder.c t->to_parent = thread->transaction_stack; thread 4545 drivers/android/binder.c t->to_thread = thread; thread 4546 drivers/android/binder.c thread->transaction_stack = t; thread 4547 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 4559 drivers/android/binder.c list_empty(&thread->proc->waiting_threads) && thread 4561 drivers/android/binder.c (thread->looper & (BINDER_LOOPER_STATE_REGISTERED | thread 4568 drivers/android/binder.c proc->pid, thread->pid); thread 4571 drivers/android/binder.c binder_stat_br(proc, thread, BR_SPAWN_LOOPER); thread 4633 drivers/android/binder.c struct binder_thread *thread = NULL; thread 4639 drivers/android/binder.c thread = rb_entry(parent, struct binder_thread, rb_node); thread 4641 drivers/android/binder.c if (current->pid < thread->pid) thread 4643 drivers/android/binder.c else if (current->pid > thread->pid) thread 4646 drivers/android/binder.c return thread; thread 4650 drivers/android/binder.c thread = new_thread; thread 4652 drivers/android/binder.c thread->proc = proc; thread 4653 drivers/android/binder.c thread->pid = current->pid; thread 4654 drivers/android/binder.c atomic_set(&thread->tmp_ref, 0); thread 4655 drivers/android/binder.c init_waitqueue_head(&thread->wait); thread 4656 drivers/android/binder.c INIT_LIST_HEAD(&thread->todo); thread 4657 drivers/android/binder.c rb_link_node(&thread->rb_node, parent, p); thread 4658 drivers/android/binder.c rb_insert_color(&thread->rb_node, &proc->threads); thread 4659 drivers/android/binder.c thread->looper_need_return = true; thread 4660 drivers/android/binder.c thread->return_error.work.type = BINDER_WORK_RETURN_ERROR; thread 4661 drivers/android/binder.c thread->return_error.cmd = BR_OK; thread 4662 drivers/android/binder.c thread->reply_error.work.type = BINDER_WORK_RETURN_ERROR; thread 4663 drivers/android/binder.c thread->reply_error.cmd = BR_OK; thread 4665 drivers/android/binder.c return thread; thread 4670 drivers/android/binder.c struct binder_thread *thread; thread 4674 drivers/android/binder.c thread = binder_get_thread_ilocked(proc, NULL); thread 4676 drivers/android/binder.c if (!thread) { thread 4677 drivers/android/binder.c new_thread = kzalloc(sizeof(*thread), GFP_KERNEL); thread 4681 drivers/android/binder.c thread = binder_get_thread_ilocked(proc, new_thread); thread 4683 drivers/android/binder.c if (thread != new_thread) thread 4686 drivers/android/binder.c return thread; thread 4699 drivers/android/binder.c static void binder_free_thread(struct binder_thread *thread) thread 4701 drivers/android/binder.c BUG_ON(!list_empty(&thread->todo)); thread 4703 drivers/android/binder.c binder_proc_dec_tmpref(thread->proc); thread 4704 drivers/android/binder.c kfree(thread); thread 4708 drivers/android/binder.c struct binder_thread *thread) thread 4715 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 4727 drivers/android/binder.c atomic_inc(&thread->tmp_ref); thread 4728 drivers/android/binder.c rb_erase(&thread->rb_node, &proc->threads); thread 4729 drivers/android/binder.c t = thread->transaction_stack; thread 4732 drivers/android/binder.c if (t->to_thread == thread) thread 4737 drivers/android/binder.c thread->is_dead = true; thread 4744 drivers/android/binder.c proc->pid, thread->pid, thread 4746 drivers/android/binder.c (t->to_thread == thread) ? "in" : "out"); thread 4748 drivers/android/binder.c if (t->to_thread == thread) { thread 4756 drivers/android/binder.c } else if (t->from == thread) { thread 4776 drivers/android/binder.c if ((thread->looper & BINDER_LOOPER_STATE_POLL) && thread 4777 drivers/android/binder.c waitqueue_active(&thread->wait)) { thread 4778 drivers/android/binder.c wake_up_poll(&thread->wait, EPOLLHUP | POLLFREE); thread 4781 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 4789 drivers/android/binder.c if (thread->looper & BINDER_LOOPER_STATE_POLL) thread 4794 drivers/android/binder.c binder_release_work(proc, &thread->todo); thread 4795 drivers/android/binder.c binder_thread_dec_tmpref(thread); thread 4803 drivers/android/binder.c struct binder_thread *thread = NULL; thread 4806 drivers/android/binder.c thread = binder_get_thread(proc); thread 4807 drivers/android/binder.c if (!thread) thread 4810 drivers/android/binder.c binder_inner_proc_lock(thread->proc); thread 4811 drivers/android/binder.c thread->looper |= BINDER_LOOPER_STATE_POLL; thread 4812 drivers/android/binder.c wait_for_proc_work = binder_available_for_proc_work_ilocked(thread); thread 4814 drivers/android/binder.c binder_inner_proc_unlock(thread->proc); thread 4816 drivers/android/binder.c poll_wait(filp, &thread->wait, wait); thread 4818 drivers/android/binder.c if (binder_has_work(thread, wait_for_proc_work)) thread 4826 drivers/android/binder.c struct binder_thread *thread) thread 4844 drivers/android/binder.c proc->pid, thread->pid, thread 4849 drivers/android/binder.c ret = binder_thread_write(proc, thread, thread 4862 drivers/android/binder.c ret = binder_thread_read(proc, thread, bwr.read_buffer, thread 4879 drivers/android/binder.c proc->pid, thread->pid, thread 5003 drivers/android/binder.c struct binder_thread *thread; thread 5018 drivers/android/binder.c thread = binder_get_thread(proc); thread 5019 drivers/android/binder.c if (thread == NULL) { thread 5026 drivers/android/binder.c ret = binder_ioctl_write_read(filp, cmd, arg, thread); thread 5062 drivers/android/binder.c proc->pid, thread->pid); thread 5063 drivers/android/binder.c binder_thread_release(proc, thread); thread 5064 drivers/android/binder.c thread = NULL; thread 5123 drivers/android/binder.c if (thread) thread 5124 drivers/android/binder.c thread->looper_need_return = false; thread 5312 drivers/android/binder.c struct binder_thread *thread = rb_entry(n, struct binder_thread, rb_node); thread 5314 drivers/android/binder.c thread->looper_need_return = true; thread 5315 drivers/android/binder.c if (thread->looper & BINDER_LOOPER_STATE_WAITING) { thread 5316 drivers/android/binder.c wake_up_interruptible(&thread->wait); thread 5445 drivers/android/binder.c struct binder_thread *thread; thread 5447 drivers/android/binder.c thread = rb_entry(n, struct binder_thread, rb_node); thread 5450 drivers/android/binder.c active_transactions += binder_thread_release(proc, thread); thread 5628 drivers/android/binder.c struct binder_thread *thread, thread 5637 drivers/android/binder.c thread->pid, thread->looper, thread 5638 drivers/android/binder.c thread->looper_need_return, thread 5639 drivers/android/binder.c atomic_read(&thread->tmp_ref)); thread 5641 drivers/android/binder.c t = thread->transaction_stack; thread 5643 drivers/android/binder.c if (t->from == thread) { thread 5644 drivers/android/binder.c print_binder_transaction_ilocked(m, thread->proc, thread 5647 drivers/android/binder.c } else if (t->to_thread == thread) { thread 5648 drivers/android/binder.c print_binder_transaction_ilocked(m, thread->proc, thread 5652 drivers/android/binder.c print_binder_transaction_ilocked(m, thread->proc, thread 5657 drivers/android/binder.c list_for_each_entry(w, &thread->todo, entry) { thread 5658 drivers/android/binder.c print_binder_work_ilocked(m, thread->proc, " ", thread 5873 drivers/android/binder.c struct binder_thread *thread; thread 5887 drivers/android/binder.c list_for_each_entry(thread, &proc->waiting_threads, waiting_thread_node) thread 31 drivers/base/devtmpfs.c static struct task_struct *thread; thread 101 drivers/base/devtmpfs.c if (!thread) thread 127 drivers/base/devtmpfs.c wake_up_process(thread); thread 140 drivers/base/devtmpfs.c if (!thread) thread 157 drivers/base/devtmpfs.c wake_up_process(thread); thread 177 drivers/base/devtmpfs.c d_inode(dentry)->i_private = &thread; thread 237 drivers/base/devtmpfs.c d_inode(dentry)->i_private = &thread; thread 253 drivers/base/devtmpfs.c if (d_inode(dentry)->i_private == &thread) thread 294 drivers/base/devtmpfs.c if (inode->i_private != &thread) thread 369 drivers/base/devtmpfs.c if (!thread) thread 450 drivers/base/devtmpfs.c thread = kthread_run(devtmpfsd, &err, "kdevtmpfs"); thread 451 drivers/base/devtmpfs.c if (!IS_ERR(thread)) { thread 454 drivers/base/devtmpfs.c err = PTR_ERR(thread); thread 455 drivers/base/devtmpfs.c thread = NULL; thread 2610 drivers/block/pktcdvd.c pd->cdrw.thread = kthread_run(kcdrwd, pd, "%s", pd->name); thread 2611 drivers/block/pktcdvd.c if (IS_ERR(pd->cdrw.thread)) { thread 2813 drivers/block/pktcdvd.c if (!IS_ERR(pd->cdrw.thread)) thread 2814 drivers/block/pktcdvd.c kthread_stop(pd->cdrw.thread); thread 596 drivers/bluetooth/btmrvl_main.c struct btmrvl_thread *thread = data; thread 597 drivers/bluetooth/btmrvl_main.c struct btmrvl_private *priv = thread->priv; thread 606 drivers/bluetooth/btmrvl_main.c add_wait_queue(&thread->wait_q, &wait); thread 624 drivers/bluetooth/btmrvl_main.c remove_wait_queue(&thread->wait_q, &wait); thread 242 drivers/char/ipmi/ipmi_si_intf.c struct task_struct *thread; thread 357 drivers/char/ipmi/ipmi_si_intf.c if (smi_info->thread) thread 358 drivers/char/ipmi/ipmi_si_intf.c wake_up_process(smi_info->thread); thread 873 drivers/char/ipmi/ipmi_si_intf.c if (smi_info->thread) thread 874 drivers/char/ipmi/ipmi_si_intf.c wake_up_process(smi_info->thread); thread 1183 drivers/char/ipmi/ipmi_si_intf.c new_smi->thread = kthread_run(ipmi_thread, new_smi, thread 1185 drivers/char/ipmi/ipmi_si_intf.c if (IS_ERR(new_smi->thread)) { thread 1189 drivers/char/ipmi/ipmi_si_intf.c PTR_ERR(new_smi->thread)); thread 1190 drivers/char/ipmi/ipmi_si_intf.c new_smi->thread = NULL; thread 1840 drivers/char/ipmi/ipmi_si_intf.c if (smi_info->thread != NULL) { thread 1841 drivers/char/ipmi/ipmi_si_intf.c kthread_stop(smi_info->thread); thread 1842 drivers/char/ipmi/ipmi_si_intf.c smi_info->thread = NULL; thread 268 drivers/char/ipmi/ipmi_ssif.c struct task_struct *thread; thread 1301 drivers/char/ipmi/ipmi_ssif.c if (ssif_info->thread) { thread 1303 drivers/char/ipmi/ipmi_ssif.c kthread_stop(ssif_info->thread); thread 1891 drivers/char/ipmi/ipmi_ssif.c ssif_info->thread = kthread_run(ipmi_ssif_thread, ssif_info, thread 1893 drivers/char/ipmi/ipmi_ssif.c if (IS_ERR(ssif_info->thread)) { thread 1894 drivers/char/ipmi/ipmi_ssif.c rv = PTR_ERR(ssif_info->thread); thread 139 drivers/clocksource/timer-nps.c int thread; thread 147 drivers/clocksource/timer-nps.c thread = read_aux_reg(CTOP_AUX_THREAD_ID); thread 148 drivers/clocksource/timer-nps.c enabled_threads &= ~(1 << thread); thread 163 drivers/clocksource/timer-nps.c int thread; thread 169 drivers/clocksource/timer-nps.c thread = read_aux_reg(CTOP_AUX_THREAD_ID); thread 171 drivers/clocksource/timer-nps.c enabled_threads |= (1 << thread); thread 79 drivers/crypto/mxs-dcp.c struct task_struct *thread[DCP_MAX_CHANS]; thread 467 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); thread 781 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); thread 1085 drivers/crypto/mxs-dcp.c sdcp->thread[DCP_CHAN_HASH_SHA] = kthread_run(dcp_chan_thread_sha, thread 1087 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->thread[DCP_CHAN_HASH_SHA])) { thread 1089 drivers/crypto/mxs-dcp.c ret = PTR_ERR(sdcp->thread[DCP_CHAN_HASH_SHA]); thread 1093 drivers/crypto/mxs-dcp.c sdcp->thread[DCP_CHAN_CRYPTO] = kthread_run(dcp_chan_thread_aes, thread 1095 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->thread[DCP_CHAN_CRYPTO])) { thread 1097 drivers/crypto/mxs-dcp.c ret = PTR_ERR(sdcp->thread[DCP_CHAN_CRYPTO]); thread 1143 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_CRYPTO]); thread 1146 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_HASH_SHA]); thread 1167 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_HASH_SHA]); thread 1168 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_CRYPTO]); thread 240 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 242 drivers/dma/dmatest.c list_for_each_entry(thread, &dtc->threads, node) { thread 243 drivers/dma/dmatest.c if (!thread->done && !thread->pending) thread 256 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 258 drivers/dma/dmatest.c list_for_each_entry(thread, &dtc->threads, node) { thread 259 drivers/dma/dmatest.c if (thread->pending) thread 420 drivers/dma/dmatest.c struct dmatest_thread *thread = thread 422 drivers/dma/dmatest.c if (!thread->done) { thread 554 drivers/dma/dmatest.c struct dmatest_thread *thread = data; thread 555 drivers/dma/dmatest.c struct dmatest_done *done = &thread->test_done; thread 588 drivers/dma/dmatest.c thread->pending = false; thread 589 drivers/dma/dmatest.c info = thread->info; thread 591 drivers/dma/dmatest.c chan = thread->chan; thread 593 drivers/dma/dmatest.c src = &thread->src; thread 594 drivers/dma/dmatest.c dst = &thread->dst; thread 595 drivers/dma/dmatest.c if (thread->type == DMA_MEMCPY) { thread 599 drivers/dma/dmatest.c } else if (thread->type == DMA_MEMSET) { thread 604 drivers/dma/dmatest.c } else if (thread->type == DMA_XOR) { thread 610 drivers/dma/dmatest.c } else if (thread->type == DMA_PQ) { thread 761 drivers/dma/dmatest.c if (thread->type == DMA_MEMCPY) thread 765 drivers/dma/dmatest.c else if (thread->type == DMA_MEMSET) thread 770 drivers/dma/dmatest.c else if (thread->type == DMA_XOR) thread 775 drivers/dma/dmatest.c else if (thread->type == DMA_PQ) { thread 812 drivers/dma/dmatest.c wait_event_freezable_timeout(thread->done_wait, thread 907 drivers/dma/dmatest.c thread->done = true; thread 915 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 919 drivers/dma/dmatest.c list_for_each_entry_safe(thread, _thread, &dtc->threads, node) { thread 920 drivers/dma/dmatest.c ret = kthread_stop(thread->task); thread 922 drivers/dma/dmatest.c thread->task->comm, ret); thread 923 drivers/dma/dmatest.c list_del(&thread->node); thread 924 drivers/dma/dmatest.c put_task_struct(thread->task); thread 925 drivers/dma/dmatest.c kfree(thread); thread 938 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 955 drivers/dma/dmatest.c thread = kzalloc(sizeof(struct dmatest_thread), GFP_KERNEL); thread 956 drivers/dma/dmatest.c if (!thread) { thread 961 drivers/dma/dmatest.c thread->info = info; thread 962 drivers/dma/dmatest.c thread->chan = dtc->chan; thread 963 drivers/dma/dmatest.c thread->type = type; thread 964 drivers/dma/dmatest.c thread->test_done.wait = &thread->done_wait; thread 965 drivers/dma/dmatest.c init_waitqueue_head(&thread->done_wait); thread 967 drivers/dma/dmatest.c thread->task = kthread_create(dmatest_func, thread, "%s-%s%u", thread 969 drivers/dma/dmatest.c if (IS_ERR(thread->task)) { thread 972 drivers/dma/dmatest.c kfree(thread); thread 977 drivers/dma/dmatest.c get_task_struct(thread->task); thread 978 drivers/dma/dmatest.c list_add_tail(&thread->node, &dtc->threads); thread 979 drivers/dma/dmatest.c thread->pending = true; thread 1102 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 1105 drivers/dma/dmatest.c list_for_each_entry(thread, &dtc->threads, node) { thread 1106 drivers/dma/dmatest.c wake_up_process(thread->task); thread 1275 drivers/dma/dmatest.c struct dmatest_thread *thread; thread 1278 drivers/dma/dmatest.c list_for_each_entry(thread, &dtc->threads, node) { thread 124 drivers/dma/img-mdc-dma.c unsigned int thread; thread 217 drivers/dma/img-mdc-dma.c (mchan->thread << MDC_READ_PORT_CONFIG_STHREAD_SHIFT) | thread 218 drivers/dma/img-mdc-dma.c (mchan->thread << MDC_READ_PORT_CONFIG_RTHREAD_SHIFT) | thread 219 drivers/dma/img-mdc-dma.c (mchan->thread << MDC_READ_PORT_CONFIG_WTHREAD_SHIFT); thread 550 drivers/dma/img-mdc-dma.c val = (mchan->thread << MDC_READ_PORT_CONFIG_STHREAD_SHIFT) | thread 551 drivers/dma/img-mdc-dma.c (mchan->thread << MDC_READ_PORT_CONFIG_RTHREAD_SHIFT) | thread 552 drivers/dma/img-mdc-dma.c (mchan->thread << MDC_READ_PORT_CONFIG_WTHREAD_SHIFT); thread 824 drivers/dma/img-mdc-dma.c mchan->thread = dma_spec->args[2]; thread 440 drivers/dma/pl330.c struct pl330_thread *thread; thread 2041 drivers/dma/pl330.c ret = pl330_submit_req(pch->thread, desc); thread 2078 drivers/dma/pl330.c spin_lock(&pch->thread->dmac->lock); thread 2079 drivers/dma/pl330.c _stop(pch->thread); thread 2080 drivers/dma/pl330.c spin_unlock(&pch->thread->dmac->lock); thread 2085 drivers/dma/pl330.c spin_lock(&pch->thread->dmac->lock); thread 2086 drivers/dma/pl330.c _start(pch->thread); thread 2087 drivers/dma/pl330.c spin_unlock(&pch->thread->dmac->lock); thread 2103 drivers/dma/pl330.c spin_lock(&pch->thread->dmac->lock); thread 2104 drivers/dma/pl330.c _start(pch->thread); thread 2105 drivers/dma/pl330.c spin_unlock(&pch->thread->dmac->lock); thread 2161 drivers/dma/pl330.c pch->thread = pl330_request_channel(pl330); thread 2162 drivers/dma/pl330.c if (!pch->thread) { thread 2282 drivers/dma/pl330.c _stop(pch->thread); thread 2283 drivers/dma/pl330.c pch->thread->req[0].desc = NULL; thread 2284 drivers/dma/pl330.c pch->thread->req[1].desc = NULL; thread 2285 drivers/dma/pl330.c pch->thread->req_running = -1; thread 2331 drivers/dma/pl330.c _stop(pch->thread); thread 2352 drivers/dma/pl330.c pl330_release_channel(pch->thread); thread 2353 drivers/dma/pl330.c pch->thread = NULL; thread 2367 drivers/dma/pl330.c struct pl330_thread *thrd = pch->thread; thread 2410 drivers/dma/pl330.c spin_lock(&pch->thread->dmac->lock); thread 2412 drivers/dma/pl330.c if (pch->thread->req_running != -1) thread 2413 drivers/dma/pl330.c running = pch->thread->req[pch->thread->req_running].desc; thread 2415 drivers/dma/pl330.c last_enq = pch->thread->req[pch->thread->lstenq].desc; thread 2453 drivers/dma/pl330.c spin_unlock(&pch->thread->dmac->lock); thread 2924 drivers/dma/pl330.c if (!pch->thread || thrd->id != pch->thread->id) thread 3107 drivers/dma/pl330.c pch->thread = NULL; thread 3190 drivers/dma/pl330.c if (pch->thread) { thread 3233 drivers/dma/pl330.c if (pch->thread) { thread 394 drivers/firmware/psci/psci_checker.c struct task_struct *thread; thread 405 drivers/firmware/psci/psci_checker.c thread = kthread_create_on_cpu(suspend_test_thread, thread 408 drivers/firmware/psci/psci_checker.c if (IS_ERR(thread)) thread 411 drivers/firmware/psci/psci_checker.c threads[nb_threads++] = thread; thread 213 drivers/firmware/tegra/bpmp-tegra186.c unsigned int index = bpmp->soc->channels.thread.offset + i; thread 196 drivers/firmware/tegra/bpmp-tegra210.c unsigned int index = bpmp->soc->channels.thread.offset + i; thread 84 drivers/firmware/tegra/bpmp.c count = bpmp->soc->channels.thread.count; thread 256 drivers/firmware/tegra/bpmp.c unsigned long timeout = bpmp->soc->channels.thread.timeout; thread 257 drivers/firmware/tegra/bpmp.c unsigned int count = bpmp->soc->channels.thread.count; thread 376 drivers/firmware/tegra/bpmp.c timeout = usecs_to_jiffies(bpmp->soc->channels.thread.timeout); thread 665 drivers/firmware/tegra/bpmp.c count = bpmp->soc->channels.thread.count; thread 704 drivers/firmware/tegra/bpmp.c bpmp->threaded.count = bpmp->soc->channels.thread.count; thread 818 drivers/firmware/tegra/bpmp.c .thread = { thread 841 drivers/firmware/tegra/bpmp.c .thread = { thread 695 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c uint32_t offset, se, sh, cu, wave, simd, thread, bank, *data; thread 707 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c thread = (*pos & GENMASK_ULL(59, 52)) >> 52; thread 720 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c adev->gfx.funcs->read_wave_vgprs(adev, simd, wave, thread, offset, size>>2, data); thread 869 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (!ring || !ring->sched.thread) thread 871 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c kthread_park(ring->sched.thread); thread 885 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (!ring || !ring->sched.thread) thread 887 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c kthread_unpark(ring->sched.thread); thread 1032 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c if (!ring || !ring->funcs->preempt_ib || !ring->sched.thread) thread 1048 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c kthread_park(ring->sched.thread); thread 1084 drivers/gpu/drm/amd/amdgpu/amdgpu_debugfs.c kthread_unpark(ring->sched.thread); thread 3562 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (!ring || !ring->sched.thread) thread 3847 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (!ring || !ring->sched.thread) thread 3920 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c if (!ring || !ring->sched.thread) thread 193 drivers/gpu/drm/amd/amdgpu/amdgpu_gfx.h uint32_t wave, uint32_t thread, uint32_t start, thread 1119 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t thread, uint32_t regno, thread 1125 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (thread << SQ_IND_INDEX__WORKITEM_ID__SHIFT) | thread 1169 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c uint32_t wave, uint32_t thread, thread 1174 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c adev, wave, thread, thread 2998 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c uint32_t wave, uint32_t thread, thread 3005 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c (thread << SQ_IND_INDEX__THREAD_ID__SHIFT) | thread 4152 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c uint32_t wave, uint32_t thread, thread 4159 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c (thread << SQ_IND_INDEX__THREAD_ID__SHIFT) | thread 5252 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c uint32_t wave, uint32_t thread, thread 5259 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c (thread << SQ_IND_INDEX__THREAD_ID__SHIFT) | thread 1787 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t wave, uint32_t thread, thread 1794 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c (thread << SQ_IND_INDEX__THREAD_ID__SHIFT) | thread 1831 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c uint32_t wave, uint32_t thread, thread 1836 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c adev, simd, wave, thread, thread 63 drivers/gpu/drm/amd/amdkfd/kfd_process.c static struct kfd_process *find_process(const struct task_struct *thread); thread 65 drivers/gpu/drm/amd/amdkfd/kfd_process.c static struct kfd_process *create_process(const struct task_struct *thread); thread 270 drivers/gpu/drm/amd/amdkfd/kfd_process.c struct task_struct *thread = current; thread 273 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (!thread->mm) thread 277 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (thread->group_leader->mm != thread->mm) thread 288 drivers/gpu/drm/amd/amdkfd/kfd_process.c process = find_process(thread); thread 292 drivers/gpu/drm/amd/amdkfd/kfd_process.c process = create_process(thread); thread 332 drivers/gpu/drm/amd/amdkfd/kfd_process.c struct kfd_process *kfd_get_process(const struct task_struct *thread) thread 336 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (!thread->mm) thread 340 drivers/gpu/drm/amd/amdkfd/kfd_process.c if (thread->group_leader->mm != thread->mm) thread 343 drivers/gpu/drm/amd/amdkfd/kfd_process.c process = find_process(thread); thread 362 drivers/gpu/drm/amd/amdkfd/kfd_process.c static struct kfd_process *find_process(const struct task_struct *thread) thread 368 drivers/gpu/drm/amd/amdkfd/kfd_process.c p = find_process_by_mm(thread->mm); thread 622 drivers/gpu/drm/amd/amdkfd/kfd_process.c static struct kfd_process *create_process(const struct task_struct *thread) thread 633 drivers/gpu/drm/amd/amdkfd/kfd_process.c process->mm = thread->mm; thread 634 drivers/gpu/drm/amd/amdkfd/kfd_process.c process->lead_thread = thread->group_leader; thread 1096 drivers/gpu/drm/i915/gvt/scheduler.c kthread_stop(scheduler->thread[i]); thread 1124 drivers/gpu/drm/i915/gvt/scheduler.c scheduler->thread[i] = kthread_run(workload_thread, param, thread 1126 drivers/gpu/drm/i915/gvt/scheduler.c if (IS_ERR(scheduler->thread[i])) { thread 1128 drivers/gpu/drm/i915/gvt/scheduler.c ret = PTR_ERR(scheduler->thread[i]); thread 50 drivers/gpu/drm/i915/gvt/scheduler.h struct task_struct *thread[I915_NUM_ENGINES]; thread 240 drivers/gpu/drm/msm/msm_drv.c if (priv->event_thread[i].thread) { thread 242 drivers/gpu/drm/msm/msm_drv.c priv->event_thread[i].thread = NULL; thread 506 drivers/gpu/drm/msm/msm_drv.c priv->event_thread[i].thread = thread 510 drivers/gpu/drm/msm/msm_drv.c if (IS_ERR(priv->event_thread[i].thread)) { thread 512 drivers/gpu/drm/msm/msm_drv.c priv->event_thread[i].thread = NULL; thread 516 drivers/gpu/drm/msm/msm_drv.c ret = sched_setscheduler(priv->event_thread[i].thread, thread 131 drivers/gpu/drm/msm/msm_drv.h struct task_struct *thread; thread 292 drivers/gpu/drm/scheduler/sched_entity.c kthread_park(sched->thread); thread 293 drivers/gpu/drm/scheduler/sched_entity.c kthread_unpark(sched->thread); thread 370 drivers/gpu/drm/scheduler/sched_main.c kthread_park(sched->thread); thread 467 drivers/gpu/drm/scheduler/sched_main.c kthread_unpark(sched->thread); thread 799 drivers/gpu/drm/scheduler/sched_main.c sched->thread = kthread_run(drm_sched_main, sched, sched->name); thread 800 drivers/gpu/drm/scheduler/sched_main.c if (IS_ERR(sched->thread)) { thread 801 drivers/gpu/drm/scheduler/sched_main.c ret = PTR_ERR(sched->thread); thread 802 drivers/gpu/drm/scheduler/sched_main.c sched->thread = NULL; thread 821 drivers/gpu/drm/scheduler/sched_main.c if (sched->thread) thread 822 drivers/gpu/drm/scheduler/sched_main.c kthread_stop(sched->thread); thread 369 drivers/gpu/drm/vc4/vc4_gem.c submit_cl(struct drm_device *dev, uint32_t thread, uint32_t start, uint32_t end) thread 376 drivers/gpu/drm/vc4/vc4_gem.c V3D_WRITE(V3D_CTNCA(thread), start); thread 377 drivers/gpu/drm/vc4/vc4_gem.c V3D_WRITE(V3D_CTNEA(thread), end); thread 541 drivers/hv/hv_balloon.c struct task_struct *thread; thread 1697 drivers/hv/hv_balloon.c dm_device.thread = thread 1699 drivers/hv/hv_balloon.c if (IS_ERR(dm_device.thread)) { thread 1700 drivers/hv/hv_balloon.c ret = PTR_ERR(dm_device.thread); thread 1728 drivers/hv/hv_balloon.c kthread_stop(dm->thread); thread 43 drivers/iio/buffer/industrialio-triggered-buffer.c irqreturn_t (*thread)(int irq, void *p), thread 58 drivers/iio/buffer/industrialio-triggered-buffer.c thread, thread 106 drivers/iio/buffer/industrialio-triggered-buffer.c irqreturn_t (*thread)(int irq, void *p), thread 119 drivers/iio/buffer/industrialio-triggered-buffer.c ret = iio_triggered_buffer_setup(indio_dev, h, thread, ops); thread 261 drivers/iio/industrialio-trigger.c ret = request_threaded_irq(pf->irq, pf->h, pf->thread, thread 325 drivers/iio/industrialio-trigger.c irqreturn_t (*thread)(int irq, void *p), thread 345 drivers/iio/industrialio-trigger.c pf->thread = thread; thread 32 drivers/iio/industrialio-triggered-event.c irqreturn_t (*thread)(int irq, void *p)) thread 35 drivers/iio/industrialio-triggered-event.c thread, thread 710 drivers/infiniband/hw/hfi1/driver.c static noinline int max_packet_exceeded(struct hfi1_packet *packet, int thread) thread 712 drivers/infiniband/hw/hfi1/driver.c if (thread) { thread 724 drivers/infiniband/hw/hfi1/driver.c static inline int check_max_packet(struct hfi1_packet *packet, int thread) thread 729 drivers/infiniband/hw/hfi1/driver.c ret = max_packet_exceeded(packet, thread); thread 733 drivers/infiniband/hw/hfi1/driver.c static noinline int skip_rcv_packet(struct hfi1_packet *packet, int thread) thread 744 drivers/infiniband/hw/hfi1/driver.c ret = check_max_packet(packet, thread); thread 753 drivers/infiniband/hw/hfi1/driver.c static inline int process_rcv_packet(struct hfi1_packet *packet, int thread) thread 794 drivers/infiniband/hw/hfi1/driver.c ret = check_max_packet(packet, thread); thread 834 drivers/infiniband/hw/hfi1/driver.c int handle_receive_interrupt_nodma_rtail(struct hfi1_ctxtdata *rcd, int thread) thread 850 drivers/infiniband/hw/hfi1/driver.c last = process_rcv_packet(&packet, thread); thread 865 drivers/infiniband/hw/hfi1/driver.c int handle_receive_interrupt_dma_rtail(struct hfi1_ctxtdata *rcd, int thread) thread 882 drivers/infiniband/hw/hfi1/driver.c last = process_rcv_packet(&packet, thread); thread 1010 drivers/infiniband/hw/hfi1/driver.c int handle_receive_interrupt(struct hfi1_ctxtdata *rcd, int thread) thread 1067 drivers/infiniband/hw/hfi1/driver.c last = skip_rcv_packet(&packet, thread); thread 1075 drivers/infiniband/hw/hfi1/driver.c last = process_rcv_packet(&packet, thread); thread 1496 drivers/infiniband/hw/hfi1/hfi.h int handle_receive_interrupt(struct hfi1_ctxtdata *rcd, int thread); thread 1497 drivers/infiniband/hw/hfi1/hfi.h int handle_receive_interrupt_nodma_rtail(struct hfi1_ctxtdata *rcd, int thread); thread 1498 drivers/infiniband/hw/hfi1/hfi.h int handle_receive_interrupt_dma_rtail(struct hfi1_ctxtdata *rcd, int thread); thread 117 drivers/infiniband/hw/hfi1/msix.c irq_handler_t handler, irq_handler_t thread, thread 170 drivers/infiniband/hw/hfi1/msix.c ret = pci_request_irq(dd->pcidev, nr, handler, thread, arg, name); thread 301 drivers/isdn/mISDN/stack.c task_cputime(st->thread, &utime, &stime); thread 307 drivers/isdn/mISDN/stack.c dev_name(&st->dev->dev), st->thread->nvcsw, st->thread->nivcsw); thread 316 drivers/isdn/mISDN/stack.c st->thread = NULL; thread 402 drivers/isdn/mISDN/stack.c newst->thread = kthread_run(mISDNStackd, (void *)newst, "mISDN_%s", thread 404 drivers/isdn/mISDN/stack.c if (IS_ERR(newst->thread)) { thread 405 drivers/isdn/mISDN/stack.c err = PTR_ERR(newst->thread); thread 629 drivers/isdn/mISDN/stack.c if (st->thread) { thread 83 drivers/macintosh/therm_adt746x.c struct task_struct *thread; thread 555 drivers/macintosh/therm_adt746x.c th->thread = kthread_run(monitor_task, th, "kfand"); thread 556 drivers/macintosh/therm_adt746x.c if (th->thread == ERR_PTR(-ENOMEM)) { thread 558 drivers/macintosh/therm_adt746x.c th->thread = NULL; thread 574 drivers/macintosh/therm_adt746x.c if (th->thread != NULL) thread 575 drivers/macintosh/therm_adt746x.c kthread_stop(th->thread); thread 66 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread; thread 76 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread; thread 81 drivers/mailbox/mtk-cmdq-mailbox.c static int cmdq_thread_suspend(struct cmdq *cmdq, struct cmdq_thread *thread) thread 85 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_SUSPEND, thread->base + CMDQ_THR_SUSPEND_TASK); thread 88 drivers/mailbox/mtk-cmdq-mailbox.c if (!(readl(thread->base + CMDQ_THR_ENABLE_TASK) & CMDQ_THR_ENABLED)) thread 91 drivers/mailbox/mtk-cmdq-mailbox.c if (readl_poll_timeout_atomic(thread->base + CMDQ_THR_CURR_STATUS, thread 94 drivers/mailbox/mtk-cmdq-mailbox.c (u32)(thread->base - cmdq->base)); thread 101 drivers/mailbox/mtk-cmdq-mailbox.c static void cmdq_thread_resume(struct cmdq_thread *thread) thread 103 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_RESUME, thread->base + CMDQ_THR_SUSPEND_TASK); thread 117 drivers/mailbox/mtk-cmdq-mailbox.c static int cmdq_thread_reset(struct cmdq *cmdq, struct cmdq_thread *thread) thread 121 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_DO_WARM_RESET, thread->base + CMDQ_THR_WARM_RESET); thread 122 drivers/mailbox/mtk-cmdq-mailbox.c if (readl_poll_timeout_atomic(thread->base + CMDQ_THR_WARM_RESET, thread 126 drivers/mailbox/mtk-cmdq-mailbox.c (u32)(thread->base - cmdq->base)); thread 133 drivers/mailbox/mtk-cmdq-mailbox.c static void cmdq_thread_disable(struct cmdq *cmdq, struct cmdq_thread *thread) thread 135 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_reset(cmdq, thread); thread 136 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_DISABLED, thread->base + CMDQ_THR_ENABLE_TASK); thread 140 drivers/mailbox/mtk-cmdq-mailbox.c static void cmdq_thread_invalidate_fetched_data(struct cmdq_thread *thread) thread 142 drivers/mailbox/mtk-cmdq-mailbox.c writel(readl(thread->base + CMDQ_THR_CURR_ADDR), thread 143 drivers/mailbox/mtk-cmdq-mailbox.c thread->base + CMDQ_THR_CURR_ADDR); thread 149 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread = task->thread; thread 151 drivers/mailbox/mtk-cmdq-mailbox.c &thread->task_busy_list, typeof(*task), list_entry); thread 162 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_invalidate_fetched_data(thread); thread 191 drivers/mailbox/mtk-cmdq-mailbox.c static bool cmdq_thread_is_in_wfe(struct cmdq_thread *thread) thread 193 drivers/mailbox/mtk-cmdq-mailbox.c return readl(thread->base + CMDQ_THR_WAIT_TOKEN) & CMDQ_THR_IS_WAITING; thread 196 drivers/mailbox/mtk-cmdq-mailbox.c static void cmdq_thread_wait_end(struct cmdq_thread *thread, thread 199 drivers/mailbox/mtk-cmdq-mailbox.c struct device *dev = thread->chan->mbox->dev; thread 202 drivers/mailbox/mtk-cmdq-mailbox.c if (readl_poll_timeout_atomic(thread->base + CMDQ_THR_CURR_ADDR, thread 222 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread = task->thread; thread 226 drivers/mailbox/mtk-cmdq-mailbox.c WARN_ON(cmdq_thread_suspend(task->cmdq, thread) < 0); thread 227 drivers/mailbox/mtk-cmdq-mailbox.c next_task = list_first_entry_or_null(&thread->task_busy_list, thread 230 drivers/mailbox/mtk-cmdq-mailbox.c writel(next_task->pa_base, thread->base + CMDQ_THR_CURR_ADDR); thread 231 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_resume(thread); thread 235 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread) thread 241 drivers/mailbox/mtk-cmdq-mailbox.c irq_flag = readl(thread->base + CMDQ_THR_IRQ_STATUS); thread 242 drivers/mailbox/mtk-cmdq-mailbox.c writel(~irq_flag, thread->base + CMDQ_THR_IRQ_STATUS); thread 250 drivers/mailbox/mtk-cmdq-mailbox.c if (!(readl(thread->base + CMDQ_THR_ENABLE_TASK) & CMDQ_THR_ENABLED)) thread 260 drivers/mailbox/mtk-cmdq-mailbox.c curr_pa = readl(thread->base + CMDQ_THR_CURR_ADDR); thread 262 drivers/mailbox/mtk-cmdq-mailbox.c list_for_each_entry_safe(task, tmp, &thread->task_busy_list, thread 281 drivers/mailbox/mtk-cmdq-mailbox.c if (list_empty(&thread->task_busy_list)) { thread 282 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_disable(cmdq, thread); thread 298 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread = &cmdq->thread[bit]; thread 300 drivers/mailbox/mtk-cmdq-mailbox.c spin_lock_irqsave(&thread->chan->lock, flags); thread 301 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_irq_handler(cmdq, thread); thread 302 drivers/mailbox/mtk-cmdq-mailbox.c spin_unlock_irqrestore(&thread->chan->lock, flags); thread 311 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread; thread 318 drivers/mailbox/mtk-cmdq-mailbox.c thread = &cmdq->thread[i]; thread 319 drivers/mailbox/mtk-cmdq-mailbox.c if (!list_empty(&thread->task_busy_list)) { thread 354 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread = (struct cmdq_thread *)chan->con_priv; thread 369 drivers/mailbox/mtk-cmdq-mailbox.c task->thread = thread; thread 372 drivers/mailbox/mtk-cmdq-mailbox.c if (list_empty(&thread->task_busy_list)) { thread 374 drivers/mailbox/mtk-cmdq-mailbox.c WARN_ON(cmdq_thread_reset(cmdq, thread) < 0); thread 376 drivers/mailbox/mtk-cmdq-mailbox.c writel(task->pa_base, thread->base + CMDQ_THR_CURR_ADDR); thread 378 drivers/mailbox/mtk-cmdq-mailbox.c thread->base + CMDQ_THR_END_ADDR); thread 379 drivers/mailbox/mtk-cmdq-mailbox.c writel(thread->priority, thread->base + CMDQ_THR_PRIORITY); thread 380 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_IRQ_EN, thread->base + CMDQ_THR_IRQ_ENABLE); thread 381 drivers/mailbox/mtk-cmdq-mailbox.c writel(CMDQ_THR_ENABLED, thread->base + CMDQ_THR_ENABLE_TASK); thread 383 drivers/mailbox/mtk-cmdq-mailbox.c WARN_ON(cmdq_thread_suspend(cmdq, thread) < 0); thread 384 drivers/mailbox/mtk-cmdq-mailbox.c curr_pa = readl(thread->base + CMDQ_THR_CURR_ADDR); thread 385 drivers/mailbox/mtk-cmdq-mailbox.c end_pa = readl(thread->base + CMDQ_THR_END_ADDR); thread 391 drivers/mailbox/mtk-cmdq-mailbox.c if (thread->atomic_exec) { thread 393 drivers/mailbox/mtk-cmdq-mailbox.c if (!cmdq_thread_is_in_wfe(thread)) { thread 394 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_resume(thread); thread 395 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_wait_end(thread, end_pa); thread 396 drivers/mailbox/mtk-cmdq-mailbox.c WARN_ON(cmdq_thread_suspend(cmdq, thread) < 0); thread 399 drivers/mailbox/mtk-cmdq-mailbox.c thread->base + CMDQ_THR_CURR_ADDR); thread 411 drivers/mailbox/mtk-cmdq-mailbox.c thread->base + CMDQ_THR_CURR_ADDR); thread 418 drivers/mailbox/mtk-cmdq-mailbox.c thread->base + CMDQ_THR_END_ADDR); thread 419 drivers/mailbox/mtk-cmdq-mailbox.c cmdq_thread_resume(thread); thread 421 drivers/mailbox/mtk-cmdq-mailbox.c list_move_tail(&task->list_entry, &thread->task_busy_list); thread 445 drivers/mailbox/mtk-cmdq-mailbox.c struct cmdq_thread *thread; thread 450 drivers/mailbox/mtk-cmdq-mailbox.c thread = (struct cmdq_thread *)mbox->chans[ind].con_priv; thread 451 drivers/mailbox/mtk-cmdq-mailbox.c thread->priority = sp->args[1]; thread 452 drivers/mailbox/mtk-cmdq-mailbox.c thread->atomic_exec = (sp->args[2] != 0); thread 453 drivers/mailbox/mtk-cmdq-mailbox.c thread->chan = &mbox->chans[ind]; thread 514 drivers/mailbox/mtk-cmdq-mailbox.c cmdq->thread = devm_kcalloc(dev, cmdq->thread_nr, thread 515 drivers/mailbox/mtk-cmdq-mailbox.c sizeof(*cmdq->thread), GFP_KERNEL); thread 516 drivers/mailbox/mtk-cmdq-mailbox.c if (!cmdq->thread) thread 520 drivers/mailbox/mtk-cmdq-mailbox.c cmdq->thread[i].base = cmdq->base + CMDQ_THR_BASE + thread 522 drivers/mailbox/mtk-cmdq-mailbox.c INIT_LIST_HEAD(&cmdq->thread[i].task_busy_list); thread 523 drivers/mailbox/mtk-cmdq-mailbox.c cmdq->mbox.chans[i].con_priv = (void *)&cmdq->thread[i]; thread 3714 drivers/md/dm-raid.c if (!mddev->suspended && mddev->thread) thread 3715 drivers/md/dm-raid.c md_wakeup_thread(mddev->thread); thread 1253 drivers/md/md-bitmap.c mddev->thread->timeout = MAX_SCHEDULE_TIMEOUT; thread 1349 drivers/md/md-bitmap.c mddev->thread->timeout = thread 1801 drivers/md/md-bitmap.c if (mddev->thread) thread 1802 drivers/md/md-bitmap.c mddev->thread->timeout = MAX_SCHEDULE_TIMEOUT; thread 1945 drivers/md/md-bitmap.c mddev->thread->timeout = mddev->bitmap_info.daemon_sleep; thread 1946 drivers/md/md-bitmap.c md_wakeup_thread(mddev->thread); thread 2361 drivers/md/md-bitmap.c md_wakeup_thread(mddev->thread); thread 2445 drivers/md/md-bitmap.c if (mddev->thread) { thread 2450 drivers/md/md-bitmap.c if (mddev->thread->timeout < MAX_SCHEDULE_TIMEOUT) { thread 2451 drivers/md/md-bitmap.c mddev->thread->timeout = timeout; thread 2452 drivers/md/md-bitmap.c md_wakeup_thread(mddev->thread); thread 286 drivers/md/md-cluster.c static void recover_bitmaps(struct md_thread *thread) thread 288 drivers/md/md-cluster.c struct mddev *mddev = thread->mddev; thread 343 drivers/md/md-cluster.c md_wakeup_thread(mddev->thread); thread 458 drivers/md/md-cluster.c md_wakeup_thread(mddev->thread); thread 533 drivers/md/md-cluster.c wait_event(mddev->thread->wqueue, thread 550 drivers/md/md-cluster.c md_wakeup_thread(mddev->thread); thread 621 drivers/md/md-cluster.c static void recv_daemon(struct md_thread *thread) thread 623 drivers/md/md-cluster.c struct md_cluster_info *cinfo = thread->mddev->cluster_info; thread 639 drivers/md/md-cluster.c ret = process_recvd_msg(thread->mddev, &msg); thread 683 drivers/md/md-cluster.c md_wakeup_thread(mddev->thread); thread 1052 drivers/md/md-cluster.c md_wakeup_thread(mddev->thread); thread 60 drivers/md/md-multipath.c md_wakeup_thread(mddev->thread); thread 313 drivers/md/md-multipath.c static void multipathd(struct md_thread *thread) thread 315 drivers/md/md-multipath.c struct mddev *mddev = thread->mddev; thread 432 drivers/md/md-multipath.c mddev->thread = md_register_thread(multipathd, mddev, thread 434 drivers/md/md-multipath.c if (!mddev->thread) thread 423 drivers/md/md.c WARN_ON_ONCE(mddev->thread && current == mddev->thread->tsk); thread 449 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 762 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 2771 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 2884 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 2913 drivers/md/md.c md_wakeup_thread(rdev->mddev->thread); thread 2949 drivers/md/md.c md_wakeup_thread(rdev->mddev->thread); thread 3074 drivers/md/md.c md_wakeup_thread(rdev->mddev->thread); thread 3979 drivers/md/md.c if (!mddev->thread) thread 4295 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 4738 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 5586 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 5881 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 5903 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 5948 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6047 drivers/md/md.c md_unregister_thread(&mddev->thread); thread 6090 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6116 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6131 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6154 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6179 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6728 drivers/md/md.c if (mddev->thread) thread 6729 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6797 drivers/md/md.c if (!mddev->thread) thread 6804 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 6818 drivers/md/md.c if (!mddev->pers->quiesce || !mddev->thread) thread 7146 drivers/md/md.c if (mddev->pers->quiesce == NULL || mddev->thread == NULL) { thread 7675 drivers/md/md.c struct md_thread *thread = arg; thread 7701 drivers/md/md.c (thread->wqueue, thread 7702 drivers/md/md.c test_bit(THREAD_WAKEUP, &thread->flags) thread 7704 drivers/md/md.c thread->timeout); thread 7706 drivers/md/md.c clear_bit(THREAD_WAKEUP, &thread->flags); thread 7710 drivers/md/md.c thread->run(thread); thread 7716 drivers/md/md.c void md_wakeup_thread(struct md_thread *thread) thread 7718 drivers/md/md.c if (thread) { thread 7719 drivers/md/md.c pr_debug("md: waking up MD thread %s.\n", thread->tsk->comm); thread 7720 drivers/md/md.c set_bit(THREAD_WAKEUP, &thread->flags); thread 7721 drivers/md/md.c wake_up(&thread->wqueue); thread 7729 drivers/md/md.c struct md_thread *thread; thread 7731 drivers/md/md.c thread = kzalloc(sizeof(struct md_thread), GFP_KERNEL); thread 7732 drivers/md/md.c if (!thread) thread 7735 drivers/md/md.c init_waitqueue_head(&thread->wqueue); thread 7737 drivers/md/md.c thread->run = run; thread 7738 drivers/md/md.c thread->mddev = mddev; thread 7739 drivers/md/md.c thread->timeout = MAX_SCHEDULE_TIMEOUT; thread 7740 drivers/md/md.c thread->tsk = kthread_run(md_thread, thread, thread 7742 drivers/md/md.c mdname(thread->mddev), thread 7744 drivers/md/md.c if (IS_ERR(thread->tsk)) { thread 7745 drivers/md/md.c kfree(thread); thread 7748 drivers/md/md.c return thread; thread 7754 drivers/md/md.c struct md_thread *thread = *threadp; thread 7755 drivers/md/md.c if (!thread) thread 7757 drivers/md/md.c pr_debug("interrupting MD-thread pid %d\n", task_pid_nr(thread->tsk)); thread 7765 drivers/md/md.c kthread_stop(thread->tsk); thread 7766 drivers/md/md.c kfree(thread); thread 7783 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 8268 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 8293 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 8309 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 8352 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 8401 drivers/md/md.c void md_do_sync(struct md_thread *thread) thread 8403 drivers/md/md.c struct mddev *mddev = thread->mddev; thread 8816 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 9252 drivers/md/md.c md_wakeup_thread(rdev->mddev->thread); thread 9407 drivers/md/md.c md_wakeup_thread(mddev->thread); thread 336 drivers/md/md.h struct md_thread *thread; /* management thread */ thread 666 drivers/md/md.h void (*run) (struct md_thread *thread); thread 690 drivers/md/md.h void (*run)(struct md_thread *thread), thread 694 drivers/md/md.h extern void md_wakeup_thread(struct md_thread *thread); thread 713 drivers/md/md.h extern void md_do_sync(struct md_thread *thread); thread 763 drivers/md/md.h md_wakeup_thread(mddev->thread); thread 271 drivers/md/raid1.c md_wakeup_thread(mddev->thread); thread 1179 drivers/md/raid1.c md_wakeup_thread(mddev->thread); thread 1368 drivers/md/raid1.c md_wakeup_thread(mddev->thread); thread 1556 drivers/md/raid1.c md_wakeup_thread(mddev->thread); thread 2471 drivers/md/raid1.c md_wakeup_thread(conf->mddev->thread); thread 2521 drivers/md/raid1.c static void raid1d(struct md_thread *thread) thread 2523 drivers/md/raid1.c struct mddev *mddev = thread->mddev; thread 3055 drivers/md/raid1.c conf->thread = md_register_thread(raid1d, mddev, "raid1"); thread 3056 drivers/md/raid1.c if (!conf->thread) thread 3152 drivers/md/raid1.c mddev->thread = conf->thread; thread 3153 drivers/md/raid1.c conf->thread = NULL; thread 3170 drivers/md/raid1.c md_unregister_thread(&mddev->thread); thread 3329 drivers/md/raid1.c md_wakeup_thread(mddev->thread); thread 134 drivers/md/raid1.h struct md_thread *thread; thread 284 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 1083 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 1292 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 1333 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 1342 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 2701 drivers/md/raid10.c md_wakeup_thread(conf->mddev->thread); thread 2711 drivers/md/raid10.c static void raid10d(struct md_thread *thread) thread 2713 drivers/md/raid10.c struct mddev *mddev = thread->mddev; thread 3705 drivers/md/raid10.c conf->thread = md_register_thread(raid10d, mddev, "raid10"); thread 3706 drivers/md/raid10.c if (!conf->thread) thread 3759 drivers/md/raid10.c mddev->thread = conf->thread; thread 3760 drivers/md/raid10.c conf->thread = NULL; thread 3932 drivers/md/raid10.c md_unregister_thread(&mddev->thread); thread 4525 drivers/md/raid10.c md_wakeup_thread(mddev->thread); thread 104 drivers/md/raid10.h struct md_thread *thread; thread 605 drivers/md/raid5-cache.c md_wakeup_thread(log->rdev->mddev->thread); thread 1496 drivers/md/raid5-cache.c md_wakeup_thread(conf->mddev->thread); thread 1524 drivers/md/raid5-cache.c md_wakeup_thread(log->rdev->mddev->thread); thread 1551 drivers/md/raid5-cache.c static void r5l_reclaim_thread(struct md_thread *thread) thread 1553 drivers/md/raid5-cache.c struct mddev *mddev = thread->mddev; thread 2780 drivers/md/raid5-cache.c md_wakeup_thread(conf->mddev->thread); thread 2836 drivers/md/raid5-cache.c md_wakeup_thread(conf->mddev->thread); thread 611 drivers/md/raid5-ppl.c md_wakeup_thread(conf->mddev->thread); thread 190 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 262 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 268 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 349 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 395 drivers/md/raid5.c if (unlikely(!conf->mddev->thread) || thread 400 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 832 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 3458 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 3844 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 5055 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 5145 drivers/md/raid5.c md_wakeup_thread(conf->mddev->thread); thread 5717 drivers/md/raid5.c md_wakeup_thread(mddev->thread); thread 5900 drivers/md/raid5.c md_wakeup_thread(mddev->thread); thread 6006 drivers/md/raid5.c md_wakeup_thread(mddev->thread); thread 6280 drivers/md/raid5.c static void raid5d(struct md_thread *thread) thread 6282 drivers/md/raid5.c struct mddev *mddev = thread->mddev; thread 7106 drivers/md/raid5.c conf->thread = md_register_thread(raid5d, mddev, pers_name); thread 7107 drivers/md/raid5.c if (!conf->thread) { thread 7310 drivers/md/raid5.c mddev->thread = conf->thread; thread 7311 drivers/md/raid5.c conf->thread = NULL; thread 7495 drivers/md/raid5.c md_unregister_thread(&mddev->thread); thread 8227 drivers/md/raid5.c md_wakeup_thread(mddev->thread); thread 676 drivers/md/raid5.h struct md_thread *thread; thread 2771 drivers/media/common/videobuf2/videobuf2-core.c struct task_struct *thread; thread 2865 drivers/media/common/videobuf2/videobuf2-core.c threadio->thread = kthread_run(vb2_thread, q, "vb2-%s", thread_name); thread 2866 drivers/media/common/videobuf2/videobuf2-core.c if (IS_ERR(threadio->thread)) { thread 2867 drivers/media/common/videobuf2/videobuf2-core.c ret = PTR_ERR(threadio->thread); thread 2868 drivers/media/common/videobuf2/videobuf2-core.c threadio->thread = NULL; thread 2891 drivers/media/common/videobuf2/videobuf2-core.c err = kthread_stop(threadio->thread); thread 2893 drivers/media/common/videobuf2/videobuf2-core.c threadio->thread = NULL; thread 135 drivers/media/dvb-core/dvb_ca_en50221.c struct task_struct *thread; thread 1019 drivers/media/dvb-core/dvb_ca_en50221.c wake_up_process(ca->thread); thread 1899 drivers/media/dvb-core/dvb_ca_en50221.c ca->thread = kthread_run(dvb_ca_en50221_thread, ca, "kdvb-ca-%i:%i", thread 1901 drivers/media/dvb-core/dvb_ca_en50221.c if (IS_ERR(ca->thread)) { thread 1902 drivers/media/dvb-core/dvb_ca_en50221.c ret = PTR_ERR(ca->thread); thread 1934 drivers/media/dvb-core/dvb_ca_en50221.c kthread_stop(ca->thread); thread 100 drivers/media/dvb-core/dvb_frontend.c struct task_struct *thread; thread 788 drivers/media/dvb-core/dvb_frontend.c fepriv->thread = NULL; thread 811 drivers/media/dvb-core/dvb_frontend.c if (!fepriv->thread) thread 814 drivers/media/dvb-core/dvb_frontend.c kthread_stop(fepriv->thread); thread 820 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) thread 823 drivers/media/dvb-core/dvb_frontend.c fepriv->thread); thread 856 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) { thread 870 drivers/media/dvb-core/dvb_frontend.c fepriv->thread = NULL; thread 883 drivers/media/dvb-core/dvb_frontend.c fepriv->thread = fe_thread; thread 2646 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) thread 2655 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) thread 2664 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) thread 2673 drivers/media/dvb-core/dvb_frontend.c if (fepriv->thread) thread 2753 drivers/media/dvb-core/dvb_frontend.c mfepriv->thread)) { thread 2766 drivers/media/dvb-core/dvb_frontend.c mfepriv->thread) { thread 135 drivers/media/i2c/tvaudio.c struct task_struct *thread; thread 322 drivers/media/i2c/tvaudio.c wake_up_process(chip->thread); thread 1835 drivers/media/i2c/tvaudio.c if (chip->thread) thread 1836 drivers/media/i2c/tvaudio.c wake_up_process(chip->thread); thread 1882 drivers/media/i2c/tvaudio.c if (chip->thread) { thread 2047 drivers/media/i2c/tvaudio.c chip->thread = NULL; thread 2058 drivers/media/i2c/tvaudio.c chip->thread = kthread_run(chip_thread, chip, "%s", thread 2060 drivers/media/i2c/tvaudio.c if (IS_ERR(chip->thread)) { thread 2062 drivers/media/i2c/tvaudio.c chip->thread = NULL; thread 2074 drivers/media/i2c/tvaudio.c if (chip->thread) { thread 2076 drivers/media/i2c/tvaudio.c kthread_stop(chip->thread); thread 2077 drivers/media/i2c/tvaudio.c chip->thread = NULL; thread 439 drivers/media/pci/pt3/pt3.c adap->thread->comm); thread 454 drivers/media/pci/pt3/pt3.c adap->thread->comm); thread 460 drivers/media/pci/pt3/pt3.c struct task_struct *thread; thread 463 drivers/media/pci/pt3/pt3.c thread = kthread_run(pt3_fetch_thread, adap, "pt3-ad%i-dmx%i", thread 465 drivers/media/pci/pt3/pt3.c if (IS_ERR(thread)) { thread 466 drivers/media/pci/pt3/pt3.c int ret = PTR_ERR(thread); thread 468 drivers/media/pci/pt3/pt3.c adap->thread = NULL; thread 474 drivers/media/pci/pt3/pt3.c adap->thread = thread; thread 490 drivers/media/pci/pt3/pt3.c ret = kthread_stop(adap->thread); thread 491 drivers/media/pci/pt3/pt3.c adap->thread = NULL; thread 518 drivers/media/pci/pt3/pt3.c if (adap->num_feeds > 0 || !adap->thread) thread 604 drivers/media/pci/pt3/pt3.c if (adap->thread) thread 121 drivers/media/pci/pt3/pt3.h struct task_struct *thread; thread 304 drivers/media/pci/saa7134/saa7134-tvaudio.c if (dev->thread.scan1 == dev->thread.scan2 && thread 314 drivers/media/pci/saa7134/saa7134-tvaudio.c return dev->thread.scan1 != dev->thread.scan2; thread 477 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan1 = dev->thread.scan2; thread 479 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan1); thread 518 drivers/media/pci/saa7134/saa7134-tvaudio.c if (dev->thread.scan1 != dev->thread.scan2) thread 592 drivers/media/pci/saa7134/saa7134-tvaudio.c if (UNSET == dev->thread.mode) { thread 596 drivers/media/pci/saa7134/saa7134-tvaudio.c mode = dev->thread.mode; thread 606 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.stopped = 1; thread 772 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan1 = dev->thread.scan2; thread 774 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan1); thread 850 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.stopped = 1; thread 1012 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.thread = NULL; thread 1013 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan1 = dev->thread.scan2 = 0; thread 1017 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.thread = kthread_run(my_thread, dev, "%s", dev->name); thread 1018 drivers/media/pci/saa7134/saa7134-tvaudio.c if (IS_ERR(dev->thread.thread)) { thread 1039 drivers/media/pci/saa7134/saa7134-tvaudio.c if (dev->thread.thread && !dev->thread.stopped) thread 1040 drivers/media/pci/saa7134/saa7134-tvaudio.c kthread_stop(dev->thread.thread); thread 1052 drivers/media/pci/saa7134/saa7134-tvaudio.c } else if (dev->thread.thread) { thread 1053 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.mode = UNSET; thread 1054 drivers/media/pci/saa7134/saa7134-tvaudio.c dev->thread.scan2++; thread 1056 drivers/media/pci/saa7134/saa7134-tvaudio.c if (!dev->insuspend && !dev->thread.stopped) thread 1057 drivers/media/pci/saa7134/saa7134-tvaudio.c wake_up_process(dev->thread.thread); thread 1722 drivers/media/pci/saa7134/saa7134-video.c mode = dev->thread.mode; thread 1728 drivers/media/pci/saa7134/saa7134-video.c dev->thread.mode = t->audmode; thread 455 drivers/media/pci/saa7134/saa7134.h struct task_struct *thread; thread 652 drivers/media/pci/saa7134/saa7134.h struct saa7134_thread thread; thread 2351 drivers/media/pci/ttpci/av7110.c struct task_struct *thread; thread 2674 drivers/media/pci/ttpci/av7110.c thread = kthread_run(arm_thread, (void *) av7110, "arm_mon"); thread 2675 drivers/media/pci/ttpci/av7110.c if (IS_ERR(thread)) { thread 2676 drivers/media/pci/ttpci/av7110.c ret = PTR_ERR(thread); thread 2679 drivers/media/pci/ttpci/av7110.c av7110->arm_thread = thread; thread 50 drivers/media/rc/rc-core-priv.h struct task_struct *thread; thread 231 drivers/media/rc/rc-ir-raw.c if (!dev->raw || !dev->raw->thread) thread 234 drivers/media/rc/rc-ir-raw.c wake_up_process(dev->raw->thread); thread 635 drivers/media/rc/rc-ir-raw.c struct task_struct *thread; thread 637 drivers/media/rc/rc-ir-raw.c thread = kthread_run(ir_raw_event_thread, dev->raw, "rc%u", dev->minor); thread 638 drivers/media/rc/rc-ir-raw.c if (IS_ERR(thread)) thread 639 drivers/media/rc/rc-ir-raw.c return PTR_ERR(thread); thread 641 drivers/media/rc/rc-ir-raw.c dev->raw->thread = thread; thread 666 drivers/media/rc/rc-ir-raw.c kthread_stop(dev->raw->thread); thread 101 drivers/media/usb/pvrusb2/pvrusb2-dvb.c if (adap->thread) { thread 102 drivers/media/usb/pvrusb2/pvrusb2-dvb.c kthread_stop(adap->thread); thread 103 drivers/media/usb/pvrusb2/pvrusb2-dvb.c adap->thread = NULL; thread 172 drivers/media/usb/pvrusb2/pvrusb2-dvb.c adap->thread = kthread_run(pvr2_dvb_feed_thread, adap, "pvrusb2-dvb"); thread 174 drivers/media/usb/pvrusb2/pvrusb2-dvb.c if (IS_ERR(adap->thread)) { thread 175 drivers/media/usb/pvrusb2/pvrusb2-dvb.c ret = PTR_ERR(adap->thread); thread 176 drivers/media/usb/pvrusb2/pvrusb2-dvb.c adap->thread = NULL; thread 29 drivers/media/usb/pvrusb2/pvrusb2-dvb.h struct task_struct *thread; thread 194 drivers/misc/cxl/cxllib.c attr->tid = task->thread.tidr; thread 689 drivers/misc/cxl/native.c ctx->tidr = current->thread.tidr; thread 133 drivers/misc/ocxl/file.c ctx->tidr = current->thread.tidr; thread 1076 drivers/net/ethernet/marvell/mvpp2/mvpp2.h unsigned int thread; thread 131 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c static void mvpp2_thread_write(struct mvpp2 *priv, unsigned int thread, thread 134 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c writel(data, priv->swth_base[thread] + offset); thread 137 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c static u32 mvpp2_thread_read(struct mvpp2 *priv, unsigned int thread, thread 140 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c return readl(priv->swth_base[thread] + offset); thread 143 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c static void mvpp2_thread_write_relaxed(struct mvpp2 *priv, unsigned int thread, thread 146 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c writel_relaxed(data, priv->swth_base[thread] + offset); thread 149 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c static u32 mvpp2_thread_read_relaxed(struct mvpp2 *priv, unsigned int thread, thread 152 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c return readl_relaxed(priv->swth_base[thread] + offset); thread 413 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(priv, get_cpu()); thread 415 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c *dma_addr = mvpp2_thread_read(priv, thread, thread 417 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c *phys_addr = mvpp2_thread_read(priv, thread, MVPP2_BM_VIRT_ALLOC_REG); thread 423 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c val = mvpp2_thread_read(priv, thread, MVPP22_BM_ADDR_HIGH_ALLOC); thread 661 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 664 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c if (test_bit(thread, &port->priv->lock_map)) thread 665 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c spin_lock_irqsave(&port->bm_lock[thread], flags); thread 679 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write_relaxed(port->priv, thread, thread 688 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write_relaxed(port->priv, thread, thread 690 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write_relaxed(port->priv, thread, thread 693 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c if (test_bit(thread, &port->priv->lock_map)) thread 694 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c spin_unlock_irqrestore(&port->bm_lock[thread], flags); thread 1929 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = thread 1932 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c MVPP2_AGGR_TXQ_STATUS_REG(thread)); thread 1951 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, smp_processor_id()); thread 1956 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write_relaxed(priv, thread, MVPP2_TXQ_RSVD_REQ_REG, val); thread 1958 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c val = mvpp2_thread_read_relaxed(priv, thread, MVPP2_TXQ_RSVD_RSLT_REG); thread 1972 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 1983 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < port->priv->nthreads; thread++) { thread 1986 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu_aux = per_cpu_ptr(txq->pcpu, thread); thread 2148 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2153 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_NUM_REG, rxq->id); thread 2154 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_THRESH_REG, thread 2164 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2171 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); thread 2172 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_THRESH_REG, val); thread 2273 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c if (txq_pcpu->thread != mvpp2_cpu_to_thread(port->priv, smp_processor_id())) thread 2289 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread) thread 2300 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 2317 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread, struct mvpp2 *priv) thread 2332 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c MVPP2_AGGR_TXQ_INDEX_REG(thread)); thread 2343 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_write(priv, MVPP2_AGGR_TXQ_DESC_ADDR_REG(thread), txq_dma); thread 2344 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_write(priv, MVPP2_AGGR_TXQ_DESC_SIZE_REG(thread), thread 2355 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 2373 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2374 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_NUM_REG, rxq->id); thread 2379 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_DESC_ADDR_REG, rxq_dma); thread 2380 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_DESC_SIZE_REG, rxq->size); thread 2381 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_INDEX_REG, 0); thread 2426 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 2445 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2446 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_NUM_REG, rxq->id); thread 2447 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_DESC_ADDR_REG, 0); thread 2448 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_RXQ_DESC_SIZE_REG, 0); thread 2457 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 2473 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2474 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); thread 2475 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_DESC_ADDR_REG, thread 2477 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_DESC_SIZE_REG, thread 2479 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_INDEX_REG, 0); thread 2480 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_RSVD_CLR_REG, thread 2482 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c val = mvpp2_thread_read(port->priv, thread, MVPP2_TXQ_PENDING_REG); thread 2484 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_PENDING_REG, val); thread 2495 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_PREF_BUF_REG, thread 2514 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < port->priv->nthreads; thread++) { thread 2515 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 2549 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 2551 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < port->priv->nthreads; thread++) { thread 2552 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 2578 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2579 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); thread 2580 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_DESC_ADDR_REG, 0); thread 2581 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_DESC_SIZE_REG, 0); thread 2590 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, get_cpu()); thread 2593 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_NUM_REG, txq->id); thread 2594 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c val = mvpp2_thread_read(port->priv, thread, MVPP2_TXQ_PREF_BUF_REG); thread 2596 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_PREF_BUF_REG, val); thread 2612 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c pending = mvpp2_thread_read(port->priv, thread, thread 2618 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, MVPP2_TXQ_PREF_BUF_REG, val); thread 2621 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < port->priv->nthreads; thread++) { thread 2622 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 3020 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, smp_processor_id()); thread 3021 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 3038 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, smp_processor_id()); thread 3039 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 3208 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 3213 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c thread = mvpp2_cpu_to_thread(port->priv, smp_processor_id()); thread 3217 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 3218 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c aggr_txq = &port->priv->aggr_txqs[thread]; thread 3220 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c if (test_bit(thread, &port->priv->lock_map)) thread 3221 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c spin_lock_irqsave(&port->tx_lock[thread], flags); thread 3273 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_pcpu_stats *stats = per_cpu_ptr(port->stats, thread); thread 3303 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_port_pcpu *port_pcpu = per_cpu_ptr(port->pcpu, thread); thread 3313 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c if (test_bit(thread, &port->priv->lock_map)) thread 3314 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c spin_unlock_irqrestore(&port->tx_lock[thread], flags); thread 3335 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread = mvpp2_cpu_to_thread(port->priv, smp_processor_id()); thread 3358 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c mvpp2_thread_write(port->priv, thread, thread 3727 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 3742 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < port->priv->nthreads; thread++) { thread 3743 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c port_pcpu = per_cpu_ptr(port->pcpu, thread); thread 4545 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int thread; thread 4589 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < priv->nthreads; thread++) { thread 4590 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); thread 4591 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu->thread = thread; thread 5205 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c unsigned int ntxqs, nrxqs, thread; thread 5360 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c for (thread = 0; thread < priv->nthreads; thread++) { thread 5361 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c port_pcpu = per_cpu_ptr(port->pcpu, thread); thread 616 drivers/net/ethernet/realtek/8139too.c struct delayed_work thread; thread 1030 drivers/net/ethernet/realtek/8139too.c INIT_DELAYED_WORK(&tp->thread, rtl8139_thread); thread 1121 drivers/net/ethernet/realtek/8139too.c cancel_delayed_work_sync(&tp->thread); thread 1611 drivers/net/ethernet/realtek/8139too.c container_of(work, struct rtl8139_private, thread.work); thread 1627 drivers/net/ethernet/realtek/8139too.c schedule_delayed_work(&tp->thread, thr_delay); thread 1643 drivers/net/ethernet/realtek/8139too.c schedule_delayed_work(&tp->thread, next_tick); thread 1657 drivers/net/ethernet/realtek/8139too.c container_of(work, struct rtl8139_private, thread.work); thread 1709 drivers/net/ethernet/realtek/8139too.c INIT_DELAYED_WORK(&tp->thread, rtl8139_thread); thread 1710 drivers/net/ethernet/realtek/8139too.c schedule_delayed_work(&tp->thread, next_tick); thread 60 drivers/net/wireless/rsi/rsi_common.h struct rsi_thread *thread, thread 64 drivers/net/wireless/rsi/rsi_common.h init_completion(&thread->completion); thread 65 drivers/net/wireless/rsi/rsi_common.h atomic_set(&thread->thread_done, 0); thread 66 drivers/net/wireless/rsi/rsi_common.h thread->task = kthread_run(func_ptr, common, "%s", name); thread 67 drivers/net/wireless/rsi/rsi_common.h if (IS_ERR(thread->task)) thread 68 drivers/net/wireless/rsi/rsi_common.h return (int)PTR_ERR(thread->task); thread 376 drivers/of/base.c const char *prop_name, int cpu, unsigned int *thread) thread 392 drivers/of/base.c if (thread) thread 393 drivers/of/base.c *thread = tid; thread 408 drivers/of/base.c int cpu, unsigned int *thread) thread 417 drivers/of/base.c cpu, thread)) thread 420 drivers/of/base.c return __of_find_n_match_cpu_property(cpun, "reg", cpu, thread); thread 442 drivers/of/base.c struct device_node *of_get_cpu_node(int cpu, unsigned int *thread) thread 447 drivers/of/base.c if (arch_find_n_match_cpu_physical_id(cpun, cpu, thread)) thread 176 drivers/pcmcia/cs.c if (!socket->thread) { thread 213 drivers/pcmcia/cs.c if (socket->thread) thread 214 drivers/pcmcia/cs.c kthread_stop(socket->thread); thread 597 drivers/pcmcia/cs.c skt->thread = current; thread 606 drivers/pcmcia/cs.c skt->thread = NULL; thread 704 drivers/pcmcia/cs.c if (s->thread) { thread 709 drivers/pcmcia/cs.c wake_up_process(s->thread); thread 729 drivers/pcmcia/cs.c if (s->thread) { thread 734 drivers/pcmcia/cs.c wake_up_process(s->thread); thread 1043 drivers/ps3/ps3-lpm.c void ps3_enable_pm_interrupts(u32 cpu, u32 thread, u32 mask) thread 335 drivers/s390/net/lcs.c static int lcs_set_thread_start_bit(struct lcs_card *card, unsigned long thread) thread 340 drivers/s390/net/lcs.c if ( !(card->thread_allowed_mask & thread) || thread 341 drivers/s390/net/lcs.c (card->thread_start_mask & thread) ) { thread 345 drivers/s390/net/lcs.c card->thread_start_mask |= thread; thread 351 drivers/s390/net/lcs.c lcs_clear_thread_running_bit(struct lcs_card *card, unsigned long thread) thread 356 drivers/s390/net/lcs.c card->thread_running_mask &= ~thread; thread 361 drivers/s390/net/lcs.c static int __lcs_do_run_thread(struct lcs_card *card, unsigned long thread) thread 367 drivers/s390/net/lcs.c if (card->thread_start_mask & thread){ thread 368 drivers/s390/net/lcs.c if ((card->thread_allowed_mask & thread) && thread 369 drivers/s390/net/lcs.c !(card->thread_running_mask & thread)){ thread 371 drivers/s390/net/lcs.c card->thread_start_mask &= ~thread; thread 372 drivers/s390/net/lcs.c card->thread_running_mask |= thread; thread 381 drivers/s390/net/lcs.c lcs_do_run_thread(struct lcs_card *card, unsigned long thread) thread 385 drivers/s390/net/lcs.c (rc = __lcs_do_run_thread(card, thread)) >= 0); thread 390 drivers/s390/net/lcs.c lcs_do_start_thread(struct lcs_card *card, unsigned long thread) thread 400 drivers/s390/net/lcs.c rc = (card->thread_start_mask & thread); thread 812 drivers/s390/net/qeth_core_main.c unsigned long thread) thread 817 drivers/s390/net/qeth_core_main.c if (!(card->thread_allowed_mask & thread) || thread 818 drivers/s390/net/qeth_core_main.c (card->thread_start_mask & thread)) { thread 822 drivers/s390/net/qeth_core_main.c card->thread_start_mask |= thread; thread 827 drivers/s390/net/qeth_core_main.c void qeth_clear_thread_start_bit(struct qeth_card *card, unsigned long thread) thread 832 drivers/s390/net/qeth_core_main.c card->thread_start_mask &= ~thread; thread 838 drivers/s390/net/qeth_core_main.c void qeth_clear_thread_running_bit(struct qeth_card *card, unsigned long thread) thread 843 drivers/s390/net/qeth_core_main.c card->thread_running_mask &= ~thread; thread 849 drivers/s390/net/qeth_core_main.c static int __qeth_do_run_thread(struct qeth_card *card, unsigned long thread) thread 855 drivers/s390/net/qeth_core_main.c if (card->thread_start_mask & thread) { thread 856 drivers/s390/net/qeth_core_main.c if ((card->thread_allowed_mask & thread) && thread 857 drivers/s390/net/qeth_core_main.c !(card->thread_running_mask & thread)) { thread 859 drivers/s390/net/qeth_core_main.c card->thread_start_mask &= ~thread; thread 860 drivers/s390/net/qeth_core_main.c card->thread_running_mask |= thread; thread 868 drivers/s390/net/qeth_core_main.c int qeth_do_run_thread(struct qeth_card *card, unsigned long thread) thread 873 drivers/s390/net/qeth_core_main.c (rc = __qeth_do_run_thread(card, thread)) >= 0); thread 1305 drivers/s390/net/qeth_core_main.c static int qeth_do_start_thread(struct qeth_card *card, unsigned long thread) thread 1315 drivers/s390/net/qeth_core_main.c rc = (card->thread_start_mask & thread); thread 1573 drivers/s390/scsi/zfcp_erp.c struct task_struct *thread; thread 1575 drivers/s390/scsi/zfcp_erp.c thread = kthread_run(zfcp_erp_thread, adapter, "zfcperp%s", thread 1577 drivers/s390/scsi/zfcp_erp.c if (IS_ERR(thread)) { thread 1580 drivers/s390/scsi/zfcp_erp.c return PTR_ERR(thread); thread 1583 drivers/s390/scsi/zfcp_erp.c adapter->erp_thread = thread; thread 1602 drivers/scsi/aacraid/aacraid.h struct task_struct *thread; thread 315 drivers/scsi/aacraid/commctrl.c kthread_stop(dev->thread); thread 318 drivers/scsi/aacraid/commctrl.c dev->thread = kthread_run(aac_command_thread, dev, thread 1492 drivers/scsi/aacraid/commsup.c if (aac->thread && aac->thread->pid != current->pid) { thread 1494 drivers/scsi/aacraid/commsup.c kthread_stop(aac->thread); thread 1495 drivers/scsi/aacraid/commsup.c aac->thread = NULL; thread 1578 drivers/scsi/aacraid/commsup.c aac->thread = kthread_run(aac_command_thread, aac, "%s", thread 1580 drivers/scsi/aacraid/commsup.c if (IS_ERR(aac->thread)) { thread 1581 drivers/scsi/aacraid/commsup.c retval = PTR_ERR(aac->thread); thread 1582 drivers/scsi/aacraid/commsup.c aac->thread = NULL; thread 1554 drivers/scsi/aacraid/linit.c kthread_stop(aac->thread); thread 1555 drivers/scsi/aacraid/linit.c aac->thread = NULL; thread 1712 drivers/scsi/aacraid/linit.c aac->thread = kthread_run(aac_command_thread, aac, AAC_DRIVERNAME); thread 1713 drivers/scsi/aacraid/linit.c if (IS_ERR(aac->thread)) { thread 1715 drivers/scsi/aacraid/linit.c error = PTR_ERR(aac->thread); thread 1716 drivers/scsi/aacraid/linit.c aac->thread = NULL; thread 2624 drivers/scsi/bnx2fc/bnx2fc_fcoe.c struct task_struct *thread; thread 2628 drivers/scsi/bnx2fc/bnx2fc_fcoe.c thread = kthread_create_on_node(bnx2fc_percpu_io_thread, thread 2631 drivers/scsi/bnx2fc/bnx2fc_fcoe.c if (IS_ERR(thread)) thread 2632 drivers/scsi/bnx2fc/bnx2fc_fcoe.c return PTR_ERR(thread); thread 2635 drivers/scsi/bnx2fc/bnx2fc_fcoe.c kthread_bind(thread, cpu); thread 2636 drivers/scsi/bnx2fc/bnx2fc_fcoe.c p->iothread = thread; thread 2637 drivers/scsi/bnx2fc/bnx2fc_fcoe.c wake_up_process(thread); thread 2644 drivers/scsi/bnx2fc/bnx2fc_fcoe.c struct task_struct *thread; thread 2652 drivers/scsi/bnx2fc/bnx2fc_fcoe.c thread = p->iothread; thread 2664 drivers/scsi/bnx2fc/bnx2fc_fcoe.c if (thread) thread 2665 drivers/scsi/bnx2fc/bnx2fc_fcoe.c kthread_stop(thread); thread 414 drivers/scsi/bnx2i/bnx2i_init.c struct task_struct *thread; thread 418 drivers/scsi/bnx2i/bnx2i_init.c thread = kthread_create_on_node(bnx2i_percpu_io_thread, (void *)p, thread 421 drivers/scsi/bnx2i/bnx2i_init.c if (IS_ERR(thread)) thread 422 drivers/scsi/bnx2i/bnx2i_init.c return PTR_ERR(thread); thread 425 drivers/scsi/bnx2i/bnx2i_init.c kthread_bind(thread, cpu); thread 426 drivers/scsi/bnx2i/bnx2i_init.c p->iothread = thread; thread 427 drivers/scsi/bnx2i/bnx2i_init.c wake_up_process(thread); thread 434 drivers/scsi/bnx2i/bnx2i_init.c struct task_struct *thread; thread 440 drivers/scsi/bnx2i/bnx2i_init.c thread = p->iothread; thread 452 drivers/scsi/bnx2i/bnx2i_init.c if (thread) thread 453 drivers/scsi/bnx2i/bnx2i_init.c kthread_stop(thread); thread 1893 drivers/scsi/qedi/qedi_main.c struct task_struct *thread; thread 1895 drivers/scsi/qedi/qedi_main.c thread = kthread_create_on_node(qedi_percpu_io_thread, (void *)p, thread 1898 drivers/scsi/qedi/qedi_main.c if (IS_ERR(thread)) thread 1899 drivers/scsi/qedi/qedi_main.c return PTR_ERR(thread); thread 1901 drivers/scsi/qedi/qedi_main.c kthread_bind(thread, cpu); thread 1902 drivers/scsi/qedi/qedi_main.c p->iothread = thread; thread 1903 drivers/scsi/qedi/qedi_main.c wake_up_process(thread); thread 1911 drivers/scsi/qedi/qedi_main.c struct task_struct *thread; thread 1914 drivers/scsi/qedi/qedi_main.c thread = p->iothread; thread 1925 drivers/scsi/qedi/qedi_main.c if (thread) thread 1926 drivers/scsi/qedi/qedi_main.c kthread_stop(thread); thread 277 drivers/staging/media/ipu3/ipu3-abi.h #define IMGU_ABI_EVENT_BUFFER_ENQUEUED(thread, queue) \ thread 278 drivers/staging/media/ipu3/ipu3-abi.h (0 << 24 | (thread) << 16 | (queue) << 8) thread 1102 drivers/staging/media/ipu3/ipu3-css.c static u8 imgu_css_queue_pos(struct imgu_css *css, int queue, int thread) thread 1110 drivers/staging/media/ipu3/ipu3-css.c return queue >= 0 ? readb(&q->host2sp_bufq_info[thread][queue].end) : thread 1116 drivers/staging/media/ipu3/ipu3-css.c int queue, int thread, u32 data) thread 1126 drivers/staging/media/ipu3/ipu3-css.c size = readb(&q->host2sp_bufq_info[thread][queue].size); thread 1127 drivers/staging/media/ipu3/ipu3-css.c start = readb(&q->host2sp_bufq_info[thread][queue].start); thread 1128 drivers/staging/media/ipu3/ipu3-css.c end = readb(&q->host2sp_bufq_info[thread][queue].end); thread 1143 drivers/staging/media/ipu3/ipu3-css.c writel(data, &q->host2sp_bufq[thread][queue][end]); thread 1144 drivers/staging/media/ipu3/ipu3-css.c writeb(end2, &q->host2sp_bufq_info[thread][queue].end); thread 82 drivers/staging/unisys/visorhba/visorhba_main.c struct task_struct *thread; thread 733 drivers/staging/unisys/visorhba/visorhba_main.c visor_thread_stop(devdata->thread); thread 1031 drivers/staging/unisys/visorhba/visorhba_main.c devdata->thread = visor_thread_start(process_incoming_rsps, devdata, thread 1117 drivers/staging/unisys/visorhba/visorhba_main.c devdata->thread = visor_thread_start(process_incoming_rsps, devdata, thread 1153 drivers/staging/unisys/visorhba/visorhba_main.c visor_thread_stop(devdata->thread); thread 150 drivers/tty/mips_ejtag_fdc.c struct task_struct *thread; thread 957 drivers/tty/mips_ejtag_fdc.c priv->thread = kthread_create(mips_ejtag_fdc_put, priv, priv->fdc_name); thread 958 drivers/tty/mips_ejtag_fdc.c if (IS_ERR(priv->thread)) { thread 959 drivers/tty/mips_ejtag_fdc.c ret = PTR_ERR(priv->thread); thread 968 drivers/tty/mips_ejtag_fdc.c kthread_bind(priv->thread, dev->cpu); thread 969 drivers/tty/mips_ejtag_fdc.c wake_up_process(priv->thread); thread 1036 drivers/tty/mips_ejtag_fdc.c kthread_stop(priv->thread); thread 1066 drivers/tty/mips_ejtag_fdc.c kthread_stop(priv->thread); thread 1097 drivers/tty/mips_ejtag_fdc.c priv->thread = kthread_create(mips_ejtag_fdc_put, priv, priv->fdc_name); thread 1098 drivers/tty/mips_ejtag_fdc.c if (IS_ERR(priv->thread)) { thread 1099 drivers/tty/mips_ejtag_fdc.c ret = PTR_ERR(priv->thread); thread 1104 drivers/tty/mips_ejtag_fdc.c kthread_bind(priv->thread, dev->cpu); thread 1105 drivers/tty/mips_ejtag_fdc.c wake_up_process(priv->thread); thread 967 drivers/usb/atm/usbatm.c instance->thread = NULL; thread 985 drivers/usb/atm/usbatm.c instance->thread = t; thread 1063 drivers/usb/atm/usbatm.c instance->thread = NULL; thread 1218 drivers/usb/atm/usbatm.c if (instance->thread != NULL) thread 1219 drivers/usb/atm/usbatm.c send_sig(SIGTERM, instance->thread, 1); thread 147 drivers/usb/atm/usbatm.h struct task_struct *thread; thread 52 drivers/vfio/virqfd.c virqfd->thread) thread 100 drivers/vfio/virqfd.c if (virqfd->thread) thread 101 drivers/vfio/virqfd.c virqfd->thread(virqfd->opaque, virqfd->data); thread 106 drivers/vfio/virqfd.c void (*thread)(void *, void *), thread 122 drivers/vfio/virqfd.c virqfd->thread = thread; thread 173 drivers/vfio/virqfd.c if ((!handler || handler(opaque, data)) && thread) thread 237 drivers/w1/w1.c wake_up_process(md->thread); thread 140 drivers/w1/w1_int.c dev->thread = kthread_run(&w1_process, dev, "%s", dev->name); thread 141 drivers/w1/w1_int.c if (IS_ERR(dev->thread)) { thread 142 drivers/w1/w1_int.c retval = PTR_ERR(dev->thread); thread 163 drivers/w1/w1_int.c kthread_stop(dev->thread); thread 184 drivers/w1/w1_int.c kthread_stop(dev->thread); thread 688 drivers/w1/w1_netlink.c wake_up_process(dev->thread); thread 1669 fs/binfmt_elf.c struct elf_thread_core_info *thread; thread 1772 fs/binfmt_elf.c info->thread = NULL; thread 1817 fs/binfmt_elf.c if (ct->task == dump_task || !info->thread) { thread 1818 fs/binfmt_elf.c t->next = info->thread; thread 1819 fs/binfmt_elf.c info->thread = t; thread 1825 fs/binfmt_elf.c t->next = info->thread->next; thread 1826 fs/binfmt_elf.c info->thread->next = t; thread 1833 fs/binfmt_elf.c for (t = info->thread; t != NULL; t = t->next) thread 1868 fs/binfmt_elf.c struct elf_thread_core_info *t = info->thread; thread 1900 fs/binfmt_elf.c struct elf_thread_core_info *threads = info->thread; thread 1922 fs/binfmt_elf.c struct task_struct *thread; thread 1938 fs/binfmt_elf.c struct task_struct *p = t->thread; thread 2025 fs/binfmt_elf.c ets->thread = ct->task; thread 1427 fs/binfmt_elf_fdpic.c struct task_struct *thread; thread 1442 fs/binfmt_elf_fdpic.c struct task_struct *p = t->thread; thread 1618 fs/binfmt_elf_fdpic.c tmp->thread = ct->task; thread 741 fs/ceph/super.h struct task_struct *thread; thread 747 fs/ceph/super.h .thread = current, \ thread 773 fs/ceph/super.h if (ctx->thread == current) { thread 70 fs/hfs/catalog.c memset(rec->thread.reserved, 0, sizeof(rec->thread.reserved)); thread 71 fs/hfs/catalog.c rec->thread.ParID = cpu_to_be32(parentid); thread 72 fs/hfs/catalog.c hfs_asc2mac(sb, &rec->thread.CName, name); thread 204 fs/hfs/catalog.c fd->search_key->cat.ParID = rec.thread.ParID; thread 205 fs/hfs/catalog.c len = fd->search_key->cat.CName.len = rec.thread.CName.len; thread 210 fs/hfs/catalog.c memcpy(fd->search_key->cat.CName.name, rec.thread.CName.name, len); thread 94 fs/hfs/dir.c be32_to_cpu(entry.thread.ParID), DT_DIR)) thread 237 fs/hfs/hfs.h struct hfs_cat_thread thread; thread 183 fs/hfsplus/catalog.c entry->thread.reserved = 0; thread 184 fs/hfsplus/catalog.c entry->thread.parentID = cpu_to_be32(parentid); thread 185 fs/hfsplus/catalog.c err = hfsplus_asc2uni(sb, &entry->thread.nodeName, HFSPLUS_MAX_STRLEN, thread 190 fs/hfsplus/catalog.c return 10 + be16_to_cpu(entry->thread.nodeName.length) * 2; thread 212 fs/hfsplus/catalog.c if (be16_to_cpu(tmp.thread.nodeName.length) > 255) { thread 218 fs/hfsplus/catalog.c be32_to_cpu(tmp.thread.parentID), thread 219 fs/hfsplus/catalog.c &tmp.thread.nodeName); thread 182 fs/hfsplus/dir.c be32_to_cpu(entry.thread.parentID), DT_DIR)) thread 329 fs/hfsplus/hfsplus_raw.h struct hfsplus_cat_thread thread; thread 277 include/drm/gpu_scheduler.h struct task_struct *thread; thread 42 include/linux/cpu.h int cpu, unsigned int *thread); thread 33 include/linux/iio/trigger_consumer.h irqreturn_t (*thread)(int irq, void *p); thread 43 include/linux/iio/trigger_consumer.h irqreturn_t (*thread)(int irq, void *p), thread 12 include/linux/iio/triggered_buffer.h irqreturn_t (*thread)(int irq, void *p), thread 19 include/linux/iio/triggered_buffer.h irqreturn_t (*thread)(int irq, void *p), thread 9 include/linux/iio/triggered_event.h irqreturn_t (*thread)(int irq, void *p)); thread 116 include/linux/interrupt.h struct task_struct *thread; thread 509 include/linux/mISDNif.h struct task_struct *thread; thread 352 include/linux/of.h extern struct device_node *of_get_cpu_node(int cpu, unsigned int *thread); thread 758 include/linux/of.h unsigned int *thread) thread 56 include/linux/pktcdvd.h struct task_struct *thread; thread 1278 include/linux/sched.h struct thread_struct thread; thread 140 include/linux/sched/task.h *size = arch_task_struct_size - offsetof(struct task_struct, thread); thread 183 include/linux/vfio.h void (*thread)(void *, void *); thread 194 include/linux/vfio.h void (*thread)(void *, void *), thread 231 include/linux/w1.h struct task_struct *thread; thread 184 include/pcmcia/ss.h struct task_struct *thread; thread 71 include/soc/nps/common.h u32 __reserved:20, cluster:4, core:4, thread:4; thread 26 include/soc/tegra/bpmp.h } cpu_tx, thread, cpu_rx; thread 105 init/init_task.c .thread = INIT_THREAD, thread 60 kernel/cpu.c struct task_struct *thread; thread 500 kernel/cpu.c wake_up_process(st->thread); thread 529 kernel/cpu.c kthread_unpark(st->thread); thread 736 kernel/cpu.c if (!st->thread) thread 787 kernel/cpu.c .store = &cpuhp_state.thread, thread 798 kernel/cpu.c kthread_unpark(this_cpu_read(cpuhp_state.thread)); thread 886 kernel/cpu.c kthread_park(per_cpu_ptr(&cpuhp_state, cpu)->thread); thread 902 kernel/cpu.c kthread_unpark(per_cpu_ptr(&cpuhp_state, cpu)->thread); thread 496 kernel/debug/gdbstub.c struct task_struct *thread; thread 500 kernel/debug/gdbstub.c thread = kgdb_usethread; thread 501 kernel/debug/gdbstub.c if (!thread) { thread 502 kernel/debug/gdbstub.c thread = kgdb_info[ks->cpu].task; thread 513 kernel/debug/gdbstub.c if (thread == kgdb_info[i].task) thread 533 kernel/debug/gdbstub.c sleeping_thread_to_gdb_regs(gdb_regs, thread); thread 801 kernel/debug/gdbstub.c struct task_struct *thread; thread 808 kernel/debug/gdbstub.c thread = getthread(ks->linux_regs, ks->threadid); thread 809 kernel/debug/gdbstub.c if (!thread && ks->threadid > 0) { thread 813 kernel/debug/gdbstub.c kgdb_usethread = thread; thread 823 kernel/debug/gdbstub.c thread = getthread(ks->linux_regs, ks->threadid); thread 824 kernel/debug/gdbstub.c if (!thread && ks->threadid > 0) { thread 828 kernel/debug/gdbstub.c kgdb_contthread = thread; thread 839 kernel/debug/gdbstub.c struct task_struct *thread; thread 842 kernel/debug/gdbstub.c thread = getthread(ks->linux_regs, ks->threadid); thread 843 kernel/debug/gdbstub.c if (thread) thread 2333 kernel/debug/kdb/kdb_main.c (void *)(&p->thread), thread 542 kernel/exit.c struct task_struct *thread, *reaper; thread 544 kernel/exit.c thread = find_alive_thread(father); thread 545 kernel/exit.c if (thread) thread 546 kernel/exit.c return thread; thread 565 kernel/exit.c thread = find_alive_thread(reaper); thread 566 kernel/exit.c if (thread) thread 567 kernel/exit.c return thread; thread 796 kernel/fork.c *offset += offsetof(struct task_struct, thread); thread 66 kernel/irq/handle.c if (action->thread->flags & PF_EXITING) thread 134 kernel/irq/handle.c wake_up_process(action->thread); thread 193 kernel/irq/manage.c if (action->thread) thread 1126 kernel/irq/manage.c if (action->thread) thread 1251 kernel/irq/manage.c new->thread = get_task_struct(t); thread 1598 kernel/irq/manage.c if (new->thread) thread 1599 kernel/irq/manage.c wake_up_process(new->thread); thread 1601 kernel/irq/manage.c wake_up_process(new->secondary->thread); thread 1628 kernel/irq/manage.c if (new->thread) { thread 1629 kernel/irq/manage.c struct task_struct *t = new->thread; thread 1631 kernel/irq/manage.c new->thread = NULL; thread 1635 kernel/irq/manage.c if (new->secondary && new->secondary->thread) { thread 1636 kernel/irq/manage.c struct task_struct *t = new->secondary->thread; thread 1638 kernel/irq/manage.c new->secondary->thread = NULL; thread 1778 kernel/irq/manage.c if (action->thread) { thread 1779 kernel/irq/manage.c kthread_stop(action->thread); thread 1780 kernel/irq/manage.c put_task_struct(action->thread); thread 1781 kernel/irq/manage.c if (action->secondary && action->secondary->thread) { thread 1782 kernel/irq/manage.c kthread_stop(action->secondary->thread); thread 1783 kernel/irq/manage.c put_task_struct(action->secondary->thread); thread 40 kernel/sched/cpufreq_schedutil.c struct task_struct *thread; thread 656 kernel/sched/cpufreq_schedutil.c struct task_struct *thread; thread 680 kernel/sched/cpufreq_schedutil.c thread = kthread_create(kthread_worker_fn, &sg_policy->worker, thread 683 kernel/sched/cpufreq_schedutil.c if (IS_ERR(thread)) { thread 684 kernel/sched/cpufreq_schedutil.c pr_err("failed to create sugov thread: %ld\n", PTR_ERR(thread)); thread 685 kernel/sched/cpufreq_schedutil.c return PTR_ERR(thread); thread 688 kernel/sched/cpufreq_schedutil.c ret = sched_setattr_nocheck(thread, &attr); thread 690 kernel/sched/cpufreq_schedutil.c kthread_stop(thread); thread 695 kernel/sched/cpufreq_schedutil.c sg_policy->thread = thread; thread 696 kernel/sched/cpufreq_schedutil.c kthread_bind_mask(thread, policy->related_cpus); thread 700 kernel/sched/cpufreq_schedutil.c wake_up_process(thread); thread 712 kernel/sched/cpufreq_schedutil.c kthread_stop(sg_policy->thread); thread 339 kernel/seccomp.c struct task_struct *thread, *caller; thread 346 kernel/seccomp.c for_each_thread(caller, thread) { thread 350 kernel/seccomp.c if (thread == caller) thread 353 kernel/seccomp.c if (thread->seccomp.mode == SECCOMP_MODE_DISABLED || thread 354 kernel/seccomp.c (thread->seccomp.mode == SECCOMP_MODE_FILTER && thread 355 kernel/seccomp.c is_ancestor(thread->seccomp.filter, thread 360 kernel/seccomp.c failed = task_pid_vnr(thread); thread 380 kernel/seccomp.c struct task_struct *thread, *caller; thread 387 kernel/seccomp.c for_each_thread(caller, thread) { thread 389 kernel/seccomp.c if (thread == caller) thread 399 kernel/seccomp.c put_seccomp_filter(thread); thread 400 kernel/seccomp.c smp_store_release(&thread->seccomp.filter, thread 410 kernel/seccomp.c task_set_no_new_privs(thread); thread 418 kernel/seccomp.c if (thread->seccomp.mode == SECCOMP_MODE_DISABLED) thread 419 kernel/seccomp.c seccomp_assign_mode(thread, SECCOMP_MODE_FILTER, thread 38 kernel/stop_machine.c struct task_struct *thread; thread 73 kernel/stop_machine.c wake_q_add(wakeq, stopper->thread); thread 542 kernel/stop_machine.c kthread_park(stopper->thread); thread 549 kernel/stop_machine.c sched_set_stop_task(cpu, per_cpu(cpu_stopper.thread, cpu)); thread 564 kernel/stop_machine.c kthread_unpark(stopper->thread); thread 568 kernel/stop_machine.c .store = &cpu_stopper.thread, thread 50 kernel/time/posix-cpu-timers.c static struct task_struct *lookup_task(const pid_t pid, bool thread, thread 60 kernel/time/posix-cpu-timers.c return thread ? current : current->group_leader; thread 66 kernel/time/posix-cpu-timers.c if (thread) thread 91 kernel/time/posix-cpu-timers.c const bool thread = !!CPUCLOCK_PERTHREAD(clock); thread 99 kernel/time/posix-cpu-timers.c p = lookup_task(pid, thread, gettime); thread 560 sound/soc/intel/baytrail/sst-baytrail-ipc.c .thread = sst_byt_irq_thread, thread 178 sound/soc/intel/common/sst-dsp.h irqreturn_t (*thread)(int irq, void *context); thread 1240 sound/soc/intel/common/sst-firmware.c sst_dev->thread, IRQF_SHARED, "AudioDSP", sst); thread 2049 sound/soc/intel/haswell/sst-haswell-ipc.c .thread = hsw_irq_thread, thread 545 sound/soc/intel/skylake/bxt-sst.c .thread = skl_dsp_irq_thread_handler, thread 363 sound/soc/intel/skylake/cnl-sst.c .thread = cnl_dsp_irq_thread_handler, thread 440 sound/soc/intel/skylake/skl-sst-dsp.c sst_dev->thread, IRQF_SHARED, "AudioDSP", sst); thread 515 sound/soc/intel/skylake/skl-sst.c .thread = skl_dsp_irq_thread_handler, thread 73 tools/io_uring/io_uring-bench.c pthread_t thread; thread 560 tools/io_uring/io_uring-bench.c pthread_create(&s->thread, NULL, submitter_fn, s); thread 588 tools/io_uring/io_uring-bench.c pthread_join(s->thread, &ret); thread 6 tools/perf/arch/arm/include/arch-tests.h struct thread; thread 14 tools/perf/arch/arm/tests/dwarf-unwind.c struct thread *thread, u64 *regs) thread 29 tools/perf/arch/arm/tests/dwarf-unwind.c map = map_groups__find(thread->mg, (u64)sp); thread 46 tools/perf/arch/arm/tests/dwarf-unwind.c struct thread *thread) thread 62 tools/perf/arch/arm/tests/dwarf-unwind.c return sample_ustack(sample, thread, buf); thread 7 tools/perf/arch/arm/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 36 tools/perf/arch/arm/util/unwind-libdw.c return dwfl_thread_state_registers(thread, 0, PERF_REG_ARM_MAX, thread 6 tools/perf/arch/arm64/include/arch-tests.h struct thread; thread 14 tools/perf/arch/arm64/tests/dwarf-unwind.c struct thread *thread, u64 *regs) thread 29 tools/perf/arch/arm64/tests/dwarf-unwind.c map = map_groups__find(thread->mg, (u64)sp); thread 46 tools/perf/arch/arm64/tests/dwarf-unwind.c struct thread *thread) thread 62 tools/perf/arch/arm64/tests/dwarf-unwind.c return sample_ustack(sample, thread, buf); thread 7 tools/perf/arch/arm64/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 52 tools/perf/arch/arm64/util/unwind-libdw.c if (!dwfl_thread_state_registers(thread, 0, PERF_REG_ARM64_MAX, thread 57 tools/perf/arch/arm64/util/unwind-libdw.c dwfl_thread_state_register_pc(thread, dwarf_pc); thread 9 tools/perf/arch/csky/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 73 tools/perf/arch/csky/util/unwind-libdw.c dwfl_thread_state_register_pc(thread, REG(PC)); thread 75 tools/perf/arch/csky/util/unwind-libdw.c return dwfl_thread_state_registers(thread, 0, PERF_REG_CSKY_MAX, thread 6 tools/perf/arch/powerpc/include/arch-tests.h struct thread; thread 9 tools/perf/arch/powerpc/include/arch-tests.h struct thread *thread); thread 15 tools/perf/arch/powerpc/tests/dwarf-unwind.c struct thread *thread, u64 *regs) thread 30 tools/perf/arch/powerpc/tests/dwarf-unwind.c map = map_groups__find(thread->mg, (u64)sp); thread 47 tools/perf/arch/powerpc/tests/dwarf-unwind.c struct thread *thread) thread 63 tools/perf/arch/powerpc/tests/dwarf-unwind.c return sample_ustack(sample, thread, buf); thread 242 tools/perf/arch/powerpc/util/skip-callchain-idx.c int arch_skip_callchain_idx(struct thread *thread, struct ip_callchain *chain) thread 255 tools/perf/arch/powerpc/util/skip-callchain-idx.c thread__find_symbol(thread, PERF_RECORD_MISC_USER, ip, &al); thread 15 tools/perf/arch/powerpc/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 60 tools/perf/arch/powerpc/util/unwind-libdw.c if (!dwfl_thread_state_registers(thread, 0, 32, dwarf_regs)) thread 64 tools/perf/arch/powerpc/util/unwind-libdw.c dwfl_thread_state_register_pc(thread, dwarf_nip); thread 68 tools/perf/arch/powerpc/util/unwind-libdw.c if (!dwfl_thread_state_registers(thread, thread 9 tools/perf/arch/riscv/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 53 tools/perf/arch/riscv/util/unwind-libdw.c dwfl_thread_state_register_pc(thread, REG(PC)); thread 55 tools/perf/arch/riscv/util/unwind-libdw.c return dwfl_thread_state_registers(thread, 0, PERF_REG_RISCV_MAX, thread 9 tools/perf/arch/s390/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 61 tools/perf/arch/s390/util/unwind-libdw.c dwfl_thread_state_register_pc(thread, dwarf_regs[65]); thread 62 tools/perf/arch/s390/util/unwind-libdw.c return dwfl_thread_state_registers(thread, 0, 32, dwarf_regs); thread 16 tools/perf/arch/x86/include/arch-tests.h struct thread; thread 19 tools/perf/arch/x86/include/arch-tests.h struct thread *thread); thread 15 tools/perf/arch/x86/tests/dwarf-unwind.c struct thread *thread, u64 *regs) thread 30 tools/perf/arch/x86/tests/dwarf-unwind.c map = map_groups__find(thread->mg, (u64)sp); thread 47 tools/perf/arch/x86/tests/dwarf-unwind.c struct thread *thread) thread 63 tools/perf/arch/x86/tests/dwarf-unwind.c return sample_ustack(sample, thread, buf); thread 10 tools/perf/arch/x86/util/archinsn.c struct thread *thread, thread 19 tools/perf/arch/x86/util/archinsn.c len = thread__memcpy(thread, machine, sample->insn, sample->ip, sizeof(sample->insn), &is64bit); thread 7 tools/perf/arch/x86/util/unwind-libdw.c bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg) thread 52 tools/perf/arch/x86/util/unwind-libdw.c return dwfl_thread_state_registers(thread, 0, nregs, dwarf_regs); thread 70 tools/perf/bench/epoll-ctl.c pthread_t thread; thread 267 tools/perf/bench/epoll-ctl.c ret = pthread_create(&w->thread, attrp, workerfn, thread 379 tools/perf/bench/epoll-ctl.c ret = pthread_join(worker[i].thread, NULL); thread 122 tools/perf/bench/epoll-wait.c pthread_t thread; thread 356 tools/perf/bench/epoll-wait.c ret = pthread_create(&w->thread, attrp, workerfn, thread 49 tools/perf/bench/futex-hash.c pthread_t thread; thread 178 tools/perf/bench/futex-hash.c ret = pthread_create(&worker[i].thread, &thread_attr, workerfn, thread 196 tools/perf/bench/futex-hash.c ret = pthread_join(worker[i].thread, NULL); thread 29 tools/perf/bench/futex-lock-pi.c pthread_t thread; thread 143 tools/perf/bench/futex-lock-pi.c if (pthread_create(&w[i].thread, &thread_attr, workerfn, &worker[i])) thread 203 tools/perf/bench/futex-lock-pi.c ret = pthread_join(worker[i].thread, NULL); thread 1064 tools/perf/builtin-c2c.c return scnprintf(hpp->buf, hpp->size, "%*d", width, he->thread->pid_); thread 1071 tools/perf/builtin-c2c.c return left->thread->pid_ - right->thread->pid_; thread 433 tools/perf/builtin-inject.c struct thread *thread; thread 435 tools/perf/builtin-inject.c thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 436 tools/perf/builtin-inject.c if (thread == NULL) { thread 442 tools/perf/builtin-inject.c if (thread__find_map(thread, sample->cpumode, sample->ip, &al)) { thread 461 tools/perf/builtin-inject.c thread__put(thread); thread 398 tools/perf/builtin-kmem.c al.thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 948 tools/perf/builtin-kmem.c struct thread *thread = machine__findnew_thread(machine, sample->pid, thread 951 tools/perf/builtin-kmem.c if (thread == NULL) { thread 960 tools/perf/builtin-kmem.c dump_printf(" ... thread: %s:%d\n", thread__comm_str(thread), thread->tid); thread 967 tools/perf/builtin-kmem.c thread__put(thread); thread 404 tools/perf/builtin-kvm.c struct vcpu_event_record *per_vcpu_record(struct thread *thread, thread 409 tools/perf/builtin-kvm.c if (!thread__priv(thread) && kvm_entry_event(evsel)) { thread 420 tools/perf/builtin-kvm.c thread__set_priv(thread, vcpu_record); thread 423 tools/perf/builtin-kvm.c return thread__priv(thread); thread 427 tools/perf/builtin-kvm.c struct thread *thread, thread 435 tools/perf/builtin-kvm.c vcpu_record = per_vcpu_record(thread, evsel, sample); thread 685 tools/perf/builtin-kvm.c struct thread *thread; thread 692 tools/perf/builtin-kvm.c thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 693 tools/perf/builtin-kvm.c if (thread == NULL) { thread 699 tools/perf/builtin-kvm.c if (!handle_kvm_event(kvm, thread, evsel, sample)) thread 702 tools/perf/builtin-kvm.c thread__put(thread); thread 767 tools/perf/builtin-lock.c struct thread *t; thread 820 tools/perf/builtin-lock.c struct thread *thread = machine__findnew_thread(machine, sample->pid, thread 823 tools/perf/builtin-lock.c if (thread == NULL) { thread 834 tools/perf/builtin-lock.c thread__put(thread); thread 699 tools/perf/builtin-report.c struct thread *thread; thread 706 tools/perf/builtin-report.c struct thread *parent_thread, *thread = task->thread; thread 714 tools/perf/builtin-report.c if (thread->ppid == -1) thread 717 tools/perf/builtin-report.c parent_thread = machine__find_thread(machine, -1, thread->ppid); thread 754 tools/perf/builtin-report.c struct thread *thread = task->thread; thread 757 tools/perf/builtin-report.c thread->pid_, thread->tid, thread->ppid, thread 760 tools/perf/builtin-report.c fprintf(fp, "%s\n", thread__comm_str(thread)); thread 762 tools/perf/builtin-report.c map_groups__fprintf_task(thread->mg, comm_indent, fp); thread 799 tools/perf/builtin-report.c task->thread = rb_entry(nd, struct thread, rb_node); thread 802 tools/perf/builtin-report.c thread__set_priv(task->thread, task); thread 65 tools/perf/builtin-sched.c pthread_t thread; thread 127 tools/perf/builtin-sched.c struct thread *thread; thread 192 tools/perf/builtin-sched.c struct thread *curr_thread[MAX_CPUS]; thread 280 tools/perf/builtin-sched.c struct thread *last_thread; thread 287 tools/perf/builtin-sched.c static struct thread **idle_threads; thread 685 tools/perf/builtin-sched.c err = pthread_create(&task->thread, &attr, thread_func, parms); thread 878 tools/perf/builtin-sched.c struct thread *child, *parent; thread 914 tools/perf/builtin-sched.c static struct thread_runtime *thread__init_runtime(struct thread *thread) thread 923 tools/perf/builtin-sched.c thread__set_priv(thread, r); thread 928 tools/perf/builtin-sched.c static struct thread_runtime *thread__get_runtime(struct thread *thread) thread 932 tools/perf/builtin-sched.c tr = thread__priv(thread); thread 934 tools/perf/builtin-sched.c tr = thread__init_runtime(thread); thread 960 tools/perf/builtin-sched.c thread_atoms_search(struct rb_root_cached *root, struct thread *thread, thread 964 tools/perf/builtin-sched.c struct work_atoms key = { .thread = thread }; thread 978 tools/perf/builtin-sched.c BUG_ON(thread != atoms->thread); thread 1013 tools/perf/builtin-sched.c static int thread_atoms_insert(struct perf_sched *sched, struct thread *thread) thread 1021 tools/perf/builtin-sched.c atoms->thread = thread__get(thread); thread 1110 tools/perf/builtin-sched.c struct thread *sched_out, *sched_in; thread 1178 tools/perf/builtin-sched.c struct thread *thread = machine__findnew_thread(machine, -1, pid); thread 1179 tools/perf/builtin-sched.c struct work_atoms *atoms = thread_atoms_search(&sched->atom_root, thread, &sched->cmp_pid); thread 1183 tools/perf/builtin-sched.c if (thread == NULL) thread 1188 tools/perf/builtin-sched.c if (thread_atoms_insert(sched, thread)) thread 1190 tools/perf/builtin-sched.c atoms = thread_atoms_search(&sched->atom_root, thread, &sched->cmp_pid); thread 1202 tools/perf/builtin-sched.c thread__put(thread); thread 1214 tools/perf/builtin-sched.c struct thread *wakee; thread 1276 tools/perf/builtin-sched.c struct thread *migrant; thread 1329 tools/perf/builtin-sched.c if (!strcmp(thread__comm_str(work_list->thread), "swapper")) thread 1336 tools/perf/builtin-sched.c ret = printf(" %s:(%d) ", thread__comm_str(work_list->thread), work_list->num_merged); thread 1338 tools/perf/builtin-sched.c ret = printf(" %s:%d ", thread__comm_str(work_list->thread), work_list->thread->tid); thread 1355 tools/perf/builtin-sched.c if (l->thread == r->thread) thread 1357 tools/perf/builtin-sched.c if (l->thread->tid < r->thread->tid) thread 1359 tools/perf/builtin-sched.c if (l->thread->tid > r->thread->tid) thread 1361 tools/perf/builtin-sched.c return (int)(l->thread - r->thread); thread 1496 tools/perf/builtin-sched.c static bool thread__has_color(struct thread *thread) thread 1499 tools/perf/builtin-sched.c .ptr = thread__priv(thread), thread 1505 tools/perf/builtin-sched.c static struct thread* thread 1508 tools/perf/builtin-sched.c struct thread *thread = machine__findnew_thread(machine, pid, tid); thread 1513 tools/perf/builtin-sched.c if (!sched->map.color_pids || !thread || thread__priv(thread)) thread 1514 tools/perf/builtin-sched.c return thread; thread 1519 tools/perf/builtin-sched.c thread__set_priv(thread, priv.ptr); thread 1520 tools/perf/builtin-sched.c return thread; thread 1527 tools/perf/builtin-sched.c struct thread *sched_in; thread 1606 tools/perf/builtin-sched.c struct thread *curr_thread = sched->curr_thread[cpu]; thread 1758 tools/perf/builtin-sched.c struct thread *thread; thread 1766 tools/perf/builtin-sched.c thread = machine__find_thread(machine, sample->pid, sample->tid); thread 1767 tools/perf/builtin-sched.c if (!thread) { thread 1772 tools/perf/builtin-sched.c tr = thread__get_runtime(thread); thread 1774 tools/perf/builtin-sched.c thread__put(thread); thread 1779 tools/perf/builtin-sched.c thread__put(thread); thread 1902 tools/perf/builtin-sched.c static char *timehist_get_commstr(struct thread *thread) thread 1905 tools/perf/builtin-sched.c const char *comm = thread__comm_str(thread); thread 1906 tools/perf/builtin-sched.c pid_t tid = thread->tid; thread 1907 tools/perf/builtin-sched.c pid_t pid = thread->pid_; thread 1984 tools/perf/builtin-sched.c static char task_state_char(struct thread *thread, int state) thread 1990 tools/perf/builtin-sched.c if (thread->tid == 0) thread 2000 tools/perf/builtin-sched.c struct thread *thread, thread 2003 tools/perf/builtin-sched.c struct thread_runtime *tr = thread__priv(thread); thread 2022 tools/perf/builtin-sched.c c = (thread->tid == 0) ? 'i' : 's'; thread 2030 tools/perf/builtin-sched.c printf(" %-*s ", comm_width, timehist_get_commstr(thread)); thread 2039 tools/perf/builtin-sched.c printf(" %5c ", task_state_char(thread, state)); thread 2049 tools/perf/builtin-sched.c if (thread->tid == 0) thread 2145 tools/perf/builtin-sched.c struct thread *thread; thread 2148 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, sample->pid, sample->pid); thread 2149 tools/perf/builtin-sched.c if (thread == NULL) { thread 2157 tools/perf/builtin-sched.c if (thread__resolve_callchain(thread, cursor, evsel, sample, thread 2187 tools/perf/builtin-sched.c static int init_idle_thread(struct thread *thread) thread 2191 tools/perf/builtin-sched.c thread__set_comm(thread, idle_comm, 0); thread 2200 tools/perf/builtin-sched.c thread__set_priv(thread, itr); thread 2213 tools/perf/builtin-sched.c idle_threads = zalloc(ncpu * sizeof(struct thread *)); thread 2248 tools/perf/builtin-sched.c static struct thread *get_idle_thread(int cpu) thread 2258 tools/perf/builtin-sched.c p = realloc(idle_threads, j * sizeof(struct thread *)); thread 2262 tools/perf/builtin-sched.c idle_threads = (struct thread **) p; thread 2292 tools/perf/builtin-sched.c static struct thread *timehist_get_thread(struct perf_sched *sched, thread 2297 tools/perf/builtin-sched.c struct thread *thread; thread 2300 tools/perf/builtin-sched.c thread = get_idle_thread(sample->cpu); thread 2301 tools/perf/builtin-sched.c if (thread == NULL) thread 2306 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, sample->pid, thread 2308 tools/perf/builtin-sched.c if (thread == NULL) { thread 2315 tools/perf/builtin-sched.c struct thread *idle; thread 2328 tools/perf/builtin-sched.c itr->last_thread = thread; thread 2336 tools/perf/builtin-sched.c return thread; thread 2340 tools/perf/builtin-sched.c struct thread *thread, thread 2346 tools/perf/builtin-sched.c if (thread__is_filtered(thread)) { thread 2366 tools/perf/builtin-sched.c struct thread *awakened) thread 2368 tools/perf/builtin-sched.c struct thread *thread; thread 2371 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 2372 tools/perf/builtin-sched.c if (thread == NULL) thread 2376 tools/perf/builtin-sched.c if (timehist_skip_sample(sched, thread, evsel, sample) && thread 2386 tools/perf/builtin-sched.c printf(" %-*s ", comm_width, timehist_get_commstr(thread)); thread 2403 tools/perf/builtin-sched.c struct thread *thread; thread 2408 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, 0, pid); thread 2409 tools/perf/builtin-sched.c if (thread == NULL) thread 2412 tools/perf/builtin-sched.c tr = thread__get_runtime(thread); thread 2422 tools/perf/builtin-sched.c timehist_print_wakeup_event(sched, evsel, sample, machine, thread); thread 2431 tools/perf/builtin-sched.c struct thread *migrated) thread 2433 tools/perf/builtin-sched.c struct thread *thread; thread 2445 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 2446 tools/perf/builtin-sched.c if (thread == NULL) thread 2449 tools/perf/builtin-sched.c if (timehist_skip_sample(sched, thread, evsel, sample) && thread 2469 tools/perf/builtin-sched.c printf(" %-*s ", comm_width, timehist_get_commstr(thread)); thread 2487 tools/perf/builtin-sched.c struct thread *thread; thread 2492 tools/perf/builtin-sched.c thread = machine__findnew_thread(machine, 0, pid); thread 2493 tools/perf/builtin-sched.c if (thread == NULL) thread 2496 tools/perf/builtin-sched.c tr = thread__get_runtime(thread); thread 2503 tools/perf/builtin-sched.c timehist_print_migration_event(sched, evsel, sample, machine, thread); thread 2517 tools/perf/builtin-sched.c struct thread *thread; thread 2531 tools/perf/builtin-sched.c thread = timehist_get_thread(sched, sample, machine, evsel); thread 2532 tools/perf/builtin-sched.c if (thread == NULL) { thread 2537 tools/perf/builtin-sched.c if (timehist_skip_sample(sched, thread, evsel, sample)) thread 2540 tools/perf/builtin-sched.c tr = thread__get_runtime(thread); thread 2573 tools/perf/builtin-sched.c if (!sched->idle_hist || thread->tid == 0) { thread 2580 tools/perf/builtin-sched.c BUG_ON(thread->tid != 0); thread 2610 tools/perf/builtin-sched.c timehist_print_sample(sched, evsel, sample, &al, thread, t, state); thread 2658 tools/perf/builtin-sched.c static void print_thread_runtime(struct thread *t, thread 2681 tools/perf/builtin-sched.c static void print_thread_waittime(struct thread *t, thread 2706 tools/perf/builtin-sched.c static int __show_thread_runtime(struct thread *t, void *priv) thread 2729 tools/perf/builtin-sched.c static int show_thread_runtime(struct thread *t, void *priv) thread 2737 tools/perf/builtin-sched.c static int show_deadthread_runtime(struct thread *t, void *priv) thread 2805 tools/perf/builtin-sched.c struct thread *t; thread 3087 tools/perf/builtin-sched.c const char *comm = thread__comm_str(data->thread), *this_comm; thread 3096 tools/perf/builtin-sched.c this_comm = thread__comm_str(this->thread); thread 3162 tools/perf/builtin-sched.c thread__zput(work_list->thread); thread 625 tools/perf/builtin-script.c struct thread *thread, thread 636 tools/perf/builtin-script.c printed += fprintf(fp, "%8.8s ", thread__comm_str(thread)); thread 638 tools/perf/builtin-script.c printed += fprintf(fp, "%s ", thread__comm_str(thread)); thread 640 tools/perf/builtin-script.c printed += fprintf(fp, "%16s ", thread__comm_str(thread)); thread 734 tools/perf/builtin-script.c struct thread *thread, thread 752 tools/perf/builtin-script.c thread__find_map_fb(thread, sample->cpumode, from, &alf); thread 753 tools/perf/builtin-script.c thread__find_map_fb(thread, sample->cpumode, to, &alt); thread 781 tools/perf/builtin-script.c struct thread *thread, thread 799 tools/perf/builtin-script.c thread__find_symbol_fb(thread, sample->cpumode, from, &alf); thread 800 tools/perf/builtin-script.c thread__find_symbol_fb(thread, sample->cpumode, to, &alt); thread 826 tools/perf/builtin-script.c struct thread *thread, thread 844 tools/perf/builtin-script.c if (thread__find_map_fb(thread, sample->cpumode, from, &alf) && thread 848 tools/perf/builtin-script.c if (thread__find_map_fb(thread, sample->cpumode, to, &alt) && thread 876 tools/perf/builtin-script.c struct machine *machine, struct thread *thread, thread 912 tools/perf/builtin-script.c if (!thread__find_map(thread, *cpumode, start, &al) || !al.map->dso) { thread 935 tools/perf/builtin-script.c static int print_srccode(struct thread *thread, u8 cpumode, uint64_t addr) thread 941 tools/perf/builtin-script.c thread__find_map(thread, cpumode, addr, &al); thread 945 tools/perf/builtin-script.c &thread->srccode_state); thread 970 tools/perf/builtin-script.c static int ip__fprintf_sym(uint64_t addr, struct thread *thread, thread 979 tools/perf/builtin-script.c thread__find_map(thread, cpumode, addr, &al); thread 1009 tools/perf/builtin-script.c struct thread *thread, thread 1028 tools/perf/builtin-script.c x.thread = thread; thread 1036 tools/perf/builtin-script.c machine, thread, &x.is64bit, &x.cpumode, false); thread 1038 tools/perf/builtin-script.c printed += ip__fprintf_sym(br->entries[nr - 1].from, thread, thread 1043 tools/perf/builtin-script.c printed += print_srccode(thread, x.cpumode, br->entries[nr - 1].from); thread 1055 tools/perf/builtin-script.c len = grab_bb(buffer, start, end, machine, thread, &x.is64bit, &x.cpumode, false); thread 1060 tools/perf/builtin-script.c len = grab_bb(buffer, start, end, machine, thread, &x.is64bit, &x.cpumode, false); thread 1069 tools/perf/builtin-script.c printed += ip__fprintf_sym(ip, thread, x.cpumode, x.cpu, &lastsym, attr, fp); thread 1074 tools/perf/builtin-script.c printed += print_srccode(thread, x.cpumode, ip); thread 1083 tools/perf/builtin-script.c print_srccode(thread, x.cpumode, ip); thread 1114 tools/perf/builtin-script.c len = grab_bb(buffer, start, end, machine, thread, &x.is64bit, &x.cpumode, true); thread 1115 tools/perf/builtin-script.c printed += ip__fprintf_sym(start, thread, x.cpumode, x.cpu, &lastsym, attr, fp); thread 1119 tools/perf/builtin-script.c machine, thread, &x.is64bit, &x.cpumode, false); thread 1125 tools/perf/builtin-script.c print_srccode(thread, x.cpumode, sample->ip); thread 1142 tools/perf/builtin-script.c print_srccode(thread, x.cpumode, start + off); thread 1149 tools/perf/builtin-script.c struct thread *thread, thread 1158 tools/perf/builtin-script.c thread__resolve(thread, &al, sample); thread 1179 tools/perf/builtin-script.c struct thread *thread, thread 1189 tools/perf/builtin-script.c thread__resolve(thread, &addr_al, sample); thread 1208 tools/perf/builtin-script.c struct thread *thread, thread 1212 tools/perf/builtin-script.c size_t depth = thread_stack__depth(thread, sample->cpu); thread 1223 tools/perf/builtin-script.c if (thread->ts && sample->flags & PERF_IP_FLAG_RETURN) thread 1226 tools/perf/builtin-script.c name = resolve_branch_sym(sample, evsel, thread, al, &ip); thread 1256 tools/perf/builtin-script.c struct thread *thread __maybe_unused, thread 1263 tools/perf/builtin-script.c struct thread *thread, thread 1269 tools/perf/builtin-script.c arch_fetch_insn(sample, thread, machine); thread 1281 tools/perf/builtin-script.c printed += perf_sample__fprintf_brstackinsn(sample, thread, attr, machine, fp); thread 1302 tools/perf/builtin-script.c struct thread *thread, thread 1312 tools/perf/builtin-script.c printed += perf_sample__fprintf_callindent(sample, evsel, thread, al, fp); thread 1320 tools/perf/builtin-script.c thread__resolve_callchain(al->thread, &callchain_cursor, evsel, thread 1342 tools/perf/builtin-script.c printed += perf_sample__fprintf_addr(sample, thread, attr, fp); thread 1350 tools/perf/builtin-script.c printed += perf_sample__fprintf_insn(sample, attr, thread, machine, fp); thread 1354 tools/perf/builtin-script.c &thread->srccode_state); thread 1687 tools/perf/builtin-script.c struct thread *thread; thread 1701 tools/perf/builtin-script.c perf_sample__fprintf_start(mctx->sample, mctx->thread, mctx->evsel, thread 1716 tools/perf/builtin-script.c perf_sample__fprintf_start(mctx->sample, mctx->thread, mctx->evsel, thread 1722 tools/perf/builtin-script.c struct thread *thread, thread 1732 tools/perf/builtin-script.c .thread = thread, thread 1766 tools/perf/builtin-script.c struct thread *thread, thread 1769 tools/perf/builtin-script.c int depth = thread_stack__depth(thread, sample->cpu); thread 1774 tools/perf/builtin-script.c if (thread->filter) { thread 1775 tools/perf/builtin-script.c if (depth <= thread->filter_entry_depth) { thread 1776 tools/perf/builtin-script.c thread->filter = false; thread 1783 tools/perf/builtin-script.c const char *name = resolve_branch_sym(sample, evsel, thread, al, thread 1793 tools/perf/builtin-script.c thread->filter = true; thread 1794 tools/perf/builtin-script.c thread->filter_entry_depth = depth; thread 1810 tools/perf/builtin-script.c struct thread *thread = al->thread; thread 1819 tools/perf/builtin-script.c if (!show_event(sample, evsel, thread, al)) thread 1827 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 1846 tools/perf/builtin-script.c perf_sample__fprintf_bts(sample, evsel, thread, al, machine, fp); thread 1859 tools/perf/builtin-script.c perf_sample__fprintf_addr(sample, thread, attr, fp); thread 1871 tools/perf/builtin-script.c thread__resolve_callchain(al->thread, &callchain_cursor, evsel, thread 1887 tools/perf/builtin-script.c perf_sample__fprintf_brstack(sample, thread, attr, fp); thread 1889 tools/perf/builtin-script.c perf_sample__fprintf_brstacksym(sample, thread, attr, fp); thread 1891 tools/perf/builtin-script.c perf_sample__fprintf_brstackoff(sample, thread, attr, fp); thread 1895 tools/perf/builtin-script.c perf_sample__fprintf_insn(sample, attr, thread, machine, fp); thread 1906 tools/perf/builtin-script.c &thread->srccode_state)) thread 1911 tools/perf/builtin-script.c perf_sample__fprint_metric(script, thread, evsel, sample, fp); thread 1923 tools/perf/builtin-script.c int cpu, thread; thread 1935 tools/perf/builtin-script.c for (thread = 0; thread < nthreads; thread++) { thread 1939 tools/perf/builtin-script.c counts = perf_counts(counter->counts, cpu, thread); thread 1943 tools/perf/builtin-script.c perf_thread_map__pid(counter->core.threads, thread), thread 2090 tools/perf/builtin-script.c struct thread *thread; thread 2096 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->comm.pid, event->comm.tid); thread 2097 tools/perf/builtin-script.c if (thread == NULL) { thread 2112 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2118 tools/perf/builtin-script.c thread__put(thread); thread 2127 tools/perf/builtin-script.c struct thread *thread; thread 2133 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->namespaces.pid, thread 2135 tools/perf/builtin-script.c if (thread == NULL) { thread 2150 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2156 tools/perf/builtin-script.c thread__put(thread); thread 2165 tools/perf/builtin-script.c struct thread *thread; thread 2173 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->fork.pid, event->fork.tid); thread 2174 tools/perf/builtin-script.c if (thread == NULL) { thread 2186 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2190 tools/perf/builtin-script.c thread__put(thread); thread 2200 tools/perf/builtin-script.c struct thread *thread; thread 2205 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->fork.pid, event->fork.tid); thread 2206 tools/perf/builtin-script.c if (thread == NULL) { thread 2218 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2226 tools/perf/builtin-script.c thread__put(thread); thread 2235 tools/perf/builtin-script.c struct thread *thread; thread 2243 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->mmap.pid, event->mmap.tid); thread 2244 tools/perf/builtin-script.c if (thread == NULL) { thread 2256 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2260 tools/perf/builtin-script.c thread__put(thread); thread 2269 tools/perf/builtin-script.c struct thread *thread; thread 2277 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, event->mmap2.pid, event->mmap2.tid); thread 2278 tools/perf/builtin-script.c if (thread == NULL) { thread 2290 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2294 tools/perf/builtin-script.c thread__put(thread); thread 2303 tools/perf/builtin-script.c struct thread *thread; thread 2317 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, sample->pid, thread 2319 tools/perf/builtin-script.c if (thread == NULL) { thread 2325 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2329 tools/perf/builtin-script.c thread__put(thread); thread 2342 tools/perf/builtin-script.c struct thread *thread; thread 2344 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, sample->pid, thread 2346 tools/perf/builtin-script.c if (thread == NULL) thread 2350 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2354 tools/perf/builtin-script.c thread__put(thread); thread 2374 tools/perf/builtin-script.c struct thread *thread; thread 2387 tools/perf/builtin-script.c thread = machine__findnew_thread(machine, sample->pid, sample->tid); thread 2388 tools/perf/builtin-script.c if (thread == NULL) { thread 2394 tools/perf/builtin-script.c perf_sample__fprintf_start(sample, thread, evsel, thread 2399 tools/perf/builtin-script.c thread__put(thread); thread 241 tools/perf/builtin-stat.c perf_evsel__write_stat_event(struct evsel *counter, u32 cpu, u32 thread, thread 244 tools/perf/builtin-stat.c struct perf_sample_id *sid = SID(counter, cpu, thread); thread 246 tools/perf/builtin-stat.c return perf_event__synthesize_stat(NULL, cpu, thread, sid->id, count, thread 251 tools/perf/builtin-stat.c int thread, struct timespec *rs) thread 256 tools/perf/builtin-stat.c perf_counts(counter->counts, cpu, thread); thread 261 tools/perf/builtin-stat.c return perf_evsel__read_counter(counter, cpu, thread); thread 271 tools/perf/builtin-stat.c int ncpus, cpu, thread; thread 284 tools/perf/builtin-stat.c for (thread = 0; thread < nthreads; thread++) { thread 288 tools/perf/builtin-stat.c count = perf_counts(counter->counts, cpu, thread); thread 294 tools/perf/builtin-stat.c if (!perf_counts__is_loaded(counter->counts, cpu, thread) && thread 295 tools/perf/builtin-stat.c read_single_counter(counter, cpu, thread, rs)) { thread 297 tools/perf/builtin-stat.c perf_counts(counter->counts, cpu, thread)->ena = 0; thread 298 tools/perf/builtin-stat.c perf_counts(counter->counts, cpu, thread)->run = 0; thread 302 tools/perf/builtin-stat.c perf_counts__set_loaded(counter->counts, cpu, thread, false); thread 305 tools/perf/builtin-stat.c if (perf_evsel__write_stat_event(counter, cpu, thread, count)) { thread 534 tools/perf/builtin-timechart.c if (thread__find_symbol(al.thread, cpumode, ip, &tal)) thread 1221 tools/perf/builtin-top.c pthread_t thread, thread_process; thread 1294 tools/perf/builtin-top.c if (pthread_create(&thread, NULL, (use_browser > 0 ? display_thread_tui : thread 1331 tools/perf/builtin-top.c pthread_join(thread, NULL); thread 112 tools/perf/builtin-trace.c struct thread *current; thread 1083 tools/perf/builtin-trace.c static struct thread_trace *thread__trace(struct thread *thread, FILE *fp) thread 1087 tools/perf/builtin-trace.c if (thread == NULL) thread 1090 tools/perf/builtin-trace.c if (thread__priv(thread) == NULL) thread 1091 tools/perf/builtin-trace.c thread__set_priv(thread, thread_trace__new()); thread 1093 tools/perf/builtin-trace.c if (thread__priv(thread) == NULL) thread 1096 tools/perf/builtin-trace.c ttrace = thread__priv(thread); thread 1110 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(arg->thread); thread 1145 tools/perf/builtin-trace.c struct file *thread__files_entry(struct thread *thread, int fd) thread 1147 tools/perf/builtin-trace.c return thread_trace__files_entry(thread__priv(thread), fd); thread 1150 tools/perf/builtin-trace.c static int trace__set_fd_pathname(struct thread *thread, int fd, const char *pathname) thread 1152 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(thread); thread 1167 tools/perf/builtin-trace.c static int thread__read_fd_path(struct thread *thread, int fd) thread 1173 tools/perf/builtin-trace.c if (thread->pid_ == thread->tid) { thread 1175 tools/perf/builtin-trace.c "/proc/%d/fd/%d", thread->pid_, fd); thread 1178 tools/perf/builtin-trace.c "/proc/%d/task/%d/fd/%d", thread->pid_, thread->tid, fd); thread 1190 tools/perf/builtin-trace.c return trace__set_fd_pathname(thread, fd, pathname); thread 1193 tools/perf/builtin-trace.c static const char *thread__fd_path(struct thread *thread, int fd, thread 1196 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(thread); thread 1208 tools/perf/builtin-trace.c if (thread__read_fd_path(thread, fd)) thread 1219 tools/perf/builtin-trace.c const char *path = thread__fd_path(arg->thread, fd, arg->trace); thread 1230 tools/perf/builtin-trace.c struct thread *thread = machine__find_thread(trace->host, pid, pid); thread 1232 tools/perf/builtin-trace.c if (thread) { thread 1233 tools/perf/builtin-trace.c const char *path = thread__fd_path(thread, fd, trace); thread 1238 tools/perf/builtin-trace.c thread__put(thread); thread 1249 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(arg->thread); thread 1257 tools/perf/builtin-trace.c static void thread__set_filename_pos(struct thread *thread, const char *bf, thread 1260 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(thread); thread 1293 tools/perf/builtin-trace.c thread__set_filename_pos(arg->thread, bf, ptr); thread 1332 tools/perf/builtin-trace.c static size_t trace__fprintf_comm_tid(struct trace *trace, struct thread *thread, FILE *fp) thread 1338 tools/perf/builtin-trace.c printed += fprintf(fp, "%.14s/", thread__comm_str(thread)); thread 1339 tools/perf/builtin-trace.c printed += fprintf(fp, "%d ", thread->tid); thread 1345 tools/perf/builtin-trace.c static size_t trace__fprintf_entry_head(struct trace *trace, struct thread *thread, thread 1354 tools/perf/builtin-trace.c return printed + trace__fprintf_comm_tid(trace, thread, fp); thread 1705 tools/perf/builtin-trace.c struct trace *trace, struct thread *thread) thread 1719 tools/perf/builtin-trace.c .thread = thread, thread 1722 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(thread); thread 1902 tools/perf/builtin-trace.c struct perf_sample *sample, struct thread *thread) thread 1911 tools/perf/builtin-trace.c thread__comm_str(thread), thread 1951 tools/perf/builtin-trace.c struct thread *thread; thread 1961 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 1962 tools/perf/builtin-trace.c ttrace = thread__trace(thread, trace->output); thread 1966 tools/perf/builtin-trace.c trace__fprintf_sample(trace, evsel, sample, thread); thread 1995 tools/perf/builtin-trace.c args, augmented_args, augmented_args_size, trace, thread); thread 2001 tools/perf/builtin-trace.c trace__fprintf_entry_head(trace, thread, 0, false, ttrace->entry_time, trace->output); thread 2013 tools/perf/builtin-trace.c if (trace->current != thread) { thread 2015 tools/perf/builtin-trace.c trace->current = thread__get(thread); thread 2019 tools/perf/builtin-trace.c thread__put(thread); thread 2027 tools/perf/builtin-trace.c struct thread *thread; thread 2037 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2038 tools/perf/builtin-trace.c ttrace = thread__trace(thread, trace->output); thread 2048 tools/perf/builtin-trace.c syscall__scnprintf_args(sc, msg, sizeof(msg), args, augmented_args, augmented_args_size, trace, thread); thread 2052 tools/perf/builtin-trace.c thread__put(thread); thread 2069 tools/perf/builtin-trace.c err = thread__resolve_callchain(al.thread, cursor, evsel, sample, NULL, NULL, max_stack); thread 2099 tools/perf/builtin-trace.c struct thread *thread; thread 2108 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2109 tools/perf/builtin-trace.c ttrace = thread__trace(thread, trace->output); thread 2113 tools/perf/builtin-trace.c trace__fprintf_sample(trace, evsel, sample, thread); thread 2121 tools/perf/builtin-trace.c trace__set_fd_pathname(thread, ret, ttrace->filename.name); thread 2146 tools/perf/builtin-trace.c trace__fprintf_entry_head(trace, thread, duration, duration_calculated, ttrace->entry_time, trace->output); thread 2185 tools/perf/builtin-trace.c .thread = thread, thread 2194 tools/perf/builtin-trace.c struct thread *child = machine__find_thread(trace->host, ret, ret); thread 2222 tools/perf/builtin-trace.c thread__put(thread); thread 2230 tools/perf/builtin-trace.c struct thread *thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2237 tools/perf/builtin-trace.c if (!thread) thread 2240 tools/perf/builtin-trace.c ttrace = thread__priv(thread); thread 2282 tools/perf/builtin-trace.c thread__put(thread); thread 2293 tools/perf/builtin-trace.c struct thread *thread = machine__findnew_thread(trace->host, thread 2296 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__trace(thread, trace->output); thread 2304 tools/perf/builtin-trace.c thread__put(thread); thread 2353 tools/perf/builtin-trace.c struct thread *thread; thread 2364 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2381 tools/perf/builtin-trace.c if (thread) thread 2382 tools/perf/builtin-trace.c trace__fprintf_comm_tid(trace, thread, trace->output); thread 2429 tools/perf/builtin-trace.c thread__put(thread); thread 2455 tools/perf/builtin-trace.c struct thread *thread; thread 2462 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2473 tools/perf/builtin-trace.c ttrace = thread__trace(thread, trace->output); thread 2485 tools/perf/builtin-trace.c thread__find_symbol(thread, sample->cpumode, sample->ip, &al); thread 2487 tools/perf/builtin-trace.c trace__fprintf_entry_head(trace, thread, 0, true, sample->time, trace->output); thread 2497 tools/perf/builtin-trace.c thread__find_symbol(thread, sample->cpumode, sample->addr, &al); thread 2500 tools/perf/builtin-trace.c thread__find_symbol(thread, sample->cpumode, sample->addr, &al); thread 2521 tools/perf/builtin-trace.c thread__put(thread); thread 2549 tools/perf/builtin-trace.c struct thread *thread; thread 2554 tools/perf/builtin-trace.c thread = machine__findnew_thread(trace->host, sample->pid, sample->tid); thread 2555 tools/perf/builtin-trace.c if (thread && thread__is_filtered(thread)) thread 2565 tools/perf/builtin-trace.c thread__put(thread); thread 3162 tools/perf/builtin-trace.c struct thread *thread = machine__find_thread(trace->host, pids[0], pids[0]); thread 3164 tools/perf/builtin-trace.c while (thread && nr < ARRAY_SIZE(pids)) { thread 3165 tools/perf/builtin-trace.c struct thread *parent = machine__find_thread(trace->host, thread->ppid, thread->ppid); thread 3175 tools/perf/builtin-trace.c thread = parent; thread 3724 tools/perf/builtin-trace.c static size_t trace__fprintf_thread(FILE *fp, struct thread *thread, struct trace *trace) thread 3727 tools/perf/builtin-trace.c struct thread_trace *ttrace = thread__priv(thread); thread 3735 tools/perf/builtin-trace.c printed += fprintf(fp, " %s (%d), ", thread__comm_str(thread), thread->tid); thread 3757 tools/perf/builtin-trace.c DEFINE_RESORT_RB(threads, (thread__nr_events(a->thread->priv) < thread__nr_events(b->thread->priv)), thread 3758 tools/perf/builtin-trace.c struct thread *thread; thread 3761 tools/perf/builtin-trace.c entry->thread = rb_entry(nd, struct thread, rb_node); thread 3779 tools/perf/builtin-trace.c printed += trace__fprintf_thread(fp, threads_entry->thread, trace); thread 186 tools/perf/lib/evlist.c int cpu, int thread, u64 id) thread 189 tools/perf/lib/evlist.c struct perf_sample_id *sid = SID(evsel, cpu, thread); thread 199 tools/perf/lib/evlist.c int cpu, int thread, u64 id) thread 201 tools/perf/lib/evlist.c perf_evlist__id_hash(evlist, evsel, cpu, thread, id); thread 207 tools/perf/lib/evlist.c int cpu, int thread, int fd) thread 242 tools/perf/lib/evlist.c perf_evlist__id_add(evlist, evsel, cpu, thread, id); thread 47 tools/perf/lib/evsel.c int cpu, thread; thread 49 tools/perf/lib/evsel.c for (thread = 0; thread < nthreads; thread++) { thread 50 tools/perf/lib/evsel.c FD(evsel, cpu, thread) = -1; thread 69 tools/perf/lib/evsel.c int cpu, thread, err = 0; thread 100 tools/perf/lib/evsel.c for (thread = 0; thread < threads->nr; thread++) { thread 104 tools/perf/lib/evsel.c threads->map[thread].pid, thread 110 tools/perf/lib/evsel.c FD(evsel, cpu, thread) = fd; thread 119 tools/perf/lib/evsel.c int cpu, thread; thread 122 tools/perf/lib/evsel.c for (thread = 0; thread < xyarray__max_y(evsel->fd); ++thread) { thread 123 tools/perf/lib/evsel.c close(FD(evsel, cpu, thread)); thread 124 tools/perf/lib/evsel.c FD(evsel, cpu, thread) = -1; thread 168 tools/perf/lib/evsel.c int perf_evsel__read(struct perf_evsel *evsel, int cpu, int thread, thread 175 tools/perf/lib/evsel.c if (FD(evsel, cpu, thread) < 0) thread 178 tools/perf/lib/evsel.c if (readn(FD(evsel, cpu, thread), count->values, size) <= 0) thread 187 tools/perf/lib/evsel.c int cpu, thread; thread 190 tools/perf/lib/evsel.c for (thread = 0; thread < xyarray__max_y(evsel->fd); thread++) { thread 191 tools/perf/lib/evsel.c int fd = FD(evsel, cpu, thread), thread 77 tools/perf/lib/include/internal/evlist.h int cpu, int thread, u64 id); thread 81 tools/perf/lib/include/internal/evlist.h int cpu, int thread, int fd); thread 290 tools/perf/lib/include/perf/event.h __u32 thread; thread 31 tools/perf/lib/include/perf/evsel.h LIBPERF_API int perf_evsel__read(struct perf_evsel *evsel, int cpu, int thread, thread 12 tools/perf/lib/include/perf/threadmap.h LIBPERF_API void perf_thread_map__set_pid(struct perf_thread_map *map, int thread, pid_t pid); thread 13 tools/perf/lib/include/perf/threadmap.h LIBPERF_API char *perf_thread_map__comm(struct perf_thread_map *map, int thread); thread 15 tools/perf/lib/include/perf/threadmap.h LIBPERF_API pid_t perf_thread_map__pid(struct perf_thread_map *map, int thread); thread 35 tools/perf/lib/threadmap.c void perf_thread_map__set_pid(struct perf_thread_map *map, int thread, pid_t pid) thread 37 tools/perf/lib/threadmap.c map->map[thread].pid = pid; thread 40 tools/perf/lib/threadmap.c char *perf_thread_map__comm(struct perf_thread_map *map, int thread) thread 42 tools/perf/lib/threadmap.c return map->map[thread].comm; thread 88 tools/perf/lib/threadmap.c pid_t perf_thread_map__pid(struct perf_thread_map *map, int thread) thread 90 tools/perf/lib/threadmap.c return map->map[thread].pid; thread 236 tools/perf/tests/code-reading.c struct thread *thread, struct state *state) thread 250 tools/perf/tests/code-reading.c if (!thread__find_map(thread, cpumode, addr, &al) || !al.map->dso) { thread 278 tools/perf/tests/code-reading.c ret_len = dso__data_read_offset(al.map->dso, thread->mg->machine, thread 377 tools/perf/tests/code-reading.c struct thread *thread; thread 385 tools/perf/tests/code-reading.c thread = machine__findnew_thread(machine, sample.pid, sample.tid); thread 386 tools/perf/tests/code-reading.c if (!thread) { thread 391 tools/perf/tests/code-reading.c ret = read_object_code(sample.ip, READLEN, sample.cpumode, thread, state); thread 392 tools/perf/tests/code-reading.c thread__put(thread); thread 554 tools/perf/tests/code-reading.c struct thread *thread; thread 622 tools/perf/tests/code-reading.c thread = machine__findnew_thread(machine, pid, pid); thread 623 tools/perf/tests/code-reading.c if (!thread) { thread 714 tools/perf/tests/code-reading.c thread__put(thread); thread 51 tools/perf/tests/dwarf-unwind.c int test_dwarf_unwind__thread(struct thread *thread); thread 53 tools/perf/tests/dwarf-unwind.c int test_dwarf_unwind__krava_3(struct thread *thread); thread 54 tools/perf/tests/dwarf-unwind.c int test_dwarf_unwind__krava_2(struct thread *thread); thread 55 tools/perf/tests/dwarf-unwind.c int test_dwarf_unwind__krava_1(struct thread *thread); thread 97 tools/perf/tests/dwarf-unwind.c noinline int test_dwarf_unwind__thread(struct thread *thread) thread 105 tools/perf/tests/dwarf-unwind.c if (test__arch_unwind_sample(&sample, thread)) { thread 110 tools/perf/tests/dwarf-unwind.c err = unwind__get_entries(unwind_entry, &cnt, thread, thread 131 tools/perf/tests/dwarf-unwind.c struct thread *thread = *(struct thread **)p1; thread 137 tools/perf/tests/dwarf-unwind.c global_unwind_retval = test_dwarf_unwind__thread(thread); thread 140 tools/perf/tests/dwarf-unwind.c global_unwind_retval = test_dwarf_unwind__thread(thread); thread 147 tools/perf/tests/dwarf-unwind.c noinline int test_dwarf_unwind__krava_3(struct thread *thread) thread 149 tools/perf/tests/dwarf-unwind.c struct thread *array[2] = {thread, thread}; thread 161 tools/perf/tests/dwarf-unwind.c _bsearch(array, &thread, 2, sizeof(struct thread **), thread 166 tools/perf/tests/dwarf-unwind.c noinline int test_dwarf_unwind__krava_2(struct thread *thread) thread 168 tools/perf/tests/dwarf-unwind.c return test_dwarf_unwind__krava_3(thread); thread 171 tools/perf/tests/dwarf-unwind.c noinline int test_dwarf_unwind__krava_1(struct thread *thread) thread 173 tools/perf/tests/dwarf-unwind.c return test_dwarf_unwind__krava_2(thread); thread 179 tools/perf/tests/dwarf-unwind.c struct thread *thread; thread 204 tools/perf/tests/dwarf-unwind.c thread = machine__find_thread(machine, getpid(), getpid()); thread 205 tools/perf/tests/dwarf-unwind.c if (!thread) { thread 210 tools/perf/tests/dwarf-unwind.c err = test_dwarf_unwind__krava_1(thread); thread 211 tools/perf/tests/dwarf-unwind.c thread__put(thread); thread 96 tools/perf/tests/hists_common.c struct thread *thread; thread 98 tools/perf/tests/hists_common.c thread = machine__findnew_thread(machine, fake_threads[i].pid, thread 100 tools/perf/tests/hists_common.c if (thread == NULL) thread 103 tools/perf/tests/hists_common.c thread__set_comm(thread, fake_threads[i].comm, 0); thread 104 tools/perf/tests/hists_common.c thread__put(thread); thread 183 tools/perf/tests/hists_common.c i, thread__comm_str(he->thread), thread 210 tools/perf/tests/hists_common.c i, thread__comm_str(he->thread), he->thread->tid, thread 20 tools/perf/tests/hists_cumulate.c struct thread *thread; thread 114 tools/perf/tests/hists_cumulate.c fake_samples[i].thread = al.thread; thread 152 tools/perf/tests/hists_cumulate.c #define COMM(he) (thread__comm_str(he->thread)) thread 156 tools/perf/tests/hists_cumulate.c #define PID(he) (he->thread->tid) thread 18 tools/perf/tests/hists_filter.c struct thread *thread; thread 91 tools/perf/tests/hists_filter.c fake_samples[i].thread = al.thread; thread 168 tools/perf/tests/hists_filter.c hists->thread_filter = fake_samples[9].thread; thread 290 tools/perf/tests/hists_filter.c hists->thread_filter = fake_samples[1].thread; thread 18 tools/perf/tests/hists_link.c struct thread *thread; thread 96 tools/perf/tests/hists_link.c fake_common_samples[k].thread = al.thread; thread 115 tools/perf/tests/hists_link.c fake_samples[i][k].thread = al.thread; thread 130 tools/perf/tests/hists_link.c struct thread *t, struct map *m, struct symbol *s) thread 133 tools/perf/tests/hists_link.c if (samples->thread == t && samples->map == m && thread 164 tools/perf/tests/hists_link.c he->thread, he->ms.map, he->ms.sym)) { thread 216 tools/perf/tests/hists_link.c he->thread, he->ms.map, he->ms.sym) && thread 219 tools/perf/tests/hists_link.c he->thread, he->ms.map, he->ms.sym)) { thread 21 tools/perf/tests/hists_output.c struct thread *thread; thread 80 tools/perf/tests/hists_output.c fake_samples[i].thread = al.thread; thread 118 tools/perf/tests/hists_output.c #define COMM(he) (thread__comm_str(he->thread)) thread 122 tools/perf/tests/hists_output.c #define PID(he) (he->thread->tid) thread 188 tools/perf/tests/mmap-thread-lookup.c struct thread *thread; thread 190 tools/perf/tests/mmap-thread-lookup.c thread = machine__findnew_thread(machine, getpid(), td->tid); thread 194 tools/perf/tests/mmap-thread-lookup.c thread__find_map(thread, PERF_RECORD_MISC_USER, thread 197 tools/perf/tests/mmap-thread-lookup.c thread__put(thread); thread 72 tools/perf/tests/stat.c TEST_ASSERT_VAL("wrong thread", st->thread == 2); thread 118 tools/perf/tests/tests.h struct thread; thread 121 tools/perf/tests/tests.h struct thread *thread); thread 13 tools/perf/tests/thread-mg-share.c struct thread *leader; thread 14 tools/perf/tests/thread-mg-share.c struct thread *t1, *t2, *t3; thread 18 tools/perf/tests/thread-mg-share.c struct thread *other, *other_leader; thread 33 tools/perf/trace/beauty/beauty.h struct thread; thread 40 tools/perf/trace/beauty/beauty.h struct file *thread__files_entry(struct thread *thread, int fd); thread 101 tools/perf/trace/beauty/beauty.h struct thread *thread; thread 179 tools/perf/trace/beauty/ioctl.c struct file *file = thread__files_entry(arg->thread, fd); thread 8 tools/perf/trace/beauty/pid.c struct thread *thread = machine__findnew_thread(trace->host, pid, pid); thread 10 tools/perf/trace/beauty/pid.c if (thread != NULL) { thread 11 tools/perf/trace/beauty/pid.c if (!thread->comm_set) thread 12 tools/perf/trace/beauty/pid.c thread__set_comm_from_proc(thread); thread 14 tools/perf/trace/beauty/pid.c if (thread->comm_set) thread 16 tools/perf/trace/beauty/pid.c " (%s)", thread__comm_str(thread)); thread 17 tools/perf/trace/beauty/pid.c thread__put(thread); thread 2222 tools/perf/ui/browsers/hists.c static struct thread *hist_browser__selected_thread(struct hist_browser *browser) thread 2224 tools/perf/ui/browsers/hists.c return browser->he_selection->thread; thread 2358 tools/perf/ui/browsers/hists.c struct thread *thread; thread 2420 tools/perf/ui/browsers/hists.c struct thread *thread = act->thread; thread 2422 tools/perf/ui/browsers/hists.c if ((!hists__has(browser->hists, thread) && thread 2423 tools/perf/ui/browsers/hists.c !hists__has(browser->hists, comm)) || thread == NULL) thread 2432 tools/perf/ui/browsers/hists.c if (hists__has(browser->hists, thread)) { thread 2434 tools/perf/ui/browsers/hists.c thread->comm_set ? thread__comm_str(thread) : "", thread 2435 tools/perf/ui/browsers/hists.c thread->tid); thread 2438 tools/perf/ui/browsers/hists.c thread->comm_set ? thread__comm_str(thread) : ""); thread 2441 tools/perf/ui/browsers/hists.c browser->hists->thread_filter = thread__get(thread); thread 2453 tools/perf/ui/browsers/hists.c char **optstr, struct thread *thread) thread 2457 tools/perf/ui/browsers/hists.c if ((!hists__has(browser->hists, thread) && thread 2458 tools/perf/ui/browsers/hists.c !hists__has(browser->hists, comm)) || thread == NULL) thread 2461 tools/perf/ui/browsers/hists.c if (hists__has(browser->hists, thread)) { thread 2464 tools/perf/ui/browsers/hists.c thread->comm_set ? thread__comm_str(thread) : "", thread 2465 tools/perf/ui/browsers/hists.c thread->tid); thread 2469 tools/perf/ui/browsers/hists.c thread->comm_set ? thread__comm_str(thread) : ""); thread 2474 tools/perf/ui/browsers/hists.c act->thread = thread; thread 2554 tools/perf/ui/browsers/hists.c if (act->thread) thread 2555 tools/perf/ui/browsers/hists.c len += strlen(thread__comm_str(act->thread)); thread 2563 tools/perf/ui/browsers/hists.c if (act->thread) { thread 2565 tools/perf/ui/browsers/hists.c thread__comm_str(act->thread)); thread 2604 tools/perf/ui/browsers/hists.c struct thread *thread, struct symbol *sym, thread 2608 tools/perf/ui/browsers/hists.c if (thread) { thread 2610 tools/perf/ui/browsers/hists.c thread__comm_str(thread), tstr) < 0) thread 2621 tools/perf/ui/browsers/hists.c act->thread = thread; thread 2631 tools/perf/ui/browsers/hists.c struct thread *thread, struct symbol *sym, thread 2637 tools/perf/ui/browsers/hists.c n = add_script_opt_2(browser, act, optstr, thread, sym, evsel, ""); thread 2651 tools/perf/ui/browsers/hists.c n += add_script_opt_2(browser, act, optstr, thread, sym, thread 2907 tools/perf/ui/browsers/hists.c struct thread *thread = NULL; thread 2918 tools/perf/ui/browsers/hists.c thread = hist_browser__selected_thread(browser); thread 2963 tools/perf/ui/browsers/hists.c actions->thread = thread; thread 2983 tools/perf/ui/browsers/hists.c actions->thread = NULL; thread 3131 tools/perf/ui/browsers/hists.c &options[nr_options], thread); thread 3146 tools/perf/ui/browsers/hists.c if (hists__has(hists, thread) && thread) { thread 3150 tools/perf/ui/browsers/hists.c thread, NULL, evsel); thread 214 tools/perf/ui/hist.c if (a->thread != b->thread || !hist_entry__has_callchains(a) || !symbol_conf.use_callchain) thread 861 tools/perf/ui/stdio/hist.c map_groups__fprintf(h->thread->mg, fp); thread 6 tools/perf/util/archinsn.h struct thread; thread 9 tools/perf/util/archinsn.h struct thread *thread, thread 46 tools/perf/util/build-id.c struct thread *thread = machine__findnew_thread(machine, sample->pid, thread 49 tools/perf/util/build-id.c if (thread == NULL) { thread 55 tools/perf/util/build-id.c if (thread__find_map(thread, sample->cpumode, sample->ip, &al)) thread 58 tools/perf/util/build-id.c thread__put(thread); thread 68 tools/perf/util/build-id.c struct thread *thread = machine__findnew_thread(machine, thread 75 tools/perf/util/build-id.c if (thread) { thread 76 tools/perf/util/build-id.c machine__remove_thread(machine, thread); thread 77 tools/perf/util/build-id.c thread__put(thread); thread 1093 tools/perf/util/callchain.c return thread__resolve_callchain(al->thread, cursor, evsel, sample, thread 15 tools/perf/util/callchain.h struct thread; thread 266 tools/perf/util/callchain.h int arch_skip_callchain_idx(struct thread *thread, struct ip_callchain *chain); thread 268 tools/perf/util/callchain.h static inline int arch_skip_callchain_idx(struct thread *thread __maybe_unused, thread 21 tools/perf/util/counts.h perf_counts(struct perf_counts *counts, int cpu, int thread) thread 23 tools/perf/util/counts.h return xyarray__entry(counts->values, cpu, thread); thread 27 tools/perf/util/counts.h perf_counts__is_loaded(struct perf_counts *counts, int cpu, int thread) thread 29 tools/perf/util/counts.h return *((bool *) xyarray__entry(counts->loaded, cpu, thread)); thread 33 tools/perf/util/counts.h perf_counts__set_loaded(struct perf_counts *counts, int cpu, int thread, bool loaded) thread 35 tools/perf/util/counts.h *((bool *) xyarray__entry(counts->loaded, cpu, thread)) = loaded; thread 49 tools/perf/util/cs-etm.c struct thread *unknown_thread; thread 75 tools/perf/util/cs-etm.c struct thread *thread; thread 542 tools/perf/util/cs-etm.c thread__zput(tidq->thread); thread 644 tools/perf/util/cs-etm.c struct thread *thread; thread 658 tools/perf/util/cs-etm.c thread = tidq->thread; thread 659 tools/perf/util/cs-etm.c if (!thread) { thread 662 tools/perf/util/cs-etm.c thread = etmq->etm->unknown_thread; thread 665 tools/perf/util/cs-etm.c if (!thread__find_map(thread, cpumode, address, &al) || !al.map->dso) thread 1045 tools/perf/util/cs-etm.c if ((!tidq->thread) && (tidq->tid != -1)) thread 1046 tools/perf/util/cs-etm.c tidq->thread = machine__find_thread(etm->machine, -1, thread 1049 tools/perf/util/cs-etm.c if (tidq->thread) thread 1050 tools/perf/util/cs-etm.c tidq->pid = tidq->thread->pid_; thread 1072 tools/perf/util/cs-etm.c thread__zput(tidq->thread); thread 2192 tools/perf/util/cs-etm.c struct thread *th; thread 2215 tools/perf/util/cs-etm.c struct thread *th; thread 62 tools/perf/util/db-export.c int db_export__thread(struct db_export *dbe, struct thread *thread, thread 63 tools/perf/util/db-export.c struct machine *machine, struct thread *main_thread) thread 67 tools/perf/util/db-export.c if (thread->db_id) thread 70 tools/perf/util/db-export.c thread->db_id = ++dbe->thread_last_db_id; thread 76 tools/perf/util/db-export.c return dbe->export_thread(dbe, thread, main_thread_db_id, thread 83 tools/perf/util/db-export.c struct thread *thread) thread 88 tools/perf/util/db-export.c return dbe->export_comm(dbe, comm, thread); thread 94 tools/perf/util/db-export.c struct thread *thread) thread 99 tools/perf/util/db-export.c return __db_export__comm(dbe, comm, thread); thread 109 tools/perf/util/db-export.c struct thread *main_thread) thread 134 tools/perf/util/db-export.c struct thread *thread) thread 141 tools/perf/util/db-export.c return dbe->export_comm_thread(dbe, db_id, comm, thread); thread 211 tools/perf/util/db-export.c struct thread *thread, thread 229 tools/perf/util/db-export.c err = thread__resolve_callchain(thread, &callchain_cursor, evsel, thread 290 tools/perf/util/db-export.c static int db_export__threads(struct db_export *dbe, struct thread *thread, thread 291 tools/perf/util/db-export.c struct thread *main_thread, thread 319 tools/perf/util/db-export.c if (thread != main_thread) { thread 324 tools/perf/util/db-export.c bool export_comm_thread = comm && !thread->db_id; thread 326 tools/perf/util/db-export.c err = db_export__thread(dbe, thread, machine, main_thread); thread 331 tools/perf/util/db-export.c err = db_export__comm_thread(dbe, comm, thread); thread 337 tools/perf/util/db-export.c curr_comm = thread__comm(thread); thread 339 tools/perf/util/db-export.c return db_export__comm(dbe, curr_comm, thread); thread 348 tools/perf/util/db-export.c struct thread *thread = al->thread; thread 355 tools/perf/util/db-export.c struct thread *main_thread; thread 367 tools/perf/util/db-export.c main_thread = thread__main_thread(al->machine, thread); thread 369 tools/perf/util/db-export.c err = db_export__threads(dbe, thread, main_thread, al->machine, &comm); thread 384 tools/perf/util/db-export.c thread, sample, thread 396 tools/perf/util/db-export.c thread__resolve(thread, &addr_al, sample); thread 402 tools/perf/util/db-export.c err = thread_stack__process(thread, comm, sample, al, thread 528 tools/perf/util/db-export.c struct thread *thread = machine__find_thread(machine, pid, tid); thread 529 tools/perf/util/db-export.c struct thread *main_thread; thread 532 tools/perf/util/db-export.c if (!thread || !thread->comm_set) thread 535 tools/perf/util/db-export.c *is_idle = !thread->pid_ && !thread->tid; thread 537 tools/perf/util/db-export.c main_thread = thread__main_thread(machine, thread); thread 539 tools/perf/util/db-export.c err = db_export__threads(dbe, thread, main_thread, machine, comm_ptr); thread 541 tools/perf/util/db-export.c *db_id = thread->db_id; thread 545 tools/perf/util/db-export.c thread__put(thread); thread 15 tools/perf/util/db-export.h struct thread; thread 44 tools/perf/util/db-export.h int (*export_thread)(struct db_export *dbe, struct thread *thread, thread 47 tools/perf/util/db-export.h struct thread *thread); thread 49 tools/perf/util/db-export.h struct comm *comm, struct thread *thread); thread 84 tools/perf/util/db-export.h int db_export__thread(struct db_export *dbe, struct thread *thread, thread 85 tools/perf/util/db-export.h struct machine *machine, struct thread *main_thread); thread 87 tools/perf/util/db-export.h struct thread *thread); thread 89 tools/perf/util/db-export.h struct thread *main_thread); thread 91 tools/perf/util/db-export.h struct thread *thread); thread 9 tools/perf/util/dump-insn.h struct thread; thread 13 tools/perf/util/dump-insn.h struct thread *thread; thread 457 tools/perf/util/event.c struct map *thread__find_map(struct thread *thread, u8 cpumode, u64 addr, thread 460 tools/perf/util/event.c struct map_groups *mg = thread->mg; thread 465 tools/perf/util/event.c al->thread = thread; thread 522 tools/perf/util/event.c struct map *thread__find_map_fb(struct thread *thread, u8 cpumode, u64 addr, thread 525 tools/perf/util/event.c struct map *map = thread__find_map(thread, cpumode, addr, al); thread 526 tools/perf/util/event.c struct machine *machine = thread->mg->machine; thread 532 tools/perf/util/event.c return thread__find_map(thread, addr_cpumode, addr, al); thread 535 tools/perf/util/event.c struct symbol *thread__find_symbol(struct thread *thread, u8 cpumode, thread 539 tools/perf/util/event.c if (thread__find_map(thread, cpumode, addr, al)) thread 544 tools/perf/util/event.c struct symbol *thread__find_symbol_fb(struct thread *thread, u8 cpumode, thread 548 tools/perf/util/event.c if (thread__find_map_fb(thread, cpumode, addr, al)) thread 560 tools/perf/util/event.c struct thread *thread = machine__findnew_thread(machine, sample->pid, thread 563 tools/perf/util/event.c if (thread == NULL) thread 566 tools/perf/util/event.c dump_printf(" ... thread: %s:%d\n", thread__comm_str(thread), thread->tid); thread 567 tools/perf/util/event.c thread__find_map(thread, sample->cpumode, sample->ip, al); thread 572 tools/perf/util/event.c if (thread__is_filtered(thread)) thread 619 tools/perf/util/event.c thread__zput(al->thread); thread 643 tools/perf/util/event.c void thread__resolve(struct thread *thread, struct addr_location *al, thread 646 tools/perf/util/event.c thread__find_map_fb(thread, sample->cpumode, sample->addr, al); thread 354 tools/perf/util/event.h struct thread; thread 358 tools/perf/util/event.h void thread__resolve(struct thread *thread, struct addr_location *al, thread 358 tools/perf/util/evlist.c int thread; thread 364 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { thread 365 tools/perf/util/evlist.c int err = ioctl(FD(evsel, cpu, thread), PERF_EVENT_IOC_ENABLE, 0); thread 374 tools/perf/util/evlist.c int thread) thread 383 tools/perf/util/evlist.c int err = ioctl(FD(evsel, cpu, thread), PERF_EVENT_IOC_ENABLE, 0); thread 428 tools/perf/util/evlist.c int thread) thread 430 tools/perf/util/evlist.c struct perf_sample_id *sid = SID(evsel, cpu, thread); thread 436 tools/perf/util/evlist.c if (!evsel->core.system_wide && evlist->core.threads && thread >= 0) thread 437 tools/perf/util/evlist.c sid->tid = perf_thread_map__pid(evlist->core.threads, thread); thread 635 tools/perf/util/evlist.c int thread, int *_output, int *_output_overwrite) thread 663 tools/perf/util/evlist.c if (evsel->core.system_wide && thread) thread 670 tools/perf/util/evlist.c fd = FD(evsel, cpu, thread); thread 700 tools/perf/util/evlist.c if (perf_evlist__id_add_fd(&evlist->core, &evsel->core, cpu, thread, thread 704 tools/perf/util/evlist.c thread); thread 714 tools/perf/util/evlist.c int cpu, thread; thread 726 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { thread 728 tools/perf/util/evlist.c thread, &output, &output_overwrite)) thread 743 tools/perf/util/evlist.c int thread; thread 747 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { thread 751 tools/perf/util/evlist.c auxtrace_mmap_params__set_idx(&mp->auxtrace_mp, evlist, thread, thread 754 tools/perf/util/evlist.c if (evlist__mmap_per_evsel(evlist, thread, mp, 0, thread, thread 1722 tools/perf/util/evlist.c if (evlist->thread.done) thread 1779 tools/perf/util/evlist.c evlist->thread.done = 0; thread 1780 tools/perf/util/evlist.c if (pthread_create(&evlist->thread.th, NULL, perf_evlist__poll_thread, evlist)) thread 1795 tools/perf/util/evlist.c evlist->thread.done = 1; thread 1796 tools/perf/util/evlist.c pthread_join(evlist->thread.th, NULL); thread 76 tools/perf/util/evlist.h } thread; thread 1266 tools/perf/util/evsel.c void perf_evsel__compute_deltas(struct evsel *evsel, int cpu, int thread, thread 1278 tools/perf/util/evsel.c tmp = *perf_counts(evsel->prev_raw_counts, cpu, thread); thread 1279 tools/perf/util/evsel.c *perf_counts(evsel->prev_raw_counts, cpu, thread) = *count; thread 1307 tools/perf/util/evsel.c perf_evsel__read_one(struct evsel *evsel, int cpu, int thread) thread 1309 tools/perf/util/evsel.c struct perf_counts_values *count = perf_counts(evsel->counts, cpu, thread); thread 1311 tools/perf/util/evsel.c return perf_evsel__read(&evsel->core, cpu, thread, count); thread 1315 tools/perf/util/evsel.c perf_evsel__set_count(struct evsel *counter, int cpu, int thread, thread 1320 tools/perf/util/evsel.c count = perf_counts(counter->counts, cpu, thread); thread 1326 tools/perf/util/evsel.c perf_counts__set_loaded(counter->counts, cpu, thread, true); thread 1331 tools/perf/util/evsel.c int cpu, int thread, u64 *data) thread 1350 tools/perf/util/evsel.c perf_evsel__set_count(leader, cpu, thread, thread 1360 tools/perf/util/evsel.c perf_evsel__set_count(counter, cpu, thread, thread 1368 tools/perf/util/evsel.c perf_evsel__read_group(struct evsel *leader, int cpu, int thread) thread 1389 tools/perf/util/evsel.c if (FD(leader, cpu, thread) < 0) thread 1392 tools/perf/util/evsel.c if (readn(FD(leader, cpu, thread), data, size) <= 0) thread 1395 tools/perf/util/evsel.c return perf_evsel__process_group_data(leader, cpu, thread, data); thread 1398 tools/perf/util/evsel.c int perf_evsel__read_counter(struct evsel *evsel, int cpu, int thread) thread 1403 tools/perf/util/evsel.c return perf_evsel__read_group(evsel, cpu, thread); thread 1405 tools/perf/util/evsel.c return perf_evsel__read_one(evsel, cpu, thread); thread 1409 tools/perf/util/evsel.c int cpu, int thread, bool scale) thread 1414 tools/perf/util/evsel.c if (FD(evsel, cpu, thread) < 0) thread 1417 tools/perf/util/evsel.c if (evsel->counts == NULL && perf_evsel__alloc_counts(evsel, cpu + 1, thread + 1) < 0) thread 1420 tools/perf/util/evsel.c if (readn(FD(evsel, cpu, thread), &count, nv * sizeof(u64)) <= 0) thread 1423 tools/perf/util/evsel.c perf_evsel__compute_deltas(evsel, cpu, thread, &count); thread 1425 tools/perf/util/evsel.c *perf_counts(evsel->counts, cpu, thread) = count; thread 1429 tools/perf/util/evsel.c static int get_group_fd(struct evsel *evsel, int cpu, int thread) thread 1443 tools/perf/util/evsel.c fd = FD(leader, cpu, thread); thread 1454 tools/perf/util/evsel.c for (int thread = thread_idx; thread < nr_threads - 1; thread++) thread 1455 tools/perf/util/evsel.c FD(pos, cpu, thread) = FD(pos, cpu, thread + 1); thread 1485 tools/perf/util/evsel.c int thread, int err) thread 1487 tools/perf/util/evsel.c pid_t ignore_pid = perf_thread_map__pid(threads, thread); thread 1508 tools/perf/util/evsel.c if (update_fds(evsel, nr_cpus, cpu, threads->nr, thread)) thread 1511 tools/perf/util/evsel.c if (thread_map__remove(threads, thread)) thread 1575 tools/perf/util/evsel.c int cpu, thread, nthreads; thread 1652 tools/perf/util/evsel.c for (thread = 0; thread < nthreads; thread++) { thread 1656 tools/perf/util/evsel.c pid = perf_thread_map__pid(threads, thread); thread 1658 tools/perf/util/evsel.c group_fd = get_group_fd(evsel, cpu, thread); thread 1665 tools/perf/util/evsel.c FD(evsel, cpu, thread) = fd; thread 1670 tools/perf/util/evsel.c if (ignore_missing_thread(evsel, cpus->nr, cpu, threads, thread, err)) { thread 1677 tools/perf/util/evsel.c thread--; thread 1748 tools/perf/util/evsel.c if (err != -EINVAL || cpu > 0 || thread > 0) thread 1813 tools/perf/util/evsel.c threads->err_thread = thread; thread 1816 tools/perf/util/evsel.c while (--thread >= 0) { thread 1817 tools/perf/util/evsel.c close(FD(evsel, cpu, thread)); thread 1818 tools/perf/util/evsel.c FD(evsel, cpu, thread) = -1; thread 1820 tools/perf/util/evsel.c thread = nthreads; thread 2521 tools/perf/util/evsel.c int cpu, thread; thread 2524 tools/perf/util/evsel.c for (thread = 0; thread < xyarray__max_y(evsel->core.fd); thread 2525 tools/perf/util/evsel.c thread++) { thread 2526 tools/perf/util/evsel.c int fd = FD(evsel, cpu, thread); thread 2529 tools/perf/util/evsel.c cpu, thread, fd) < 0) thread 140 tools/perf/util/evsel.h void perf_evsel__compute_deltas(struct evsel *evsel, int cpu, int thread, thread 269 tools/perf/util/evsel.h int perf_evsel__read_counter(struct evsel *evsel, int cpu, int thread); thread 272 tools/perf/util/evsel.h int cpu, int thread, bool scale); thread 282 tools/perf/util/evsel.h int cpu, int thread) thread 284 tools/perf/util/evsel.h return __perf_evsel__read_on_cpu(evsel, cpu, thread, false); thread 295 tools/perf/util/evsel.h int cpu, int thread) thread 297 tools/perf/util/evsel.h return __perf_evsel__read_on_cpu(evsel, cpu, thread, true); thread 99 tools/perf/util/hist.c len = thread__comm_len(h->thread); thread 475 tools/perf/util/hist.c thread__get(he->thread); thread 683 tools/perf/util/hist.c struct namespaces *ns = thread__namespaces(al->thread); thread 685 tools/perf/util/hist.c .thread = al->thread, thread 686 tools/perf/util/hist.c .comm = thread__comm(al->thread), thread 1061 tools/perf/util/hist.c .thread = al->thread, thread 1062 tools/perf/util/hist.c .comm = thread__comm(al->thread), thread 1243 tools/perf/util/hist.c thread__zput(he->thread); thread 2056 tools/perf/util/hist.c he->thread != hists->thread_filter) { thread 2630 tools/perf/util/hist.c struct thread *thread = hists->thread_filter; thread 2682 tools/perf/util/hist.c if (thread) { thread 2683 tools/perf/util/hist.c if (hists__has(hists, thread)) { thread 2686 tools/perf/util/hist.c (thread->comm_set ? thread__comm_str(thread) : ""), thread 2687 tools/perf/util/hist.c thread->tid); thread 2691 tools/perf/util/hist.c (thread->comm_set ? thread__comm_str(thread) : "")); thread 75 tools/perf/util/hist.h struct thread; thread 87 tools/perf/util/hist.h struct thread *thread_filter; thread 294 tools/perf/util/hist.h int thread; thread 321 tools/perf/util/intel-bts.c struct thread *thread; thread 327 tools/perf/util/intel-bts.c thread = machine__find_thread(machine, -1, btsq->tid); thread 328 tools/perf/util/intel-bts.c if (!thread) thread 331 tools/perf/util/intel-bts.c len = thread__memcpy(thread, machine, buf, ip, INTEL_PT_INSN_BUF_SZ, &x86_64); thread 340 tools/perf/util/intel-bts.c thread__put(thread); thread 408 tools/perf/util/intel-bts.c struct thread *thread) thread 431 tools/perf/util/intel-bts.c thread_stack__event(thread, btsq->cpu, btsq->sample_flags, thread 449 tools/perf/util/intel-bts.c struct thread *thread; thread 456 tools/perf/util/intel-bts.c thread = machine__find_thread(btsq->bts->machine, -1, thread 458 tools/perf/util/intel-bts.c if (thread) thread 459 tools/perf/util/intel-bts.c btsq->pid = thread->pid_; thread 461 tools/perf/util/intel-bts.c thread = machine__findnew_thread(btsq->bts->machine, btsq->pid, thread 500 tools/perf/util/intel-bts.c !btsq->bts->synth_opts.thread_stack && thread && thread 503 tools/perf/util/intel-bts.c thread_stack__set_trace_nr(thread, btsq->cpu, buffer->buffer_nr + 1); thread 505 tools/perf/util/intel-bts.c err = intel_bts_process_buffer(btsq, buffer, thread); thread 518 tools/perf/util/intel-bts.c thread__put(thread); thread 61 tools/perf/util/intel-pt.c struct thread *unknown_thread; thread 158 tools/perf/util/intel-pt.c struct thread *thread; thread 521 tools/perf/util/intel-pt.c struct thread *thread; thread 538 tools/perf/util/intel-pt.c thread = ptq->thread; thread 539 tools/perf/util/intel-pt.c if (!thread) { thread 542 tools/perf/util/intel-pt.c thread = ptq->pt->unknown_thread; thread 546 tools/perf/util/intel-pt.c if (!thread__find_map(thread, cpumode, *ip, &al) || !al.map->dso) thread 685 tools/perf/util/intel-pt.c struct thread *thread; thread 695 tools/perf/util/intel-pt.c thread = ptq->thread; thread 696 tools/perf/util/intel-pt.c if (!thread) thread 699 tools/perf/util/intel-pt.c if (!thread__find_map(thread, cpumode, ip, &al) || !al.map->dso) thread 961 tools/perf/util/intel-pt.c thread__zput(ptq->thread); thread 977 tools/perf/util/intel-pt.c thread__zput(ptq->thread); thread 980 tools/perf/util/intel-pt.c if (!ptq->thread && ptq->tid != -1) thread 981 tools/perf/util/intel-pt.c ptq->thread = machine__find_thread(pt->machine, -1, ptq->tid); thread 983 tools/perf/util/intel-pt.c if (ptq->thread) { thread 984 tools/perf/util/intel-pt.c ptq->pid = ptq->thread->pid_; thread 986 tools/perf/util/intel-pt.c ptq->cpu = ptq->thread->cpu; thread 1324 tools/perf/util/intel-pt.c thread_stack__sample(ptq->thread, ptq->cpu, ptq->chain, thread 1752 tools/perf/util/intel-pt.c thread_stack__sample(ptq->thread, ptq->cpu, ptq->chain, thread 1972 tools/perf/util/intel-pt.c thread_stack__event(ptq->thread, ptq->cpu, ptq->flags, state->from_ip, thread 1976 tools/perf/util/intel-pt.c thread_stack__set_trace_nr(ptq->thread, ptq->cpu, state->trace_nr); thread 43 tools/perf/util/machine.c static void __machine__remove_thread(struct machine *machine, struct thread *th, bool lock); thread 109 tools/perf/util/machine.c struct thread *thread = machine__findnew_thread(machine, -1, thread 113 tools/perf/util/machine.c if (thread == NULL) thread 117 tools/perf/util/machine.c thread__set_comm(thread, comm, 0); thread 118 tools/perf/util/machine.c thread__put(thread); thread 198 tools/perf/util/machine.c struct thread *t = rb_entry(nd, struct thread, rb_node); thread 224 tools/perf/util/machine.c struct thread *thread, *n; thread 233 tools/perf/util/machine.c list_for_each_entry_safe(thread, n, &threads->dead, node) thread 234 tools/perf/util/machine.c list_del_init(&thread->node); thread 394 tools/perf/util/machine.c struct thread *th, pid_t pid) thread 396 tools/perf/util/machine.c struct thread *leader; thread 445 tools/perf/util/machine.c static struct thread* thread 449 tools/perf/util/machine.c struct thread *th; thread 464 tools/perf/util/machine.c static struct thread* thread 468 tools/perf/util/machine.c struct thread *th = NULL; thread 477 tools/perf/util/machine.c __threads__set_last_match(struct threads *threads, struct thread *th) thread 483 tools/perf/util/machine.c threads__set_last_match(struct threads *threads, struct thread *th) thread 493 tools/perf/util/machine.c static struct thread *____machine__findnew_thread(struct machine *machine, thread 500 tools/perf/util/machine.c struct thread *th; thread 509 tools/perf/util/machine.c th = rb_entry(parent, struct thread, rb_node); thread 558 tools/perf/util/machine.c struct thread *__machine__findnew_thread(struct machine *machine, pid_t pid, pid_t tid) thread 563 tools/perf/util/machine.c struct thread *machine__findnew_thread(struct machine *machine, pid_t pid, thread 567 tools/perf/util/machine.c struct thread *th; thread 575 tools/perf/util/machine.c struct thread *machine__find_thread(struct machine *machine, pid_t pid, thread 579 tools/perf/util/machine.c struct thread *th; thread 588 tools/perf/util/machine.c struct thread *thread) thread 591 tools/perf/util/machine.c return thread__exec_comm(thread); thread 593 tools/perf/util/machine.c return thread__comm(thread); thread 599 tools/perf/util/machine.c struct thread *thread = machine__findnew_thread(machine, thread 611 tools/perf/util/machine.c if (thread == NULL || thread 612 tools/perf/util/machine.c __thread__set_comm(thread, event->comm.comm, sample->time, exec)) { thread 617 tools/perf/util/machine.c thread__put(thread); thread 626 tools/perf/util/machine.c struct thread *thread = machine__findnew_thread(machine, thread 642 tools/perf/util/machine.c if (thread == NULL || thread 643 tools/perf/util/machine.c thread__set_namespaces(thread, sample->time, &event->namespaces)) { thread 648 tools/perf/util/machine.c thread__put(thread); thread 870 tools/perf/util/machine.c struct thread *pos = rb_entry(nd, struct thread, rb_node); thread 1652 tools/perf/util/machine.c struct thread *thread; thread 1667 tools/perf/util/machine.c thread = machine__findnew_thread(machine, event->mmap2.pid, thread 1669 tools/perf/util/machine.c if (thread == NULL) thread 1679 tools/perf/util/machine.c event->mmap2.filename, thread); thread 1684 tools/perf/util/machine.c ret = thread__insert_map(thread, map); thread 1688 tools/perf/util/machine.c thread__put(thread); thread 1695 tools/perf/util/machine.c thread__put(thread); thread 1704 tools/perf/util/machine.c struct thread *thread; thread 1720 tools/perf/util/machine.c thread = machine__findnew_thread(machine, event->mmap.pid, thread 1722 tools/perf/util/machine.c if (thread == NULL) thread 1732 tools/perf/util/machine.c thread); thread 1737 tools/perf/util/machine.c ret = thread__insert_map(thread, map); thread 1741 tools/perf/util/machine.c thread__put(thread); thread 1748 tools/perf/util/machine.c thread__put(thread); thread 1754 tools/perf/util/machine.c static void __machine__remove_thread(struct machine *machine, struct thread *th, bool lock) thread 1787 tools/perf/util/machine.c void machine__remove_thread(struct machine *machine, struct thread *th) thread 1795 tools/perf/util/machine.c struct thread *thread = machine__find_thread(machine, thread 1798 tools/perf/util/machine.c struct thread *parent = machine__findnew_thread(machine, thread 1823 tools/perf/util/machine.c if (thread != NULL) { thread 1824 tools/perf/util/machine.c machine__remove_thread(machine, thread); thread 1825 tools/perf/util/machine.c thread__put(thread); thread 1828 tools/perf/util/machine.c thread = machine__findnew_thread(machine, event->fork.pid, thread 1847 tools/perf/util/machine.c if (thread == NULL || parent == NULL || thread 1848 tools/perf/util/machine.c thread__fork(thread, parent, sample->time, do_maps_clone) < 0) { thread 1852 tools/perf/util/machine.c thread__put(thread); thread 1861 tools/perf/util/machine.c struct thread *thread = machine__find_thread(machine, thread 1868 tools/perf/util/machine.c if (thread != NULL) { thread 1869 tools/perf/util/machine.c thread__exited(thread); thread 1870 tools/perf/util/machine.c thread__put(thread); thread 1924 tools/perf/util/machine.c static void ip__resolve_ams(struct thread *thread, thread 1938 tools/perf/util/machine.c thread__find_cpumode_addr_location(thread, ip, &al); thread 1947 tools/perf/util/machine.c static void ip__resolve_data(struct thread *thread, thread 1955 tools/perf/util/machine.c thread__find_symbol(thread, m, addr, &al); thread 1972 tools/perf/util/machine.c ip__resolve_ams(al->thread, &mi->iaddr, sample->ip); thread 1973 tools/perf/util/machine.c ip__resolve_data(al->thread, al->cpumode, &mi->daddr, thread 2005 tools/perf/util/machine.c static int add_callchain_ip(struct thread *thread, thread 2024 tools/perf/util/machine.c thread__find_cpumode_addr_location(thread, ip, &al); thread 2049 tools/perf/util/machine.c thread__find_symbol(thread, *cpumode, ip, &al); thread 2090 tools/perf/util/machine.c ip__resolve_ams(al->thread, &bi[i].to, bs->entries[i].to); thread 2091 tools/perf/util/machine.c ip__resolve_ams(al->thread, &bi[i].from, bs->entries[i].from); thread 2168 tools/perf/util/machine.c static int resolve_lbr_callchain_sample(struct thread *thread, thread 2241 tools/perf/util/machine.c err = add_callchain_ip(thread, cursor, parent, thread 2254 tools/perf/util/machine.c static int find_prev_cpumode(struct ip_callchain *chain, struct thread *thread, thread 2266 tools/perf/util/machine.c err = add_callchain_ip(thread, cursor, parent, thread 2275 tools/perf/util/machine.c static int thread__resolve_callchain_sample(struct thread *thread, thread 2295 tools/perf/util/machine.c err = resolve_lbr_callchain_sample(thread, cursor, sample, parent, thread 2305 tools/perf/util/machine.c skip_idx = arch_skip_callchain_idx(thread, chain); thread 2357 tools/perf/util/machine.c err = add_callchain_ip(thread, cursor, parent, thread 2364 tools/perf/util/machine.c err = add_callchain_ip(thread, cursor, parent, root_al, thread 2382 tools/perf/util/machine.c err = find_prev_cpumode(chain, thread, cursor, parent, root_al, thread 2404 tools/perf/util/machine.c err = find_prev_cpumode(chain, thread, cursor, parent, thread 2411 tools/perf/util/machine.c err = add_callchain_ip(thread, cursor, parent, thread 2481 tools/perf/util/machine.c static int thread__resolve_callchain_unwind(struct thread *thread, thread 2498 tools/perf/util/machine.c thread, sample, max_stack); thread 2501 tools/perf/util/machine.c int thread__resolve_callchain(struct thread *thread, thread 2514 tools/perf/util/machine.c ret = thread__resolve_callchain_sample(thread, cursor, thread 2520 tools/perf/util/machine.c ret = thread__resolve_callchain_unwind(thread, cursor, thread 2524 tools/perf/util/machine.c ret = thread__resolve_callchain_unwind(thread, cursor, thread 2529 tools/perf/util/machine.c ret = thread__resolve_callchain_sample(thread, cursor, thread 2539 tools/perf/util/machine.c int (*fn)(struct thread *thread, void *p), thread 2544 tools/perf/util/machine.c struct thread *thread; thread 2552 tools/perf/util/machine.c thread = rb_entry(nd, struct thread, rb_node); thread 2553 tools/perf/util/machine.c rc = fn(thread, priv); thread 2558 tools/perf/util/machine.c list_for_each_entry(thread, &threads->dead, node) { thread 2559 tools/perf/util/machine.c rc = fn(thread, priv); thread 2568 tools/perf/util/machine.c int (*fn)(struct thread *thread, void *p), thread 2601 tools/perf/util/machine.c struct thread *thread; thread 2625 tools/perf/util/machine.c thread = machine__findnew_thread(machine, pid, tid); thread 2626 tools/perf/util/machine.c if (!thread) thread 2629 tools/perf/util/machine.c thread->cpu = cpu; thread 2630 tools/perf/util/machine.c thread__put(thread); thread 18 tools/perf/util/machine.h struct thread; thread 37 tools/perf/util/machine.h struct thread *last_match; thread 106 tools/perf/util/machine.h struct thread *machine__find_thread(struct machine *machine, pid_t pid, thread 109 tools/perf/util/machine.h struct thread *thread); thread 168 tools/perf/util/machine.h void machine__remove_thread(struct machine *machine, struct thread *th); thread 177 tools/perf/util/machine.h int thread__resolve_callchain(struct thread *thread, thread 202 tools/perf/util/machine.h struct thread *__machine__findnew_thread(struct machine *machine, pid_t pid, pid_t tid); thread 203 tools/perf/util/machine.h struct thread *machine__findnew_thread(struct machine *machine, pid_t pid, pid_t tid); thread 248 tools/perf/util/machine.h int (*fn)(struct thread *thread, void *p), thread 251 tools/perf/util/machine.h int (*fn)(struct thread *thread, void *p), thread 151 tools/perf/util/map.c struct thread *thread) thread 173 tools/perf/util/map.c nsi = nsinfo__get(thread->nsinfo); thread 198 tools/perf/util/map.c dso = machine__findnew_vdso(machine, thread); thread 883 tools/perf/util/map.c int map_groups__clone(struct thread *thread, struct map_groups *parent) thread 885 tools/perf/util/map.c struct map_groups *mg = thread->mg; thread 82 tools/perf/util/map.h struct thread; thread 116 tools/perf/util/map.h char *filename, struct thread *thread); thread 15 tools/perf/util/map_groups.h struct thread; thread 62 tools/perf/util/map_groups.h int map_groups__clone(struct thread *thread, struct map_groups *parent); thread 273 tools/perf/util/scripting-engines/trace-event-perl.c if (thread__resolve_callchain(al->thread, &callchain_cursor, evsel, thread 342 tools/perf/util/scripting-engines/trace-event-perl.c struct thread *thread = al->thread; thread 352 tools/perf/util/scripting-engines/trace-event-perl.c const char *comm = thread__comm_str(thread); thread 407 tools/perf/util/scripting-engines/trace-event-python.c if (thread__resolve_callchain(al->thread, &callchain_cursor, evsel, thread 464 tools/perf/util/scripting-engines/trace-event-python.c struct thread *thread) thread 501 tools/perf/util/scripting-engines/trace-event-python.c thread__find_map_fb(thread, sample->cpumode, thread 507 tools/perf/util/scripting-engines/trace-event-python.c thread__find_map_fb(thread, sample->cpumode, thread 561 tools/perf/util/scripting-engines/trace-event-python.c struct thread *thread) thread 583 tools/perf/util/scripting-engines/trace-event-python.c thread__find_symbol_fb(thread, sample->cpumode, thread 589 tools/perf/util/scripting-engines/trace-event-python.c thread__find_symbol_fb(thread, sample->cpumode, thread 769 tools/perf/util/scripting-engines/trace-event-python.c _PyUnicode_FromString(thread__comm_str(al->thread))); thread 781 tools/perf/util/scripting-engines/trace-event-python.c brstack = python_process_brstack(sample, al->thread); thread 784 tools/perf/util/scripting-engines/trace-event-python.c brstacksym = python_process_brstacksym(sample, al->thread); thread 807 tools/perf/util/scripting-engines/trace-event-python.c const char *comm = thread__comm_str(al->thread); thread 994 tools/perf/util/scripting-engines/trace-event-python.c static int python_export_thread(struct db_export *dbe, struct thread *thread, thread 1002 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_u64(t, 0, thread->db_id); thread 1005 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_s32(t, 3, thread->pid_); thread 1006 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_s32(t, 4, thread->tid); thread 1016 tools/perf/util/scripting-engines/trace-event-python.c struct thread *thread) thread 1025 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_u64(t, 2, thread->db_id); thread 1037 tools/perf/util/scripting-engines/trace-event-python.c struct comm *comm, struct thread *thread) thread 1046 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_u64(t, 2, thread->db_id); thread 1131 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_u64(t, 3, es->al->thread->db_id); thread 1220 tools/perf/util/scripting-engines/trace-event-python.c tuple_set_u64(t, 1, cr->thread->db_id); thread 1356 tools/perf/util/scripting-engines/trace-event-python.c process_stat(struct evsel *counter, int cpu, int thread, u64 tstamp, thread 1377 tools/perf/util/scripting-engines/trace-event-python.c PyTuple_SetItem(t, n++, _PyLong_FromLong(thread)); thread 1397 tools/perf/util/scripting-engines/trace-event-python.c int cpu, thread; thread 1405 tools/perf/util/scripting-engines/trace-event-python.c for (thread = 0; thread < threads->nr; thread++) { thread 1408 tools/perf/util/scripting-engines/trace-event-python.c perf_thread_map__pid(threads, thread), tstamp, thread 1409 tools/perf/util/scripting-engines/trace-event-python.c perf_counts(counter->counts, cpu, thread)); thread 1548 tools/perf/util/scripting-engines/trace-event-python.c SET_TABLE_HANDLER(thread); thread 895 tools/perf/util/session.c event->stat.thread = bswap_32(event->stat.thread); thread 1692 tools/perf/util/session.c struct thread *perf_session__findnew(struct perf_session *session, pid_t pid) thread 1706 tools/perf/util/session.c struct thread *thread; thread 1709 tools/perf/util/session.c thread = machine__findnew_thread(&session->machines.host, 0, 0); thread 1710 tools/perf/util/session.c if (thread == NULL || thread__set_comm(thread, "swapper", 0)) { thread 1715 tools/perf/util/session.c if (thread == NULL || thread__set_namespaces(thread, 0, NULL)) { thread 1721 tools/perf/util/session.c thread__put(thread); thread 1835 tools/perf/util/session.c static int perf_session__flush_thread_stack(struct thread *thread, thread 1838 tools/perf/util/session.c return thread_stack__flush(thread); thread 18 tools/perf/util/session.h struct thread; thread 77 tools/perf/util/session.h struct thread *thread, thread 101 tools/perf/util/session.h struct thread *perf_session__findnew(struct perf_session *session, pid_t pid); thread 94 tools/perf/util/sort.c return right->thread->tid - left->thread->tid; thread 100 tools/perf/util/sort.c const char *comm = thread__comm_str(he->thread); thread 103 tools/perf/util/sort.c return repsep_snprintf(bf, size, "%7d:%-*.*s", he->thread->tid, thread 109 tools/perf/util/sort.c const struct thread *th = arg; thread 114 tools/perf/util/sort.c return th && he->thread != th; thread 1247 tools/perf/util/sort.c if (left->thread->pid_ > right->thread->pid_) return -1; thread 1248 tools/perf/util/sort.c if (left->thread->pid_ < right->thread->pid_) return 1; thread 1850 tools/perf/util/sort.c MK_SORT_ENTRY_CHK(thread) thread 2610 tools/perf/util/sort.c list->thread = 1; thread 15 tools/perf/util/sort.h struct thread; thread 97 tools/perf/util/sort.h struct thread *thread; thread 700 tools/perf/util/stat-display.c int cpu, thread, i = 0; thread 708 tools/perf/util/stat-display.c for (thread = 0; thread < nthreads; thread++) { thread 712 tools/perf/util/stat-display.c val += perf_counts(counter->counts, cpu, thread)->val; thread 713 tools/perf/util/stat-display.c ena += perf_counts(counter->counts, cpu, thread)->ena; thread 714 tools/perf/util/stat-display.c run += perf_counts(counter->counts, cpu, thread)->run; thread 727 tools/perf/util/stat-display.c buf[i].id = thread; thread 750 tools/perf/util/stat-display.c int thread, sorted_threads, id; thread 759 tools/perf/util/stat-display.c for (thread = 0; thread < sorted_threads; thread++) { thread 763 tools/perf/util/stat-display.c id = buf[thread].id; thread 765 tools/perf/util/stat-display.c printout(config, id, 0, buf[thread].counter, buf[thread].uval, thread 766 tools/perf/util/stat-display.c prefix, buf[thread].run, buf[thread].ena, 1.0, thread 769 tools/perf/util/stat-display.c printout(config, id, 0, buf[thread].counter, buf[thread].uval, thread 770 tools/perf/util/stat-display.c prefix, buf[thread].run, buf[thread].ena, 1.0, thread 282 tools/perf/util/stat.c int cpu, int thread, thread 304 tools/perf/util/stat.c perf_evsel__compute_deltas(evsel, cpu, thread, count); thread 314 tools/perf/util/stat.c count->val, 0, &config->stats[thread]); thread 337 tools/perf/util/stat.c int cpu, thread; thread 342 tools/perf/util/stat.c for (thread = 0; thread < nthreads; thread++) { thread 344 tools/perf/util/stat.c if (process_counter_values(config, counter, cpu, thread, thread 345 tools/perf/util/stat.c perf_counts(counter->counts, cpu, thread))) thread 420 tools/perf/util/stat.c *perf_counts(counter->counts, st->cpu, st->thread) = count; thread 431 tools/perf/util/stat.c st->id, st->cpu, st->thread); thread 121 tools/perf/util/symbol.h struct thread *thread; thread 567 tools/perf/util/synthetic-events.c int err = -1, thread, j; thread 588 tools/perf/util/synthetic-events.c for (thread = 0; thread < threads->nr; ++thread) { thread 591 tools/perf/util/synthetic-events.c perf_thread_map__pid(threads, thread), 0, thread 602 tools/perf/util/synthetic-events.c if ((int) comm_event->comm.pid != perf_thread_map__pid(threads, thread)) { thread 1089 tools/perf/util/synthetic-events.c u32 cpu, u32 thread, u64 id, thread 1102 tools/perf/util/synthetic-events.c event.thread = thread; thread 52 tools/perf/util/synthetic-events.h int perf_event__synthesize_stat(struct perf_tool *tool, u32 cpu, u32 thread, u64 id, struct perf_counts_values *count, perf_event__handler_t process, struct machine *machine); thread 105 tools/perf/util/thread-stack.c static inline bool thread_stack__per_cpu(struct thread *thread) thread 107 tools/perf/util/thread-stack.c return !(thread->tid || thread->pid_); thread 128 tools/perf/util/thread-stack.c static int thread_stack__init(struct thread_stack *ts, struct thread *thread, thread 137 tools/perf/util/thread-stack.c if (thread->mg && thread->mg->machine) { thread 138 tools/perf/util/thread-stack.c struct machine *machine = thread->mg->machine; thread 152 tools/perf/util/thread-stack.c static struct thread_stack *thread_stack__new(struct thread *thread, int cpu, thread 155 tools/perf/util/thread-stack.c struct thread_stack *ts = thread->ts, *new_ts; thread 159 tools/perf/util/thread-stack.c if (thread_stack__per_cpu(thread) && cpu > 0) thread 169 tools/perf/util/thread-stack.c zfree(&thread->ts); thread 170 tools/perf/util/thread-stack.c thread->ts = new_ts; thread 174 tools/perf/util/thread-stack.c if (thread_stack__per_cpu(thread) && cpu > 0 && thread 179 tools/perf/util/thread-stack.c thread_stack__init(ts, thread, crp)) thread 185 tools/perf/util/thread-stack.c static struct thread_stack *thread__cpu_stack(struct thread *thread, int cpu) thread 187 tools/perf/util/thread-stack.c struct thread_stack *ts = thread->ts; thread 203 tools/perf/util/thread-stack.c static inline struct thread_stack *thread__stack(struct thread *thread, thread 206 tools/perf/util/thread-stack.c if (!thread) thread 209 tools/perf/util/thread-stack.c if (thread_stack__per_cpu(thread)) thread 210 tools/perf/util/thread-stack.c return thread__cpu_stack(thread, cpu); thread 212 tools/perf/util/thread-stack.c return thread->ts; thread 275 tools/perf/util/thread-stack.c static int thread_stack__call_return(struct thread *thread, thread 282 tools/perf/util/thread-stack.c .thread = thread, thread 315 tools/perf/util/thread-stack.c static int __thread_stack__flush(struct thread *thread, struct thread_stack *ts) thread 326 tools/perf/util/thread-stack.c err = thread_stack__call_return(thread, ts, --ts->cnt, thread 338 tools/perf/util/thread-stack.c int thread_stack__flush(struct thread *thread) thread 340 tools/perf/util/thread-stack.c struct thread_stack *ts = thread->ts; thread 346 tools/perf/util/thread-stack.c int ret = __thread_stack__flush(thread, ts + pos); thread 356 tools/perf/util/thread-stack.c int thread_stack__event(struct thread *thread, int cpu, u32 flags, u64 from_ip, thread 359 tools/perf/util/thread-stack.c struct thread_stack *ts = thread__stack(thread, cpu); thread 361 tools/perf/util/thread-stack.c if (!thread) thread 365 tools/perf/util/thread-stack.c ts = thread_stack__new(thread, cpu, NULL); thread 380 tools/perf/util/thread-stack.c __thread_stack__flush(thread, ts); thread 415 tools/perf/util/thread-stack.c void thread_stack__set_trace_nr(struct thread *thread, int cpu, u64 trace_nr) thread 417 tools/perf/util/thread-stack.c struct thread_stack *ts = thread__stack(thread, cpu); thread 424 tools/perf/util/thread-stack.c __thread_stack__flush(thread, ts); thread 429 tools/perf/util/thread-stack.c static void __thread_stack__free(struct thread *thread, struct thread_stack *ts) thread 431 tools/perf/util/thread-stack.c __thread_stack__flush(thread, ts); thread 435 tools/perf/util/thread-stack.c static void thread_stack__reset(struct thread *thread, struct thread_stack *ts) thread 439 tools/perf/util/thread-stack.c __thread_stack__free(thread, ts); thread 444 tools/perf/util/thread-stack.c void thread_stack__free(struct thread *thread) thread 446 tools/perf/util/thread-stack.c struct thread_stack *ts = thread->ts; thread 451 tools/perf/util/thread-stack.c __thread_stack__free(thread, ts + pos); thread 452 tools/perf/util/thread-stack.c zfree(&thread->ts); thread 461 tools/perf/util/thread-stack.c void thread_stack__sample(struct thread *thread, int cpu, thread 465 tools/perf/util/thread-stack.c struct thread_stack *ts = thread__stack(thread, cpu); thread 561 tools/perf/util/thread-stack.c static int thread_stack__pop_cp(struct thread *thread, struct thread_stack *ts, thread 574 tools/perf/util/thread-stack.c return thread_stack__call_return(thread, ts, --ts->cnt, thread 580 tools/perf/util/thread-stack.c return thread_stack__call_return(thread, ts, --ts->cnt, thread 591 tools/perf/util/thread-stack.c err = thread_stack__call_return(thread, ts, thread 598 tools/perf/util/thread-stack.c return thread_stack__call_return(thread, ts, --ts->cnt, thread 633 tools/perf/util/thread-stack.c static int thread_stack__pop_ks(struct thread *thread, struct thread_stack *ts, thread 641 tools/perf/util/thread-stack.c err = thread_stack__call_return(thread, ts, --ts->cnt, thread 650 tools/perf/util/thread-stack.c static int thread_stack__no_call_return(struct thread *thread, thread 669 tools/perf/util/thread-stack.c err = thread_stack__pop_ks(thread, ts, sample, ref); thread 681 tools/perf/util/thread-stack.c err = thread_stack__pop_ks(thread, ts, sample, ref); thread 698 tools/perf/util/thread-stack.c err = thread_stack__call_return(thread, ts, --ts->cnt, thread 741 tools/perf/util/thread-stack.c return thread_stack__call_return(thread, ts, --ts->cnt, tm, ref, false); thread 744 tools/perf/util/thread-stack.c static int thread_stack__trace_begin(struct thread *thread, thread 757 tools/perf/util/thread-stack.c err = thread_stack__call_return(thread, ts, --ts->cnt, thread 850 tools/perf/util/thread-stack.c int thread_stack__process(struct thread *thread, struct comm *comm, thread 856 tools/perf/util/thread-stack.c struct thread_stack *ts = thread__stack(thread, sample->cpu); thread 862 tools/perf/util/thread-stack.c thread_stack__reset(thread, ts); thread 867 tools/perf/util/thread-stack.c ts = thread_stack__new(thread, sample->cpu, crp); thread 878 tools/perf/util/thread-stack.c if (ts->comm != comm && thread->pid_ == thread->tid) { thread 879 tools/perf/util/thread-stack.c err = __thread_stack__flush(thread, ts); thread 934 tools/perf/util/thread-stack.c return thread_stack__pop_ks(thread, ts, sample, ref); thread 945 tools/perf/util/thread-stack.c err = thread_stack__pop_cp(thread, ts, sample->addr, thread 950 tools/perf/util/thread-stack.c err = thread_stack__no_call_return(thread, ts, sample, thread 954 tools/perf/util/thread-stack.c err = thread_stack__trace_begin(thread, ts, sample->time, ref); thread 981 tools/perf/util/thread-stack.c size_t thread_stack__depth(struct thread *thread, int cpu) thread 983 tools/perf/util/thread-stack.c struct thread_stack *ts = thread__stack(thread, cpu); thread 14 tools/perf/util/thread-stack.h struct thread; thread 55 tools/perf/util/thread-stack.h struct thread *thread; thread 83 tools/perf/util/thread-stack.h int thread_stack__event(struct thread *thread, int cpu, u32 flags, u64 from_ip, thread 85 tools/perf/util/thread-stack.h void thread_stack__set_trace_nr(struct thread *thread, int cpu, u64 trace_nr); thread 86 tools/perf/util/thread-stack.h void thread_stack__sample(struct thread *thread, int cpu, struct ip_callchain *chain, thread 88 tools/perf/util/thread-stack.h int thread_stack__flush(struct thread *thread); thread 89 tools/perf/util/thread-stack.h void thread_stack__free(struct thread *thread); thread 90 tools/perf/util/thread-stack.h size_t thread_stack__depth(struct thread *thread, int cpu); thread 96 tools/perf/util/thread-stack.h int thread_stack__process(struct thread *thread, struct comm *comm, thread 22 tools/perf/util/thread.c int thread__init_map_groups(struct thread *thread, struct machine *machine) thread 24 tools/perf/util/thread.c pid_t pid = thread->pid_; thread 26 tools/perf/util/thread.c if (pid == thread->tid || pid == -1) { thread 27 tools/perf/util/thread.c thread->mg = map_groups__new(machine); thread 29 tools/perf/util/thread.c struct thread *leader = __machine__findnew_thread(machine, pid, pid); thread 31 tools/perf/util/thread.c thread->mg = map_groups__get(leader->mg); thread 36 tools/perf/util/thread.c return thread->mg ? 0 : -1; thread 39 tools/perf/util/thread.c struct thread *thread__new(pid_t pid, pid_t tid) thread 43 tools/perf/util/thread.c struct thread *thread = zalloc(sizeof(*thread)); thread 45 tools/perf/util/thread.c if (thread != NULL) { thread 46 tools/perf/util/thread.c thread->pid_ = pid; thread 47 tools/perf/util/thread.c thread->tid = tid; thread 48 tools/perf/util/thread.c thread->ppid = -1; thread 49 tools/perf/util/thread.c thread->cpu = -1; thread 50 tools/perf/util/thread.c INIT_LIST_HEAD(&thread->namespaces_list); thread 51 tools/perf/util/thread.c INIT_LIST_HEAD(&thread->comm_list); thread 52 tools/perf/util/thread.c init_rwsem(&thread->namespaces_lock); thread 53 tools/perf/util/thread.c init_rwsem(&thread->comm_lock); thread 65 tools/perf/util/thread.c list_add(&comm->list, &thread->comm_list); thread 66 tools/perf/util/thread.c refcount_set(&thread->refcnt, 1); thread 67 tools/perf/util/thread.c RB_CLEAR_NODE(&thread->rb_node); thread 69 tools/perf/util/thread.c thread->nsinfo = nsinfo__new(pid); thread 70 tools/perf/util/thread.c srccode_state_init(&thread->srccode_state); thread 73 tools/perf/util/thread.c return thread; thread 76 tools/perf/util/thread.c free(thread); thread 80 tools/perf/util/thread.c void thread__delete(struct thread *thread) thread 85 tools/perf/util/thread.c BUG_ON(!RB_EMPTY_NODE(&thread->rb_node)); thread 87 tools/perf/util/thread.c thread_stack__free(thread); thread 89 tools/perf/util/thread.c if (thread->mg) { thread 90 tools/perf/util/thread.c map_groups__put(thread->mg); thread 91 tools/perf/util/thread.c thread->mg = NULL; thread 93 tools/perf/util/thread.c down_write(&thread->namespaces_lock); thread 95 tools/perf/util/thread.c &thread->namespaces_list, list) { thread 99 tools/perf/util/thread.c up_write(&thread->namespaces_lock); thread 101 tools/perf/util/thread.c down_write(&thread->comm_lock); thread 102 tools/perf/util/thread.c list_for_each_entry_safe(comm, tmp_comm, &thread->comm_list, list) { thread 106 tools/perf/util/thread.c up_write(&thread->comm_lock); thread 108 tools/perf/util/thread.c nsinfo__zput(thread->nsinfo); thread 109 tools/perf/util/thread.c srccode_state_free(&thread->srccode_state); thread 111 tools/perf/util/thread.c exit_rwsem(&thread->namespaces_lock); thread 112 tools/perf/util/thread.c exit_rwsem(&thread->comm_lock); thread 113 tools/perf/util/thread.c free(thread); thread 116 tools/perf/util/thread.c struct thread *thread__get(struct thread *thread) thread 118 tools/perf/util/thread.c if (thread) thread 119 tools/perf/util/thread.c refcount_inc(&thread->refcnt); thread 120 tools/perf/util/thread.c return thread; thread 123 tools/perf/util/thread.c void thread__put(struct thread *thread) thread 125 tools/perf/util/thread.c if (thread && refcount_dec_and_test(&thread->refcnt)) { thread 146 tools/perf/util/thread.c if (!list_empty(&thread->node)) thread 147 tools/perf/util/thread.c list_del_init(&thread->node); thread 148 tools/perf/util/thread.c thread__delete(thread); thread 152 tools/perf/util/thread.c static struct namespaces *__thread__namespaces(const struct thread *thread) thread 154 tools/perf/util/thread.c if (list_empty(&thread->namespaces_list)) thread 157 tools/perf/util/thread.c return list_first_entry(&thread->namespaces_list, struct namespaces, list); thread 160 tools/perf/util/thread.c struct namespaces *thread__namespaces(struct thread *thread) thread 164 tools/perf/util/thread.c down_read(&thread->namespaces_lock); thread 165 tools/perf/util/thread.c ns = __thread__namespaces(thread); thread 166 tools/perf/util/thread.c up_read(&thread->namespaces_lock); thread 171 tools/perf/util/thread.c static int __thread__set_namespaces(struct thread *thread, u64 timestamp, thread 174 tools/perf/util/thread.c struct namespaces *new, *curr = __thread__namespaces(thread); thread 180 tools/perf/util/thread.c list_add(&new->list, &thread->namespaces_list); thread 195 tools/perf/util/thread.c int thread__set_namespaces(struct thread *thread, u64 timestamp, thread 200 tools/perf/util/thread.c down_write(&thread->namespaces_lock); thread 201 tools/perf/util/thread.c ret = __thread__set_namespaces(thread, timestamp, event); thread 202 tools/perf/util/thread.c up_write(&thread->namespaces_lock); thread 206 tools/perf/util/thread.c struct comm *thread__comm(const struct thread *thread) thread 208 tools/perf/util/thread.c if (list_empty(&thread->comm_list)) thread 211 tools/perf/util/thread.c return list_first_entry(&thread->comm_list, struct comm, list); thread 214 tools/perf/util/thread.c struct comm *thread__exec_comm(const struct thread *thread) thread 218 tools/perf/util/thread.c list_for_each_entry(comm, &thread->comm_list, list) { thread 231 tools/perf/util/thread.c if (second_last && !last->start && thread->pid_ == thread->tid) thread 237 tools/perf/util/thread.c static int ____thread__set_comm(struct thread *thread, const char *str, thread 240 tools/perf/util/thread.c struct comm *new, *curr = thread__comm(thread); thread 243 tools/perf/util/thread.c if (!thread->comm_set) { thread 251 tools/perf/util/thread.c list_add(&new->list, &thread->comm_list); thread 254 tools/perf/util/thread.c unwind__flush_access(thread->mg); thread 257 tools/perf/util/thread.c thread->comm_set = true; thread 262 tools/perf/util/thread.c int __thread__set_comm(struct thread *thread, const char *str, u64 timestamp, thread 267 tools/perf/util/thread.c down_write(&thread->comm_lock); thread 268 tools/perf/util/thread.c ret = ____thread__set_comm(thread, str, timestamp, exec); thread 269 tools/perf/util/thread.c up_write(&thread->comm_lock); thread 273 tools/perf/util/thread.c int thread__set_comm_from_proc(struct thread *thread) thread 281 tools/perf/util/thread.c thread->pid_, thread->tid) >= (int)sizeof(path)) && thread 284 tools/perf/util/thread.c err = thread__set_comm(thread, comm, 0); thread 290 tools/perf/util/thread.c static const char *__thread__comm_str(const struct thread *thread) thread 292 tools/perf/util/thread.c const struct comm *comm = thread__comm(thread); thread 300 tools/perf/util/thread.c const char *thread__comm_str(struct thread *thread) thread 304 tools/perf/util/thread.c down_read(&thread->comm_lock); thread 305 tools/perf/util/thread.c str = __thread__comm_str(thread); thread 306 tools/perf/util/thread.c up_read(&thread->comm_lock); thread 312 tools/perf/util/thread.c int thread__comm_len(struct thread *thread) thread 314 tools/perf/util/thread.c if (!thread->comm_len) { thread 315 tools/perf/util/thread.c const char *comm = thread__comm_str(thread); thread 318 tools/perf/util/thread.c thread->comm_len = strlen(comm); thread 321 tools/perf/util/thread.c return thread->comm_len; thread 324 tools/perf/util/thread.c size_t thread__fprintf(struct thread *thread, FILE *fp) thread 326 tools/perf/util/thread.c return fprintf(fp, "Thread %d %s\n", thread->tid, thread__comm_str(thread)) + thread 327 tools/perf/util/thread.c map_groups__fprintf(thread->mg, fp); thread 330 tools/perf/util/thread.c int thread__insert_map(struct thread *thread, struct map *map) thread 334 tools/perf/util/thread.c ret = unwind__prepare_access(thread->mg, map, NULL); thread 338 tools/perf/util/thread.c map_groups__fixup_overlappings(thread->mg, map, stderr); thread 339 tools/perf/util/thread.c map_groups__insert(thread->mg, map); thread 344 tools/perf/util/thread.c static int __thread__prepare_access(struct thread *thread) thread 348 tools/perf/util/thread.c struct maps *maps = &thread->mg->maps; thread 354 tools/perf/util/thread.c err = unwind__prepare_access(thread->mg, map, &initialized); thread 364 tools/perf/util/thread.c static int thread__prepare_access(struct thread *thread) thread 369 tools/perf/util/thread.c err = __thread__prepare_access(thread); thread 374 tools/perf/util/thread.c static int thread__clone_map_groups(struct thread *thread, thread 375 tools/perf/util/thread.c struct thread *parent, thread 379 tools/perf/util/thread.c if (thread->pid_ == parent->pid_) thread 380 tools/perf/util/thread.c return thread__prepare_access(thread); thread 382 tools/perf/util/thread.c if (thread->mg == parent->mg) { thread 384 tools/perf/util/thread.c thread->pid_, thread->tid, parent->pid_, parent->tid); thread 388 tools/perf/util/thread.c return do_maps_clone ? map_groups__clone(thread, parent->mg) : 0; thread 391 tools/perf/util/thread.c int thread__fork(struct thread *thread, struct thread *parent, u64 timestamp, bool do_maps_clone) thread 398 tools/perf/util/thread.c err = thread__set_comm(thread, comm, timestamp); thread 403 tools/perf/util/thread.c thread->ppid = parent->tid; thread 404 tools/perf/util/thread.c return thread__clone_map_groups(thread, parent, do_maps_clone); thread 407 tools/perf/util/thread.c void thread__find_cpumode_addr_location(struct thread *thread, u64 addr, thread 419 tools/perf/util/thread.c thread__find_symbol(thread, cpumodes[i], addr, al); thread 425 tools/perf/util/thread.c struct thread *thread__main_thread(struct machine *machine, struct thread *thread) thread 427 tools/perf/util/thread.c if (thread->pid_ == thread->tid) thread 428 tools/perf/util/thread.c return thread__get(thread); thread 430 tools/perf/util/thread.c if (thread->pid_ == -1) thread 433 tools/perf/util/thread.c return machine__find_thread(machine, thread->pid_, thread->pid_); thread 436 tools/perf/util/thread.c int thread__memcpy(struct thread *thread, struct machine *machine, thread 446 tools/perf/util/thread.c if (!thread__find_map(thread, cpumode, ip, &al) || !al.map->dso || thread 55 tools/perf/util/thread.h struct thread *thread__new(pid_t pid, pid_t tid); thread 56 tools/perf/util/thread.h int thread__init_map_groups(struct thread *thread, struct machine *machine); thread 57 tools/perf/util/thread.h void thread__delete(struct thread *thread); thread 59 tools/perf/util/thread.h struct thread *thread__get(struct thread *thread); thread 60 tools/perf/util/thread.h void thread__put(struct thread *thread); thread 62 tools/perf/util/thread.h static inline void __thread__zput(struct thread **thread) thread 64 tools/perf/util/thread.h thread__put(*thread); thread 65 tools/perf/util/thread.h *thread = NULL; thread 68 tools/perf/util/thread.h #define thread__zput(thread) __thread__zput(&thread) thread 70 tools/perf/util/thread.h static inline void thread__exited(struct thread *thread) thread 72 tools/perf/util/thread.h thread->dead = true; thread 75 tools/perf/util/thread.h struct namespaces *thread__namespaces(struct thread *thread); thread 76 tools/perf/util/thread.h int thread__set_namespaces(struct thread *thread, u64 timestamp, thread 79 tools/perf/util/thread.h int __thread__set_comm(struct thread *thread, const char *comm, u64 timestamp, thread 81 tools/perf/util/thread.h static inline int thread__set_comm(struct thread *thread, const char *comm, thread 84 tools/perf/util/thread.h return __thread__set_comm(thread, comm, timestamp, false); thread 87 tools/perf/util/thread.h int thread__set_comm_from_proc(struct thread *thread); thread 89 tools/perf/util/thread.h int thread__comm_len(struct thread *thread); thread 90 tools/perf/util/thread.h struct comm *thread__comm(const struct thread *thread); thread 91 tools/perf/util/thread.h struct comm *thread__exec_comm(const struct thread *thread); thread 92 tools/perf/util/thread.h const char *thread__comm_str(struct thread *thread); thread 93 tools/perf/util/thread.h int thread__insert_map(struct thread *thread, struct map *map); thread 94 tools/perf/util/thread.h int thread__fork(struct thread *thread, struct thread *parent, u64 timestamp, bool do_maps_clone); thread 95 tools/perf/util/thread.h size_t thread__fprintf(struct thread *thread, FILE *fp); thread 97 tools/perf/util/thread.h struct thread *thread__main_thread(struct machine *machine, struct thread *thread); thread 99 tools/perf/util/thread.h struct map *thread__find_map(struct thread *thread, u8 cpumode, u64 addr, thread 101 tools/perf/util/thread.h struct map *thread__find_map_fb(struct thread *thread, u8 cpumode, u64 addr, thread 104 tools/perf/util/thread.h struct symbol *thread__find_symbol(struct thread *thread, u8 cpumode, thread 106 tools/perf/util/thread.h struct symbol *thread__find_symbol_fb(struct thread *thread, u8 cpumode, thread 109 tools/perf/util/thread.h void thread__find_cpumode_addr_location(struct thread *thread, u64 addr, thread 112 tools/perf/util/thread.h int thread__memcpy(struct thread *thread, struct machine *machine, thread 115 tools/perf/util/thread.h static inline void *thread__priv(struct thread *thread) thread 117 tools/perf/util/thread.h return thread->priv; thread 120 tools/perf/util/thread.h static inline void thread__set_priv(struct thread *thread, void *p) thread 122 tools/perf/util/thread.h thread->priv = p; thread 125 tools/perf/util/thread.h static inline bool thread__is_filtered(struct thread *thread) thread 128 tools/perf/util/thread.h !strlist__has_entry(symbol_conf.comm_list, thread__comm_str(thread))) { thread 133 tools/perf/util/thread.h !intlist__has_entry(symbol_conf.pid_list, thread->pid_)) { thread 138 tools/perf/util/thread.h !intlist__has_entry(symbol_conf.tid_list, thread->tid)) { thread 12 tools/perf/util/trace-event.h struct thread; thread 38 tools/perf/util/unwind-libdw.c thread__find_symbol(ui->thread, PERF_RECORD_MISC_USER, ip, al); thread 110 tools/perf/util/unwind-libdw.c if (!thread__find_map(ui->thread, PERF_RECORD_MISC_USER, addr, &al)) { thread 195 tools/perf/util/unwind-libdw.c struct thread *thread, thread 201 tools/perf/util/unwind-libdw.c .thread = thread, thread 202 tools/perf/util/unwind-libdw.c .machine = thread->mg->machine, thread 231 tools/perf/util/unwind-libdw.c err = !dwfl_attach_state(ui->dwfl, EM_NONE, thread->tid, &callbacks, ui); thread 235 tools/perf/util/unwind-libdw.c err = dwfl_getthread_frames(ui->dwfl, thread->tid, frame_callback, ui); thread 10 tools/perf/util/unwind-libdw.h struct thread; thread 12 tools/perf/util/unwind-libdw.h bool libdw__arch_set_initial_registers(Dwfl_Thread *thread, void *arg); thread 18 tools/perf/util/unwind-libdw.h struct thread *thread; thread 98 tools/perf/util/unwind-libunwind-local.c struct thread *thread; thread 370 tools/perf/util/unwind-libunwind-local.c return thread__find_map(ui->thread, PERF_RECORD_MISC_USER, ip, &al); thread 572 tools/perf/util/unwind-libunwind-local.c static int entry(u64 ip, struct thread *thread, thread 578 tools/perf/util/unwind-libunwind-local.c e.sym = thread__find_symbol(thread, PERF_RECORD_MISC_USER, ip, &al); thread 661 tools/perf/util/unwind-libunwind-local.c WARN_ONCE(!ui->thread, "WARNING: ui->thread is NULL"); thread 662 tools/perf/util/unwind-libunwind-local.c addr_space = ui->thread->mg->addr_space; thread 698 tools/perf/util/unwind-libunwind-local.c ret = ips[j] ? entry(ips[j], ui->thread, cb, arg) : 0; thread 705 tools/perf/util/unwind-libunwind-local.c struct thread *thread, thread 710 tools/perf/util/unwind-libunwind-local.c .thread = thread, thread 711 tools/perf/util/unwind-libunwind-local.c .machine = thread->mg->machine, thread 84 tools/perf/util/unwind-libunwind.c struct thread *thread, thread 87 tools/perf/util/unwind-libunwind.c if (thread->mg->unwind_libunwind_ops) thread 88 tools/perf/util/unwind-libunwind.c return thread->mg->unwind_libunwind_ops->get_entries(cb, arg, thread, data, max_stack); thread 12 tools/perf/util/unwind.h struct thread; thread 27 tools/perf/util/unwind.h struct thread *thread, thread 33 tools/perf/util/unwind.h struct thread *thread, thread 69 tools/perf/util/unwind.h struct thread *thread __maybe_unused, thread 142 tools/perf/util/vdso.c struct thread *thread) thread 145 tools/perf/util/vdso.c struct map *map = map_groups__first(thread->mg); thread 255 tools/perf/util/vdso.c struct thread *thread, thread 261 tools/perf/util/vdso.c dso_type = machine__thread_dso_type(machine, thread); thread 289 tools/perf/util/vdso.c struct thread *thread) thread 294 tools/perf/util/vdso.c dso_type = machine__thread_dso_type(machine, thread); thread 319 tools/perf/util/vdso.c struct thread *thread) thread 332 tools/perf/util/vdso.c dso = machine__find_vdso(machine, thread); thread 337 tools/perf/util/vdso.c if (__machine__findnew_vdso_compat(machine, thread, vdso_info, &dso)) thread 25 tools/perf/util/vdso.h struct thread; thread 27 tools/perf/util/vdso.h struct dso *machine__findnew_vdso(struct machine *machine, struct thread *thread); thread 1248 tools/power/acpi/os_specific/service_layers/osunixxf.c pthread_t thread; thread 1250 tools/power/acpi/os_specific/service_layers/osunixxf.c thread = pthread_self(); thread 1251 tools/power/acpi/os_specific/service_layers/osunixxf.c return (ACPI_CAST_PTHREAD_T(thread)); thread 1272 tools/power/acpi/os_specific/service_layers/osunixxf.c pthread_t thread; thread 1276 tools/power/acpi/os_specific/service_layers/osunixxf.c pthread_create(&thread, NULL, (PTHREAD_CALLBACK) function, context); thread 20 tools/testing/selftests/powerpc/dscr/dscr_default_test.c unsigned long thread = (unsigned long)in; thread 44 tools/testing/selftests/powerpc/dscr/dscr_default_test.c "but is %ld\n", thread, d, cur_dscr); thread 45 tools/testing/selftests/powerpc/dscr/dscr_default_test.c result[thread] = 1; thread 46 tools/testing/selftests/powerpc/dscr/dscr_default_test.c pthread_exit(&result[thread]); thread 51 tools/testing/selftests/powerpc/dscr/dscr_default_test.c "but is %ld\n", thread, d, cur_dscr_usr); thread 52 tools/testing/selftests/powerpc/dscr/dscr_default_test.c result[thread] = 1; thread 53 tools/testing/selftests/powerpc/dscr/dscr_default_test.c pthread_exit(&result[thread]); thread 56 tools/testing/selftests/powerpc/dscr/dscr_default_test.c result[thread] = 0; thread 57 tools/testing/selftests/powerpc/dscr/dscr_default_test.c pthread_exit(&result[thread]); thread 100 tools/testing/selftests/powerpc/tm/tm-tmspr.c pthread_t *thread; thread 109 tools/testing/selftests/powerpc/tm/tm-tmspr.c thread = malloc(thread_num * sizeof(pthread_t)); thread 110 tools/testing/selftests/powerpc/tm/tm-tmspr.c if (thread == NULL) thread 115 tools/testing/selftests/powerpc/tm/tm-tmspr.c if (pthread_create(&thread[i], NULL, (void *)tfiar_tfhar, thread 121 tools/testing/selftests/powerpc/tm/tm-tmspr.c if (pthread_create(&thread[i], NULL, (void *)texasr, (void *)i)) thread 126 tools/testing/selftests/powerpc/tm/tm-tmspr.c if (pthread_join(thread[i], NULL) != 0) thread 130 tools/testing/selftests/powerpc/tm/tm-tmspr.c free(thread); thread 92 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c pthread_t *thread; thread 99 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c thread = malloc(sizeof(pthread_t)*threads); thread 100 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c if (!thread) thread 104 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c pthread_create(&thread[i], NULL, &worker, NULL); thread 107 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c pthread_join(thread[i], NULL); thread 109 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c free(thread); thread 687 tools/testing/selftests/seccomp/seccomp_bpf.c pthread_t thread; thread 727 tools/testing/selftests/seccomp/seccomp_bpf.c ASSERT_EQ(0, pthread_create(&thread, NULL, kill_thread, (void *)false)); thread 728 tools/testing/selftests/seccomp/seccomp_bpf.c ASSERT_EQ(0, pthread_join(thread, &status)); thread 732 tools/testing/selftests/seccomp/seccomp_bpf.c ASSERT_EQ(0, pthread_create(&thread, NULL, kill_thread, (void *)true)); thread 733 tools/testing/selftests/seccomp/seccomp_bpf.c ASSERT_EQ(0, pthread_join(thread, &status)); thread 129 tools/testing/selftests/timers/threadtest.c void *(*thread)(void *) = shared_thread; thread 144 tools/testing/selftests/timers/threadtest.c thread = independent_thread; thread 170 tools/testing/selftests/timers/threadtest.c pthread_create(&pth[i], 0, thread, 0); thread 506 tools/testing/selftests/x86/fsgsbase.c pthread_t thread; thread 545 tools/testing/selftests/x86/fsgsbase.c if (pthread_create(&thread, 0, threadproc, 0) != 0) thread 586 tools/testing/selftests/x86/fsgsbase.c if (pthread_join(thread, NULL) != 0) thread 533 tools/testing/selftests/x86/ldt_gdt.c pthread_t thread; thread 559 tools/testing/selftests/x86/ldt_gdt.c if (pthread_create(&thread, 0, threadproc, 0) != 0) thread 615 tools/testing/selftests/x86/ldt_gdt.c if (pthread_join(thread, NULL) != 0) thread 61 tools/testing/selftests/x86/sysret_ss_attrs.c pthread_t thread; thread 62 tools/testing/selftests/x86/sysret_ss_attrs.c if (pthread_create(&thread, 0, threadproc, 0) != 0) thread 334 tools/usb/ffs-test.c struct thread; thread 336 tools/usb/ffs-test.c static ssize_t read_wrap(struct thread *t, void *buf, size_t nbytes); thread 337 tools/usb/ffs-test.c static ssize_t write_wrap(struct thread *t, const void *buf, size_t nbytes); thread 338 tools/usb/ffs-test.c static ssize_t ep0_consume(struct thread *t, const void *buf, size_t nbytes); thread 339 tools/usb/ffs-test.c static ssize_t fill_in_buf(struct thread *t, void *buf, size_t nbytes); thread 340 tools/usb/ffs-test.c static ssize_t empty_out_buf(struct thread *t, const void *buf, size_t nbytes); thread 347 tools/usb/ffs-test.c ssize_t (*in)(struct thread *, void *, size_t); thread 350 tools/usb/ffs-test.c ssize_t (*out)(struct thread *, const void *, size_t); thread 379 tools/usb/ffs-test.c static void init_thread(struct thread *t) thread 390 tools/usb/ffs-test.c struct thread *t = arg; thread 422 tools/usb/ffs-test.c struct thread *t = arg; thread 464 tools/usb/ffs-test.c static void start_thread(struct thread *t) thread 472 tools/usb/ffs-test.c static void join_thread(struct thread *t) thread 483 tools/usb/ffs-test.c static ssize_t read_wrap(struct thread *t, void *buf, size_t nbytes) thread 488 tools/usb/ffs-test.c static ssize_t write_wrap(struct thread *t, const void *buf, size_t nbytes) thread 501 tools/usb/ffs-test.c fill_in_buf(struct thread *ignore, void *buf, size_t nbytes) thread 526 tools/usb/ffs-test.c empty_out_buf(struct thread *ignore, const void *buf, size_t nbytes) thread 588 tools/usb/ffs-test.c ep0_consume(struct thread *ignore, const void *buf, size_t nbytes) thread 626 tools/usb/ffs-test.c static void ep0_init(struct thread *t, bool legacy_descriptors) thread 118 tools/usb/testusb.c pthread_t thread; thread 497 tools/usb/testusb.c status = pthread_create (&entry->thread, 0, handle_testdev, entry); thread 520 tools/usb/testusb.c if (pthread_join (entry->thread, &retval)) thread 4580 virt/kvm/kvm_main.c struct task_struct *thread; thread 4589 virt/kvm/kvm_main.c thread = kthread_run(kvm_vm_worker_thread, &init_context, thread 4591 virt/kvm/kvm_main.c if (IS_ERR(thread)) thread 4592 virt/kvm/kvm_main.c return PTR_ERR(thread); thread 4595 virt/kvm/kvm_main.c WARN_ON(thread == NULL); thread 4600 virt/kvm/kvm_main.c *thread_ptr = thread;