nr_threads 550 arch/s390/appldata/appldata_base.c EXPORT_SYMBOL_GPL(nr_threads); nr_threads 69 arch/s390/appldata/appldata_os.c u32 nr_threads; /* number of threads */ nr_threads 105 arch/s390/appldata/appldata_os.c os_data->nr_threads = nr_threads; nr_threads 140 drivers/dma/img-mdc-dma.c unsigned int nr_threads; nr_threads 924 drivers/dma/img-mdc-dma.c mdma->nr_threads = nr_threads 1010 drivers/dma/img-mdc-dma.c mdma->nr_channels, mdma->nr_threads); nr_threads 434 fs/coredump.c atomic_set(&core_state->nr_threads, nr); nr_threads 23 fs/proc/loadavg.c nr_running(), nr_threads, nr_threads 601 fs/xfs/xfs_iwalk.c unsigned int nr_threads; nr_threads 607 fs/xfs/xfs_iwalk.c nr_threads = xfs_pwork_guess_datadev_parallelism(mp); nr_threads 609 fs/xfs/xfs_iwalk.c nr_threads); nr_threads 65 fs/xfs/xfs_pwork.c unsigned int nr_threads) nr_threads 69 fs/xfs/xfs_pwork.c nr_threads = xfs_globals.pwork_threads; nr_threads 71 fs/xfs/xfs_pwork.c trace_xfs_pwork_init(mp, nr_threads, current->pid); nr_threads 73 fs/xfs/xfs_pwork.c pctl->wq = alloc_workqueue("%s-%d", WQ_FREEZABLE, nr_threads, tag, nr_threads 55 fs/xfs/xfs_pwork.h unsigned int nr_threads); nr_threads 3566 fs/xfs/xfs_trace.h TP_PROTO(struct xfs_mount *mp, unsigned int nr_threads, pid_t pid), nr_threads 3567 fs/xfs/xfs_trace.h TP_ARGS(mp, nr_threads, pid), nr_threads 3570 fs/xfs/xfs_trace.h __field(unsigned int, nr_threads) nr_threads 3575 fs/xfs/xfs_trace.h __entry->nr_threads = nr_threads; nr_threads 3580 fs/xfs/xfs_trace.h __entry->nr_threads, __entry->pid) nr_threads 364 include/linux/mm_types.h atomic_t nr_threads; nr_threads 83 include/linux/sched/signal.h int nr_threads; nr_threads 626 include/linux/sched/signal.h return task->signal->nr_threads; nr_threads 16 include/linux/sched/stat.h extern int nr_threads; nr_threads 19 init/init_task.c .nr_threads = 1, nr_threads 2501 kernel/debug/kdb/kdb_main.c val->procs = nr_threads-1; nr_threads 74 kernel/exit.c nr_threads--; nr_threads 156 kernel/exit.c sig->nr_threads--; nr_threads 464 kernel/exit.c if (atomic_dec_and_test(&core_state->nr_threads)) nr_threads 124 kernel/fork.c int nr_threads; /* The idle threads do not count.. */ nr_threads 1555 kernel/fork.c sig->nr_threads = 1; nr_threads 1890 kernel/fork.c if (nr_threads >= max_threads) nr_threads 2198 kernel/fork.c current->signal->nr_threads++; nr_threads 2208 kernel/fork.c nr_threads++; nr_threads 678 kernel/power/swap.c unsigned thr, run_threads, nr_threads; nr_threads 689 kernel/power/swap.c nr_threads = num_online_cpus() - 1; nr_threads 690 kernel/power/swap.c nr_threads = clamp_val(nr_threads, 1, LZO_THREADS); nr_threads 699 kernel/power/swap.c data = vmalloc(array_size(nr_threads, sizeof(*data))); nr_threads 705 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) nr_threads 719 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) { nr_threads 742 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) { nr_threads 761 kernel/power/swap.c pr_info("Using %u thread(s) for compression\n", nr_threads); nr_threads 770 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) { nr_threads 863 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) nr_threads 1166 kernel/power/swap.c unsigned i, thr, run_threads, nr_threads; nr_threads 1180 kernel/power/swap.c nr_threads = num_online_cpus() - 1; nr_threads 1181 kernel/power/swap.c nr_threads = clamp_val(nr_threads, 1, LZO_THREADS); nr_threads 1190 kernel/power/swap.c data = vmalloc(array_size(nr_threads, sizeof(*data))); nr_threads 1196 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) nr_threads 1212 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) { nr_threads 1235 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) { nr_threads 1278 kernel/power/swap.c pr_info("Using %u thread(s) for decompression\n", nr_threads); nr_threads 1335 kernel/power/swap.c for (thr = 0; have && thr < nr_threads; thr++) { nr_threads 1458 kernel/power/swap.c for (thr = 0; thr < nr_threads; thr++) nr_threads 2526 kernel/sys.c info->procs = nr_threads; nr_threads 80 tools/perf/bench/numa.c int nr_threads; nr_threads 170 tools/perf/bench/numa.c OPT_INTEGER('t', "nr_threads" , &p0.nr_threads, "number of threads per process"), nr_threads 881 tools/perf/bench/numa.c for (t = 0; t < g->p.nr_threads; t++) { nr_threads 886 tools/perf/bench/numa.c task_nr = process_nr*g->p.nr_threads + t; nr_threads 917 tools/perf/bench/numa.c for (t = 0; t < g->p.nr_threads; t++) { nr_threads 922 tools/perf/bench/numa.c task_nr = p*g->p.nr_threads + t; nr_threads 1123 tools/perf/bench/numa.c if (process_nr == g->p.nr_proc-1 && thread_nr == g->p.nr_threads-1) nr_threads 1163 tools/perf/bench/numa.c val += do_work(process_data, g->p.bytes_process, thread_nr, g->p.nr_threads, l, val); nr_threads 1176 tools/perf/bench/numa.c val += do_work(process_data, g->p.bytes_process_locked, thread_nr, g->p.nr_threads, l, val); nr_threads 1300 tools/perf/bench/numa.c task_nr = process_nr*g->p.nr_threads; nr_threads 1306 tools/perf/bench/numa.c pthreads = zalloc(g->p.nr_threads * sizeof(pthread_t)); nr_threads 1314 tools/perf/bench/numa.c for (t = 0; t < g->p.nr_threads; t++) { nr_threads 1315 tools/perf/bench/numa.c task_nr = process_nr*g->p.nr_threads + t; nr_threads 1330 tools/perf/bench/numa.c for (t = 0; t < g->p.nr_threads; t++) { nr_threads 1429 tools/perf/bench/numa.c BUG_ON(g->p.nr_threads <= 0); nr_threads 1432 tools/perf/bench/numa.c g->p.nr_tasks = g->p.nr_proc*g->p.nr_threads; nr_threads 1636 tools/perf/bench/numa.c for (t = 0; t < g->p.nr_threads; t++) { nr_threads 1638 tools/perf/bench/numa.c td = g->threads + p*g->p.nr_threads + t; nr_threads 1697 tools/perf/bench/numa.c p->nr_threads = 1; nr_threads 83 tools/perf/bench/sched-pipe.c int nr_threads = 2; nr_threads 101 tools/perf/bench/sched-pipe.c for (t = 0; t < nr_threads; t++) { nr_threads 118 tools/perf/bench/sched-pipe.c for (t = 0; t < nr_threads; t++) { nr_threads 125 tools/perf/bench/sched-pipe.c for (t = 0; t < nr_threads; t++) { nr_threads 249 tools/perf/lib/evlist.c int nr_threads = perf_thread_map__nr(evlist->threads); nr_threads 257 tools/perf/lib/evlist.c nfds += nr_cpus * nr_threads; nr_threads 359 tools/perf/util/evlist.c int nr_threads = perf_evlist__nr_threads(evlist, evsel); nr_threads 364 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { nr_threads 716 tools/perf/util/evlist.c int nr_threads = perf_thread_map__nr(evlist->core.threads); nr_threads 726 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { nr_threads 744 tools/perf/util/evlist.c int nr_threads = perf_thread_map__nr(evlist->core.threads); nr_threads 747 tools/perf/util/evlist.c for (thread = 0; thread < nr_threads; thread++) { nr_threads 1450 tools/perf/util/evsel.c int nr_cpus, int nr_threads, nr_threads 1454 tools/perf/util/evsel.c for (int thread = thread_idx; thread < nr_threads - 1; thread++) nr_threads 1460 tools/perf/util/evsel.c int nr_threads, int thread_idx) nr_threads 1464 tools/perf/util/evsel.c if (cpu_idx >= nr_cpus || thread_idx >= nr_threads) nr_threads 1470 tools/perf/util/evsel.c perf_evsel__remove_fd(pos, nr_cpus, nr_threads, thread_idx); nr_threads 171 tools/testing/radix-tree/regression1.c int nr_threads; nr_threads 177 tools/testing/radix-tree/regression1.c nr_threads = 2; nr_threads 178 tools/testing/radix-tree/regression1.c pthread_barrier_init(&worker_barrier, NULL, nr_threads); nr_threads 180 tools/testing/radix-tree/regression1.c threads = malloc(nr_threads * sizeof(pthread_t *)); nr_threads 182 tools/testing/radix-tree/regression1.c for (i = 0; i < nr_threads; i++) { nr_threads 190 tools/testing/radix-tree/regression1.c for (i = 0; i < nr_threads; i++) {