threads            47 arch/powerpc/include/asm/cputhreads.h static inline cpumask_t cpu_thread_mask_to_cores(const struct cpumask *threads)
threads            55 arch/powerpc/include/asm/cputhreads.h 		if (cpumask_intersects(threads, &tmp)) {
threads           579 arch/x86/kernel/smpboot.c 	int i, threads;
threads           641 arch/x86/kernel/smpboot.c 	threads = cpumask_weight(topology_sibling_cpumask(cpu));
threads           642 arch/x86/kernel/smpboot.c 	if (threads > __max_smt_threads)
threads           643 arch/x86/kernel/smpboot.c 		__max_smt_threads = threads;
threads          1532 arch/x86/kernel/smpboot.c 		int threads = cpumask_weight(topology_sibling_cpumask(cpu));
threads          1534 arch/x86/kernel/smpboot.c 		if (threads > max_threads)
threads          1535 arch/x86/kernel/smpboot.c 			max_threads = threads;
threads          1174 drivers/acpi/acpica/aclocal.h 	acpi_thread_id *threads;
threads           512 drivers/acpi/acpica/dbexec.c 	if (info->threads && (info->num_created < info->num_threads)) {
threads           513 drivers/acpi/acpica/dbexec.c 		info->threads[info->num_created++] = acpi_os_get_thread_id();
threads           766 drivers/acpi/acpica/dbexec.c 	acpi_gbl_db_method_info.threads = acpi_os_allocate(size);
threads           767 drivers/acpi/acpica/dbexec.c 	if (acpi_gbl_db_method_info.threads == NULL) {
threads           774 drivers/acpi/acpica/dbexec.c 	memset(acpi_gbl_db_method_info.threads, 0, size);
threads           852 drivers/acpi/acpica/dbexec.c 	acpi_os_free(acpi_gbl_db_method_info.threads);
threads           853 drivers/acpi/acpica/dbexec.c 	acpi_gbl_db_method_info.threads = NULL;
threads           465 drivers/android/binder.c 	struct rb_root threads;
threads           953 drivers/android/binder.c 	for (n = rb_first(&proc->threads); n != NULL; n = rb_next(n)) {
threads          1834 drivers/android/binder.c 	if (proc->is_dead && RB_EMPTY_ROOT(&proc->threads) &&
threads          4635 drivers/android/binder.c 	struct rb_node **p = &proc->threads.rb_node;
threads          4658 drivers/android/binder.c 	rb_insert_color(&thread->rb_node, &proc->threads);
threads          4728 drivers/android/binder.c 	rb_erase(&thread->rb_node, &proc->threads);
threads          5311 drivers/android/binder.c 	for (n = rb_first(&proc->threads); n != NULL; n = rb_next(n)) {
threads          5413 drivers/android/binder.c 	int threads, nodes, incoming_refs, outgoing_refs, active_transactions;
threads          5442 drivers/android/binder.c 	threads = 0;
threads          5444 drivers/android/binder.c 	while ((n = rb_first(&proc->threads))) {
threads          5449 drivers/android/binder.c 		threads++;
threads          5493 drivers/android/binder.c 		     __func__, proc->pid, threads, nodes, incoming_refs,
threads          5720 drivers/android/binder.c 	for (n = rb_first(&proc->threads); n != NULL; n = rb_next(n))
threads          5884 drivers/android/binder.c 	for (n = rb_first(&proc->threads); n != NULL; n = rb_next(n))
threads           229 drivers/dma/dmatest.c 	struct list_head	threads;
threads           242 drivers/dma/dmatest.c 		list_for_each_entry(thread, &dtc->threads, node) {
threads           258 drivers/dma/dmatest.c 		list_for_each_entry(thread, &dtc->threads, node) {
threads           919 drivers/dma/dmatest.c 	list_for_each_entry_safe(thread, _thread, &dtc->threads, node) {
threads           978 drivers/dma/dmatest.c 		list_add_tail(&thread->node, &dtc->threads);
threads          1000 drivers/dma/dmatest.c 	INIT_LIST_HEAD(&dtc->threads);
threads          1105 drivers/dma/dmatest.c 		list_for_each_entry(thread, &dtc->threads, node) {
threads          1278 drivers/dma/dmatest.c 		list_for_each_entry(thread, &dtc->threads, node) {
threads           376 drivers/firmware/psci/psci_checker.c 	struct task_struct **threads;
threads           379 drivers/firmware/psci/psci_checker.c 	threads = kmalloc_array(nb_available_cpus, sizeof(*threads),
threads           381 drivers/firmware/psci/psci_checker.c 	if (!threads)
threads           411 drivers/firmware/psci/psci_checker.c 			threads[nb_threads++] = thread;
threads           427 drivers/firmware/psci/psci_checker.c 		wake_up_process(threads[i]);
threads           435 drivers/firmware/psci/psci_checker.c 		err += kthread_park(threads[i]);
threads           436 drivers/firmware/psci/psci_checker.c 		err += kthread_stop(threads[i]);
threads           440 drivers/firmware/psci/psci_checker.c 	kfree(threads);
threads           809 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		struct active_engine threads[I915_NUM_ENGINES] = {};
threads           825 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		memset(threads, 0, sizeof(threads));
threads           829 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			threads[tmp].resets =
threads           838 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			threads[tmp].engine = other;
threads           839 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			threads[tmp].flags = flags;
threads           841 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			tsk = kthread_run(active_engine, &threads[tmp],
threads           848 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			threads[tmp].task = tsk;
threads           934 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		reported -= threads[engine->id].resets;
threads           946 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			if (!threads[tmp].task)
threads           949 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			ret = kthread_stop(threads[tmp].task);
threads           956 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			put_task_struct(threads[tmp].task);
threads           959 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 			    threads[tmp].resets !=
threads           964 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 				       threads[tmp].resets);
threads           439 drivers/gpu/drm/i915/selftests/i915_request.c 	struct task_struct **threads;
threads           449 drivers/gpu/drm/i915/selftests/i915_request.c 	threads = kmalloc_array(ncpus, sizeof(*threads), GFP_KERNEL);
threads           450 drivers/gpu/drm/i915/selftests/i915_request.c 	if (!threads)
threads           471 drivers/gpu/drm/i915/selftests/i915_request.c 		threads[n] = kthread_run(__igt_breadcrumbs_smoketest,
threads           473 drivers/gpu/drm/i915/selftests/i915_request.c 		if (IS_ERR(threads[n])) {
threads           474 drivers/gpu/drm/i915/selftests/i915_request.c 			ret = PTR_ERR(threads[n]);
threads           479 drivers/gpu/drm/i915/selftests/i915_request.c 		get_task_struct(threads[n]);
threads           487 drivers/gpu/drm/i915/selftests/i915_request.c 		err = kthread_stop(threads[n]);
threads           491 drivers/gpu/drm/i915/selftests/i915_request.c 		put_task_struct(threads[n]);
threads           508 drivers/gpu/drm/i915/selftests/i915_request.c 	kfree(threads);
threads          1109 drivers/gpu/drm/i915/selftests/i915_request.c 	struct task_struct **threads;
threads          1133 drivers/gpu/drm/i915/selftests/i915_request.c 	threads = kcalloc(ncpus * I915_NUM_ENGINES,
threads          1134 drivers/gpu/drm/i915/selftests/i915_request.c 			  sizeof(*threads),
threads          1136 drivers/gpu/drm/i915/selftests/i915_request.c 	if (!threads) {
threads          1191 drivers/gpu/drm/i915/selftests/i915_request.c 			threads[id * ncpus + n] = tsk;
threads          1203 drivers/gpu/drm/i915/selftests/i915_request.c 			struct task_struct *tsk = threads[id * ncpus + n];
threads          1228 drivers/gpu/drm/i915/selftests/i915_request.c 	kfree(threads);
threads           198 drivers/ntb/test/ntb_perf.c 	struct perf_thread threads[MAX_THREADS_CNT];
threads          1043 drivers/ntb/test/ntb_perf.c 		wake_up(&perf->threads[tidx].dma_wait);
threads          1044 drivers/ntb/test/ntb_perf.c 		cancel_work_sync(&perf->threads[tidx].work);
threads          1064 drivers/ntb/test/ntb_perf.c 		pthr = &perf->threads[tidx];
threads          1098 drivers/ntb/test/ntb_perf.c 		pthr = &perf->threads[tidx];
threads          1130 drivers/ntb/test/ntb_perf.c 		pthr = &perf->threads[tidx];
threads           308 drivers/s390/net/lcs.c lcs_set_allowed_threads(struct lcs_card *card, unsigned long threads)
threads           313 drivers/s390/net/lcs.c 	card->thread_allowed_mask = threads;
threads           317 drivers/s390/net/lcs.c static int lcs_threads_running(struct lcs_card *card, unsigned long threads)
threads           323 drivers/s390/net/lcs.c         rc = (card->thread_running_mask & threads);
threads           329 drivers/s390/net/lcs.c lcs_wait_for_threads(struct lcs_card *card, unsigned long threads)
threads           332 drivers/s390/net/lcs.c                         lcs_threads_running(card, threads) == 0);
threads           177 drivers/s390/net/qeth_core_main.c void qeth_set_allowed_threads(struct qeth_card *card, unsigned long threads,
threads           183 drivers/s390/net/qeth_core_main.c 	card->thread_allowed_mask = threads;
threads           185 drivers/s390/net/qeth_core_main.c 		card->thread_start_mask &= threads;
threads           191 drivers/s390/net/qeth_core_main.c int qeth_threads_running(struct qeth_card *card, unsigned long threads)
threads           197 drivers/s390/net/qeth_core_main.c 	rc = (card->thread_running_mask & threads);
threads          1900 fs/binfmt_elf.c 	struct elf_thread_core_info *threads = info->thread;
threads          1901 fs/binfmt_elf.c 	while (threads) {
threads          1903 fs/binfmt_elf.c 		struct elf_thread_core_info *t = threads;
threads          1904 fs/binfmt_elf.c 		threads = t->next;
threads           759 kernel/fork.c  	u64 threads;
threads           767 kernel/fork.c  		threads = MAX_THREADS;
threads           769 kernel/fork.c  		threads = div64_u64((u64) nr_pages * (u64) PAGE_SIZE,
threads           772 kernel/fork.c  	if (threads > max_threads_suggested)
threads           773 kernel/fork.c  		threads = max_threads_suggested;
threads           775 kernel/fork.c  	max_threads = clamp_t(u64, threads, MIN_THREADS, MAX_THREADS);
threads          2959 kernel/fork.c  	int threads = max_threads;
threads          2964 kernel/fork.c  	t.data = &threads;
threads          2972 kernel/fork.c  	max_threads = threads;
threads            58 tools/perf/arch/x86/tests/perf-time-to-tsc.c 	struct perf_thread_map *threads = NULL;
threads            71 tools/perf/arch/x86/tests/perf-time-to-tsc.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            72 tools/perf/arch/x86/tests/perf-time-to-tsc.c 	CHECK_NOT_NULL__(threads);
threads            80 tools/perf/arch/x86/tests/perf-time-to-tsc.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           150 tools/perf/bench/numa.c 	struct thread_data	*threads;
threads           580 tools/perf/bench/numa.c 				td = g->threads + t;
threads           702 tools/perf/bench/numa.c 				td = g->threads + t;
threads           861 tools/perf/bench/numa.c 	g->threads[task_nr].curr_cpu = cpu;
threads           887 tools/perf/bench/numa.c 		td = g->threads + task_nr;
threads           923 tools/perf/bench/numa.c 			td = g->threads + task_nr;
threads           990 tools/perf/bench/numa.c 		struct thread_data *td = g->threads + t;
threads          1222 tools/perf/bench/numa.c 			this_cpu = g->threads[task_nr].curr_cpu;
threads          1301 tools/perf/bench/numa.c 	td = g->threads + task_nr;
threads          1316 tools/perf/bench/numa.c 		td = g->threads + task_nr;
threads          1361 tools/perf/bench/numa.c 	ssize_t size = sizeof(*g->threads)*g->p.nr_tasks;
threads          1364 tools/perf/bench/numa.c 	g->threads = zalloc_shared_data(size);
threads          1367 tools/perf/bench/numa.c 		struct thread_data *td = g->threads + t;
threads          1382 tools/perf/bench/numa.c 	ssize_t size = sizeof(*g->threads)*g->p.nr_tasks;
threads          1384 tools/perf/bench/numa.c 	free_data(g->threads, size);
threads          1576 tools/perf/bench/numa.c 		u64 thread_runtime_ns = g->threads[t].runtime_ns;
threads          1638 tools/perf/bench/numa.c 				td = g->threads + p*g->p.nr_threads + t;
threads            79 tools/perf/bench/sched-pipe.c 	struct thread_data threads[2], *td;
threads           102 tools/perf/bench/sched-pipe.c 		td = threads + t;
threads           119 tools/perf/bench/sched-pipe.c 			td = threads + t;
threads           126 tools/perf/bench/sched-pipe.c 			td = threads + t;
threads           137 tools/perf/bench/sched-pipe.c 			worker_thread(threads + 0);
threads           140 tools/perf/bench/sched-pipe.c 			worker_thread(threads + 1);
threads           162 tools/perf/builtin-ftrace.c 	for (i = 0; i < perf_thread_map__nr(ftrace->evlist->core.threads); i++) {
threads           164 tools/perf/builtin-ftrace.c 			  ftrace->evlist->core.threads->map[i]);
threads          1460 tools/perf/builtin-kvm.c 				    kvm->evlist->core.threads, false, 1);
threads           770 tools/perf/builtin-record.c 		if (evsel__open(pos, pos->core.cpus, pos->core.threads) < 0) {
threads          1308 tools/perf/builtin-record.c 	err = perf_event__synthesize_thread_map2(&rec->tool, rec->evlist->core.threads,
threads          1328 tools/perf/builtin-record.c 	err = __machine__synthesize_threads(machine, tool, &opts->target, rec->evlist->core.threads,
threads           786 tools/perf/builtin-report.c 		nr += machine->threads[i].nr;
threads           793 tools/perf/builtin-report.c 		struct threads *threads = &machine->threads[i];
threads           795 tools/perf/builtin-report.c 		for (nd = rb_first_cached(&threads->entries); nd;
threads          1646 tools/perf/builtin-script.c 	struct perf_thread_map *threads;
threads          1921 tools/perf/builtin-script.c 	int nthreads = perf_thread_map__nr(counter->core.threads);
threads          1943 tools/perf/builtin-script.c 				perf_thread_map__pid(counter->core.threads, thread),
threads          3280 tools/perf/builtin-script.c 	if (!script->cpus || !script->threads)
threads          3286 tools/perf/builtin-script.c 	perf_evlist__set_maps(&evlist->core, script->cpus, script->threads);
threads          3302 tools/perf/builtin-script.c 	if (script->threads) {
threads          3307 tools/perf/builtin-script.c 	script->threads = thread_map__new_event(&event->thread_map);
threads          3308 tools/perf/builtin-script.c 	if (!script->threads)
threads           172 tools/perf/builtin-stat.c 	struct perf_thread_map *threads;
threads           270 tools/perf/builtin-stat.c 	int nthreads = perf_thread_map__nr(evsel_list->core.threads);
threads           402 tools/perf/builtin-stat.c 			    struct perf_thread_map *threads)
threads           410 tools/perf/builtin-stat.c 	for (i = 0; i < threads->nr; i++) {
threads           414 tools/perf/builtin-stat.c 			  threads->map[i].pid);
threads           492 tools/perf/builtin-stat.c 				   evsel_list->core.threads &&
threads           493 tools/perf/builtin-stat.c 				   evsel_list->core.threads->err_thread != -1) {
threads           498 tools/perf/builtin-stat.c 				if (!thread_map__remove(evsel_list->core.threads,
threads           499 tools/perf/builtin-stat.c 							evsel_list->core.threads->err_thread)) {
threads           500 tools/perf/builtin-stat.c 					evsel_list->core.threads->err_thread = -1;
threads           586 tools/perf/builtin-stat.c 			if (!is_target_alive(&target, evsel_list->core.threads))
threads          1506 tools/perf/builtin-stat.c 	if (!st->cpus || !st->threads)
threads          1512 tools/perf/builtin-stat.c 	perf_evlist__set_maps(&evsel_list->core, st->cpus, st->threads);
threads          1528 tools/perf/builtin-stat.c 	if (st->threads) {
threads          1533 tools/perf/builtin-stat.c 	st->threads = thread_map__new_event(&event->thread_map);
threads          1534 tools/perf/builtin-stat.c 	if (!st->threads)
threads          1884 tools/perf/builtin-stat.c 		thread_map__read_comms(evsel_list->core.threads);
threads          1887 tools/perf/builtin-stat.c 				perf_thread_map__nr(evsel_list->core.threads))) {
threads          1018 tools/perf/builtin-top.c 				     top->evlist->core.threads) < 0) {
threads          1253 tools/perf/builtin-top.c 				    top->evlist->core.threads, false,
threads          1419 tools/perf/builtin-trace.c 					    evlist->core.threads, trace__tool_process, false,
threads          3201 tools/perf/builtin-trace.c 	} else if (perf_thread_map__pid(trace->evlist->core.threads, 0) == -1) {
threads          3430 tools/perf/builtin-trace.c 	trace->multiple_threads = perf_thread_map__pid(evlist->core.threads, 0) == -1 ||
threads          3431 tools/perf/builtin-trace.c 				  evlist->core.threads->nr > 1 ||
threads          3757 tools/perf/builtin-trace.c DEFINE_RESORT_RB(threads, (thread__nr_events(a->thread->priv) < thread__nr_events(b->thread->priv)),
threads          3771 tools/perf/builtin-trace.c 		DECLARE_RESORT_RB_MACHINE_THREADS(threads, trace->host, i);
threads          3773 tools/perf/builtin-trace.c 		if (threads == NULL) {
threads          3778 tools/perf/builtin-trace.c 		resort_rb__for_each_entry(nd, threads)
threads          3781 tools/perf/builtin-trace.c 		resort_rb__delete(threads);
threads            47 tools/perf/lib/evlist.c 	perf_thread_map__put(evsel->threads);
threads            48 tools/perf/lib/evlist.c 	evsel->threads = perf_thread_map__get(evlist->threads);
threads           111 tools/perf/lib/evlist.c 			   struct perf_thread_map *threads)
threads           125 tools/perf/lib/evlist.c 	if (threads != evlist->threads) {
threads           126 tools/perf/lib/evlist.c 		perf_thread_map__put(evlist->threads);
threads           127 tools/perf/lib/evlist.c 		evlist->threads = perf_thread_map__get(threads);
threads           139 tools/perf/lib/evlist.c 		err = perf_evsel__open(evsel, evsel->cpus, evsel->threads);
threads           249 tools/perf/lib/evlist.c 	int nr_threads = perf_thread_map__nr(evlist->threads);
threads            67 tools/perf/lib/evsel.c 		     struct perf_thread_map *threads)
threads            83 tools/perf/lib/evsel.c 	if (threads == NULL) {
threads            92 tools/perf/lib/evsel.c 		threads = empty_thread_map;
threads            96 tools/perf/lib/evsel.c 	    perf_evsel__alloc_fd(evsel, cpus->nr, threads->nr) < 0)
threads           100 tools/perf/lib/evsel.c 		for (thread = 0; thread < threads->nr; thread++) {
threads           104 tools/perf/lib/evsel.c 						 threads->map[thread].pid,
threads           226 tools/perf/lib/evsel.c 	return evsel->threads;
threads            20 tools/perf/lib/include/internal/evlist.h 	struct perf_thread_map	*threads;
threads            42 tools/perf/lib/include/internal/evsel.h 	struct perf_thread_map	*threads;
threads            33 tools/perf/lib/include/perf/evlist.h 				       struct perf_thread_map *threads);
threads            29 tools/perf/lib/include/perf/evsel.h 				 struct perf_thread_map *threads);
threads            14 tools/perf/lib/include/perf/threadmap.h LIBPERF_API int perf_thread_map__nr(struct perf_thread_map *threads);
threads            74 tools/perf/lib/tests/test-evlist.c 	struct perf_thread_map *threads;
threads            87 tools/perf/lib/tests/test-evlist.c 	threads = perf_thread_map__new_dummy();
threads            88 tools/perf/lib/tests/test-evlist.c 	__T("failed to create threads", threads);
threads            90 tools/perf/lib/tests/test-evlist.c 	perf_thread_map__set_pid(threads, 0, 0);
threads           105 tools/perf/lib/tests/test-evlist.c 	perf_evlist__set_maps(evlist, NULL, threads);
threads           118 tools/perf/lib/tests/test-evlist.c 	perf_thread_map__put(threads);
threads           125 tools/perf/lib/tests/test-evlist.c 	struct perf_thread_map *threads;
threads           140 tools/perf/lib/tests/test-evlist.c 	threads = perf_thread_map__new_dummy();
threads           141 tools/perf/lib/tests/test-evlist.c 	__T("failed to create threads", threads);
threads           143 tools/perf/lib/tests/test-evlist.c 	perf_thread_map__set_pid(threads, 0, 0);
threads           158 tools/perf/lib/tests/test-evlist.c 	perf_evlist__set_maps(evlist, NULL, threads);
threads           180 tools/perf/lib/tests/test-evlist.c 	perf_thread_map__put(threads);
threads            52 tools/perf/lib/tests/test-evsel.c 	struct perf_thread_map *threads;
threads            60 tools/perf/lib/tests/test-evsel.c 	threads = perf_thread_map__new_dummy();
threads            61 tools/perf/lib/tests/test-evsel.c 	__T("failed to create threads", threads);
threads            63 tools/perf/lib/tests/test-evsel.c 	perf_thread_map__set_pid(threads, 0, 0);
threads            68 tools/perf/lib/tests/test-evsel.c 	err = perf_evsel__open(evsel, NULL, threads);
threads            77 tools/perf/lib/tests/test-evsel.c 	perf_thread_map__put(threads);
threads            84 tools/perf/lib/tests/test-evsel.c 	struct perf_thread_map *threads;
threads            93 tools/perf/lib/tests/test-evsel.c 	threads = perf_thread_map__new_dummy();
threads            94 tools/perf/lib/tests/test-evsel.c 	__T("failed to create threads", threads);
threads            96 tools/perf/lib/tests/test-evsel.c 	perf_thread_map__set_pid(threads, 0, 0);
threads           101 tools/perf/lib/tests/test-evsel.c 	err = perf_evsel__open(evsel, NULL, threads);
threads           119 tools/perf/lib/tests/test-evsel.c 	perf_thread_map__put(threads);
threads            15 tools/perf/lib/tests/test-threadmap.c 	struct perf_thread_map *threads;
threads            21 tools/perf/lib/tests/test-threadmap.c 	threads = perf_thread_map__new_dummy();
threads            22 tools/perf/lib/tests/test-threadmap.c 	if (!threads)
threads            25 tools/perf/lib/tests/test-threadmap.c 	perf_thread_map__get(threads);
threads            26 tools/perf/lib/tests/test-threadmap.c 	perf_thread_map__put(threads);
threads            27 tools/perf/lib/tests/test-threadmap.c 	perf_thread_map__put(threads);
threads            47 tools/perf/lib/threadmap.c 	struct perf_thread_map *threads = thread_map__alloc(1);
threads            49 tools/perf/lib/threadmap.c 	if (threads != NULL) {
threads            50 tools/perf/lib/threadmap.c 		perf_thread_map__set_pid(threads, 0, -1);
threads            51 tools/perf/lib/threadmap.c 		threads->nr = 1;
threads            52 tools/perf/lib/threadmap.c 		refcount_set(&threads->refcnt, 1);
threads            54 tools/perf/lib/threadmap.c 	return threads;
threads            57 tools/perf/lib/threadmap.c static void perf_thread_map__delete(struct perf_thread_map *threads)
threads            59 tools/perf/lib/threadmap.c 	if (threads) {
threads            62 tools/perf/lib/threadmap.c 		WARN_ONCE(refcount_read(&threads->refcnt) != 0,
threads            64 tools/perf/lib/threadmap.c 		for (i = 0; i < threads->nr; i++)
threads            65 tools/perf/lib/threadmap.c 			free(perf_thread_map__comm(threads, i));
threads            66 tools/perf/lib/threadmap.c 		free(threads);
threads            83 tools/perf/lib/threadmap.c int perf_thread_map__nr(struct perf_thread_map *threads)
threads            85 tools/perf/lib/threadmap.c 	return threads ? threads->nr : 1;
threads           567 tools/perf/tests/code-reading.c 	struct perf_thread_map *threads = NULL;
threads           609 tools/perf/tests/code-reading.c 	threads = thread_map__new_by_tid(pid);
threads           610 tools/perf/tests/code-reading.c 	if (!threads) {
threads           615 tools/perf/tests/code-reading.c 	ret = perf_event__synthesize_thread_map(NULL, threads,
threads           643 tools/perf/tests/code-reading.c 		perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           671 tools/perf/tests/code-reading.c 				perf_thread_map__get(threads);
threads           721 tools/perf/tests/code-reading.c 		perf_thread_map__put(threads);
threads            62 tools/perf/tests/event-times.c 	struct perf_thread_map *threads;
threads            67 tools/perf/tests/event-times.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            68 tools/perf/tests/event-times.c 	if (threads == NULL) {
threads            75 tools/perf/tests/event-times.c 	err = perf_evsel__open_per_thread(evsel, threads);
threads            81 tools/perf/tests/event-times.c 	perf_thread_map__put(threads);
threads            88 tools/perf/tests/event-times.c 	struct perf_thread_map *threads;
threads            93 tools/perf/tests/event-times.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            94 tools/perf/tests/event-times.c 	if (threads == NULL) {
threads            99 tools/perf/tests/event-times.c 	err = perf_evsel__open_per_thread(evsel, threads);
threads           101 tools/perf/tests/event-times.c 	perf_thread_map__put(threads);
threads            73 tools/perf/tests/keep-tracking.c 	struct perf_thread_map *threads = NULL;
threads            80 tools/perf/tests/keep-tracking.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            81 tools/perf/tests/keep-tracking.c 	CHECK_NOT_NULL__(threads);
threads            89 tools/perf/tests/keep-tracking.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           158 tools/perf/tests/keep-tracking.c 		perf_thread_map__put(threads);
threads            35 tools/perf/tests/mmap-basic.c 	struct perf_thread_map *threads;
threads            48 tools/perf/tests/mmap-basic.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            49 tools/perf/tests/mmap-basic.c 	if (threads == NULL) {
threads            75 tools/perf/tests/mmap-basic.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads            92 tools/perf/tests/mmap-basic.c 		if (evsel__open(evsels[i], cpus, threads) < 0) {
threads           161 tools/perf/tests/mmap-basic.c 	threads = NULL;
threads           165 tools/perf/tests/mmap-basic.c 	perf_thread_map__put(threads);
threads            32 tools/perf/tests/mmap-thread-lookup.c static struct thread_data threads[THREADS];
threads            81 tools/perf/tests/mmap-thread-lookup.c 	struct thread_data *td = &threads[i];
threads           101 tools/perf/tests/mmap-thread-lookup.c 	struct thread_data *td0 = &threads[0];
threads           118 tools/perf/tests/mmap-thread-lookup.c 	struct thread_data *td0 = &threads[0];
threads           127 tools/perf/tests/mmap-thread-lookup.c 		err = pthread_join(threads[i].pt, NULL);
threads           186 tools/perf/tests/mmap-thread-lookup.c 		struct thread_data *td = &threads[i];
threads            30 tools/perf/tests/openat-syscall-all-cpus.c 	struct perf_thread_map *threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            34 tools/perf/tests/openat-syscall-all-cpus.c 	if (threads == NULL) {
threads            54 tools/perf/tests/openat-syscall-all-cpus.c 	if (evsel__open(evsel, cpus, threads) < 0) {
threads           128 tools/perf/tests/openat-syscall-all-cpus.c 	perf_thread_map__put(threads);
threads            64 tools/perf/tests/openat-syscall-tp-fields.c 	perf_thread_map__set_pid(evlist->core.threads, 0, getpid());
threads            21 tools/perf/tests/openat-syscall.c 	struct perf_thread_map *threads = thread_map__new(-1, getpid(), UINT_MAX);
threads            25 tools/perf/tests/openat-syscall.c 	if (threads == NULL) {
threads            37 tools/perf/tests/openat-syscall.c 	if (perf_evsel__open_per_thread(evsel, threads) < 0) {
threads            66 tools/perf/tests/openat-syscall.c 	perf_thread_map__put(threads);
threads            45 tools/perf/tests/sw-clock.c 	struct perf_thread_map *threads;
threads            64 tools/perf/tests/sw-clock.c 	threads = thread_map__new_by_tid(getpid());
threads            65 tools/perf/tests/sw-clock.c 	if (!cpus || !threads) {
threads            71 tools/perf/tests/sw-clock.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads            74 tools/perf/tests/sw-clock.c 	threads = NULL;
threads           133 tools/perf/tests/sw-clock.c 	perf_thread_map__put(threads);
threads           335 tools/perf/tests/switch-tracking.c 	struct perf_thread_map *threads = NULL;
threads           343 tools/perf/tests/switch-tracking.c 	threads = thread_map__new(-1, getpid(), UINT_MAX);
threads           344 tools/perf/tests/switch-tracking.c 	if (!threads) {
threads           361 tools/perf/tests/switch-tracking.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           578 tools/perf/tests/switch-tracking.c 		perf_thread_map__put(threads);
threads            54 tools/perf/tests/task-exit.c 	struct perf_thread_map *threads;
threads            73 tools/perf/tests/task-exit.c 	threads = thread_map__new_by_tid(-1);
threads            74 tools/perf/tests/task-exit.c 	if (!cpus || !threads) {
threads            80 tools/perf/tests/task-exit.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads            83 tools/perf/tests/task-exit.c 	threads = NULL;
threads           153 tools/perf/tests/task-exit.c 	perf_thread_map__put(threads);
threads            68 tools/perf/tests/thread-map.c 	struct perf_thread_map *threads;
threads            74 tools/perf/tests/thread-map.c 	threads = thread_map__new_event(&event->thread_map);
threads            75 tools/perf/tests/thread-map.c 	TEST_ASSERT_VAL("failed to alloc map", threads);
threads            77 tools/perf/tests/thread-map.c 	TEST_ASSERT_VAL("wrong nr", threads->nr == 1);
threads            79 tools/perf/tests/thread-map.c 			perf_thread_map__pid(threads, 0) == getpid());
threads            81 tools/perf/tests/thread-map.c 			perf_thread_map__comm(threads, 0) &&
threads            82 tools/perf/tests/thread-map.c 			!strcmp(perf_thread_map__comm(threads, 0), NAME));
threads            84 tools/perf/tests/thread-map.c 			refcount_read(&threads->refcnt) == 1);
threads            85 tools/perf/tests/thread-map.c 	perf_thread_map__put(threads);
threads            91 tools/perf/tests/thread-map.c 	struct perf_thread_map *threads;
threads            97 tools/perf/tests/thread-map.c 	threads = thread_map__new_by_pid(getpid());
threads            98 tools/perf/tests/thread-map.c 	TEST_ASSERT_VAL("failed to alloc map", threads);
threads           100 tools/perf/tests/thread-map.c 	thread_map__read_comms(threads);
threads           103 tools/perf/tests/thread-map.c 		!perf_event__synthesize_thread_map2(NULL, threads, process_event, NULL));
threads           110 tools/perf/tests/thread-map.c 	struct perf_thread_map *threads;
threads           117 tools/perf/tests/thread-map.c 	threads = thread_map__new_str(str, NULL, 0, false);
threads           120 tools/perf/tests/thread-map.c 			threads);
threads           123 tools/perf/tests/thread-map.c 		thread_map__fprintf(threads, stderr);
threads           126 tools/perf/tests/thread-map.c 			!thread_map__remove(threads, 0));
threads           128 tools/perf/tests/thread-map.c 	TEST_ASSERT_VAL("thread_map count != 1", threads->nr == 1);
threads           131 tools/perf/tests/thread-map.c 		thread_map__fprintf(threads, stderr);
threads           134 tools/perf/tests/thread-map.c 			!thread_map__remove(threads, 0));
threads           136 tools/perf/tests/thread-map.c 	TEST_ASSERT_VAL("thread_map count != 0", threads->nr == 0);
threads           139 tools/perf/tests/thread-map.c 		thread_map__fprintf(threads, stderr);
threads           142 tools/perf/tests/thread-map.c 			thread_map__remove(threads, 0));
threads           144 tools/perf/tests/thread-map.c 	for (i = 0; i < threads->nr; i++)
threads           145 tools/perf/tests/thread-map.c 		zfree(&threads->map[i].comm);
threads           147 tools/perf/tests/thread-map.c 	free(threads);
threads           137 tools/perf/util/auxtrace.c 		if (evlist->core.threads)
threads           138 tools/perf/util/auxtrace.c 			mp->tid = perf_thread_map__pid(evlist->core.threads, 0);
threads           143 tools/perf/util/auxtrace.c 		mp->tid = perf_thread_map__pid(evlist->core.threads, idx);
threads           281 tools/perf/util/event.c 	struct perf_thread_map *threads = thread_map__new_event(&event->thread_map);
threads           286 tools/perf/util/event.c 	if (threads)
threads           287 tools/perf/util/event.c 		ret += thread_map__fprintf(threads, fp);
threads           291 tools/perf/util/event.c 	perf_thread_map__put(threads);
threads            56 tools/perf/util/evlist.c 		  struct perf_thread_map *threads)
threads            59 tools/perf/util/evlist.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           152 tools/perf/util/evlist.c 	perf_thread_map__put(evlist->core.threads);
threads           154 tools/perf/util/evlist.c 	evlist->core.threads = NULL;
threads           321 tools/perf/util/evlist.c 		return perf_thread_map__nr(evlist->core.threads);
threads           436 tools/perf/util/evlist.c 	if (!evsel->core.system_wide && evlist->core.threads && thread >= 0)
threads           437 tools/perf/util/evlist.c 		sid->tid = perf_thread_map__pid(evlist->core.threads, thread);
threads           602 tools/perf/util/evlist.c 		evlist->core.nr_mmaps = perf_thread_map__nr(evlist->core.threads);
threads           716 tools/perf/util/evlist.c 	int nr_threads = perf_thread_map__nr(evlist->core.threads);
threads           744 tools/perf/util/evlist.c 	int nr_threads = perf_thread_map__nr(evlist->core.threads);
threads           895 tools/perf/util/evlist.c 	const struct perf_thread_map *threads = evlist->core.threads;
threads           922 tools/perf/util/evlist.c 		    perf_evsel__alloc_id(&evsel->core, perf_cpu_map__nr(cpus), threads->nr) < 0)
threads           941 tools/perf/util/evlist.c 	struct perf_thread_map *threads;
threads           961 tools/perf/util/evlist.c 	threads = thread_map__new_str(target->pid, target->tid, target->uid,
threads           964 tools/perf/util/evlist.c 	if (!threads)
threads           977 tools/perf/util/evlist.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads           982 tools/perf/util/evlist.c 	perf_thread_map__put(threads);
threads          1212 tools/perf/util/evlist.c 	struct perf_thread_map *threads;
threads          1228 tools/perf/util/evlist.c 	threads = perf_thread_map__new_dummy();
threads          1229 tools/perf/util/evlist.c 	if (!threads)
threads          1232 tools/perf/util/evlist.c 	perf_evlist__set_maps(&evlist->core, cpus, threads);
threads          1249 tools/perf/util/evlist.c 	if (evlist->core.threads == NULL && evlist->core.cpus == NULL) {
threads          1258 tools/perf/util/evlist.c 		err = evsel__open(evsel, evsel->core.cpus, evsel->core.threads);
threads          1352 tools/perf/util/evlist.c 		if (evlist->core.threads == NULL) {
threads          1357 tools/perf/util/evlist.c 		perf_thread_map__set_pid(evlist->core.threads, 0, evlist->workload.pid);
threads          1767 tools/perf/util/evlist.c 				     evlist->core.threads) < 0)
threads            88 tools/perf/util/evlist.h 		  struct perf_thread_map *threads);
threads          1254 tools/perf/util/evsel.c 	perf_thread_map__put(evsel->core.threads);
threads          1484 tools/perf/util/evsel.c 				  struct perf_thread_map *threads,
threads          1487 tools/perf/util/evsel.c 	pid_t ignore_pid = perf_thread_map__pid(threads, thread);
threads          1501 tools/perf/util/evsel.c 	if (threads->nr == 1)
threads          1508 tools/perf/util/evsel.c 	if (update_fds(evsel, nr_cpus, cpu, threads->nr, thread))
threads          1511 tools/perf/util/evsel.c 	if (thread_map__remove(threads, thread))
threads          1573 tools/perf/util/evsel.c 		struct perf_thread_map *threads)
threads          1596 tools/perf/util/evsel.c 	if (threads == NULL) {
threads          1605 tools/perf/util/evsel.c 		threads = empty_thread_map;
threads          1611 tools/perf/util/evsel.c 		nthreads = threads->nr;
threads          1656 tools/perf/util/evsel.c 				pid = perf_thread_map__pid(threads, thread);
threads          1670 tools/perf/util/evsel.c 				if (ignore_missing_thread(evsel, cpus->nr, cpu, threads, thread, err)) {
threads          1813 tools/perf/util/evsel.c 		threads->err_thread = thread;
threads          1838 tools/perf/util/evsel.c 				struct perf_thread_map *threads)
threads          1840 tools/perf/util/evsel.c 	return evsel__open(evsel, NULL, threads);
threads          2540 tools/perf/util/evsel.c 	struct perf_thread_map *threads = evsel->core.threads;
threads          2542 tools/perf/util/evsel.c 	if (perf_evsel__alloc_id(&evsel->core, cpus->nr, threads->nr))
threads           227 tools/perf/util/evsel.h 				struct perf_thread_map *threads);
threads           229 tools/perf/util/evsel.h 		struct perf_thread_map *threads);
threads            57 tools/perf/util/machine.c 		struct threads *threads = &machine->threads[i];
threads            58 tools/perf/util/machine.c 		threads->entries = RB_ROOT_CACHED;
threads            59 tools/perf/util/machine.c 		init_rwsem(&threads->lock);
threads            60 tools/perf/util/machine.c 		threads->nr = 0;
threads            61 tools/perf/util/machine.c 		INIT_LIST_HEAD(&threads->dead);
threads            62 tools/perf/util/machine.c 		threads->last_match = NULL;
threads           194 tools/perf/util/machine.c 		struct threads *threads = &machine->threads[i];
threads           195 tools/perf/util/machine.c 		down_write(&threads->lock);
threads           196 tools/perf/util/machine.c 		nd = rb_first_cached(&threads->entries);
threads           203 tools/perf/util/machine.c 		up_write(&threads->lock);
threads           223 tools/perf/util/machine.c 		struct threads *threads = &machine->threads[i];
threads           233 tools/perf/util/machine.c 		list_for_each_entry_safe(thread, n, &threads->dead, node)
threads           236 tools/perf/util/machine.c 		exit_rwsem(&threads->lock);
threads           446 tools/perf/util/machine.c __threads__get_last_match(struct threads *threads, struct machine *machine,
threads           451 tools/perf/util/machine.c 	th = threads->last_match;
threads           458 tools/perf/util/machine.c 		threads->last_match = NULL;
threads           465 tools/perf/util/machine.c threads__get_last_match(struct threads *threads, struct machine *machine,
threads           471 tools/perf/util/machine.c 		th = __threads__get_last_match(threads, machine, pid, tid);
threads           477 tools/perf/util/machine.c __threads__set_last_match(struct threads *threads, struct thread *th)
threads           479 tools/perf/util/machine.c 	threads->last_match = th;
threads           483 tools/perf/util/machine.c threads__set_last_match(struct threads *threads, struct thread *th)
threads           486 tools/perf/util/machine.c 		__threads__set_last_match(threads, th);
threads           494 tools/perf/util/machine.c 						  struct threads *threads,
threads           498 tools/perf/util/machine.c 	struct rb_node **p = &threads->entries.rb_root.rb_node;
threads           503 tools/perf/util/machine.c 	th = threads__get_last_match(threads, machine, pid, tid);
threads           512 tools/perf/util/machine.c 			threads__set_last_match(threads, th);
threads           531 tools/perf/util/machine.c 		rb_insert_color_cached(&th->rb_node, &threads->entries, leftmost);
threads           542 tools/perf/util/machine.c 			rb_erase_cached(&th->rb_node, &threads->entries);
threads           551 tools/perf/util/machine.c 		threads__set_last_match(threads, th);
threads           552 tools/perf/util/machine.c 		++threads->nr;
threads           566 tools/perf/util/machine.c 	struct threads *threads = machine__threads(machine, tid);
threads           569 tools/perf/util/machine.c 	down_write(&threads->lock);
threads           571 tools/perf/util/machine.c 	up_write(&threads->lock);
threads           578 tools/perf/util/machine.c 	struct threads *threads = machine__threads(machine, tid);
threads           581 tools/perf/util/machine.c 	down_read(&threads->lock);
threads           582 tools/perf/util/machine.c 	th =  ____machine__findnew_thread(machine, threads, pid, tid, false);
threads           583 tools/perf/util/machine.c 	up_read(&threads->lock);
threads           862 tools/perf/util/machine.c 		struct threads *threads = &machine->threads[i];
threads           864 tools/perf/util/machine.c 		down_read(&threads->lock);
threads           866 tools/perf/util/machine.c 		ret = fprintf(fp, "Threads: %u\n", threads->nr);
threads           868 tools/perf/util/machine.c 		for (nd = rb_first_cached(&threads->entries); nd;
threads           875 tools/perf/util/machine.c 		up_read(&threads->lock);
threads          1756 tools/perf/util/machine.c 	struct threads *threads = machine__threads(machine, th->tid);
threads          1758 tools/perf/util/machine.c 	if (threads->last_match == th)
threads          1759 tools/perf/util/machine.c 		threads__set_last_match(threads, NULL);
threads          1762 tools/perf/util/machine.c 		down_write(&threads->lock);
threads          1766 tools/perf/util/machine.c 	rb_erase_cached(&th->rb_node, &threads->entries);
threads          1768 tools/perf/util/machine.c 	--threads->nr;
threads          1774 tools/perf/util/machine.c 	list_add_tail(&th->node, &threads->dead);
threads          1784 tools/perf/util/machine.c 		up_write(&threads->lock);
threads          2542 tools/perf/util/machine.c 	struct threads *threads;
threads          2549 tools/perf/util/machine.c 		threads = &machine->threads[i];
threads          2550 tools/perf/util/machine.c 		for (nd = rb_first_cached(&threads->entries); nd;
threads          2558 tools/perf/util/machine.c 		list_for_each_entry(thread, &threads->dead, node) {
threads            49 tools/perf/util/machine.h 	struct threads    threads[THREADS__TABLE_SIZE];
threads            64 tools/perf/util/machine.h static inline struct threads *machine__threads(struct machine *machine, pid_t tid)
threads            67 tools/perf/util/machine.h 	return &machine->threads[(unsigned int)tid % THREADS__TABLE_SIZE];
threads           619 tools/perf/util/python.c 	struct perf_thread_map *threads;
threads           632 tools/perf/util/python.c 	pthreads->threads = thread_map__new(pid, tid, uid);
threads           633 tools/perf/util/python.c 	if (pthreads->threads == NULL)
threads           640 tools/perf/util/python.c 	perf_thread_map__put(pthreads->threads);
threads           648 tools/perf/util/python.c 	return pthreads->threads->nr;
threads           655 tools/perf/util/python.c 	if (i >= pthreads->threads->nr)
threads           658 tools/perf/util/python.c 	return Py_BuildValue("i", pthreads->threads->map[i]);
threads           811 tools/perf/util/python.c 	struct perf_thread_map *threads = NULL;
threads           821 tools/perf/util/python.c 		threads = ((struct pyrf_thread_map *)pthreads)->threads;
threads           831 tools/perf/util/python.c 	if (evsel__open(evsel, cpus, threads) < 0) {
threads           880 tools/perf/util/python.c 	struct perf_thread_map *threads;
threads           885 tools/perf/util/python.c 	threads = ((struct pyrf_thread_map *)pthreads)->threads;
threads           887 tools/perf/util/python.c 	evlist__init(&pevlist->evlist, cpus, threads);
threads           148 tools/perf/util/rb_resort.h  DECLARE_RESORT_RB(__name)(&__machine->threads[hash_bucket].entries.rb_root, \
threads           149 tools/perf/util/rb_resort.h 			   __machine->threads[hash_bucket].nr)
threads          1395 tools/perf/util/scripting-engines/trace-event-python.c 	struct perf_thread_map *threads = counter->core.threads;
threads          1405 tools/perf/util/scripting-engines/trace-event-python.c 	for (thread = 0; thread < threads->nr; thread++) {
threads          1408 tools/perf/util/scripting-engines/trace-event-python.c 				     perf_thread_map__pid(threads, thread), tstamp,
threads           120 tools/perf/util/stat-display.c 			perf_thread_map__comm(evsel->core.threads, id),
threads           122 tools/perf/util/stat-display.c 			perf_thread_map__pid(evsel->core.threads, id),
threads           748 tools/perf/util/stat-display.c 	int nthreads = perf_thread_map__nr(counter->core.threads);
threads           177 tools/perf/util/stat.c 	int nthreads = perf_thread_map__nr(evsel->core.threads);
threads           335 tools/perf/util/stat.c 	int nthreads = perf_thread_map__nr(counter->core.threads);
threads           512 tools/perf/util/stat.c 	return perf_evsel__open_per_thread(evsel, evsel->core.threads);
threads           560 tools/perf/util/synthetic-events.c 				      struct perf_thread_map *threads,
threads           588 tools/perf/util/synthetic-events.c 	for (thread = 0; thread < threads->nr; ++thread) {
threads           591 tools/perf/util/synthetic-events.c 					       perf_thread_map__pid(threads, thread), 0,
threads           602 tools/perf/util/synthetic-events.c 		if ((int) comm_event->comm.pid != perf_thread_map__pid(threads, thread)) {
threads           606 tools/perf/util/synthetic-events.c 			for (j = 0; j < threads->nr; ++j) {
threads           607 tools/perf/util/synthetic-events.c 				if ((int) comm_event->comm.pid == perf_thread_map__pid(threads, j)) {
threads           883 tools/perf/util/synthetic-events.c 				      struct perf_thread_map *threads,
threads           891 tools/perf/util/synthetic-events.c 	size +=	threads->nr * sizeof(event->thread_map.entries[0]);
threads           899 tools/perf/util/synthetic-events.c 	event->thread_map.nr = threads->nr;
threads           901 tools/perf/util/synthetic-events.c 	for (i = 0; i < threads->nr; i++) {
threads           903 tools/perf/util/synthetic-events.c 		char *comm = perf_thread_map__comm(threads, i);
threads           908 tools/perf/util/synthetic-events.c 		entry->pid = perf_thread_map__pid(threads, i);
threads          1471 tools/perf/util/synthetic-events.c 				  struct target *target, struct perf_thread_map *threads,
threads          1476 tools/perf/util/synthetic-events.c 		return perf_event__synthesize_thread_map(tool, threads, process, machine, data_mmap);
threads          1486 tools/perf/util/synthetic-events.c 				struct perf_thread_map *threads, bool data_mmap,
threads          1489 tools/perf/util/synthetic-events.c 	return __machine__synthesize_threads(machine, NULL, target, threads,
threads          1792 tools/perf/util/synthetic-events.c 	err = perf_event__synthesize_thread_map2(tool, evlist->core.threads, process, NULL);
threads            53 tools/perf/util/synthetic-events.h int perf_event__synthesize_thread_map2(struct perf_tool *tool, struct perf_thread_map *threads, perf_event__handler_t process, struct machine *machine);
threads            54 tools/perf/util/synthetic-events.h int perf_event__synthesize_thread_map(struct perf_tool *tool, struct perf_thread_map *threads, perf_event__handler_t process, struct machine *machine, bool mmap_data);
threads            65 tools/perf/util/synthetic-events.h 				  struct target *target, struct perf_thread_map *threads,
threads            69 tools/perf/util/synthetic-events.h 				struct perf_thread_map *threads, bool data_mmap,
threads            35 tools/perf/util/thread_map.c 	struct perf_thread_map *threads;
threads            46 tools/perf/util/thread_map.c 	threads = thread_map__alloc(items);
threads            47 tools/perf/util/thread_map.c 	if (threads != NULL) {
threads            49 tools/perf/util/thread_map.c 			perf_thread_map__set_pid(threads, i, atoi(namelist[i]->d_name));
threads            50 tools/perf/util/thread_map.c 		threads->nr = items;
threads            51 tools/perf/util/thread_map.c 		refcount_set(&threads->refcnt, 1);
threads            58 tools/perf/util/thread_map.c 	return threads;
threads            63 tools/perf/util/thread_map.c 	struct perf_thread_map *threads = thread_map__alloc(1);
threads            65 tools/perf/util/thread_map.c 	if (threads != NULL) {
threads            66 tools/perf/util/thread_map.c 		perf_thread_map__set_pid(threads, 0, tid);
threads            67 tools/perf/util/thread_map.c 		threads->nr = 1;
threads            68 tools/perf/util/thread_map.c 		refcount_set(&threads->refcnt, 1);
threads            71 tools/perf/util/thread_map.c 	return threads;
threads            80 tools/perf/util/thread_map.c 	struct perf_thread_map *threads = thread_map__alloc(max_threads);
threads            82 tools/perf/util/thread_map.c 	if (threads == NULL)
threads            89 tools/perf/util/thread_map.c 	threads->nr = 0;
threads            90 tools/perf/util/thread_map.c 	refcount_set(&threads->refcnt, 1);
threads           114 tools/perf/util/thread_map.c 		while (threads->nr + items >= max_threads) {
threads           122 tools/perf/util/thread_map.c 			tmp = perf_thread_map__realloc(threads, max_threads);
threads           126 tools/perf/util/thread_map.c 			threads = tmp;
threads           130 tools/perf/util/thread_map.c 			perf_thread_map__set_pid(threads, threads->nr + i,
threads           138 tools/perf/util/thread_map.c 		threads->nr += items;
threads           144 tools/perf/util/thread_map.c 	return threads;
threads           147 tools/perf/util/thread_map.c 	free(threads);
threads           156 tools/perf/util/thread_map.c 	zfree(&threads);
threads           183 tools/perf/util/thread_map.c 	struct perf_thread_map *threads = NULL, *nt;
threads           213 tools/perf/util/thread_map.c 		nt = perf_thread_map__realloc(threads, total_tasks);
threads           217 tools/perf/util/thread_map.c 		threads = nt;
threads           220 tools/perf/util/thread_map.c 			perf_thread_map__set_pid(threads, j++, atoi(namelist[i]->d_name));
threads           223 tools/perf/util/thread_map.c 		threads->nr = total_tasks;
threads           229 tools/perf/util/thread_map.c 	if (threads)
threads           230 tools/perf/util/thread_map.c 		refcount_set(&threads->refcnt, 1);
threads           231 tools/perf/util/thread_map.c 	return threads;
threads           239 tools/perf/util/thread_map.c 	zfree(&threads);
threads           245 tools/perf/util/thread_map.c 	struct perf_thread_map *threads = NULL, *nt;
threads           272 tools/perf/util/thread_map.c 		nt = perf_thread_map__realloc(threads, ntasks);
threads           277 tools/perf/util/thread_map.c 		threads = nt;
threads           278 tools/perf/util/thread_map.c 		perf_thread_map__set_pid(threads, ntasks - 1, tid);
threads           279 tools/perf/util/thread_map.c 		threads->nr = ntasks;
threads           282 tools/perf/util/thread_map.c 	if (threads)
threads           283 tools/perf/util/thread_map.c 		refcount_set(&threads->refcnt, 1);
threads           284 tools/perf/util/thread_map.c 	return threads;
threads           287 tools/perf/util/thread_map.c 	zfree(&threads);
threads           307 tools/perf/util/thread_map.c size_t thread_map__fprintf(struct perf_thread_map *threads, FILE *fp)
threads           311 tools/perf/util/thread_map.c 				 threads->nr, threads->nr > 1 ? "s" : "");
threads           312 tools/perf/util/thread_map.c 	for (i = 0; i < threads->nr; ++i)
threads           313 tools/perf/util/thread_map.c 		printed += fprintf(fp, "%s%d", i ? ", " : "", perf_thread_map__pid(threads, i));
threads           363 tools/perf/util/thread_map.c void thread_map__read_comms(struct perf_thread_map *threads)
threads           367 tools/perf/util/thread_map.c 	for (i = 0; i < threads->nr; ++i)
threads           368 tools/perf/util/thread_map.c 		comm_init(threads, i);
threads           371 tools/perf/util/thread_map.c static void thread_map__copy_event(struct perf_thread_map *threads,
threads           376 tools/perf/util/thread_map.c 	threads->nr = (int) event->nr;
threads           379 tools/perf/util/thread_map.c 		perf_thread_map__set_pid(threads, i, (pid_t) event->entries[i].pid);
threads           380 tools/perf/util/thread_map.c 		threads->map[i].comm = strndup(event->entries[i].comm, 16);
threads           383 tools/perf/util/thread_map.c 	refcount_set(&threads->refcnt, 1);
threads           388 tools/perf/util/thread_map.c 	struct perf_thread_map *threads;
threads           390 tools/perf/util/thread_map.c 	threads = thread_map__alloc(event->nr);
threads           391 tools/perf/util/thread_map.c 	if (threads)
threads           392 tools/perf/util/thread_map.c 		thread_map__copy_event(threads, event);
threads           394 tools/perf/util/thread_map.c 	return threads;
threads           397 tools/perf/util/thread_map.c bool thread_map__has(struct perf_thread_map *threads, pid_t pid)
threads           401 tools/perf/util/thread_map.c 	for (i = 0; i < threads->nr; ++i) {
threads           402 tools/perf/util/thread_map.c 		if (threads->map[i].pid == pid)
threads           409 tools/perf/util/thread_map.c int thread_map__remove(struct perf_thread_map *threads, int idx)
threads           413 tools/perf/util/thread_map.c 	if (threads->nr < 1)
threads           416 tools/perf/util/thread_map.c 	if (idx >= threads->nr)
threads           422 tools/perf/util/thread_map.c 	zfree(&threads->map[idx].comm);
threads           424 tools/perf/util/thread_map.c 	for (i = idx; i < threads->nr - 1; i++)
threads           425 tools/perf/util/thread_map.c 		threads->map[i] = threads->map[i + 1];
threads           427 tools/perf/util/thread_map.c 	threads->nr--;
threads            26 tools/perf/util/thread_map.h size_t thread_map__fprintf(struct perf_thread_map *threads, FILE *fp);
threads            28 tools/perf/util/thread_map.h void thread_map__read_comms(struct perf_thread_map *threads);
threads            29 tools/perf/util/thread_map.h bool thread_map__has(struct perf_thread_map *threads, pid_t pid);
threads            30 tools/perf/util/thread_map.h int thread_map__remove(struct perf_thread_map *threads, int idx);
threads            22 tools/perf/util/values.c 	values->threads = 0;
threads            54 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++)
threads            93 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++)
threads            97 tools/perf/util/values.c 	if (values->threads == values->threads_max) {
threads           103 tools/perf/util/values.c 	i = values->threads;
threads           112 tools/perf/util/values.c 	values->threads = i + 1;
threads           134 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++) {
threads           217 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++) {
threads           238 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++) {
threads           261 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++) {
threads           277 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++) {
threads           289 tools/perf/util/values.c 	for (i = 0; i < values->threads; i++)
threads             8 tools/perf/util/values.h 	int threads;
threads             9 tools/power/cpupower/lib/cpupower.h 	unsigned int threads; /* per core */
threads           272 tools/power/x86/turbostat/turbostat.c 	struct thread_data threads;
threads           904 tools/power/x86/turbostat/turbostat.c 	if ((t != &average.threads) &&
threads           925 tools/power/x86/turbostat/turbostat.c 	if (t == &average.threads) {
threads          1202 tools/power/x86/turbostat/turbostat.c 	format_counters(&average.threads, &average.cores, &average.packages);
threads          1489 tools/power/x86/turbostat/turbostat.c 		average.threads.apic_id = t->apic_id;
threads          1491 tools/power/x86/turbostat/turbostat.c 		average.threads.x2apic_id = t->x2apic_id;
threads          1494 tools/power/x86/turbostat/turbostat.c 	if (average.threads.tv_begin.tv_sec == 0)
threads          1495 tools/power/x86/turbostat/turbostat.c 		average.threads.tv_begin = t->tv_begin;
threads          1498 tools/power/x86/turbostat/turbostat.c 	average.threads.tv_end = t->tv_end;
threads          1500 tools/power/x86/turbostat/turbostat.c 	average.threads.tsc += t->tsc;
threads          1501 tools/power/x86/turbostat/turbostat.c 	average.threads.aperf += t->aperf;
threads          1502 tools/power/x86/turbostat/turbostat.c 	average.threads.mperf += t->mperf;
threads          1503 tools/power/x86/turbostat/turbostat.c 	average.threads.c1 += t->c1;
threads          1505 tools/power/x86/turbostat/turbostat.c 	average.threads.irq_count += t->irq_count;
threads          1506 tools/power/x86/turbostat/turbostat.c 	average.threads.smi_count += t->smi_count;
threads          1511 tools/power/x86/turbostat/turbostat.c 		average.threads.counter[i] += t->counter[i];
threads          1590 tools/power/x86/turbostat/turbostat.c 	clear_counters(&average.threads, &average.cores, &average.packages);
threads          1595 tools/power/x86/turbostat/turbostat.c 	average.threads.tv_delta = tv_delta;
threads          1597 tools/power/x86/turbostat/turbostat.c 	average.threads.tsc /= topo.num_cpus;
threads          1598 tools/power/x86/turbostat/turbostat.c 	average.threads.aperf /= topo.num_cpus;
threads          1599 tools/power/x86/turbostat/turbostat.c 	average.threads.mperf /= topo.num_cpus;
threads          1600 tools/power/x86/turbostat/turbostat.c 	average.threads.c1 /= topo.num_cpus;
threads          1602 tools/power/x86/turbostat/turbostat.c 	if (average.threads.irq_count > 9999999)
threads          1635 tools/power/x86/turbostat/turbostat.c 			if (average.threads.counter[i] > 9999999)
threads          1639 tools/power/x86/turbostat/turbostat.c 		average.threads.counter[i] /= topo.num_cpus;
threads           528 tools/testing/radix-tree/idr-test.c 	pthread_t threads[20];
threads           531 tools/testing/radix-tree/idr-test.c 	for (i = 0; i < ARRAY_SIZE(threads); i++)
threads           532 tools/testing/radix-tree/idr-test.c 		if (pthread_create(&threads[i], NULL, ida_random_fn, NULL)) {
threads           538 tools/testing/radix-tree/idr-test.c 		pthread_join(threads[i], NULL);
threads            15 tools/testing/radix-tree/iteration_check.c static pthread_t threads[NUM_THREADS];
threads           178 tools/testing/radix-tree/iteration_check.c 	if (pthread_create(&threads[0], NULL, tagged_iteration_fn, NULL)) {
threads           182 tools/testing/radix-tree/iteration_check.c 	if (pthread_create(&threads[1], NULL, untagged_iteration_fn, NULL)) {
threads           186 tools/testing/radix-tree/iteration_check.c 	if (pthread_create(&threads[2], NULL, add_entries_fn, NULL)) {
threads           190 tools/testing/radix-tree/iteration_check.c 	if (pthread_create(&threads[3], NULL, remove_entries_fn, NULL)) {
threads           194 tools/testing/radix-tree/iteration_check.c 	if (pthread_create(&threads[4], NULL, tag_entries_fn, NULL)) {
threads           203 tools/testing/radix-tree/iteration_check.c 		if (pthread_join(threads[i], NULL)) {
threads            56 tools/testing/radix-tree/iteration_check_2.c 	pthread_t threads[2];
threads            67 tools/testing/radix-tree/iteration_check_2.c 	if (pthread_create(&threads[0], NULL, iterator, &array)) {
threads            71 tools/testing/radix-tree/iteration_check_2.c 	if (pthread_create(&threads[1], NULL, throbber, &array)) {
threads            80 tools/testing/radix-tree/iteration_check_2.c 		if (pthread_join(threads[i], NULL)) {
threads           168 tools/testing/radix-tree/regression1.c static pthread_t *threads;
threads           180 tools/testing/radix-tree/regression1.c 	threads = malloc(nr_threads * sizeof(pthread_t *));
threads           184 tools/testing/radix-tree/regression1.c 		if (pthread_create(&threads[i], NULL, regression1_fn, (void *)arg)) {
threads           191 tools/testing/radix-tree/regression1.c 		if (pthread_join(threads[i], NULL)) {
threads           197 tools/testing/radix-tree/regression1.c 	free(threads);
threads            62 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 	pthread_t threads[THREADS];
threads            74 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		if (pthread_create(&threads[i], NULL, do_test, (void *)i)) {
threads           103 tools/testing/selftests/powerpc/dscr/dscr_default_test.c 		if (pthread_join(threads[i], (void **)&(status[i]))) {
threads            57 tools/testing/selftests/powerpc/math/fpu_preempt.c 	int i, rc, threads;
threads            60 tools/testing/selftests/powerpc/math/fpu_preempt.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * THREAD_FACTOR;
threads            61 tools/testing/selftests/powerpc/math/fpu_preempt.c 	tids = malloc((threads) * sizeof(pthread_t));
threads            65 tools/testing/selftests/powerpc/math/fpu_preempt.c 	threads_starting = threads;
threads            66 tools/testing/selftests/powerpc/math/fpu_preempt.c 	for (i = 0; i < threads; i++) {
threads            88 tools/testing/selftests/powerpc/math/fpu_preempt.c 	for (i = 0; i < threads; i++) {
threads            77 tools/testing/selftests/powerpc/math/fpu_signal.c 	int i, j, rc, threads;
threads            81 tools/testing/selftests/powerpc/math/fpu_signal.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * THREAD_FACTOR;
threads            82 tools/testing/selftests/powerpc/math/fpu_signal.c 	tids = malloc(threads * sizeof(pthread_t));
threads            86 tools/testing/selftests/powerpc/math/fpu_signal.c 	threads_starting = threads;
threads            87 tools/testing/selftests/powerpc/math/fpu_signal.c 	for (i = 0; i < threads; i++) {
threads           100 tools/testing/selftests/powerpc/math/fpu_signal.c 		for (j = 0; j < threads; j++) {
threads           109 tools/testing/selftests/powerpc/math/fpu_signal.c 	for (i = 0; i < threads; i++) {
threads            57 tools/testing/selftests/powerpc/math/vmx_preempt.c 	int i, rc, threads;
threads            60 tools/testing/selftests/powerpc/math/vmx_preempt.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * THREAD_FACTOR;
threads            61 tools/testing/selftests/powerpc/math/vmx_preempt.c 	tids = malloc(threads * sizeof(pthread_t));
threads            65 tools/testing/selftests/powerpc/math/vmx_preempt.c 	threads_starting = threads;
threads            66 tools/testing/selftests/powerpc/math/vmx_preempt.c 	for (i = 0; i < threads; i++) {
threads            88 tools/testing/selftests/powerpc/math/vmx_preempt.c 	for (i = 0; i < threads; i++) {
threads            95 tools/testing/selftests/powerpc/math/vmx_signal.c 	int i, j, rc, threads;
threads            99 tools/testing/selftests/powerpc/math/vmx_signal.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * THREAD_FACTOR;
threads           100 tools/testing/selftests/powerpc/math/vmx_signal.c 	tids = malloc(threads * sizeof(pthread_t));
threads           104 tools/testing/selftests/powerpc/math/vmx_signal.c 	threads_starting = threads;
threads           105 tools/testing/selftests/powerpc/math/vmx_signal.c 	for (i = 0; i < threads; i++) {
threads           111 tools/testing/selftests/powerpc/math/vmx_signal.c 	printf("\tWaiting for %d workers to start... %d", threads, threads_starting);
threads           121 tools/testing/selftests/powerpc/math/vmx_signal.c 		for (j = 0; j < threads; j++) {
threads           130 tools/testing/selftests/powerpc/math/vmx_signal.c 	for (i = 0; i < threads; i++) {
threads            92 tools/testing/selftests/powerpc/math/vsx_preempt.c 	int i, rc, threads;
threads            95 tools/testing/selftests/powerpc/math/vsx_preempt.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * THREAD_FACTOR;
threads            96 tools/testing/selftests/powerpc/math/vsx_preempt.c 	tids = malloc(threads * sizeof(pthread_t));
threads           100 tools/testing/selftests/powerpc/math/vsx_preempt.c 	threads_starting = threads;
threads           101 tools/testing/selftests/powerpc/math/vsx_preempt.c 	for (i = 0; i < threads; i++) {
threads           123 tools/testing/selftests/powerpc/math/vsx_preempt.c 	for (i = 0; i < threads; i++) {
threads           252 tools/testing/selftests/powerpc/signal/sigfuz.c 	pthread_t *threads;
threads           254 tools/testing/selftests/powerpc/signal/sigfuz.c 	threads = malloc(nthread * sizeof(pthread_t));
threads           257 tools/testing/selftests/powerpc/signal/sigfuz.c 		rc = pthread_create(&threads[t], NULL, sigfuz_test,
threads           264 tools/testing/selftests/powerpc/signal/sigfuz.c 		rc = pthread_join(threads[t], NULL);
threads           269 tools/testing/selftests/powerpc/signal/sigfuz.c 	free(threads);
threads            91 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c 	int threads;
threads            98 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c 	threads = sysconf(_SC_NPROCESSORS_ONLN) * 4;
threads            99 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c 	thread = malloc(sizeof(pthread_t)*threads);
threads           103 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c 	for (uint64_t i = 0; i < threads; i++)
threads           106 tools/testing/selftests/powerpc/tm/tm-vmx-unavail.c 	for (uint64_t i = 0; i < threads; i++)
threads            56 tools/testing/selftests/sync/sync_stress_consumer.c 	int threads;
threads           112 tools/testing/selftests/sync/sync_stress_consumer.c 	int n = test_data_mpsc.threads;
threads           160 tools/testing/selftests/sync/sync_stress_consumer.c 	pthread_t threads[n];
threads           169 tools/testing/selftests/sync/sync_stress_consumer.c 	test_data_mpsc.threads = n;
threads           174 tools/testing/selftests/sync/sync_stress_consumer.c 		pthread_create(&threads[i], NULL, (void * (*)(void *))
threads           182 tools/testing/selftests/sync/sync_stress_consumer.c 		pthread_join(threads[i], NULL);
threads           357 tools/usb/ffs-test.c } threads[] = {
threads           399 tools/usb/ffs-test.c 	if (t != threads) {
threads           666 tools/usb/ffs-test.c 	init_thread(threads);
threads           667 tools/usb/ffs-test.c 	ep0_init(threads, legacy_descriptors);
threads           669 tools/usb/ffs-test.c 	for (i = 1; i < sizeof threads / sizeof *threads; ++i)
threads           670 tools/usb/ffs-test.c 		init_thread(threads + i);
threads           672 tools/usb/ffs-test.c 	for (i = 1; i < sizeof threads / sizeof *threads; ++i)
threads           673 tools/usb/ffs-test.c 		start_thread(threads + i);
threads           675 tools/usb/ffs-test.c 	start_thread_helper(threads);
threads           677 tools/usb/ffs-test.c 	for (i = 1; i < sizeof threads / sizeof *threads; ++i)
threads           678 tools/usb/ffs-test.c 		join_thread(threads + i);