threads_per_core 214 arch/ia64/include/asm/processor.h unsigned char threads_per_core; /* Threads per core */ threads_per_core 729 arch/ia64/kernel/setup.c if (c->threads_per_core > 1 || c->cores_per_socket > 1) threads_per_core 841 arch/ia64/kernel/setup.c c->threads_per_core = c->cores_per_socket = c->num_log = 1; threads_per_core 846 arch/ia64/kernel/setup.c if (c->threads_per_core > smp_num_siblings) threads_per_core 847 arch/ia64/kernel/setup.c smp_num_siblings = c->threads_per_core; threads_per_core 591 arch/ia64/kernel/smpboot.c if (cpu_data(cpu)->threads_per_core == 1 && threads_per_core 749 arch/ia64/kernel/smpboot.c if (cpu_data(cpu)->threads_per_core == 1 && threads_per_core 824 arch/ia64/kernel/smpboot.c c->threads_per_core = info.overview_tpc; threads_per_core 145 arch/ia64/kernel/topology.c if (cpu_data(cpu)->threads_per_core <= 1 && threads_per_core 23 arch/powerpc/include/asm/cputhreads.h extern int threads_per_core; threads_per_core 53 arch/powerpc/include/asm/cputhreads.h for (i = 0; i < NR_CPUS; i += threads_per_core) { threads_per_core 84 arch/powerpc/include/asm/cputhreads.h return cpu & (threads_per_core - 1); threads_per_core 94 arch/powerpc/include/asm/cputhreads.h return cpu & ~(threads_per_core - 1); threads_per_core 99 arch/powerpc/include/asm/cputhreads.h return cpu | (threads_per_core - 1); threads_per_core 396 arch/powerpc/kernel/setup-common.c int threads_per_core, threads_per_subcore, threads_shift __read_mostly; threads_per_core 398 arch/powerpc/kernel/setup-common.c EXPORT_SYMBOL_GPL(threads_per_core); threads_per_core 407 arch/powerpc/kernel/setup-common.c threads_per_core = tpc; threads_per_core 137 arch/powerpc/kernel/setup_64.c smt_enabled_at_boot = threads_per_core; threads_per_core 142 arch/powerpc/kernel/setup_64.c smt_enabled_at_boot = threads_per_core; threads_per_core 152 arch/powerpc/kernel/setup_64.c min(threads_per_core, smt); threads_per_core 162 arch/powerpc/kernel/setup_64.c smt_enabled_at_boot = threads_per_core; threads_per_core 803 arch/powerpc/kernel/smp.c for (i = first_thread; i < first_thread + threads_per_core; i++) { threads_per_core 996 arch/powerpc/kernel/smp.c if (threads_per_core > 1 && secondaries_inhibited() && threads_per_core 1182 arch/powerpc/kernel/smp.c for (i = first_thread; i < first_thread + threads_per_core; i++) { threads_per_core 1200 arch/powerpc/kernel/smp.c for (i = first_thread; i < first_thread + threads_per_core; i++) threads_per_core 2553 arch/powerpc/kvm/book3s_hv.c for (i = 0; i < threads_per_core; ++i) threads_per_core 4740 arch/powerpc/kvm/book3s_hv.c for (cpu = 0; cpu < nr_cpu_ids; cpu += threads_per_core) { threads_per_core 5451 arch/powerpc/kvm/book3s_hv.c int first_cpu = i * threads_per_core; threads_per_core 5465 arch/powerpc/kvm/book3s_hv.c for (j = 0; j < threads_per_core; j++) { threads_per_core 341 arch/powerpc/kvm/book3s_hv_builtin.c for (t = 1; t < threads_per_core; ++t) { threads_per_core 299 arch/powerpc/kvm/e500.h if (threads_per_core == 2) threads_per_core 371 arch/powerpc/kvm/e500mc.c if (threads_per_core == 2) threads_per_core 382 arch/powerpc/kvm/e500mc.c if (threads_per_core == 2) threads_per_core 419 arch/powerpc/kvm/e500mc.c kvmppc_init_lpid(KVMPPC_NR_LPIDS/threads_per_core); threads_per_core 512 arch/powerpc/mm/numa.c for (i = 0; i < threads_per_core; i++) { threads_per_core 1302 arch/powerpc/mm/numa.c for (j = 0; j < threads_per_core; j++) { threads_per_core 1589 arch/powerpc/perf/hv-24x7.c if (threads_per_core == 8) threads_per_core 578 arch/powerpc/perf/imc-pmu.c int nid, rc = 0, core_id = (cpu / threads_per_core); threads_per_core 616 arch/powerpc/perf/imc-pmu.c int core_id = (cpu / threads_per_core); threads_per_core 695 arch/powerpc/perf/imc-pmu.c core_id = cpu / threads_per_core; threads_per_core 726 arch/powerpc/perf/imc-pmu.c core_id = event->cpu / threads_per_core; threads_per_core 792 arch/powerpc/perf/imc-pmu.c core_id = event->cpu / threads_per_core; threads_per_core 1031 arch/powerpc/perf/imc-pmu.c core_id = smp_processor_id() / threads_per_core; threads_per_core 1068 arch/powerpc/perf/imc-pmu.c core_id = smp_processor_id() / threads_per_core; threads_per_core 1099 arch/powerpc/perf/imc-pmu.c int core_id = (cpu_id / threads_per_core); threads_per_core 1221 arch/powerpc/perf/imc-pmu.c int core_id = smp_processor_id() / threads_per_core; threads_per_core 1274 arch/powerpc/perf/imc-pmu.c int core_id = smp_processor_id() / threads_per_core; threads_per_core 1415 arch/powerpc/perf/imc-pmu.c int i, nr_cores = DIV_ROUND_UP(num_possible_cpus(), threads_per_core); threads_per_core 1560 arch/powerpc/perf/imc-pmu.c nr_cores = DIV_ROUND_UP(num_possible_cpus(), threads_per_core); threads_per_core 1600 arch/powerpc/perf/imc-pmu.c nr_cores = DIV_ROUND_UP(num_possible_cpus(), threads_per_core); threads_per_core 279 arch/powerpc/platforms/85xx/smp.c if (threads_per_core == 2) { threads_per_core 310 arch/powerpc/platforms/85xx/smp.c } else if (threads_per_core == 1) { threads_per_core 317 arch/powerpc/platforms/85xx/smp.c } else if (threads_per_core > 2) { threads_per_core 318 arch/powerpc/platforms/powernv/idle.c unsigned long core_thread_mask = (1UL << threads_per_core) - 1; threads_per_core 369 arch/powerpc/platforms/powernv/idle.c == threads_per_core) threads_per_core 610 arch/powerpc/platforms/powernv/idle.c unsigned long core_thread_mask = (1UL << threads_per_core) - 1; threads_per_core 890 arch/powerpc/platforms/powernv/idle.c int need_awake = threads_per_core; threads_per_core 893 arch/powerpc/platforms/powernv/idle.c cpu0 = cpu & ~(threads_per_core - 1); threads_per_core 894 arch/powerpc/platforms/powernv/idle.c for (thr = 0; thr < threads_per_core; ++thr) { threads_per_core 900 arch/powerpc/platforms/powernv/idle.c for (thr = 0; thr < threads_per_core; ++thr) { threads_per_core 910 arch/powerpc/platforms/powernv/idle.c for (thr = 0; thr < threads_per_core; ++thr) { threads_per_core 919 arch/powerpc/platforms/powernv/idle.c for (thr = 0; thr < threads_per_core; ++thr) { threads_per_core 936 arch/powerpc/platforms/powernv/idle.c cpu0 = cpu & ~(threads_per_core - 1); threads_per_core 939 arch/powerpc/platforms/powernv/idle.c for (thr = 0; thr < threads_per_core; ++thr) { threads_per_core 1338 arch/powerpc/platforms/powernv/idle.c p->idle_state = (1 << threads_per_core) - 1; threads_per_core 152 arch/powerpc/platforms/powernv/subcore.c for (i = cpu + 1; i < cpu + threads_per_core; i++) threads_per_core 197 arch/powerpc/platforms/powernv/subcore.c for (i = cpu + 1; i < cpu + threads_per_core; i++) threads_per_core 316 arch/powerpc/platforms/powernv/subcore.c threads_per_subcore = threads_per_core / subcores_per_core; threads_per_core 421 arch/powerpc/platforms/powernv/subcore.c if (setup_max_cpus % threads_per_core) threads_per_core 205 arch/powerpc/platforms/pseries/lpar.c vcpu_associativity = kcalloc(num_possible_cpus() / threads_per_core, threads_per_core 207 arch/powerpc/platforms/pseries/lpar.c pcpu_associativity = kcalloc(NR_CPUS_H / threads_per_core, threads_per_core 230 arch/powerpc/platforms/pseries/lpar.c assoc = &cpu_assoc[(int)(cpu / threads_per_core) * VPHN_ASSOC_BUFSIZE]; threads_per_core 148 arch/powerpc/sysdev/fsl_rcpm.c if (threads_per_core == 2) { threads_per_core 160 arch/powerpc/sysdev/fsl_rcpm.c if (threads_per_core == 1) threads_per_core 834 drivers/cpufreq/powernv-cpufreq.c for (i = 0; i < threads_per_core; i++) threads_per_core 230 tools/power/x86/turbostat/turbostat.c topo.nodes_per_pkg * topo.cores_per_node * topo.threads_per_core) + \ threads_per_core 231 tools/power/x86/turbostat/turbostat.c ((node_no) * topo.cores_per_node * topo.threads_per_core) + \ threads_per_core 232 tools/power/x86/turbostat/turbostat.c ((core_no) * topo.threads_per_core) + \ threads_per_core 297 tools/power/x86/turbostat/turbostat.c int threads_per_core; threads_per_core 329 tools/power/x86/turbostat/turbostat.c topo.threads_per_core; ++thread_no) { threads_per_core 2696 tools/power/x86/turbostat/turbostat.c topo.threads_per_core; ++thread_no) { threads_per_core 5079 tools/power/x86/turbostat/turbostat.c topo.threads_per_core = max_siblings; threads_per_core 5107 tools/power/x86/turbostat/turbostat.c int num_threads = topo.threads_per_core * num_cores;