curr_cpu 136 drivers/infiniband/hw/hfi1/affinity.c int possible, curr_cpu, i, ht; curr_cpu 154 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_first(&node_affinity.real_cpu_mask); curr_cpu 156 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, &node_affinity.real_cpu_mask); curr_cpu 162 drivers/infiniband/hw/hfi1/affinity.c cpumask_clear_cpu(curr_cpu, &node_affinity.real_cpu_mask); curr_cpu 163 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, &node_affinity.real_cpu_mask); curr_cpu 287 drivers/infiniband/hw/hfi1/affinity.c int curr_cpu; curr_cpu 309 drivers/infiniband/hw/hfi1/affinity.c for_each_cpu(curr_cpu, possible_cpumask) { curr_cpu 310 drivers/infiniband/hw/hfi1/affinity.c cntr = *per_cpu_ptr(comp_vect_affinity, curr_cpu); curr_cpu 313 drivers/infiniband/hw/hfi1/affinity.c ret_cpu = curr_cpu; curr_cpu 327 drivers/infiniband/hw/hfi1/affinity.c int curr_cpu; curr_cpu 343 drivers/infiniband/hw/hfi1/affinity.c for_each_cpu(curr_cpu, possible_cpumask) { curr_cpu 344 drivers/infiniband/hw/hfi1/affinity.c cntr = *per_cpu_ptr(comp_vect_affinity, curr_cpu); curr_cpu 347 drivers/infiniband/hw/hfi1/affinity.c max_cpu = curr_cpu; curr_cpu 538 drivers/infiniband/hw/hfi1/affinity.c int i, j, curr_cpu; curr_cpu 574 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = per_cpu_affinity_get(&entry->comp_vect_mask, curr_cpu 576 drivers/infiniband/hw/hfi1/affinity.c if (curr_cpu < 0) curr_cpu 579 drivers/infiniband/hw/hfi1/affinity.c cpumask_set_cpu(curr_cpu, dev_comp_vect_mask); curr_cpu 594 drivers/infiniband/hw/hfi1/affinity.c return curr_cpu; curr_cpu 637 drivers/infiniband/hw/hfi1/affinity.c int curr_cpu, possible, i, ret; curr_cpu 681 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_first(&entry->def_intr.mask); curr_cpu 685 drivers/infiniband/hw/hfi1/affinity.c cpumask_set_cpu(curr_cpu, &entry->rcv_intr.mask); curr_cpu 686 drivers/infiniband/hw/hfi1/affinity.c cpumask_set_cpu(curr_cpu, &entry->general_intr_mask); curr_cpu 693 drivers/infiniband/hw/hfi1/affinity.c cpumask_clear_cpu(curr_cpu, &entry->def_intr.mask); curr_cpu 694 drivers/infiniband/hw/hfi1/affinity.c cpumask_set_cpu(curr_cpu, &entry->general_intr_mask); curr_cpu 695 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, curr_cpu 706 drivers/infiniband/hw/hfi1/affinity.c cpumask_clear_cpu(curr_cpu, curr_cpu 708 drivers/infiniband/hw/hfi1/affinity.c cpumask_set_cpu(curr_cpu, curr_cpu 710 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, curr_cpu 712 drivers/infiniband/hw/hfi1/affinity.c if (curr_cpu >= nr_cpu_ids) curr_cpu 1009 drivers/infiniband/hw/hfi1/affinity.c int possible, curr_cpu, i; curr_cpu 1018 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_first(hw_thread_mask); curr_cpu 1022 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, hw_thread_mask); curr_cpu 1025 drivers/infiniband/hw/hfi1/affinity.c cpumask_clear_cpu(curr_cpu, hw_thread_mask); curr_cpu 1026 drivers/infiniband/hw/hfi1/affinity.c curr_cpu = cpumask_next(curr_cpu, hw_thread_mask); curr_cpu 176 drivers/pci/controller/pci-xgene-msi.c int curr_cpu; curr_cpu 178 drivers/pci/controller/pci-xgene-msi.c curr_cpu = hwirq_to_cpu(irqdata->hwirq); curr_cpu 179 drivers/pci/controller/pci-xgene-msi.c if (curr_cpu == target_cpu) curr_cpu 211 drivers/pci/controller/pcie-iproc-msi.c int curr_cpu; curr_cpu 213 drivers/pci/controller/pcie-iproc-msi.c curr_cpu = hwirq_to_cpu(msi, data->hwirq); curr_cpu 214 drivers/pci/controller/pcie-iproc-msi.c if (curr_cpu == target_cpu) curr_cpu 659 kernel/relay.c unsigned int i, curr_cpu; curr_cpu 693 kernel/relay.c curr_cpu = get_cpu(); curr_cpu 713 kernel/relay.c if (curr_cpu == i) { curr_cpu 1835 kernel/sched/core.c int target_cpu, int curr_cpu) curr_cpu 1842 kernel/sched/core.c .src_cpu = curr_cpu, curr_cpu 6127 kernel/sched/core.c int curr_cpu = task_cpu(p); curr_cpu 6129 kernel/sched/core.c if (curr_cpu == target_cpu) curr_cpu 6137 kernel/sched/core.c trace_sched_move_numa(p, curr_cpu, target_cpu); curr_cpu 6138 kernel/sched/core.c return stop_one_cpu(curr_cpu, migration_cpu_stop, &arg); curr_cpu 56 tools/perf/bench/numa.c int curr_cpu; curr_cpu 861 tools/perf/bench/numa.c g->threads[task_nr].curr_cpu = cpu; curr_cpu 889 tools/perf/bench/numa.c node = numa_node_of_cpu(td->curr_cpu); curr_cpu 925 tools/perf/bench/numa.c n = numa_node_of_cpu(td->curr_cpu); curr_cpu 993 tools/perf/bench/numa.c cpu = td->curr_cpu; curr_cpu 1222 tools/perf/bench/numa.c this_cpu = g->threads[task_nr].curr_cpu; curr_cpu 1323 tools/perf/bench/numa.c td->curr_cpu = -1;