cpu_idx 64 arch/arc/mm/highmem.c int idx, cpu_idx; cpu_idx 72 arch/arc/mm/highmem.c cpu_idx = kmap_atomic_idx_push(); cpu_idx 73 arch/arc/mm/highmem.c idx = cpu_idx + KM_TYPE_NR * smp_processor_id(); cpu_idx 95 arch/arc/mm/highmem.c int cpu_idx = kmap_atomic_idx(); cpu_idx 96 arch/arc/mm/highmem.c int idx = cpu_idx + KM_TYPE_NR * smp_processor_id(); cpu_idx 520 drivers/edac/xgene_edac.c int cpu_idx) cpu_idx 526 drivers/edac/xgene_edac.c pg_f = ctx->pmd_csr + cpu_idx * CPU_CSR_STRIDE + CPU_MEMERR_CPU_PAGE; cpu_idx 533 drivers/edac/xgene_edac.c ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, cpu_idx 573 drivers/edac/xgene_edac.c ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, cpu_idx 617 drivers/edac/xgene_edac.c ctx->pmd * MAX_CPU_PER_PMD + cpu_idx, val, cpu_idx 196 drivers/infiniband/hw/cxgb3/iwch_cm.c req->cpu_idx = 0; cpu_idx 217 drivers/infiniband/hw/cxgb3/iwch_cm.c req->cpu_idx = 0; cpu_idx 1249 drivers/infiniband/hw/cxgb3/iwch_cm.c req->cpu_idx = 0; cpu_idx 3921 drivers/infiniband/ulp/srp/ib_srp.c int cpu_idx = 0; cpu_idx 3926 drivers/infiniband/ulp/srp/ib_srp.c if (ch_start + cpu_idx >= ch_end) cpu_idx 3928 drivers/infiniband/ulp/srp/ib_srp.c ch = &target->ch[ch_start + cpu_idx]; cpu_idx 3931 drivers/infiniband/ulp/srp/ib_srp.c cv_start + cpu_idx % (cv_end - cv_start); cpu_idx 3958 drivers/infiniband/ulp/srp/ib_srp.c ch_start + cpu_idx, cpu_idx 3960 drivers/infiniband/ulp/srp/ib_srp.c if (node_idx == 0 && cpu_idx == 0) { cpu_idx 3971 drivers/infiniband/ulp/srp/ib_srp.c cpu_idx++; cpu_idx 157 drivers/irqchip/irq-bcm6345-l1.c unsigned int cpu_idx = cpu_for_irq(intc, d); cpu_idx 159 drivers/irqchip/irq-bcm6345-l1.c intc->cpus[cpu_idx]->enable_cache[word] |= mask; cpu_idx 160 drivers/irqchip/irq-bcm6345-l1.c __raw_writel(intc->cpus[cpu_idx]->enable_cache[word], cpu_idx 161 drivers/irqchip/irq-bcm6345-l1.c intc->cpus[cpu_idx]->map_base + reg_enable(intc, word)); cpu_idx 169 drivers/irqchip/irq-bcm6345-l1.c unsigned int cpu_idx = cpu_for_irq(intc, d); cpu_idx 171 drivers/irqchip/irq-bcm6345-l1.c intc->cpus[cpu_idx]->enable_cache[word] &= ~mask; cpu_idx 172 drivers/irqchip/irq-bcm6345-l1.c __raw_writel(intc->cpus[cpu_idx]->enable_cache[word], cpu_idx 173 drivers/irqchip/irq-bcm6345-l1.c intc->cpus[cpu_idx]->map_base + reg_enable(intc, word)); cpu_idx 148 drivers/irqchip/irq-bcm7038-l1.c static void __bcm7038_l1_unmask(struct irq_data *d, unsigned int cpu_idx) cpu_idx 154 drivers/irqchip/irq-bcm7038-l1.c intc->cpus[cpu_idx]->mask_cache[word] &= ~mask; cpu_idx 155 drivers/irqchip/irq-bcm7038-l1.c l1_writel(mask, intc->cpus[cpu_idx]->map_base + cpu_idx 159 drivers/irqchip/irq-bcm7038-l1.c static void __bcm7038_l1_mask(struct irq_data *d, unsigned int cpu_idx) cpu_idx 165 drivers/irqchip/irq-bcm7038-l1.c intc->cpus[cpu_idx]->mask_cache[word] |= mask; cpu_idx 166 drivers/irqchip/irq-bcm7038-l1.c l1_writel(mask, intc->cpus[cpu_idx]->map_base + cpu_idx 1103 drivers/net/ethernet/chelsio/cxgb3/cxgb3_offload.c req->cpu_idx = 0; cpu_idx 228 drivers/net/ethernet/chelsio/cxgb3/t3_cpl.h __u8 cpu_idx:6; cpu_idx 232 drivers/net/ethernet/chelsio/cxgb3/t3_cpl.h __u8 cpu_idx:6; cpu_idx 659 drivers/net/ethernet/chelsio/cxgb3/t3_cpl.h __u8 cpu_idx; cpu_idx 672 drivers/net/ethernet/chelsio/cxgb3/t3_cpl.h __u8 cpu_idx; cpu_idx 727 drivers/net/ethernet/chelsio/cxgb3/t3_cpl.h __u8 cpu_idx; cpu_idx 2537 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c int i, j, cpu_idx = 0, q_idx = 0; cpu_idx 2544 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c val |= (cpus[cpu_idx++] & 0x3f) << (8 * j); cpu_idx 2545 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c if (cpus[cpu_idx] == 0xff) cpu_idx 2546 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c cpu_idx = 0; cpu_idx 1398 drivers/net/ethernet/mediatek/mtk_eth_soc.c cpu = ring->cpu_idx; cpu_idx 1422 drivers/net/ethernet/mediatek/mtk_eth_soc.c ring->cpu_idx = cpu; cpu_idx 633 drivers/net/ethernet/mediatek/mtk_eth_soc.h int cpu_idx; cpu_idx 40 drivers/net/wireless/mediatek/mt76/dma.c writel(0, &q->regs->cpu_idx); cpu_idx 135 drivers/net/wireless/mediatek/mt76/dma.c writel(q->head, &q->regs->cpu_idx); cpu_idx 253 drivers/net/wireless/mediatek/mt76/dma.c writel(q->head, &q->regs->cpu_idx); cpu_idx 104 drivers/net/wireless/mediatek/mt76/mt76.h u32 cpu_idx; cpu_idx 1408 drivers/net/wireless/mediatek/mt76/mt7603/mac.c dma_idx != readl(&q->regs->cpu_idx)) cpu_idx 1162 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c req->cpu_idx = 0; cpu_idx 1198 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c req->cpu_idx = 0; cpu_idx 667 drivers/soc/fsl/qbman/qman_ccsr.c void qman_set_sdest(u16 channel, unsigned int cpu_idx) cpu_idx 675 drivers/soc/fsl/qbman/qman_ccsr.c cpu_idx /= 2; cpu_idx 676 drivers/soc/fsl/qbman/qman_ccsr.c after = (before & (~IO_CFG_SDEST_MASK)) | (cpu_idx << 16); cpu_idx 680 drivers/soc/fsl/qbman/qman_ccsr.c after = (before & (~IO_CFG_SDEST_MASK)) | (cpu_idx << 16); cpu_idx 204 drivers/soc/fsl/qbman/qman_priv.h void qman_set_sdest(u16 channel, unsigned int cpu_idx); cpu_idx 230 drivers/thermal/cpu_cooling.c int cpu_idx) cpu_idx 234 drivers/thermal/cpu_cooling.c struct time_in_idle *idle_time = &cpufreq_cdev->idle_time[cpu_idx]; cpu_idx 300 kernel/sched/debug.c static struct ctl_table **cpu_idx; cpu_idx 314 kernel/sched/debug.c if (!cpu_idx) { cpu_idx 317 kernel/sched/debug.c cpu_idx = kcalloc(nr_cpu_ids, sizeof(struct ctl_table*), GFP_KERNEL); cpu_idx 318 kernel/sched/debug.c if (!cpu_idx) cpu_idx 323 kernel/sched/debug.c cpu_idx[i] = e; cpu_idx 340 kernel/sched/debug.c struct ctl_table *e = cpu_idx[i]; cpu_idx 2158 kernel/sched/fair.c int mem_idx, membuf_idx, cpu_idx, cpubuf_idx; cpu_idx 2167 kernel/sched/fair.c cpu_idx = task_faults_idx(NUMA_CPU, nid, priv); cpu_idx 2185 kernel/sched/fair.c f_diff = f_weight - p->numa_faults[cpu_idx] / 2; cpu_idx 2189 kernel/sched/fair.c p->numa_faults[cpu_idx] += f_diff; cpu_idx 287 samples/bpf/xdp_redirect_cpu_kern.c u32 cpu_idx; cpu_idx 296 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = *cpu_iterator; cpu_idx 302 samples/bpf/xdp_redirect_cpu_kern.c cpu_selected = bpf_map_lookup_elem(&cpus_available, &cpu_idx); cpu_idx 332 samples/bpf/xdp_redirect_cpu_kern.c u32 cpu_idx = 0; cpu_idx 354 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; /* ARP packet handled on separate CPU */ cpu_idx 357 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 364 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 2; cpu_idx 367 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 370 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 1; cpu_idx 373 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 376 samples/bpf/xdp_redirect_cpu_kern.c cpu_lookup = bpf_map_lookup_elem(&cpus_available, &cpu_idx); cpu_idx 400 samples/bpf/xdp_redirect_cpu_kern.c u32 cpu_idx = 0; cpu_idx 423 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; /* ARP packet handled on separate CPU */ cpu_idx 426 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 433 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 2; cpu_idx 436 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 439 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 1; cpu_idx 449 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = 0; cpu_idx 452 samples/bpf/xdp_redirect_cpu_kern.c cpu_lookup = bpf_map_lookup_elem(&cpus_available, &cpu_idx); cpu_idx 520 samples/bpf/xdp_redirect_cpu_kern.c u32 cpu_idx = 0; cpu_idx 553 samples/bpf/xdp_redirect_cpu_kern.c cpu_idx = cpu_hash % *cpu_max; cpu_idx 555 samples/bpf/xdp_redirect_cpu_kern.c cpu_lookup = bpf_map_lookup_elem(&cpus_available, &cpu_idx); cpu_idx 634 tools/perf/util/evlist.c struct mmap_params *mp, int cpu_idx, cpu_idx 639 tools/perf/util/evlist.c int evlist_cpu = cpu_map__cpu(evlist->core.cpus, cpu_idx); cpu_idx 1459 tools/perf/util/evsel.c int nr_cpus, int cpu_idx, cpu_idx 1464 tools/perf/util/evsel.c if (cpu_idx >= nr_cpus || thread_idx >= nr_threads) cpu_idx 1468 tools/perf/util/evsel.c nr_cpus = pos != evsel ? nr_cpus : cpu_idx;