per_cpu_ptr 267 arch/arc/kernel/smp.c unsigned long __percpu *ipi_data_ptr = per_cpu_ptr(&ipi_data, cpu); per_cpu_ptr 406 arch/arc/kernel/smp.c int *dev = per_cpu_ptr(&ipi_dev, cpu); per_cpu_ptr 151 arch/arm/xen/enlighten.c vcpup = per_cpu_ptr(xen_vcpu_info, cpu); per_cpu_ptr 555 arch/arm64/include/asm/kvm_mmu.h ptr = per_cpu_ptr(&arm64_ssbd_callback_required, cpu); per_cpu_ptr 31 arch/csky/include/asm/asid.h #define active_asid(info, cpu) *per_cpu_ptr((info)->active, cpu) per_cpu_ptr 80 arch/csky/kernel/smp.c set_bit(operation, &per_cpu_ptr(&ipi_data, i)->bits); per_cpu_ptr 16 arch/csky/mm/asid.c #define reserved_asid(info, cpu) *per_cpu_ptr((info)->reserved, cpu) per_cpu_ptr 55 arch/ia64/kernel/crash.c buf = (u64 *) per_cpu_ptr(crash_notes, cpu); per_cpu_ptr 819 arch/powerpc/kvm/book3s_hv_rm_xics.c raddr = per_cpu_ptr(addr, cpu); per_cpu_ptr 529 arch/powerpc/platforms/pseries/lpar.c disp = per_cpu_ptr(&vcpu_disp_data, cpu); per_cpu_ptr 564 arch/powerpc/platforms/pseries/lpar.c disp = per_cpu_ptr(&vcpu_disp_data, cpu); per_cpu_ptr 3512 arch/powerpc/xmon/xmon.c addr = (unsigned long)per_cpu_ptr(ptr, cpu); per_cpu_ptr 156 arch/s390/kernel/processor.c struct cpu_info *c = per_cpu_ptr(&cpu_info, n); per_cpu_ptr 65 arch/s390/lib/spinlock.c node = per_cpu_ptr(&spin_wait[0], cpu); per_cpu_ptr 103 arch/s390/lib/spinlock.c return per_cpu_ptr(&spin_wait[ix], cpu - 1); per_cpu_ptr 75 arch/x86/events/amd/uncore.c return *per_cpu_ptr(amd_uncore_nb, event->cpu); per_cpu_ptr 77 arch/x86/events/amd/uncore.c return *per_cpu_ptr(amd_uncore_llc, event->cpu); per_cpu_ptr 341 arch/x86/events/amd/uncore.c *per_cpu_ptr(amd_uncore_nb, cpu) = uncore_nb; per_cpu_ptr 355 arch/x86/events/amd/uncore.c *per_cpu_ptr(amd_uncore_llc, cpu) = uncore_llc; per_cpu_ptr 362 arch/x86/events/amd/uncore.c *per_cpu_ptr(amd_uncore_nb, cpu) = NULL; per_cpu_ptr 375 arch/x86/events/amd/uncore.c that = *per_cpu_ptr(uncores, cpu); per_cpu_ptr 400 arch/x86/events/amd/uncore.c uncore = *per_cpu_ptr(amd_uncore_nb, cpu); per_cpu_ptr 405 arch/x86/events/amd/uncore.c *per_cpu_ptr(amd_uncore_nb, cpu) = uncore; per_cpu_ptr 409 arch/x86/events/amd/uncore.c uncore = *per_cpu_ptr(amd_uncore_llc, cpu); per_cpu_ptr 413 arch/x86/events/amd/uncore.c *per_cpu_ptr(amd_uncore_llc, cpu) = uncore; per_cpu_ptr 433 arch/x86/events/amd/uncore.c struct amd_uncore *uncore = *per_cpu_ptr(uncores, cpu); per_cpu_ptr 456 arch/x86/events/amd/uncore.c struct amd_uncore *this = *per_cpu_ptr(uncores, cpu); per_cpu_ptr 463 arch/x86/events/amd/uncore.c struct amd_uncore *that = *per_cpu_ptr(uncores, i); per_cpu_ptr 491 arch/x86/events/amd/uncore.c struct amd_uncore *uncore = *per_cpu_ptr(uncores, cpu); per_cpu_ptr 498 arch/x86/events/amd/uncore.c *per_cpu_ptr(uncores, cpu) = NULL; per_cpu_ptr 336 arch/x86/events/intel/ds.c struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); per_cpu_ptr 374 arch/x86/events/intel/ds.c struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); per_cpu_ptr 392 arch/x86/events/intel/ds.c struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); per_cpu_ptr 421 arch/x86/events/intel/ds.c struct cpu_hw_events *hwev = per_cpu_ptr(&cpu_hw_events, cpu); per_cpu_ptr 158 arch/x86/kernel/acpi/cstate.c percpu_entry = per_cpu_ptr(cpu_cstate_entry, cpu); per_cpu_ptr 189 arch/x86/kernel/acpi/cstate.c percpu_entry = per_cpu_ptr(cpu_cstate_entry, cpu); per_cpu_ptr 898 arch/x86/kernel/apic/vector.c hlist_add_head(&apicd->clist, per_cpu_ptr(&cleanup_list, cpu)); per_cpu_ptr 35 arch/x86/kernel/irq_64.c char *stack = (char *)per_cpu_ptr(&irq_stack_backing_store, cpu); per_cpu_ptr 60 arch/x86/kernel/irq_64.c void *va = per_cpu_ptr(&irq_stack_backing_store, cpu); per_cpu_ptr 208 arch/x86/kernel/kgdb.c bp = *per_cpu_ptr(breakinfo[breakno].pev, cpu); per_cpu_ptr 237 arch/x86/kernel/kgdb.c pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); per_cpu_ptr 249 arch/x86/kernel/kgdb.c pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); per_cpu_ptr 264 arch/x86/kernel/kgdb.c pevent = per_cpu_ptr(breakinfo[breakno].pev, cpu); per_cpu_ptr 304 arch/x86/kernel/kgdb.c bp = *per_cpu_ptr(breakinfo[i].pev, cpu); per_cpu_ptr 397 arch/x86/kernel/kgdb.c bp = *per_cpu_ptr(breakinfo[i].pev, cpu); per_cpu_ptr 665 arch/x86/kernel/kgdb.c pevent = per_cpu_ptr(breakinfo[i].pev, cpu); per_cpu_ptr 746 arch/x86/kernel/kvm.c zalloc_cpumask_var_node(per_cpu_ptr(&__pv_tlb_mask, cpu), per_cpu_ptr 153 arch/x86/kernel/tsc.c c2n = per_cpu_ptr(&cyc2ns, cpu); per_cpu_ptr 200 arch/x86/kernel/tsc.c c2n = per_cpu_ptr(&cyc2ns, cpu); per_cpu_ptr 176 arch/x86/kernel/tsc_sync.c ref = per_cpu_ptr(&tsc_adjust, refcpu); per_cpu_ptr 269 arch/x86/kvm/x86.c struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); per_cpu_ptr 302 arch/x86/kvm/x86.c struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); per_cpu_ptr 325 arch/x86/kvm/x86.c struct kvm_shared_msrs *smsr = per_cpu_ptr(shared_msrs, cpu); per_cpu_ptr 15 arch/x86/lib/msr-smp.c reg = per_cpu_ptr(rv->msrs, this_cpu); per_cpu_ptr 29 arch/x86/lib/msr-smp.c reg = per_cpu_ptr(rv->msrs, this_cpu); per_cpu_ptr 91 arch/x86/mm/cpu_entry_area.c struct exception_stacks *estacks = per_cpu_ptr(&exception_stacks, cpu); per_cpu_ptr 141 arch/x86/mm/cpu_entry_area.c per_cpu_ptr(&entry_stack_storage, cpu), 1, per_cpu_ptr 1285 block/blk-iocost.c struct ioc_pcpu_stat *stat = per_cpu_ptr(ioc->pcpu_stat, cpu); per_cpu_ptr 533 block/blk-iolatency.c s = per_cpu_ptr(iolat->stats, cpu); per_cpu_ptr 898 block/blk-iolatency.c s = per_cpu_ptr(iolat->stats, cpu); per_cpu_ptr 971 block/blk-iolatency.c stat = per_cpu_ptr(iolat->stats, cpu); per_cpu_ptr 301 block/blk-mq-sysfs.c ctx = per_cpu_ptr(q->queue_ctx, cpu); per_cpu_ptr 315 block/blk-mq-sysfs.c ctx = per_cpu_ptr(q->queue_ctx, cpu); per_cpu_ptr 2425 block/blk-mq.c struct blk_mq_ctx *__ctx = per_cpu_ptr(q->queue_ctx, i); per_cpu_ptr 2509 block/blk-mq.c ctx = per_cpu_ptr(q->queue_ctx, i); per_cpu_ptr 2662 block/blk-mq.c struct blk_mq_ctx *ctx = per_cpu_ptr(ctxs->queue_ctx, cpu); per_cpu_ptr 144 block/blk-mq.h return per_cpu_ptr(q->queue_ctx, cpu); per_cpu_ptr 91 block/blk-stat.c cpu_stat = per_cpu_ptr(cb->cpu_stat, cpu); per_cpu_ptr 144 block/blk-stat.c cpu_stat = per_cpu_ptr(cb->cpu_stat, cpu); per_cpu_ptr 2057 block/blk-throttle.c bucket = per_cpu_ptr(td->latency_buckets[rw], per_cpu_ptr 287 block/kyber-iosched.c cpu_latency = per_cpu_ptr(kqd->cpu_latency, cpu); per_cpu_ptr 105 crypto/cryptd.c cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); per_cpu_ptr 119 crypto/cryptd.c cpu_queue = per_cpu_ptr(queue->cpu_queue, cpu); per_cpu_ptr 74 crypto/scompress.c scratch = per_cpu_ptr(&scomp_scratch, i); per_cpu_ptr 91 crypto/scompress.c scratch = per_cpu_ptr(&scomp_scratch, i); per_cpu_ptr 606 drivers/acpi/processor_perflib.c if (!performance || !per_cpu_ptr(performance, i)) { per_cpu_ptr 618 drivers/acpi/processor_perflib.c pr->performance = per_cpu_ptr(performance, i); per_cpu_ptr 158 drivers/base/cpu.c addr = per_cpu_ptr_to_phys(per_cpu_ptr(crash_notes, cpunum)); per_cpu_ptr 163 drivers/block/zram/zcomp.c if (WARN_ON(*per_cpu_ptr(comp->stream, cpu))) per_cpu_ptr 171 drivers/block/zram/zcomp.c *per_cpu_ptr(comp->stream, cpu) = zstrm; per_cpu_ptr 180 drivers/block/zram/zcomp.c zstrm = *per_cpu_ptr(comp->stream, cpu); per_cpu_ptr 183 drivers/block/zram/zcomp.c *per_cpu_ptr(comp->stream, cpu) = NULL; per_cpu_ptr 302 drivers/bus/mips_cdmm.c bus_p = per_cpu_ptr(&mips_cdmm_buses, cpu); per_cpu_ptr 2428 drivers/char/random.c batched_entropy = per_cpu_ptr(&batched_entropy_u32, cpu); per_cpu_ptr 2433 drivers/char/random.c batched_entropy = per_cpu_ptr(&batched_entropy_u64, cpu); per_cpu_ptr 18 drivers/clocksource/dummy_timer.c struct clock_event_device *evt = per_cpu_ptr(&dummy_timer_evt, cpu); per_cpu_ptr 449 drivers/clocksource/exynos_mct.c per_cpu_ptr(&percpu_mct_tick, cpu); per_cpu_ptr 487 drivers/clocksource/exynos_mct.c per_cpu_ptr(&percpu_mct_tick, cpu); per_cpu_ptr 531 drivers/clocksource/exynos_mct.c per_cpu_ptr(&percpu_mct_tick, cpu); per_cpu_ptr 565 drivers/clocksource/exynos_mct.c per_cpu_ptr(&percpu_mct_tick, cpu); per_cpu_ptr 117 drivers/clocksource/hyperv_timer.c ce = per_cpu_ptr(hv_clock_event, cpu); per_cpu_ptr 142 drivers/clocksource/hyperv_timer.c ce = per_cpu_ptr(hv_clock_event, cpu); per_cpu_ptr 197 drivers/clocksource/hyperv_timer.c ce = per_cpu_ptr(hv_clock_event, cpu); per_cpu_ptr 219 drivers/clocksource/jcore-pit.c struct jcore_pit *pit = per_cpu_ptr(jcore_pit_percpu, cpu); per_cpu_ptr 175 drivers/clocksource/timer-armada-370-xp.c struct clock_event_device *evt = per_cpu_ptr(armada_370_xp_evt, cpu); per_cpu_ptr 205 drivers/clocksource/timer-armada-370-xp.c struct clock_event_device *evt = per_cpu_ptr(armada_370_xp_evt, cpu); per_cpu_ptr 176 drivers/clocksource/timer-atlas7.c struct clock_event_device *ce = per_cpu_ptr(sirfsoc_clockevent, cpu); per_cpu_ptr 78 drivers/clocksource/timer-mp-csky.c struct timer_of *to = per_cpu_ptr(&csky_to, cpu); per_cpu_ptr 145 drivers/clocksource/timer-mp-csky.c to = per_cpu_ptr(&csky_to, cpu); per_cpu_ptr 168 drivers/clocksource/timer-mp-csky.c to = per_cpu_ptr(&csky_to, cpu_rollback); per_cpu_ptr 101 drivers/clocksource/timer-qcom.c struct clock_event_device *evt = per_cpu_ptr(msm_evt, cpu); per_cpu_ptr 131 drivers/clocksource/timer-qcom.c struct clock_event_device *evt = per_cpu_ptr(msm_evt, cpu); per_cpu_ptr 59 drivers/clocksource/timer-riscv.c struct clock_event_device *ce = per_cpu_ptr(&riscv_clock_event, cpu); per_cpu_ptr 134 drivers/clocksource/timer-tegra.c struct timer_of *to = per_cpu_ptr(&tegra_to, cpu); per_cpu_ptr 159 drivers/clocksource/timer-tegra.c struct timer_of *to = per_cpu_ptr(&tegra_to, cpu); per_cpu_ptr 302 drivers/clocksource/timer-tegra.c struct timer_of *cpu_to = per_cpu_ptr(&tegra_to, cpu); per_cpu_ptr 359 drivers/clocksource/timer-tegra.c cpu_to = per_cpu_ptr(&tegra_to, cpu); per_cpu_ptr 65 drivers/cpufreq/acpi-cpufreq.c return per_cpu_ptr(acpi_perf_data, data->acpi_perf_cpu); per_cpu_ptr 524 drivers/cpufreq/acpi-cpufreq.c free_cpumask_var(per_cpu_ptr(acpi_perf_data, i) per_cpu_ptr 567 drivers/cpufreq/acpi-cpufreq.c &per_cpu_ptr(acpi_perf_data, i)->shared_cpu_map, per_cpu_ptr 661 drivers/cpufreq/acpi-cpufreq.c perf = per_cpu_ptr(acpi_perf_data, cpu); per_cpu_ptr 855 drivers/cpufreq/acpi-cpufreq.c struct acpi_processor_performance *perf = per_cpu_ptr(acpi_perf_data, per_cpu_ptr 152 drivers/cpufreq/pcc-cpufreq.c pcc_cpu_data = per_cpu_ptr(pcc_cpu_info, cpu); per_cpu_ptr 208 drivers/cpufreq/pcc-cpufreq.c pcc_cpu_data = per_cpu_ptr(pcc_cpu_info, cpu); per_cpu_ptr 258 drivers/cpufreq/pcc-cpufreq.c pcc_cpu_data = per_cpu_ptr(pcc_cpu_info, cpu); per_cpu_ptr 57 drivers/cpuidle/cpuidle-haltpoll.c dev = per_cpu_ptr(haltpoll_cpuidle_devices, cpu); per_cpu_ptr 74 drivers/cpuidle/cpuidle-haltpoll.c dev = per_cpu_ptr(haltpoll_cpuidle_devices, cpu); per_cpu_ptr 119 drivers/cpuidle/governors/teo.c struct teo_cpu *cpu_data = per_cpu_ptr(&teo_cpus, dev->cpu); per_cpu_ptr 234 drivers/cpuidle/governors/teo.c struct teo_cpu *cpu_data = per_cpu_ptr(&teo_cpus, dev->cpu); per_cpu_ptr 440 drivers/cpuidle/governors/teo.c struct teo_cpu *cpu_data = per_cpu_ptr(&teo_cpus, dev->cpu); per_cpu_ptr 465 drivers/cpuidle/governors/teo.c struct teo_cpu *cpu_data = per_cpu_ptr(&teo_cpus, dev->cpu); per_cpu_ptr 4642 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 4682 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 4689 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 4704 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 4724 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 5017 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, cpu); per_cpu_ptr 5058 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, i); per_cpu_ptr 5087 drivers/crypto/caam/caamalg_qi2.c ppriv = per_cpu_ptr(priv->ppriv, i); per_cpu_ptr 512 drivers/crypto/caam/qi.c irqtask = &per_cpu_ptr(&pcpu_qipriv.caam_napi, i)->irqtask; per_cpu_ptr 737 drivers/crypto/caam/qi.c struct caam_qi_pcpu_priv *priv = per_cpu_ptr(&pcpu_qipriv, i); per_cpu_ptr 89 drivers/dma/dmaengine.c count += per_cpu_ptr(chan->local, i)->memcpy_count; per_cpu_ptr 111 drivers/dma/dmaengine.c count += per_cpu_ptr(chan->local, i)->bytes_transferred; per_cpu_ptr 442 drivers/dma/dmaengine.c per_cpu_ptr(channel_table[cap], cpu)->chan = NULL; per_cpu_ptr 459 drivers/dma/dmaengine.c per_cpu_ptr(channel_table[cap], cpu)->chan = chan; per_cpu_ptr 3050 drivers/edac/amd64_edac.c struct msr *reg = per_cpu_ptr(msrs, cpu); per_cpu_ptr 3083 drivers/edac/amd64_edac.c struct msr *reg = per_cpu_ptr(msrs, cpu); per_cpu_ptr 259 drivers/firmware/arm_sdei.c reg = per_cpu_ptr(regs, cpu); per_cpu_ptr 575 drivers/firmware/arm_sdei.c reg = per_cpu_ptr(arg->event->private_registered, smp_processor_id()); per_cpu_ptr 294 drivers/gpu/drm/i915/i915_pmu.c sum += *per_cpu_ptr(desc->kstat_irqs, cpu); per_cpu_ptr 789 drivers/hv/channel_mgmt.c = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 818 drivers/hv/channel_mgmt.c = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 85 drivers/hv/hv.c hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 97 drivers/hv/hv.c hv_cpu = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 140 drivers/hv/hv.c = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 160 drivers/hv/hv.c = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 2461 drivers/hv/vmbus_drv.c = per_cpu_ptr(hv_context.cpu_context, cpu); per_cpu_ptr 70 drivers/hwtracing/coresight/coresight-etm-perf.c return per_cpu_ptr(data->path, cpu); per_cpu_ptr 1156 drivers/idle/intel_idle.c dev = per_cpu_ptr(intel_idle_cpuidle_devices, i); per_cpu_ptr 1392 drivers/idle/intel_idle.c dev = per_cpu_ptr(intel_idle_cpuidle_devices, cpu); per_cpu_ptr 1421 drivers/idle/intel_idle.c dev = per_cpu_ptr(intel_idle_cpuidle_devices, cpu); per_cpu_ptr 308 drivers/infiniband/hw/hfi1/affinity.c prev_cntr = *per_cpu_ptr(comp_vect_affinity, ret_cpu); per_cpu_ptr 310 drivers/infiniband/hw/hfi1/affinity.c cntr = *per_cpu_ptr(comp_vect_affinity, curr_cpu); per_cpu_ptr 318 drivers/infiniband/hw/hfi1/affinity.c *per_cpu_ptr(comp_vect_affinity, ret_cpu) += 1; per_cpu_ptr 342 drivers/infiniband/hw/hfi1/affinity.c prev_cntr = *per_cpu_ptr(comp_vect_affinity, max_cpu); per_cpu_ptr 344 drivers/infiniband/hw/hfi1/affinity.c cntr = *per_cpu_ptr(comp_vect_affinity, curr_cpu); per_cpu_ptr 352 drivers/infiniband/hw/hfi1/affinity.c *per_cpu_ptr(comp_vect_affinity, max_cpu) -= 1; per_cpu_ptr 1625 drivers/infiniband/hw/hfi1/chip.c counter += *per_cpu_ptr(cntr, cpu); per_cpu_ptr 181 drivers/infiniband/hw/hfi1/debugfs.c per_cpu_ptr(dd->tx_opstats, j); per_cpu_ptr 104 drivers/infiniband/hw/hfi1/fault.c per_cpu_ptr(dd->tx_opstats, j); per_cpu_ptr 680 drivers/infiniband/hw/hfi1/pio.c ret += *per_cpu_ptr(sc->buffers_allocated, cpu); per_cpu_ptr 689 drivers/infiniband/hw/hfi1/pio.c (*per_cpu_ptr(sc->buffers_allocated, cpu)) = 0; per_cpu_ptr 1053 drivers/infiniband/hw/qib/qib_init.c int_counter += *per_cpu_ptr(dd->int_counter, cpu); per_cpu_ptr 1650 drivers/infiniband/hw/qib/qib_mad.c p = per_cpu_ptr(ibp->pmastats, cpu); per_cpu_ptr 473 drivers/infiniband/hw/qib/qib_sysfs.c counter += *per_cpu_ptr(cntr, cpu); per_cpu_ptr 98 drivers/iommu/iova.c fq = per_cpu_ptr(queue, cpu); per_cpu_ptr 522 drivers/iommu/iova.c struct iova_fq *fq = per_cpu_ptr(iovad->fq, cpu); per_cpu_ptr 542 drivers/iommu/iova.c fq = per_cpu_ptr(iovad->fq, cpu); per_cpu_ptr 875 drivers/iommu/iova.c cpu_rcache = per_cpu_ptr(rcache->cpu_rcaches, cpu); per_cpu_ptr 1017 drivers/iommu/iova.c cpu_rcache = per_cpu_ptr(rcache->cpu_rcaches, cpu); per_cpu_ptr 1039 drivers/iommu/iova.c cpu_rcache = per_cpu_ptr(rcache->cpu_rcaches, cpu); per_cpu_ptr 174 drivers/irqchip/irq-gic-v3-its.c #define gic_data_rdist_cpu(cpu) (per_cpu_ptr(gic_rdists->rdist, cpu)) per_cpu_ptr 2773 drivers/irqchip/irq-gic-v3-its.c rdbase = per_cpu_ptr(gic_rdists->rdist, from)->rd_base; per_cpu_ptr 2930 drivers/irqchip/irq-gic-v3-its.c rdbase = per_cpu_ptr(gic_rdists->rdist, vpe->col_idx)->rd_base; per_cpu_ptr 2970 drivers/irqchip/irq-gic-v3-its.c rdbase = per_cpu_ptr(gic_rdists->rdist, vpe->col_idx)->rd_base; per_cpu_ptr 1118 drivers/irqchip/irq-gic.c *per_cpu_ptr(gic->dist_base.percpu_base, cpu) = per_cpu_ptr 1120 drivers/irqchip/irq-gic.c *per_cpu_ptr(gic->cpu_base.percpu_base, cpu) = per_cpu_ptr 72 drivers/irqchip/irq-mips-gic.c clear_bit(intr, per_cpu_ptr(pcpu_masks, i)); per_cpu_ptr 193 drivers/irqchip/irq-mips-gic.c set_bit(intr, per_cpu_ptr(pcpu_masks, cpu)); per_cpu_ptr 276 drivers/irqchip/irq-mips-gic.c set_bit(irq, per_cpu_ptr(pcpu_masks, cpu)); per_cpu_ptr 93 drivers/irqchip/irq-sifive-plic.c struct plic_handler *handler = per_cpu_ptr(&plic_handlers, cpu); per_cpu_ptr 276 drivers/irqchip/irq-sifive-plic.c handler = per_cpu_ptr(&plic_handlers, cpu); per_cpu_ptr 200 drivers/md/dm-stats.c last = per_cpu_ptr(stats->last, cpu); per_cpu_ptr 2062 drivers/md/raid5.c percpu = per_cpu_ptr(conf->percpu, cpu); per_cpu_ptr 2268 drivers/md/raid5.c percpu = per_cpu_ptr(conf->percpu, cpu); per_cpu_ptr 6781 drivers/md/raid5.c free_scratch_buffer(conf, per_cpu_ptr(conf->percpu, cpu)); per_cpu_ptr 6817 drivers/md/raid5.c struct raid5_percpu *percpu = per_cpu_ptr(conf->percpu, cpu); per_cpu_ptr 70 drivers/net/dummy.c dstats = per_cpu_ptr(dev->dstats, i); per_cpu_ptr 340 drivers/net/ethernet/cavium/thunder/nicvf_ethtool.c tmp_stats += ((u64 *)per_cpu_ptr(nic->drv_stats, cpu)) per_cpu_ptr 1528 drivers/net/ethernet/cavium/thunder/nicvf_main.c memset(per_cpu_ptr(nic->drv_stats, cpu), 0, per_cpu_ptr 1704 drivers/net/ethernet/cavium/thunder/nicvf_main.c drv_stats = per_cpu_ptr(nic->drv_stats, cpu); per_cpu_ptr 983 drivers/net/ethernet/chelsio/cxgb/sge.c struct sge_port_stats *st = per_cpu_ptr(sge->port_stats[port], cpu); per_cpu_ptr 130 drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c pool = per_cpu_ptr(ppm->pool, cpu); per_cpu_ptr 203 drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c pool = per_cpu_ptr(ppm->pool, cpu); per_cpu_ptr 375 drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.c struct cxgbi_ppm_pool *ppool = per_cpu_ptr(pools, cpu); per_cpu_ptr 228 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, i); per_cpu_ptr 339 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, i); per_cpu_ptr 1539 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c int *count_ptr = per_cpu_ptr(dpaa_bp->percpu_count, i); per_cpu_ptr 2456 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, i); per_cpu_ptr 2469 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, i); per_cpu_ptr 2652 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, cpu); per_cpu_ptr 2668 drivers/net/ethernet/freescale/dpaa/dpaa_eth.c percpu_priv = per_cpu_ptr(priv->percpu_priv, cpu); per_cpu_ptr 294 drivers/net/ethernet/freescale/dpaa/dpaa_ethtool.c percpu_priv = per_cpu_ptr(priv->percpu_priv, i); per_cpu_ptr 299 drivers/net/ethernet/freescale/dpaa/dpaa_ethtool.c bp_count[j] = *(per_cpu_ptr(dpaa_bp->percpu_count, i)); per_cpu_ptr 27 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth-debugfs.c stats = per_cpu_ptr(priv->percpu_stats, i); per_cpu_ptr 28 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth-debugfs.c extras = per_cpu_ptr(priv->percpu_extras, i); per_cpu_ptr 1471 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c percpu_stats = per_cpu_ptr(priv->percpu_stats, i); per_cpu_ptr 229 drivers/net/ethernet/freescale/dpaa2/dpaa2-ethtool.c extras = per_cpu_ptr(priv->percpu_extras, k); per_cpu_ptr 181 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c ddp_pool = per_cpu_ptr(fcoe->ddp_pool, get_cpu()); per_cpu_ptr 601 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c ddp_pool = per_cpu_ptr(fcoe->ddp_pool, cpu); per_cpu_ptr 621 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c ddp_pool = per_cpu_ptr(fcoe->ddp_pool, cpu); per_cpu_ptr 7205 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c ddp_pool = per_cpu_ptr(fcoe->ddp_pool, cpu); per_cpu_ptr 716 drivers/net/ethernet/marvell/mvneta.c cpu_stats = per_cpu_ptr(pp->stats, cpu); per_cpu_ptr 3198 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 3226 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 3733 drivers/net/ethernet/marvell/mvneta.c struct mvneta_pcpu_port *port = per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 3754 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, other_cpu); per_cpu_ptr 3790 drivers/net/ethernet/marvell/mvneta.c struct mvneta_pcpu_port *port = per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 4189 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 4217 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 4678 drivers/net/ethernet/marvell/mvneta.c per_cpu_ptr(pp->ports, cpu); per_cpu_ptr 1986 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu_aux = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 2300 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 2515 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 2552 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 2622 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 3021 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 3039 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_txq_pcpu *txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 3217 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 3273 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_pcpu_stats *stats = per_cpu_ptr(port->stats, thread); per_cpu_ptr 3303 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c struct mvpp2_port_pcpu *port_pcpu = per_cpu_ptr(port->pcpu, thread); per_cpu_ptr 3743 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c port_pcpu = per_cpu_ptr(port->pcpu, thread); per_cpu_ptr 3947 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c cpu_stats = per_cpu_ptr(port->stats, cpu); per_cpu_ptr 4590 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c txq_pcpu = per_cpu_ptr(txq->pcpu, thread); per_cpu_ptr 5361 drivers/net/ethernet/marvell/mvpp2/mvpp2_main.c port_pcpu = per_cpu_ptr(port->pcpu, thread); per_cpu_ptr 1025 drivers/net/ethernet/mellanox/mlxsw/spectrum.c p = per_cpu_ptr(mlxsw_sp_port->pcpu_stats, i); per_cpu_ptr 360 drivers/net/ethernet/mellanox/mlxsw/switchx2.c p = per_cpu_ptr(mlxsw_sx_port->pcpu_stats, i); per_cpu_ptr 134 drivers/net/ethernet/netronome/nfp/nfp_net_repr.c repr_stats = per_cpu_ptr(repr->stats, i); per_cpu_ptr 1736 drivers/net/ethernet/nvidia/forcedeth.c struct nv_txrx_stats *src = per_cpu_ptr(np->txrx_stats, cpu); per_cpu_ptr 114 drivers/net/ethernet/qualcomm/rmnet/rmnet_vnd.c pcpu_ptr = per_cpu_ptr(priv->pcpu_stats, cpu); per_cpu_ptr 1167 drivers/net/hyperv/netvsc_drv.c = per_cpu_ptr(ndev_ctx->vf_stats, i); per_cpu_ptr 1197 drivers/net/hyperv/netvsc_drv.c per_cpu_ptr(ndev_ctx->vf_stats, i); per_cpu_ptr 280 drivers/net/ipvlan/ipvlan_main.c pcptr = per_cpu_ptr(ipvlan->pcpu_stats, idx); per_cpu_ptr 114 drivers/net/loopback.c lb_stats = per_cpu_ptr(dev->lstats, i); per_cpu_ptr 2159 drivers/net/macsec.c const struct macsec_tx_sa_stats *stats = per_cpu_ptr(pstats, cpu); per_cpu_ptr 2180 drivers/net/macsec.c const struct macsec_rx_sa_stats *stats = per_cpu_ptr(pstats, cpu); per_cpu_ptr 2210 drivers/net/macsec.c stats = per_cpu_ptr(pstats, cpu); per_cpu_ptr 2274 drivers/net/macsec.c stats = per_cpu_ptr(pstats, cpu); per_cpu_ptr 2314 drivers/net/macsec.c stats = per_cpu_ptr(pstats, cpu); per_cpu_ptr 2943 drivers/net/macsec.c stats = per_cpu_ptr(dev->tstats, cpu); per_cpu_ptr 927 drivers/net/macvlan.c p = per_cpu_ptr(vlan->pcpu_stats, i); per_cpu_ptr 67 drivers/net/nlmon.c nl_stats = per_cpu_ptr(dev->lstats, i); per_cpu_ptr 1034 drivers/net/ppp/ppp_generic.c (*per_cpu_ptr(ppp->xmit_recursion, cpu)) = 0; per_cpu_ptr 1848 drivers/net/team/team.c p = per_cpu_ptr(team->pcpu_stats, i); per_cpu_ptr 503 drivers/net/team/team_mode_loadbalance.c pcpu_stats = per_cpu_ptr(lb_priv->pcpu_stats, i); per_cpu_ptr 517 drivers/net/team/team_mode_loadbalance.c pcpu_stats = per_cpu_ptr(lb_priv->pcpu_stats, i); per_cpu_ptr 518 drivers/net/team/team_mode_loadbalance.c stats = per_cpu_ptr(lb_port_priv->pcpu_stats, i); per_cpu_ptr 627 drivers/net/team/team_mode_loadbalance.c team_lb_stats = per_cpu_ptr(lb_priv->pcpu_stats, i); per_cpu_ptr 1167 drivers/net/tun.c p = per_cpu_ptr(tun->pcpu_stats, i); per_cpu_ptr 139 drivers/net/usb/qmi_wwan.c stats64 = per_cpu_ptr(priv->stats64, cpu); per_cpu_ptr 999 drivers/net/usb/usbnet.c stats64 = per_cpu_ptr(dev->stats64, cpu); per_cpu_ptr 292 drivers/net/veth.c struct pcpu_lstats *stats = per_cpu_ptr(dev->lstats, cpu); per_cpu_ptr 89 drivers/net/vrf.c dstats = per_cpu_ptr(dev->dstats, i); per_cpu_ptr 74 drivers/net/vsockmon.c vstats = per_cpu_ptr(dev->lstats, i); per_cpu_ptr 456 drivers/net/wireless/ath/ath9k/common-spectral.c if ((buf = *per_cpu_ptr(rc->buf, i))) { per_cpu_ptr 2003 drivers/net/wireless/intel/iwlwifi/pcie/trans.c per_cpu_ptr(trans_pcie->tso_hdr_page, i); per_cpu_ptr 132 drivers/net/wireless/quantenna/qtnfmac/core.c stats64 = per_cpu_ptr(vif->stats64, cpu); per_cpu_ptr 1102 drivers/net/xen-netfront.c struct netfront_stats *rx_stats = per_cpu_ptr(np->rx_stats, cpu); per_cpu_ptr 1103 drivers/net/xen-netfront.c struct netfront_stats *tx_stats = per_cpu_ptr(np->tx_stats, cpu); per_cpu_ptr 906 drivers/nvdimm/region_devs.c ndl_count = per_cpu_ptr(nd_region->lane, cpu); per_cpu_ptr 907 drivers/nvdimm/region_devs.c ndl_lock = per_cpu_ptr(nd_region->lane, lane); per_cpu_ptr 923 drivers/nvdimm/region_devs.c ndl_count = per_cpu_ptr(nd_region->lane, cpu); per_cpu_ptr 924 drivers/nvdimm/region_devs.c ndl_lock = per_cpu_ptr(nd_region->lane, lane); per_cpu_ptr 999 drivers/nvdimm/region_devs.c ndl = per_cpu_ptr(nd_region->lane, i); per_cpu_ptr 555 drivers/perf/arm_pmu.c free_irq(irq, per_cpu_ptr(&cpu_armpmu, cpu)); per_cpu_ptr 586 drivers/perf/arm_pmu.c per_cpu_ptr(&cpu_armpmu, cpu)); per_cpu_ptr 825 drivers/perf/arm_pmu.c events = per_cpu_ptr(pmu->hw_events, cpu); per_cpu_ptr 208 drivers/perf/qcom_l2_pmu.c return *per_cpu_ptr(l2cache_pmu->pmu_cluster, cpu); per_cpu_ptr 805 drivers/perf/qcom_l2_pmu.c *per_cpu_ptr(l2cache_pmu->pmu_cluster, cpu) = cluster; per_cpu_ptr 89 drivers/powercap/idle_inject.c iit = per_cpu_ptr(&idle_inject_thread, cpu); per_cpu_ptr 134 drivers/powercap/idle_inject.c iit = per_cpu_ptr(&idle_inject_thread, cpu); per_cpu_ptr 240 drivers/powercap/idle_inject.c iit = per_cpu_ptr(&idle_inject_thread, cpu); per_cpu_ptr 272 drivers/powercap/idle_inject.c per_cpu_ptr(&idle_inject_thread, cpu); per_cpu_ptr 400 drivers/scsi/bnx2fc/bnx2fc_fcoe.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 625 drivers/scsi/bnx2fc/bnx2fc_fcoe.c stats = per_cpu_ptr(lport->stats, smp_processor_id()); per_cpu_ptr 960 drivers/scsi/bnx2fc/bnx2fc_fcoe.c per_cpu_ptr(lport->stats, per_cpu_ptr 2055 drivers/scsi/bnx2fc/bnx2fc_io.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1291 drivers/scsi/fcoe/fcoe.c p = per_cpu_ptr(&fcoe_percpu, cpu); per_cpu_ptr 1438 drivers/scsi/fcoe/fcoe.c per_cpu_ptr(lport->stats, get_cpu())->ErrorFrames++; per_cpu_ptr 1589 drivers/scsi/fcoe/fcoe.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1644 drivers/scsi/fcoe/fcoe.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1689 drivers/scsi/fcoe/fcoe.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1926 drivers/scsi/fcoe/fcoe.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 2492 drivers/scsi/fcoe/fcoe.c p = per_cpu_ptr(&fcoe_percpu, cpu); per_cpu_ptr 829 drivers/scsi/fcoe/fcoe_ctlr.c stats = per_cpu_ptr(fip->lp->stats, get_cpu()); per_cpu_ptr 1287 drivers/scsi/fcoe/fcoe_ctlr.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1428 drivers/scsi/fcoe/fcoe_ctlr.c per_cpu_ptr(lport->stats, per_cpu_ptr 1458 drivers/scsi/fcoe/fcoe_ctlr.c per_cpu_ptr(lport->stats, get_cpu())->VLinkFailureCount++; per_cpu_ptr 184 drivers/scsi/fcoe/fcoe_transport.c stats = per_cpu_ptr(lport->stats, cpu); per_cpu_ptr 2865 drivers/scsi/hpsa.c lockup_detected = per_cpu_ptr(h->lockup_detected, cpu); per_cpu_ptr 8214 drivers/scsi/hpsa.c lockup_detected = per_cpu_ptr(h->lockup_detected, cpu); per_cpu_ptr 825 drivers/scsi/libfc/fc_exch.c pool = per_cpu_ptr(mp->pool, cpu); per_cpu_ptr 940 drivers/scsi/libfc/fc_exch.c pool = per_cpu_ptr(mp->pool, cpu); per_cpu_ptr 1971 drivers/scsi/libfc/fc_exch.c per_cpu_ptr(ema->mp->pool, cpu), per_cpu_ptr 2507 drivers/scsi/libfc/fc_exch.c pool = per_cpu_ptr(mp->pool, cpu); per_cpu_ptr 150 drivers/scsi/libfc/fc_fcp.c per_cpu_ptr(lport->stats, get_cpu())->FcpPktAllocFails++; per_cpu_ptr 273 drivers/scsi/libfc/fc_fcp.c per_cpu_ptr(fsp->lp->stats, get_cpu())->FcpPktAborts++; per_cpu_ptr 442 drivers/scsi/libfc/fc_fcp.c per_cpu_ptr(lport->stats, get_cpu())->FcpFrameAllocFails++; per_cpu_ptr 539 drivers/scsi/libfc/fc_fcp.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 1917 drivers/scsi/libfc/fc_fcp.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 306 drivers/scsi/libfc/fc_lport.c stats = per_cpu_ptr(lport->stats, cpu); per_cpu_ptr 5072 drivers/scsi/lpfc/lpfc_attr.c eqi = per_cpu_ptr(phba->sli4_hba.eq_info, i); per_cpu_ptr 1278 drivers/scsi/lpfc/lpfc_init.c eqi = per_cpu_ptr(phba->sli4_hba.eq_info, i); per_cpu_ptr 1293 drivers/scsi/lpfc/lpfc_init.c eqi_new = per_cpu_ptr(phba->sli4_hba.eq_info, per_cpu_ptr 8843 drivers/scsi/lpfc/lpfc_init.c eqi = per_cpu_ptr(phba->sli4_hba.eq_info, qdesc->last_cpu); per_cpu_ptr 10703 drivers/scsi/lpfc/lpfc_init.c per_cpu_ptr(phba->sli4_hba.eq_info, i); per_cpu_ptr 1150 drivers/scsi/qedf/qedf_main.c stats = per_cpu_ptr(lport->stats, get_cpu()); per_cpu_ptr 444 drivers/soc/ti/knav_qmss_queue.c pushes += per_cpu_ptr(qh->stats, cpu)->pushes; per_cpu_ptr 445 drivers/soc/ti/knav_qmss_queue.c pops += per_cpu_ptr(qh->stats, cpu)->pops; per_cpu_ptr 446 drivers/soc/ti/knav_qmss_queue.c push_errors += per_cpu_ptr(qh->stats, cpu)->push_errors; per_cpu_ptr 447 drivers/soc/ti/knav_qmss_queue.c pop_errors += per_cpu_ptr(qh->stats, cpu)->pop_errors; per_cpu_ptr 448 drivers/soc/ti/knav_qmss_queue.c notifies += per_cpu_ptr(qh->stats, cpu)->notifies; per_cpu_ptr 479 drivers/thermal/intel/intel_powerclamp.c struct powerclamp_worker_data *w_data = per_cpu_ptr(worker_data, cpu); per_cpu_ptr 500 drivers/thermal/intel/intel_powerclamp.c struct powerclamp_worker_data *w_data = per_cpu_ptr(worker_data, cpu); per_cpu_ptr 62 drivers/xen/time.c state = per_cpu_ptr(&xen_runstate, cpu); per_cpu_ptr 456 drivers/xen/xen-acpi-processor.c free_cpumask_var(per_cpu_ptr(acpi_perf_data, i) per_cpu_ptr 539 drivers/xen/xen-acpi-processor.c &per_cpu_ptr(acpi_perf_data, i)->shared_cpu_map, per_cpu_ptr 554 drivers/xen/xen-acpi-processor.c perf = per_cpu_ptr(acpi_perf_data, i); per_cpu_ptr 1394 fs/buffer.c struct bh_lru *b = per_cpu_ptr(&bh_lrus, cpu); per_cpu_ptr 2672 fs/ext4/mballoc.c lg = per_cpu_ptr(sbi->s_locality_groups, i); per_cpu_ptr 1976 fs/gfs2/glock.c const struct gfs2_pcpu_lkstats *lkstats = per_cpu_ptr(sdp->sd_lkstats, i); per_cpu_ptr 1931 fs/gfs2/rgrp.c st = &per_cpu_ptr(sdp->sd_lkstats, cpu)->lkstats[LM_TYPE_RGRP]; per_cpu_ptr 693 fs/locks.c fll = per_cpu_ptr(&file_lock_list, fl->fl_link_cpu); per_cpu_ptr 3010 fs/locks.c struct file_lock_list_struct *fll = per_cpu_ptr(&file_lock_list, i); per_cpu_ptr 166 fs/namespace.c count += per_cpu_ptr(mnt->mnt_pcp, cpu)->mnt_count; per_cpu_ptr 277 fs/namespace.c count += per_cpu_ptr(mnt->mnt_pcp, cpu)->mnt_writers; per_cpu_ptr 879 fs/nfs/super.c stats = per_cpu_ptr(nfss->io_stats, cpu); per_cpu_ptr 1068 fs/seq_file.c hlist_for_each(node, per_cpu_ptr(head, *cpu)) { per_cpu_ptr 1099 fs/seq_file.c struct hlist_head *bucket = per_cpu_ptr(head, *cpu); per_cpu_ptr 38 fs/squashfs/decompressor_multi_percpu.c stream = per_cpu_ptr(percpu, cpu); per_cpu_ptr 51 fs/squashfs/decompressor_multi_percpu.c stream = per_cpu_ptr(percpu, cpu); per_cpu_ptr 68 fs/squashfs/decompressor_multi_percpu.c stream = per_cpu_ptr(percpu, cpu); per_cpu_ptr 15 fs/xfs/xfs_stats.c val += *(((__u32 *)per_cpu_ptr(stats, cpu) + idx)); per_cpu_ptr 70 fs/xfs/xfs_stats.c xs_xstrat_bytes += per_cpu_ptr(stats, i)->s.xs_xstrat_bytes; per_cpu_ptr 71 fs/xfs/xfs_stats.c xs_write_bytes += per_cpu_ptr(stats, i)->s.xs_write_bytes; per_cpu_ptr 72 fs/xfs/xfs_stats.c xs_read_bytes += per_cpu_ptr(stats, i)->s.xs_read_bytes; per_cpu_ptr 96 fs/xfs/xfs_stats.c vn_active = per_cpu_ptr(stats, c)->s.vn_active; per_cpu_ptr 97 fs/xfs/xfs_stats.c memset(per_cpu_ptr(stats, c), 0, sizeof(*stats)); per_cpu_ptr 98 fs/xfs/xfs_stats.c per_cpu_ptr(stats, c)->s.vn_active = vn_active; per_cpu_ptr 164 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v++; \ per_cpu_ptr 165 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v++; \ per_cpu_ptr 170 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v--; \ per_cpu_ptr 171 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v--; \ per_cpu_ptr 176 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->s.v += (inc); \ per_cpu_ptr 177 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->s.v += (inc); \ per_cpu_ptr 182 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]++; \ per_cpu_ptr 183 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]++; \ per_cpu_ptr 188 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off]; \ per_cpu_ptr 189 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off]; \ per_cpu_ptr 194 fs/xfs/xfs_stats.h per_cpu_ptr(xfsstats.xs_stats, current_cpu())->a[off] += (inc); \ per_cpu_ptr 195 fs/xfs/xfs_stats.h per_cpu_ptr(mp->m_stats.xs_stats, current_cpu())->a[off] += (inc); \ per_cpu_ptr 307 include/linux/genhd.h (per_cpu_ptr((part)->dkstats, (cpu))->field) per_cpu_ptr 317 include/linux/genhd.h res += per_cpu_ptr((part)->dkstats, _cpu)->field; \ per_cpu_ptr 326 include/linux/genhd.h memset(per_cpu_ptr(part->dkstats, i), value, per_cpu_ptr 2409 include/linux/netdevice.h stat = per_cpu_ptr(pcpu_stats, __cpu); \ per_cpu_ptr 444 include/linux/netfilter/x_tables.h return per_cpu_ptr((void __percpu *) (unsigned long) cnt->pcnt, cpu); per_cpu_ptr 264 include/linux/percpu-defs.h #define raw_cpu_ptr(ptr) per_cpu_ptr(ptr, 0) per_cpu_ptr 269 include/linux/percpu-defs.h #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu)) per_cpu_ptr 157 include/linux/vmstat.h x += per_cpu_ptr(zone->pageset, cpu)->vm_numa_stat_diff[item]; per_cpu_ptr 222 include/linux/vmstat.h x += per_cpu_ptr(zone->pageset, cpu)->vm_stat_diff[item]; per_cpu_ptr 483 include/net/sch_generic.h qlen += per_cpu_ptr(q->cpu_qstats, i)->qlen; per_cpu_ptr 245 kernel/bpf/arraymap.c bpf_long_memcpy(value + off, per_cpu_ptr(pptr, cpu), size); per_cpu_ptr 340 kernel/bpf/arraymap.c bpf_long_memcpy(per_cpu_ptr(pptr, cpu), value + off, size); per_cpu_ptr 407 kernel/bpf/arraymap.c per_cpu_ptr(pptr, cpu), m); per_cpu_ptr 408 kernel/bpf/bpf_lru_list.c l = per_cpu_ptr(lru->percpu_lru, cpu); per_cpu_ptr 441 kernel/bpf/bpf_lru_list.c loc_l = per_cpu_ptr(clru->local_list, cpu); per_cpu_ptr 470 kernel/bpf/bpf_lru_list.c steal_loc_l = per_cpu_ptr(clru->local_list, steal); per_cpu_ptr 514 kernel/bpf/bpf_lru_list.c loc_l = per_cpu_ptr(lru->common_lru.local_list, node->cpu); per_cpu_ptr 541 kernel/bpf/bpf_lru_list.c l = per_cpu_ptr(lru->percpu_lru, node->cpu); per_cpu_ptr 591 kernel/bpf/bpf_lru_list.c l = per_cpu_ptr(lru->percpu_lru, cpu); per_cpu_ptr 658 kernel/bpf/bpf_lru_list.c l = per_cpu_ptr(lru->percpu_lru, cpu); per_cpu_ptr 672 kernel/bpf/bpf_lru_list.c loc_l = per_cpu_ptr(clru->local_list, cpu); per_cpu_ptr 124 kernel/bpf/core.c pstats = per_cpu_ptr(prog->aux->stats, cpu); per_cpu_ptr 123 kernel/bpf/cpumap.c INIT_LIST_HEAD(per_cpu_ptr(cmap->flush_list, cpu)); per_cpu_ptr 356 kernel/bpf/cpumap.c bq = per_cpu_ptr(rcpu->bulkq, i); per_cpu_ptr 413 kernel/bpf/cpumap.c struct xdp_bulk_queue *bq = per_cpu_ptr(rcpu->bulkq, cpu); per_cpu_ptr 531 kernel/bpf/cpumap.c struct list_head *flush_list = per_cpu_ptr(cmap->flush_list, cpu); per_cpu_ptr 151 kernel/bpf/devmap.c INIT_LIST_HEAD(per_cpu_ptr(dtab->flush_list, cpu)); per_cpu_ptr 230 kernel/bpf/devmap.c struct list_head *flush_list = per_cpu_ptr(dtab->flush_list, cpu); per_cpu_ptr 521 kernel/bpf/devmap.c bq = per_cpu_ptr(dev->bulkq, cpu); per_cpu_ptr 606 kernel/bpf/devmap.c bq = per_cpu_ptr(dev->bulkq, cpu); per_cpu_ptr 219 kernel/bpf/hashtab.c *per_cpu_ptr(pptr, cpu) = l_new; per_cpu_ptr 708 kernel/bpf/hashtab.c bpf_long_memcpy(per_cpu_ptr(pptr, cpu), per_cpu_ptr 1306 kernel/bpf/hashtab.c per_cpu_ptr(pptr, cpu), size); per_cpu_ptr 1354 kernel/bpf/hashtab.c per_cpu_ptr(pptr, cpu), m); per_cpu_ptr 193 kernel/bpf/local_storage.c per_cpu_ptr(storage->percpu_buf, cpu), size); per_cpu_ptr 227 kernel/bpf/local_storage.c bpf_long_memcpy(per_cpu_ptr(storage->percpu_buf, cpu), per_cpu_ptr 403 kernel/bpf/local_storage.c per_cpu_ptr(storage->percpu_buf, cpu), per_cpu_ptr 15 kernel/bpf/percpu_freelist.c struct pcpu_freelist_head *head = per_cpu_ptr(s->freelist, cpu); per_cpu_ptr 72 kernel/bpf/percpu_freelist.c head = per_cpu_ptr(s->freelist, cpu); per_cpu_ptr 92 kernel/bpf/percpu_freelist.c head = per_cpu_ptr(s->freelist, cpu); per_cpu_ptr 627 kernel/bpf/stackmap.c work = per_cpu_ptr(&up_read_work, cpu); per_cpu_ptr 1388 kernel/bpf/syscall.c st = per_cpu_ptr(prog->aux->stats, cpu); per_cpu_ptr 117 kernel/bpf/xskmap.c INIT_LIST_HEAD(per_cpu_ptr(m->flush_list, cpu)); per_cpu_ptr 13 kernel/cgroup/rstat.c return per_cpu_ptr(cgrp->rstat_cpu, cpu); per_cpu_ptr 27 kernel/cgroup/rstat.c raw_spinlock_t *cpu_lock = per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu); per_cpu_ptr 150 kernel/cgroup/rstat.c raw_spinlock_t *cpu_lock = per_cpu_ptr(&cgroup_rstat_cpu_lock, per_cpu_ptr 288 kernel/cgroup/rstat.c raw_spin_lock_init(per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu)); per_cpu_ptr 151 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 521 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 616 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 720 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 769 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 882 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 886 kernel/cpu.c kthread_park(per_cpu_ptr(&cpuhp_state, cpu)->thread); per_cpu_ptr 902 kernel/cpu.c kthread_unpark(per_cpu_ptr(&cpuhp_state, cpu)->thread); per_cpu_ptr 979 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1073 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1115 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1662 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1698 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1781 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1845 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1897 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, cpu); per_cpu_ptr 1993 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, dev->id); per_cpu_ptr 2003 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, dev->id); per_cpu_ptr 2042 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, dev->id); per_cpu_ptr 2053 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, dev->id); per_cpu_ptr 2089 kernel/cpu.c struct cpuhp_cpu_state *st = per_cpu_ptr(&cpuhp_state, dev->id); per_cpu_ptr 714 kernel/events/core.c t = per_cpu_ptr(event->cgrp->info, event->cpu); per_cpu_ptr 939 kernel/events/core.c t = per_cpu_ptr(event->cgrp->info, event->cpu); per_cpu_ptr 4240 kernel/events/core.c cpuctx = per_cpu_ptr(pmu->pmu_cpu_context, cpu); per_cpu_ptr 4339 kernel/events/core.c struct pmu_event_list *pel = per_cpu_ptr(&pmu_sb_events, event->cpu); per_cpu_ptr 9977 kernel/events/core.c cpuctx = per_cpu_ptr(pmu->pmu_cpu_context, cpu); per_cpu_ptr 10114 kernel/events/core.c cpuctx = per_cpu_ptr(pmu->pmu_cpu_context, cpu); per_cpu_ptr 10301 kernel/events/core.c struct pmu_event_list *pel = per_cpu_ptr(&pmu_sb_events, event->cpu); per_cpu_ptr 11434 kernel/events/core.c src_ctx = &per_cpu_ptr(pmu->pmu_cpu_context, src_cpu)->ctx; per_cpu_ptr 11435 kernel/events/core.c dst_ctx = &per_cpu_ptr(pmu->pmu_cpu_context, dst_cpu)->ctx; per_cpu_ptr 12166 kernel/events/core.c cpuctx = per_cpu_ptr(pmu->pmu_cpu_context, cpu); per_cpu_ptr 12194 kernel/events/core.c cpuctx = per_cpu_ptr(pmu->pmu_cpu_context, cpu); per_cpu_ptr 54 kernel/events/hw_breakpoint.c return per_cpu_ptr(bp_cpuinfo + type, cpu); per_cpu_ptr 196 kernel/fork.c struct vm_struct **cached_vm_stacks = per_cpu_ptr(cached_stacks, cpu); per_cpu_ptr 126 kernel/irq/irqdesc.c *per_cpu_ptr(desc->kstat_irqs, cpu) = 0; per_cpu_ptr 963 kernel/irq/irqdesc.c *per_cpu_ptr(desc->kstat_irqs, cpu) : 0; per_cpu_ptr 993 kernel/irq/irqdesc.c sum += *per_cpu_ptr(desc->kstat_irqs, cpu); per_cpu_ptr 138 kernel/irq/matrix.c cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 159 kernel/irq/matrix.c cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 215 kernel/irq/matrix.c struct cpumap *cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 256 kernel/irq/matrix.c struct cpumap *cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 298 kernel/irq/matrix.c cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 387 kernel/irq/matrix.c cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 414 kernel/irq/matrix.c struct cpumap *cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 495 kernel/irq/matrix.c struct cpumap *cm = per_cpu_ptr(m->maps, cpu); per_cpu_ptr 493 kernel/irq/proc.c any_count |= *per_cpu_ptr(desc->kstat_irqs, j); per_cpu_ptr 501 kernel/irq/proc.c *per_cpu_ptr(desc->kstat_irqs, j) : 0); per_cpu_ptr 1070 kernel/kexec_core.c buf = (u32 *)per_cpu_ptr(crash_notes, cpu); per_cpu_ptr 1289 kernel/kexec_file.c notes_addr = per_cpu_ptr_to_phys(per_cpu_ptr(crash_notes, cpu)); per_cpu_ptr 101 kernel/locking/lock_events.c unsigned long *ptr = per_cpu_ptr(lockevents, cpu); per_cpu_ptr 34 kernel/locking/osq_lock.c return per_cpu_ptr(&osq_node, cpu_nr); per_cpu_ptr 129 kernel/locking/qspinlock.c return per_cpu_ptr(&qnodes[idx].mcs, cpu); per_cpu_ptr 685 kernel/module.c memcpy(per_cpu_ptr(mod->percpu, cpu), from, size); per_cpu_ptr 701 kernel/module.c void *start = per_cpu_ptr(mod->percpu, cpu); per_cpu_ptr 708 kernel/module.c per_cpu_ptr(mod->percpu, per_cpu_ptr 148 kernel/padata.c queue = per_cpu_ptr(pd->pqueue, target_cpu); per_cpu_ptr 183 kernel/padata.c next_queue = per_cpu_ptr(pd->pqueue, cpu); per_cpu_ptr 247 kernel/padata.c squeue = per_cpu_ptr(pd->squeue, cb_cpu); per_cpu_ptr 268 kernel/padata.c next_queue = per_cpu_ptr(pd->pqueue, pd->cpu); per_cpu_ptr 329 kernel/padata.c struct padata_parallel_queue *pqueue = per_cpu_ptr(pd->pqueue, per_cpu_ptr 405 kernel/padata.c squeue = per_cpu_ptr(pd->squeue, cpu); per_cpu_ptr 419 kernel/padata.c pqueue = per_cpu_ptr(pd->pqueue, cpu); per_cpu_ptr 249 kernel/power/energy_model.c smp_store_release(per_cpu_ptr(&em_data, cpu), pd); per_cpu_ptr 134 kernel/rcu/srcutree.c sdp = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 253 kernel/rcu/srcutree.c struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 270 kernel/rcu/srcutree.c struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 339 kernel/rcu/srcutree.c struct srcu_data *cpuc = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 380 kernel/rcu/srcutree.c struct srcu_data *sdp = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 499 kernel/rcu/srcutree.c srcu_schedule_cbs_sdp(per_cpu_ptr(ssp->sda, cpu), delay); per_cpu_ptr 563 kernel/rcu/srcutree.c sdp = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 1050 kernel/rcu/srcutree.c sdp = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 1268 kernel/rcu/srcutree.c sdp = per_cpu_ptr(ssp->sda, cpu); per_cpu_ptr 212 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 923 kernel/rcu/tree.c smp_store_release(per_cpu_ptr(&rcu_data.rcu_urgent_qs, cpu), true); per_cpu_ptr 1054 kernel/rcu/tree.c ruqp = per_cpu_ptr(&rcu_data.rcu_urgent_qs, rdp->cpu); per_cpu_ptr 2095 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 2104 kernel/rcu/tree.c do_nocb_deferred_wakeup(per_cpu_ptr(&rcu_data, cpu)); per_cpu_ptr 2287 kernel/rcu/tree.c if (f(per_cpu_ptr(&rcu_data, cpu))) per_cpu_ptr 2926 kernel/rcu/tree.c rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 2994 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3021 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3063 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3078 kernel/rcu/tree.c rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3100 kernel/rcu/tree.c rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3137 kernel/rcu/tree.c rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3173 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3210 kernel/rcu/tree.c struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 3412 kernel/rcu/tree.c per_cpu_ptr(&rcu_data, i)->mynode = rnp; per_cpu_ptr 277 kernel/rcu/tree_exp.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, raw_smp_processor_id()); per_cpu_ptr 349 kernel/rcu/tree_exp.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 377 kernel/rcu/tree_exp.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 495 kernel/rcu/tree_exp.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 736 kernel/rcu/tree_exp.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 771 kernel/rcu/tree_plugin.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 1188 kernel/rcu/tree_plugin.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 2232 kernel/rcu/tree_plugin.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 2260 kernel/rcu/tree_plugin.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 2346 kernel/rcu/tree_plugin.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 297 kernel/rcu/tree_stall.h struct rcu_data *rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 583 kernel/rcu/tree_stall.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 593 kernel/rcu/tree_stall.h rdp = per_cpu_ptr(&rcu_data, cpu); per_cpu_ptr 218 kernel/relay.c *per_cpu_ptr(chan->buf, buf->cpu) = NULL; per_cpu_ptr 395 kernel/relay.c if (chan->is_global && (buf = *per_cpu_ptr(chan->buf, 0))) { per_cpu_ptr 402 kernel/relay.c if ((buf = *per_cpu_ptr(chan->buf, i))) per_cpu_ptr 450 kernel/relay.c return *per_cpu_ptr(chan->buf, 0); per_cpu_ptr 474 kernel/relay.c *per_cpu_ptr(chan->buf, 0) = buf; per_cpu_ptr 529 kernel/relay.c if ((buf = *per_cpu_ptr(chan->buf, cpu))) per_cpu_ptr 537 kernel/relay.c *per_cpu_ptr(chan->buf, cpu) = buf; per_cpu_ptr 607 kernel/relay.c *per_cpu_ptr(chan->buf, i) = buf; per_cpu_ptr 616 kernel/relay.c if ((buf = *per_cpu_ptr(chan->buf, i))) per_cpu_ptr 681 kernel/relay.c buf = *per_cpu_ptr(chan->buf, 0); per_cpu_ptr 700 kernel/relay.c buf = *per_cpu_ptr(chan->buf, i); per_cpu_ptr 822 kernel/relay.c buf = *per_cpu_ptr(chan->buf, cpu); per_cpu_ptr 848 kernel/relay.c if (chan->is_global && (buf = *per_cpu_ptr(chan->buf, 0))) per_cpu_ptr 852 kernel/relay.c if ((buf = *per_cpu_ptr(chan->buf, i))) per_cpu_ptr 880 kernel/relay.c if (chan->is_global && (buf = *per_cpu_ptr(chan->buf, 0))) { per_cpu_ptr 887 kernel/relay.c if ((buf = *per_cpu_ptr(chan->buf, i))) per_cpu_ptr 3713 kernel/sched/core.c twork = per_cpu_ptr(tick_work_cpu, cpu); per_cpu_ptr 3734 kernel/sched/core.c twork = per_cpu_ptr(tick_work_cpu, cpu); per_cpu_ptr 101 kernel/sched/cpuacct.c struct cpuacct_usage *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); per_cpu_ptr 136 kernel/sched/cpuacct.c struct cpuacct_usage *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); per_cpu_ptr 245 kernel/sched/cpuacct.c struct cpuacct_usage *cpuusage = per_cpu_ptr(ca->cpuusage, cpu); per_cpu_ptr 278 kernel/sched/cpuacct.c u64 *cpustat = per_cpu_ptr(ca->cpustat, cpu)->cpustat; per_cpu_ptr 187 kernel/sched/psi.c seqcount_init(&per_cpu_ptr(group->pcpu, cpu)->seq); per_cpu_ptr 241 kernel/sched/psi.c struct psi_group_cpu *groupc = per_cpu_ptr(group->pcpu, cpu); per_cpu_ptr 680 kernel/sched/psi.c groupc = per_cpu_ptr(group->pcpu, cpu); per_cpu_ptr 799 kernel/sched/psi.c groupc = per_cpu_ptr(group->pcpu, cpu); per_cpu_ptr 2308 kernel/sched/sched.h data = rcu_dereference_sched(*per_cpu_ptr(&cpufreq_update_util_data, per_cpu_ptr 850 kernel/sched/topology.c sibling = *per_cpu_ptr(sdd->sd, i); per_cpu_ptr 909 kernel/sched/topology.c sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); per_cpu_ptr 944 kernel/sched/topology.c sibling = *per_cpu_ptr(sdd->sd, i); per_cpu_ptr 1059 kernel/sched/topology.c struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); per_cpu_ptr 1067 kernel/sched/topology.c sg = *per_cpu_ptr(sdd->sg, cpu); per_cpu_ptr 1068 kernel/sched/topology.c sg->sgc = *per_cpu_ptr(sdd->sgc, cpu); per_cpu_ptr 1265 kernel/sched/topology.c WARN_ON_ONCE(*per_cpu_ptr(sdd->sd, cpu) != sd); per_cpu_ptr 1266 kernel/sched/topology.c *per_cpu_ptr(sdd->sd, cpu) = NULL; per_cpu_ptr 1268 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sds, cpu))->ref)) per_cpu_ptr 1269 kernel/sched/topology.c *per_cpu_ptr(sdd->sds, cpu) = NULL; per_cpu_ptr 1271 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref)) per_cpu_ptr 1272 kernel/sched/topology.c *per_cpu_ptr(sdd->sg, cpu) = NULL; per_cpu_ptr 1274 kernel/sched/topology.c if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref)) per_cpu_ptr 1275 kernel/sched/topology.c *per_cpu_ptr(sdd->sgc, cpu) = NULL; per_cpu_ptr 1320 kernel/sched/topology.c struct sched_domain *sd = *per_cpu_ptr(sdd->sd, cpu); per_cpu_ptr 1422 kernel/sched/topology.c sd->shared = *per_cpu_ptr(sdd->sds, sd_id); per_cpu_ptr 1785 kernel/sched/topology.c *per_cpu_ptr(sdd->sd, j) = sd; per_cpu_ptr 1792 kernel/sched/topology.c *per_cpu_ptr(sdd->sds, j) = sds; per_cpu_ptr 1801 kernel/sched/topology.c *per_cpu_ptr(sdd->sg, j) = sg; per_cpu_ptr 1812 kernel/sched/topology.c *per_cpu_ptr(sdd->sgc, j) = sgc; per_cpu_ptr 1831 kernel/sched/topology.c sd = *per_cpu_ptr(sdd->sd, j); per_cpu_ptr 1834 kernel/sched/topology.c kfree(*per_cpu_ptr(sdd->sd, j)); per_cpu_ptr 1838 kernel/sched/topology.c kfree(*per_cpu_ptr(sdd->sds, j)); per_cpu_ptr 1840 kernel/sched/topology.c kfree(*per_cpu_ptr(sdd->sg, j)); per_cpu_ptr 1842 kernel/sched/topology.c kfree(*per_cpu_ptr(sdd->sgc, j)); per_cpu_ptr 2023 kernel/sched/topology.c *per_cpu_ptr(d.sd, i) = sd; per_cpu_ptr 2033 kernel/sched/topology.c for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { per_cpu_ptr 2050 kernel/sched/topology.c for (sd = *per_cpu_ptr(d.sd, i); sd; sd = sd->parent) { per_cpu_ptr 2060 kernel/sched/topology.c sd = *per_cpu_ptr(d.sd, i); per_cpu_ptr 466 kernel/smp.c call_single_data_t *csd = per_cpu_ptr(cfd->csd, cpu); per_cpu_ptr 484 kernel/smp.c csd = per_cpu_ptr(cfd->csd, cpu); per_cpu_ptr 173 kernel/smpboot.c struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); per_cpu_ptr 197 kernel/smpboot.c *per_cpu_ptr(ht->store, cpu) = tsk; per_cpu_ptr 230 kernel/smpboot.c struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); per_cpu_ptr 249 kernel/smpboot.c struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); per_cpu_ptr 272 kernel/smpboot.c struct task_struct *tsk = *per_cpu_ptr(ht->store, cpu); per_cpu_ptr 277 kernel/smpboot.c *per_cpu_ptr(ht->store, cpu) = NULL; per_cpu_ptr 247 kernel/stop_machine.c struct cpu_stopper *stopper1 = per_cpu_ptr(&cpu_stopper, cpu1); per_cpu_ptr 248 kernel/stop_machine.c struct cpu_stopper *stopper2 = per_cpu_ptr(&cpu_stopper, cpu2); per_cpu_ptr 315 kernel/time/tick-sched.c ts = per_cpu_ptr(&tick_cpu_sched, cpu); per_cpu_ptr 334 kernel/time/tick-sched.c struct tick_sched *ts = per_cpu_ptr(&tick_cpu_sched, cpu); per_cpu_ptr 487 kernel/time/tick-sched.c struct tick_sched *ts = per_cpu_ptr(&tick_cpu_sched, cpu); per_cpu_ptr 838 kernel/time/timer.c struct timer_base *base = per_cpu_ptr(&timer_bases[BASE_STD], cpu); per_cpu_ptr 845 kernel/time/timer.c base = per_cpu_ptr(&timer_bases[BASE_DEF], cpu); per_cpu_ptr 1964 kernel/time/timer.c base = per_cpu_ptr(&timer_bases[b], cpu); per_cpu_ptr 1982 kernel/time/timer.c old_base = per_cpu_ptr(&timer_bases[b], cpu); per_cpu_ptr 2017 kernel/time/timer.c base = per_cpu_ptr(&timer_bases[i], cpu); per_cpu_ptr 274 kernel/trace/blktrace.c sequence = per_cpu_ptr(bt->sequence, cpu); per_cpu_ptr 1464 kernel/trace/bpf_trace.c work = per_cpu_ptr(&send_signal_work, cpu); per_cpu_ptr 6485 kernel/trace/ftrace.c per_cpu_ptr(tr->trace_buffer.data, cpu)->ftrace_ignore_pid = false; per_cpu_ptr 1512 kernel/trace/trace.c struct trace_array_cpu *data = per_cpu_ptr(trace_buf->data, cpu); per_cpu_ptr 1513 kernel/trace/trace.c struct trace_array_cpu *max_data = per_cpu_ptr(max_buf->data, cpu); per_cpu_ptr 3415 kernel/trace/trace.c per_cpu_ptr(iter->trace_buffer->data, cpu)->skipped_entries = 0; per_cpu_ptr 3435 kernel/trace/trace.c per_cpu_ptr(iter->trace_buffer->data, cpu)->skipped_entries = entries; per_cpu_ptr 3531 kernel/trace/trace.c if (per_cpu_ptr(buf->data, cpu)->skipped_entries) { per_cpu_ptr 3532 kernel/trace/trace.c count -= per_cpu_ptr(buf->data, cpu)->skipped_entries; per_cpu_ptr 3639 kernel/trace/trace.c struct trace_array_cpu *data = per_cpu_ptr(buf->data, buf->cpu); per_cpu_ptr 3711 kernel/trace/trace.c if (per_cpu_ptr(iter->trace_buffer->data, iter->cpu)->skipped_entries) per_cpu_ptr 4494 kernel/trace/trace.c atomic_inc(&per_cpu_ptr(tr->trace_buffer.data, cpu)->disabled); per_cpu_ptr 4499 kernel/trace/trace.c atomic_dec(&per_cpu_ptr(tr->trace_buffer.data, cpu)->disabled); per_cpu_ptr 5459 kernel/trace/trace.c per_cpu_ptr(buf->data, cpu)->entries = val; per_cpu_ptr 5472 kernel/trace/trace.c per_cpu_ptr(size_buf->data, cpu)->entries, cpu); per_cpu_ptr 5475 kernel/trace/trace.c per_cpu_ptr(trace_buf->data, cpu)->entries = per_cpu_ptr 5476 kernel/trace/trace.c per_cpu_ptr(size_buf->data, cpu)->entries; per_cpu_ptr 5480 kernel/trace/trace.c per_cpu_ptr(size_buf->data, cpu_id)->entries, cpu_id); per_cpu_ptr 5482 kernel/trace/trace.c per_cpu_ptr(trace_buf->data, cpu_id)->entries = per_cpu_ptr 5483 kernel/trace/trace.c per_cpu_ptr(size_buf->data, cpu_id)->entries; per_cpu_ptr 5543 kernel/trace/trace.c per_cpu_ptr(tr->max_buffer.data, cpu)->entries = size; per_cpu_ptr 5551 kernel/trace/trace.c per_cpu_ptr(tr->trace_buffer.data, cpu)->entries = size; per_cpu_ptr 6272 kernel/trace/trace.c size = per_cpu_ptr(tr->trace_buffer.data, cpu)->entries; per_cpu_ptr 6273 kernel/trace/trace.c if (size != per_cpu_ptr(tr->trace_buffer.data, cpu)->entries) { per_cpu_ptr 6289 kernel/trace/trace.c r = sprintf(buf, "%lu\n", per_cpu_ptr(tr->trace_buffer.data, cpu)->entries >> 10); per_cpu_ptr 6336 kernel/trace/trace.c size += per_cpu_ptr(tr->trace_buffer.data, cpu)->entries >> 10; per_cpu_ptr 8951 kernel/trace/trace.c atomic_inc(&per_cpu_ptr(iter.trace_buffer->data, cpu)->disabled); per_cpu_ptr 9019 kernel/trace/trace.c atomic_dec(&per_cpu_ptr(iter.trace_buffer->data, cpu)->disabled); per_cpu_ptr 107 kernel/trace/trace_event_perf.c INIT_HLIST_HEAD(per_cpu_ptr(list, cpu)); per_cpu_ptr 628 kernel/trace/trace_events.c per_cpu_ptr(tr->trace_buffer.data, cpu)->ignore_pid = false; per_cpu_ptr 146 kernel/trace/trace_functions.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 195 kernel/trace/trace_functions.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 174 kernel/trace/trace_functions_graph.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 255 kernel/trace/trace_functions_graph.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 393 kernel/trace/trace_functions_graph.c last_pid = &(per_cpu_ptr(data->cpu_data, cpu)->last_pid); per_cpu_ptr 646 kernel/trace/trace_functions_graph.c cpu_data = per_cpu_ptr(data->cpu_data, cpu); per_cpu_ptr 690 kernel/trace/trace_functions_graph.c cpu_data = per_cpu_ptr(data->cpu_data, cpu); per_cpu_ptr 790 kernel/trace/trace_functions_graph.c depth_irq = &(per_cpu_ptr(data->cpu_data, cpu)->depth_irq); per_cpu_ptr 836 kernel/trace/trace_functions_graph.c depth_irq = &(per_cpu_ptr(data->cpu_data, cpu)->depth_irq); per_cpu_ptr 920 kernel/trace/trace_functions_graph.c cpu_data = per_cpu_ptr(data->cpu_data, cpu); per_cpu_ptr 982 kernel/trace/trace_functions_graph.c depth = per_cpu_ptr(data->cpu_data, iter->cpu)->depth; per_cpu_ptr 1048 kernel/trace/trace_functions_graph.c if (data && per_cpu_ptr(data->cpu_data, cpu)->ignore) { per_cpu_ptr 1049 kernel/trace/trace_functions_graph.c per_cpu_ptr(data->cpu_data, cpu)->ignore = 0; per_cpu_ptr 1062 kernel/trace/trace_functions_graph.c per_cpu_ptr(data->cpu_data, iter->cpu)->ignore = 1; per_cpu_ptr 1221 kernel/trace/trace_functions_graph.c pid_t *pid = &(per_cpu_ptr(data->cpu_data, cpu)->last_pid); per_cpu_ptr 1222 kernel/trace/trace_functions_graph.c int *depth = &(per_cpu_ptr(data->cpu_data, cpu)->depth); per_cpu_ptr 1223 kernel/trace/trace_functions_graph.c int *ignore = &(per_cpu_ptr(data->cpu_data, cpu)->ignore); per_cpu_ptr 1224 kernel/trace/trace_functions_graph.c int *depth_irq = &(per_cpu_ptr(data->cpu_data, cpu)->depth_irq); per_cpu_ptr 125 kernel/trace/trace_irqsoff.c *data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 385 kernel/trace/trace_irqsoff.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 423 kernel/trace/trace_irqsoff.c data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 127 kernel/trace/trace_kdb.c atomic_inc(&per_cpu_ptr(iter.trace_buffer->data, cpu)->disabled); per_cpu_ptr 142 kernel/trace/trace_kdb.c atomic_dec(&per_cpu_ptr(iter.trace_buffer->data, cpu)->disabled); per_cpu_ptr 179 kernel/trace/trace_kprobe.c nhit += *per_cpu_ptr(tk->nhit, cpu); per_cpu_ptr 321 kernel/trace/trace_mmiotrace.c struct trace_array_cpu *data = per_cpu_ptr(tr->trace_buffer.data, smp_processor_id()); per_cpu_ptr 354 kernel/trace/trace_mmiotrace.c data = per_cpu_ptr(tr->trace_buffer.data, smp_processor_id()); per_cpu_ptr 85 kernel/trace/trace_sched_wakeup.c *data = per_cpu_ptr(tr->trace_buffer.data, cpu); per_cpu_ptr 462 kernel/trace/trace_sched_wakeup.c disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->trace_buffer.data, cpu)->disabled); per_cpu_ptr 474 kernel/trace/trace_sched_wakeup.c data = per_cpu_ptr(wakeup_trace->trace_buffer.data, wakeup_cpu); per_cpu_ptr 497 kernel/trace/trace_sched_wakeup.c atomic_dec(&per_cpu_ptr(wakeup_trace->trace_buffer.data, cpu)->disabled); per_cpu_ptr 554 kernel/trace/trace_sched_wakeup.c disabled = atomic_inc_return(&per_cpu_ptr(wakeup_trace->trace_buffer.data, cpu)->disabled); per_cpu_ptr 586 kernel/trace/trace_sched_wakeup.c data = per_cpu_ptr(wakeup_trace->trace_buffer.data, wakeup_cpu); per_cpu_ptr 601 kernel/trace/trace_sched_wakeup.c atomic_dec(&per_cpu_ptr(wakeup_trace->trace_buffer.data, cpu)->disabled); per_cpu_ptr 858 kernel/trace/trace_uprobe.c per_cpu_ptr(uprobe_cpu_buffer, cpu)->buf = page_address(p); per_cpu_ptr 859 kernel/trace/trace_uprobe.c mutex_init(&per_cpu_ptr(uprobe_cpu_buffer, cpu)->mutex); per_cpu_ptr 868 kernel/trace/trace_uprobe.c free_page((unsigned long)per_cpu_ptr(uprobe_cpu_buffer, cpu)->buf); per_cpu_ptr 898 kernel/trace/trace_uprobe.c free_page((unsigned long)per_cpu_ptr(uprobe_cpu_buffer, per_cpu_ptr 912 kernel/trace/trace_uprobe.c ucb = per_cpu_ptr(uprobe_cpu_buffer, cpu); per_cpu_ptr 1428 kernel/workqueue.c pwq = per_cpu_ptr(wq->cpu_pwqs, cpu); per_cpu_ptr 3295 kernel/workqueue.c struct work_struct *work = per_cpu_ptr(works, cpu); per_cpu_ptr 3302 kernel/workqueue.c flush_work(per_cpu_ptr(works, cpu)); per_cpu_ptr 4157 kernel/workqueue.c per_cpu_ptr(wq->cpu_pwqs, cpu); per_cpu_ptr 4514 kernel/workqueue.c pwq = per_cpu_ptr(wq->cpu_pwqs, cpu); per_cpu_ptr 141 lib/percpu-refcount.c count += *per_cpu_ptr(percpu_count, cpu); per_cpu_ptr 216 lib/percpu-refcount.c *per_cpu_ptr(percpu_count, cpu) = 0; per_cpu_ptr 67 lib/percpu_counter.c s32 *pcount = per_cpu_ptr(fbc->counters, cpu); per_cpu_ptr 114 lib/percpu_counter.c s32 *pcount = per_cpu_ptr(fbc->counters, cpu); per_cpu_ptr 188 lib/percpu_counter.c pcount = per_cpu_ptr(fbc->counters, cpu); per_cpu_ptr 246 lib/random32.c struct rnd_state *state = per_cpu_ptr(pcpu_state, i); per_cpu_ptr 387 lib/sbitmap.c *per_cpu_ptr(sbq->alloc_hint, i) = prandom_u32() % depth; per_cpu_ptr 602 lib/sbitmap.c *per_cpu_ptr(sbq->alloc_hint, cpu) = nr; per_cpu_ptr 640 lib/sbitmap.c seq_printf(m, "%u", *per_cpu_ptr(sbq->alloc_hint, i)); per_cpu_ptr 919 mm/kmemleak.c create_object((unsigned long)per_cpu_ptr(ptr, cpu), per_cpu_ptr 998 mm/kmemleak.c delete_object_full((unsigned long)per_cpu_ptr(ptr, per_cpu_ptr 4414 mm/memcontrol.c x += per_cpu_ptr(memcg->vmstats_percpu, cpu)->stat[idx]; per_cpu_ptr 6987 mm/memcontrol.c INIT_WORK(&per_cpu_ptr(&memcg_stock, cpu)->work, per_cpu_ptr 919 mm/memory_hotplug.c p = per_cpu_ptr(pgdat->per_cpu_nodestats, cpu); per_cpu_ptr 2824 mm/page_alloc.c pset = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 2929 mm/page_alloc.c pcp = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 2934 mm/page_alloc.c pcp = per_cpu_ptr(z->pageset, cpu); per_cpu_ptr 2949 mm/page_alloc.c struct pcpu_drain *drain = per_cpu_ptr(&pcpu_drain, cpu); per_cpu_ptr 2956 mm/page_alloc.c flush_work(&per_cpu_ptr(&pcpu_drain, cpu)->work); per_cpu_ptr 5260 mm/page_alloc.c free_pcp += per_cpu_ptr(zone->pageset, cpu)->pcp.count; per_cpu_ptr 5346 mm/page_alloc.c free_pcp += per_cpu_ptr(zone->pageset, cpu)->pcp.count; per_cpu_ptr 6161 mm/page_alloc.c struct per_cpu_pageset *pcp = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 8035 mm/page_alloc.c per_cpu_ptr(zone->pageset, cpu)); per_cpu_ptr 8538 mm/page_alloc.c per_cpu_ptr(zone->pageset, cpu)); per_cpu_ptr 8552 mm/page_alloc.c pset = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 2005 mm/percpu.c void *start = per_cpu_ptr(base, cpu); per_cpu_ptr 2012 mm/percpu.c per_cpu_ptr(base, get_boot_cpu_id()); per_cpu_ptr 2085 mm/percpu.c void *start = per_cpu_ptr(base, cpu); per_cpu_ptr 963 mm/slab.c nc = per_cpu_ptr(cachep->cpu_cache, cpu); per_cpu_ptr 1735 mm/slab.c init_arraycache(per_cpu_ptr(cpu_cache, cpu), per_cpu_ptr 3829 mm/slab.c struct array_cache *ac = per_cpu_ptr(prev, cpu); per_cpu_ptr 2030 mm/slub.c per_cpu_ptr(s->cpu_slab, cpu)->tid = init_tid(cpu); per_cpu_ptr 2316 mm/slub.c struct kmem_cache_cpu *c = per_cpu_ptr(s->cpu_slab, cpu); per_cpu_ptr 2334 mm/slub.c struct kmem_cache_cpu *c = per_cpu_ptr(s->cpu_slab, cpu); per_cpu_ptr 4822 mm/slub.c struct kmem_cache_cpu *c = per_cpu_ptr(s->cpu_slab, per_cpu_ptr 5081 mm/slub.c page = slub_percpu_partial(per_cpu_ptr(s->cpu_slab, cpu)); per_cpu_ptr 5095 mm/slub.c page = slub_percpu_partial(per_cpu_ptr(s->cpu_slab, cpu)); per_cpu_ptr 5372 mm/slub.c unsigned x = per_cpu_ptr(s->cpu_slab, cpu)->stat[si]; per_cpu_ptr 5395 mm/slub.c per_cpu_ptr(s->cpu_slab, cpu)->stat[si] = 0; per_cpu_ptr 3221 mm/swapfile.c cluster = per_cpu_ptr(p->percpu_cluster, cpu); per_cpu_ptr 47 mm/vmstat.c per_cpu_ptr(zone->pageset, cpu)->vm_numa_stat_diff[item] per_cpu_ptr 256 mm/vmstat.c per_cpu_ptr(pgdat->per_cpu_nodestats, cpu)->stat_threshold = 0; per_cpu_ptr 269 mm/vmstat.c per_cpu_ptr(zone->pageset, cpu)->stat_threshold per_cpu_ptr 273 mm/vmstat.c pgdat_threshold = per_cpu_ptr(pgdat->per_cpu_nodestats, cpu)->stat_threshold; per_cpu_ptr 274 mm/vmstat.c per_cpu_ptr(pgdat->per_cpu_nodestats, cpu)->stat_threshold per_cpu_ptr 306 mm/vmstat.c per_cpu_ptr(zone->pageset, cpu)->stat_threshold per_cpu_ptr 861 mm/vmstat.c p = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 889 mm/vmstat.c p = per_cpu_ptr(pgdat->per_cpu_nodestats, cpu); per_cpu_ptr 1617 mm/vmstat.c pageset = per_cpu_ptr(zone->pageset, i); per_cpu_ptr 1836 mm/vmstat.c struct per_cpu_pageset *p = per_cpu_ptr(zone->pageset, cpu); per_cpu_ptr 1916 mm/vmstat.c INIT_DEFERRABLE_WORK(per_cpu_ptr(&vmstat_work, cpu), per_cpu_ptr 720 mm/z3fold.c unbuddied = per_cpu_ptr(pool->unbuddied, cpu); per_cpu_ptr 786 mm/z3fold.c per_cpu_ptr(pool->unbuddied, cpu); per_cpu_ptr 403 mm/zswap.c if (WARN_ON(*per_cpu_ptr(pool->tfm, cpu))) per_cpu_ptr 412 mm/zswap.c *per_cpu_ptr(pool->tfm, cpu) = tfm; per_cpu_ptr 421 mm/zswap.c tfm = *per_cpu_ptr(pool->tfm, cpu); per_cpu_ptr 424 mm/zswap.c *per_cpu_ptr(pool->tfm, cpu) = NULL; per_cpu_ptr 674 net/8021q/vlan_dev.c p = per_cpu_ptr(vlan_dev_priv(dev)->vlan_pcpu_stats, i); per_cpu_ptr 108 net/batman-adv/soft-interface.c counters = per_cpu_ptr(bat_priv->bat_counters, cpu); per_cpu_ptr 206 net/bridge/br_device.c = per_cpu_ptr(br->stats, cpu); per_cpu_ptr 2440 net/bridge/br_multicast.c struct bridge_mcast_stats *cpu_stats = per_cpu_ptr(stats, i); per_cpu_ptr 1216 net/bridge/br_vlan.c cpu_stats = per_cpu_ptr(v->stats, i); per_cpu_ptr 87 net/caif/caif_dev.c refcnt += *per_cpu_ptr(e->pcpu_refcnt, i); per_cpu_ptr 195 net/caif/cffrml.c refcnt += *per_cpu_ptr(this->pcpu_refcnt, i); per_cpu_ptr 5259 net/core/dev.c per_cpu_ptr(&flush_works, cpu)); per_cpu_ptr 5262 net/core/dev.c flush_work(per_cpu_ptr(&flush_works, cpu)); per_cpu_ptr 9216 net/core/dev.c refcnt += *per_cpu_ptr(dev->pcpu_refcnt, i); per_cpu_ptr 10191 net/core/dev.c struct work_struct *flush = per_cpu_ptr(&flush_works, i); per_cpu_ptr 5344 net/core/devlink.c cpu_stats = per_cpu_ptr(trap_stats, i); per_cpu_ptr 312 net/core/dst.c __metadata_dst_init(per_cpu_ptr(md_dst, cpu), type, optslen); per_cpu_ptr 324 net/core/dst.c struct metadata_dst *one_md_dst = per_cpu_ptr(md_dst, cpu); per_cpu_ptr 160 net/core/dst_cache.c dst_release(per_cpu_ptr(dst_cache->cache, i)->dst); per_cpu_ptr 3598 net/core/filter.c ri = per_cpu_ptr(&bpf_redirect_info, cpu); per_cpu_ptr 124 net/core/gen_stats.c struct gnet_stats_basic_cpu *bcpu = per_cpu_ptr(cpu, i); per_cpu_ptr 288 net/core/gen_stats.c const struct gnet_stats_queue *qcpu = per_cpu_ptr(q, i); per_cpu_ptr 78 net/core/gro_cells.c struct gro_cell *cell = per_cpu_ptr(gcells->cells, i); per_cpu_ptr 99 net/core/gro_cells.c struct gro_cell *cell = per_cpu_ptr(gcells->cells, i); per_cpu_ptr 2083 net/core/neighbour.c st = per_cpu_ptr(tbl->stats, cpu); per_cpu_ptr 3277 net/core/neighbour.c return per_cpu_ptr(tbl->stats, cpu); per_cpu_ptr 3291 net/core/neighbour.c return per_cpu_ptr(tbl->stats, cpu); per_cpu_ptr 3244 net/core/sock.c res += per_cpu_ptr(net->core.prot_inuse, cpu)->val[idx]; per_cpu_ptr 3260 net/core/sock.c res += *per_cpu_ptr(net->core.sock_inuse, cpu); per_cpu_ptr 672 net/dsa/slave.c s = per_cpu_ptr(p->stats64, i); per_cpu_ptr 1058 net/dsa/slave.c s = per_cpu_ptr(p->stats64, i); per_cpu_ptr 1646 net/ipv4/af_inet.c return *(((unsigned long *)per_cpu_ptr(mib, cpu)) + offt); per_cpu_ptr 1671 net/ipv4/af_inet.c bhptr = per_cpu_ptr(mib, cpu); per_cpu_ptr 1747 net/ipv4/af_inet.c af_inet_stats = per_cpu_ptr(net->mib.ip_statistics, i); per_cpu_ptr 200 net/ipv4/fib_semantics.c rt = rcu_dereference_protected(*per_cpu_ptr(rtp, cpu), 1); per_cpu_ptr 2416 net/ipv4/fib_trie.c const struct trie_use_stats *pcpu = per_cpu_ptr(stats, cpu); per_cpu_ptr 1190 net/ipv4/icmp.c inet_ctl_sock_destroy(*per_cpu_ptr(net->ipv4.icmp_sk, i)); per_cpu_ptr 1211 net/ipv4/icmp.c *per_cpu_ptr(net->ipv4.icmp_sk, i) = sk; per_cpu_ptr 193 net/ipv4/ip_tunnel_core.c per_cpu_ptr(dev->tstats, i); per_cpu_ptr 348 net/ipv4/route.c src = (struct ip_rt_acct *)per_cpu_ptr(ip_rt_acct, i); per_cpu_ptr 722 net/ipv4/route.c prt = per_cpu_ptr(nhc->nhc_pcpu_rth_output, i); per_cpu_ptr 2628 net/ipv4/tcp_ipv4.c inet_ctl_sock_destroy(*per_cpu_ptr(net->ipv4.tcp_sk, cpu)); per_cpu_ptr 2654 net/ipv4/tcp_ipv4.c *per_cpu_ptr(net->ipv4.tcp_sk, cpu) = sk; per_cpu_ptr 339 net/ipv6/addrconf.c addrconf_stats = per_cpu_ptr(idev->stats.ipv6, i); per_cpu_ptr 6409 net/ipv6/addrconf.c rtp = per_cpu_ptr(nh->rt6i_pcpu, cpu); per_cpu_ptr 835 net/ipv6/af_inet6.c af_inet6_stats = per_cpu_ptr(net->mib.ipv6_statistics, i); per_cpu_ptr 978 net/ipv6/icmp.c inet_ctl_sock_destroy(*per_cpu_ptr(net->ipv6.icmp_sk, i)); per_cpu_ptr 1000 net/ipv6/icmp.c *per_cpu_ptr(net->ipv6.icmp_sk, i) = sk; per_cpu_ptr 916 net/ipv6/ip6_fib.c ppcpu_rt = per_cpu_ptr(fib6_nh->rt6i_pcpu, cpu); per_cpu_ptr 100 net/ipv6/ip6_tunnel.c per_cpu_ptr(dev->tstats, i); per_cpu_ptr 166 net/ipv6/route.c struct uncached_list *ul = per_cpu_ptr(&rt6_uncached_list, cpu); per_cpu_ptr 3559 net/ipv6/route.c ppcpu_rt = per_cpu_ptr(fib6_nh->rt6i_pcpu, cpu); per_cpu_ptr 6422 net/ipv6/route.c struct uncached_list *ul = per_cpu_ptr(&rt6_uncached_list, cpu); per_cpu_ptr 375 net/ipv6/seg6_hmac.c p_tfm = per_cpu_ptr(algo->tfms, cpu); per_cpu_ptr 393 net/ipv6/seg6_hmac.c *per_cpu_ptr(algo->shashs, cpu) = shash; per_cpu_ptr 428 net/ipv6/seg6_hmac.c shash = *per_cpu_ptr(algo->shashs, cpu); per_cpu_ptr 430 net/ipv6/seg6_hmac.c tfm = *per_cpu_ptr(algo->tfms, cpu); per_cpu_ptr 1148 net/mac80211/iface.c tstats = per_cpu_ptr(dev->tstats, i); per_cpu_ptr 2050 net/mac80211/sta_info.c cpustats = per_cpu_ptr(sta->pcpu_rx_stats, cpu); per_cpu_ptr 2240 net/mac80211/sta_info.c cpurxs = per_cpu_ptr(sta->pcpu_rx_stats, cpu); per_cpu_ptr 2254 net/mac80211/sta_info.c cpurxs = per_cpu_ptr(sta->pcpu_rx_stats, cpu); per_cpu_ptr 2293 net/mac80211/sta_info.c cpurxs = per_cpu_ptr(sta->pcpu_rx_stats, cpu); per_cpu_ptr 1079 net/mpls/af_mpls.c p = per_cpu_ptr(mdev->stats, i); per_cpu_ptr 1466 net/mpls/af_mpls.c mpls_stats = per_cpu_ptr(mdev->stats, i); per_cpu_ptr 989 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_dest_stats = per_cpu_ptr(dest->stats.cpustats, i); per_cpu_ptr 1330 net/netfilter/ipvs/ip_vs_ctl.c ip_vs_stats = per_cpu_ptr(svc->stats.cpustats, i); per_cpu_ptr 2249 net/netfilter/ipvs/ip_vs_ctl.c struct ip_vs_cpu_stats *u = per_cpu_ptr(cpustats, i); per_cpu_ptr 4114 net/netfilter/ipvs/ip_vs_ctl.c ipvs_tot_stats = per_cpu_ptr(ipvs->tot_stats.cpustats, i); per_cpu_ptr 63 net/netfilter/ipvs/ip_vs_est.c struct ip_vs_cpu_stats *s = per_cpu_ptr(stats, i); per_cpu_ptr 501 net/netfilter/nf_conntrack_core.c pcpu = per_cpu_ptr(nf_ct_net(ct)->ct.pcpu_lists, ct->cpu); per_cpu_ptr 516 net/netfilter/nf_conntrack_core.c pcpu = per_cpu_ptr(nf_ct_net(ct)->ct.pcpu_lists, ct->cpu); per_cpu_ptr 530 net/netfilter/nf_conntrack_core.c pcpu = per_cpu_ptr(nf_ct_net(ct)->ct.pcpu_lists, ct->cpu); per_cpu_ptr 2142 net/netfilter/nf_conntrack_core.c pcpu = per_cpu_ptr(net->ct.pcpu_lists, cpu); per_cpu_ptr 2585 net/netfilter/nf_conntrack_core.c struct ct_pcpu *pcpu = per_cpu_ptr(net->ct.pcpu_lists, cpu); per_cpu_ptr 96 net/netfilter/nf_conntrack_ecache.c pcpu = per_cpu_ptr(ctnet->pcpu_lists, cpu); per_cpu_ptr 1419 net/netfilter/nf_conntrack_netlink.c pcpu = per_cpu_ptr(net->ct.pcpu_lists, cpu); per_cpu_ptr 2254 net/netfilter/nf_conntrack_netlink.c st = per_cpu_ptr(net->ct.stat, cpu); per_cpu_ptr 3477 net/netfilter/nf_conntrack_netlink.c st = per_cpu_ptr(net->ct.stat, cpu); per_cpu_ptr 397 net/netfilter/nf_conntrack_standalone.c return per_cpu_ptr(net->ct.stat, cpu); per_cpu_ptr 412 net/netfilter/nf_conntrack_standalone.c return per_cpu_ptr(net->ct.stat, cpu); per_cpu_ptr 253 net/netfilter/nf_synproxy_core.c return per_cpu_ptr(snet->stats, cpu); per_cpu_ptr 268 net/netfilter/nf_synproxy_core.c return per_cpu_ptr(snet->stats, cpu); per_cpu_ptr 1231 net/netfilter/nf_tables_api.c cpu_stats = per_cpu_ptr(stats, cpu); per_cpu_ptr 127 net/netfilter/nft_counter.c myseq = per_cpu_ptr(&nft_counter_seq, cpu); per_cpu_ptr 128 net/netfilter/nft_counter.c this_cpu = per_cpu_ptr(priv->counter, cpu); per_cpu_ptr 277 net/netfilter/nft_counter.c seqcount_init(per_cpu_ptr(&nft_counter_seq, cpu)); per_cpu_ptr 671 net/openvswitch/datapath.c percpu_stats = per_cpu_ptr(dp->stats_percpu, i); per_cpu_ptr 102 net/openvswitch/vport-internal_dev.c percpu_stats = per_cpu_ptr(dev->tstats, i); per_cpu_ptr 1176 net/packet/af_packet.c refcnt += *per_cpu_ptr(rb->pending_refcnt, cpu); per_cpu_ptr 111 net/rds/ib_recv.c head = per_cpu_ptr(cache->percpu, cpu); per_cpu_ptr 142 net/rds/ib_recv.c head = per_cpu_ptr(cache->percpu, cpu); per_cpu_ptr 301 net/sched/cls_basic.c struct tc_basic_pcnt *pf = per_cpu_ptr(f->pf, cpu); per_cpu_ptr 372 net/sched/cls_matchall.c struct tc_matchall_pcnt *pf = per_cpu_ptr(head->pf, cpu); per_cpu_ptr 1330 net/sched/cls_u32.c __u32 cnt = *per_cpu_ptr(n->pcpu_success, cpum); per_cpu_ptr 1358 net/sched/cls_u32.c struct tc_u32_pcnt *pf = per_cpu_ptr(n->pf, cpu); per_cpu_ptr 704 net/sched/sch_generic.c q = per_cpu_ptr(qdisc->cpu_qstats, i); per_cpu_ptr 548 net/xfrm/xfrm_interface.c stats = per_cpu_ptr(dev->tstats, cpu); per_cpu_ptr 45 net/xfrm/xfrm_ipcomp.c u8 *scratch = *per_cpu_ptr(ipcomp_scratches, cpu); per_cpu_ptr 46 net/xfrm/xfrm_ipcomp.c struct crypto_comp *tfm = *per_cpu_ptr(ipcd->tfms, cpu); per_cpu_ptr 212 net/xfrm/xfrm_ipcomp.c vfree(*per_cpu_ptr(scratches, i)); per_cpu_ptr 237 net/xfrm/xfrm_ipcomp.c *per_cpu_ptr(scratches, i) = scratch; per_cpu_ptr 265 net/xfrm/xfrm_ipcomp.c struct crypto_comp *tfm = *per_cpu_ptr(tfms, cpu); per_cpu_ptr 307 net/xfrm/xfrm_ipcomp.c *per_cpu_ptr(tfms, cpu) = tfm; per_cpu_ptr 11 tools/testing/radix-tree/linux/percpu.h #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu)) per_cpu_ptr 69 tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/percpu.h THIS_CPU_ADD_HELPER(per_cpu_ptr(&(pcp), thread_cpu_id), \ per_cpu_ptr 76 tools/testing/selftests/rcutorture/formal/srcu-cbmc/src/percpu.h THIS_CPU_ADD_HELPER(per_cpu_ptr(&(pcp), this_cpu_add_impl_cpu), \ per_cpu_ptr 119 virt/kvm/arm/arm.c *per_cpu_ptr(kvm->arch.last_vcpu_ran, cpu) = -1; per_cpu_ptr 1603 virt/kvm/arm/arm.c cpu_data = per_cpu_ptr(&kvm_host_data, cpu);