__per_cpu_offset 141 arch/arm64/mm/numa.c unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; __per_cpu_offset 142 arch/arm64/mm/numa.c EXPORT_SYMBOL(__per_cpu_offset); __per_cpu_offset 187 arch/arm64/mm/numa.c __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; __per_cpu_offset 76 arch/ia64/kernel/setup.c unsigned long __per_cpu_offset[NR_CPUS]; __per_cpu_offset 77 arch/ia64/kernel/setup.c EXPORT_SYMBOL(__per_cpu_offset); __per_cpu_offset 62 arch/ia64/mm/contig.c __per_cpu_offset[cpu] = (char *)cpu_data - __per_cpu_start; __per_cpu_offset 63 arch/ia64/mm/contig.c per_cpu(local_per_cpu_offset, cpu) = __per_cpu_offset[cpu]; __per_cpu_offset 81 arch/ia64/mm/contig.c return __per_cpu_start + __per_cpu_offset[smp_processor_id()]; __per_cpu_offset 136 arch/ia64/mm/contig.c pcpu_setup_first_chunk(ai, __per_cpu_start + __per_cpu_offset[0]); __per_cpu_offset 148 arch/ia64/mm/discontig.c __per_cpu_offset[cpu] = (char *)__va(cpu_data) - __per_cpu_offset 200 arch/ia64/mm/discontig.c (void *)(__per_cpu_offset[cpu] + __per_cpu_start)); __per_cpu_offset 244 arch/ia64/mm/discontig.c gi->base_offset = __per_cpu_offset[cpu] + base_offset; __per_cpu_offset 535 arch/ia64/mm/discontig.c per_cpu(local_per_cpu_offset, cpu) = __per_cpu_offset[cpu]; __per_cpu_offset 538 arch/ia64/mm/discontig.c return __per_cpu_start + __per_cpu_offset[smp_processor_id()]; __per_cpu_offset 781 arch/powerpc/kernel/setup_64.c unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; __per_cpu_offset 782 arch/powerpc/kernel/setup_64.c EXPORT_SYMBOL(__per_cpu_offset); __per_cpu_offset 809 arch/powerpc/kernel/setup_64.c __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu]; __per_cpu_offset 810 arch/powerpc/kernel/setup_64.c paca_ptrs[cpu]->data_offset = __per_cpu_offset[cpu]; __per_cpu_offset 265 arch/s390/kernel/smp.c lc->percpu_offset = __per_cpu_offset[cpu]; __per_cpu_offset 999 arch/s390/kernel/smp.c S390_lowcore.percpu_offset = __per_cpu_offset[0]; __per_cpu_offset 15 arch/sparc/include/asm/percpu_64.h #define per_cpu_offset(x) (__per_cpu_offset(x)) __per_cpu_offset 108 arch/sparc/kernel/smp_64.c __local_per_cpu_offset = __per_cpu_offset(cpuid); __per_cpu_offset 1690 arch/sparc/kernel/smp_64.c __per_cpu_offset(cpu) = delta + pcpu_unit_offsets[cpu]; __per_cpu_offset 1693 arch/sparc/kernel/smp_64.c __local_per_cpu_offset = __per_cpu_offset(smp_processor_id()); __per_cpu_offset 38 arch/x86/kernel/setup_percpu.c unsigned long __per_cpu_offset[NR_CPUS] __ro_after_init = { __per_cpu_offset 41 arch/x86/kernel/setup_percpu.c EXPORT_SYMBOL(__per_cpu_offset); __per_cpu_offset 18 include/asm-generic/percpu.h #ifndef __per_cpu_offset __per_cpu_offset 19 include/asm-generic/percpu.h extern unsigned long __per_cpu_offset[NR_CPUS]; __per_cpu_offset 21 include/asm-generic/percpu.h #define per_cpu_offset(x) (__per_cpu_offset[x]) __per_cpu_offset 2595 kernel/debug/kdb/kdb_main.c #ifdef __per_cpu_offset __per_cpu_offset 2596 kernel/debug/kdb/kdb_main.c #define KDB_PCU(cpu) __per_cpu_offset(cpu) __per_cpu_offset 2599 kernel/debug/kdb/kdb_main.c #define KDB_PCU(cpu) __per_cpu_offset[cpu] __per_cpu_offset 2946 mm/percpu.c unsigned long __per_cpu_offset[NR_CPUS] __read_mostly; __per_cpu_offset 2947 mm/percpu.c EXPORT_SYMBOL(__per_cpu_offset); __per_cpu_offset 2978 mm/percpu.c __per_cpu_offset[cpu] = delta + pcpu_unit_offsets[cpu];