this_cpu_read      37 arch/arm64/include/asm/simd.h 		!this_cpu_read(fpsimd_context_busy);
this_cpu_read     824 arch/arm64/kernel/traps.c 	unsigned long irq_stk = (unsigned long)this_cpu_read(irq_stack_ptr);
this_cpu_read      61 arch/powerpc/lib/code-patching.c 	free_vm_area(this_cpu_read(text_poke_area));
this_cpu_read     152 arch/powerpc/lib/code-patching.c 	if (!this_cpu_read(text_poke_area))
this_cpu_read      81 arch/powerpc/mm/nohash/book3e_hugetlbpage.c 	index = this_cpu_read(next_tlbcam_idx);
this_cpu_read     150 arch/s390/kernel/vtime.c 	    time_after64(jiffies_64, this_cpu_read(mt_scaling_jiffies)))
this_cpu_read     691 arch/x86/events/amd/core.c 	if (time_after(jiffies, this_cpu_read(perf_nmi_tstamp)))
this_cpu_read    2122 arch/x86/events/core.c 	load_mm_cr4_irqsoff(this_cpu_read(cpu_tlbstate.loaded_mm));
this_cpu_read    1245 arch/x86/events/intel/ds.c 		u8 *buf = this_cpu_read(insn_buffer);
this_cpu_read    1679 arch/x86/events/intel/ds.c 	WARN_ON(this_cpu_read(cpu_hw_events.enabled));
this_cpu_read     314 arch/x86/include/asm/desc.h 	if (unlikely(this_cpu_read(__tss_limit_invalid)))
this_cpu_read     398 arch/x86/include/asm/desc.h 	if (this_cpu_read(debug_idt_ctr))
this_cpu_read     512 arch/x86/include/asm/fpu/internal.h 	return fpu == this_cpu_read(fpu_fpregs_owner_ctx) && cpu == fpu->last_cpu;
this_cpu_read     350 arch/x86/include/asm/mmu_context.h 	unsigned long cr3 = build_cr3(this_cpu_read(cpu_tlbstate.loaded_mm)->pgd,
this_cpu_read     351 arch/x86/include/asm/mmu_context.h 		this_cpu_read(cpu_tlbstate.loaded_mm_asid));
this_cpu_read     382 arch/x86/include/asm/mmu_context.h 	temp_state.mm = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read      50 arch/x86/include/asm/percpu.h #define __my_cpu_offset		this_cpu_read(this_cpu_off)
this_cpu_read     167 arch/x86/include/asm/smp.h #define raw_smp_processor_id()  this_cpu_read(cpu_number)
this_cpu_read      79 arch/x86/include/asm/switch_to.h 	if (unlikely(this_cpu_read(cpu_tss_rw.x86_tss.ss1) == thread->sysenter_cs))
this_cpu_read     254 arch/x86/include/asm/tlbflush.h 	struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read     297 arch/x86/include/asm/tlbflush.h 	cr4 = this_cpu_read(cpu_tlbstate.cr4);
this_cpu_read     307 arch/x86/include/asm/tlbflush.h 	cr4 = this_cpu_read(cpu_tlbstate.cr4);
this_cpu_read     336 arch/x86/include/asm/tlbflush.h 	cr4 = this_cpu_read(cpu_tlbstate.cr4);
this_cpu_read     343 arch/x86/include/asm/tlbflush.h 	return this_cpu_read(cpu_tlbstate.cr4);
this_cpu_read     411 arch/x86/include/asm/tlbflush.h 	invalidate_user_asid(this_cpu_read(cpu_tlbstate.loaded_mm_asid));
this_cpu_read     442 arch/x86/include/asm/tlbflush.h 	cr4 = this_cpu_read(cpu_tlbstate.cr4);
this_cpu_read     456 arch/x86/include/asm/tlbflush.h 	u32 loaded_mm_asid = this_cpu_read(cpu_tlbstate.loaded_mm_asid);
this_cpu_read     846 arch/x86/include/asm/uv/uv_hub.h #define uv_hub_nmi			this_cpu_read(uv_cpu_nmi.hub)
this_cpu_read     135 arch/x86/kernel/apic/msi.c 	if (IS_ERR_OR_NULL(this_cpu_read(vector_irq[cfg->vector])))
this_cpu_read     100 arch/x86/kernel/apic/x2apic_cluster.c 	struct cluster_mask *cmsk = this_cpu_read(cluster_masks);
this_cpu_read     436 arch/x86/kernel/cpu/common.c 	cr4_pinned_bits = this_cpu_read(cpu_tlbstate.cr4) & mask;
this_cpu_read    1751 arch/x86/kernel/cpu/common.c 	if (WARN_ON(!this_cpu_read(debug_idt_ctr)))
this_cpu_read    1869 arch/x86/kernel/cpu/common.c 	if (this_cpu_read(numa_node) == 0 &&
this_cpu_read     651 arch/x86/kernel/cpu/intel.c 	msr = this_cpu_read(msr_misc_features_shadow);
this_cpu_read      77 arch/x86/kernel/cpu/intel_epb.c 	u64 val = this_cpu_read(saved_epb);
this_cpu_read     633 arch/x86/kernel/cpu/mce/amd.c 	for (bank = 0; bank < this_cpu_read(mce_num_banks); ++bank) {
this_cpu_read     978 arch/x86/kernel/cpu/mce/amd.c 	for (bank = 0; bank < this_cpu_read(mce_num_banks); ++bank)
this_cpu_read    1019 arch/x86/kernel/cpu/mce/amd.c 	for (bank = 0; bank < this_cpu_read(mce_num_banks); ++bank) {
this_cpu_read     713 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read     815 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    1095 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    1153 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    1494 arch/x86/kernel/cpu/mce/core.c 	u8 n_banks = this_cpu_read(mce_num_banks);
this_cpu_read    1567 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    1593 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    1645 arch/x86/kernel/cpu/mce/core.c 		if (c->x86 == 15 && this_cpu_read(mce_num_banks) > 4) {
this_cpu_read    1664 arch/x86/kernel/cpu/mce/core.c 		if (c->x86 == 6 && this_cpu_read(mce_num_banks) > 0)
this_cpu_read    1686 arch/x86/kernel/cpu/mce/core.c 		if (c->x86 == 6 && c->x86_model < 0x1A && this_cpu_read(mce_num_banks) > 0)
this_cpu_read    1923 arch/x86/kernel/cpu/mce/core.c 	if (bank >= this_cpu_read(mce_num_banks)) {
this_cpu_read    2012 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read    2361 arch/x86/kernel/cpu/mce/core.c 	for (i = 0; i < this_cpu_read(mce_num_banks); i++) {
this_cpu_read     175 arch/x86/kernel/cpu/mce/intel.c 	if ((this_cpu_read(cmci_backoff_cnt) > 0) &&
this_cpu_read     463 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	closid_p = this_cpu_read(pqr_state.cur_closid);
this_cpu_read     464 arch/x86/kernel/cpu/resctrl/pseudo_lock.c 	rmid_p = this_cpu_read(pqr_state.cur_rmid);
this_cpu_read      37 arch/x86/kernel/dumpstack_32.c 	unsigned long *begin = (unsigned long *)this_cpu_read(hardirq_stack_ptr);
this_cpu_read      62 arch/x86/kernel/dumpstack_32.c 	unsigned long *begin = (unsigned long *)this_cpu_read(softirq_stack_ptr);
this_cpu_read     130 arch/x86/kernel/dumpstack_64.c 	unsigned long *end   = (unsigned long *)this_cpu_read(hardirq_stack_ptr);
this_cpu_read      48 arch/x86/kernel/fpu/core.c 	return this_cpu_read(in_kernel_fpu);
this_cpu_read      90 arch/x86/kernel/fpu/core.c 	WARN_ON_FPU(this_cpu_read(in_kernel_fpu));
this_cpu_read     109 arch/x86/kernel/fpu/core.c 	WARN_ON_FPU(!this_cpu_read(in_kernel_fpu));
this_cpu_read      59 arch/x86/kernel/kvmclock.c 	return &this_cpu_read(hv_clock_per_cpu)->pvti;
this_cpu_read      64 arch/x86/kernel/kvmclock.c 	return this_cpu_read(hv_clock_per_cpu);
this_cpu_read      56 arch/x86/kernel/ldt.c 	if (this_cpu_read(cpu_tlbstate.loaded_mm) != mm)
this_cpu_read     516 arch/x86/kernel/nmi.c 	if (this_cpu_read(nmi_state) != NMI_NOT_RUNNING) {
this_cpu_read     547 arch/x86/kernel/nmi.c 	if (unlikely(this_cpu_read(update_debug_stack))) {
this_cpu_read     553 arch/x86/kernel/nmi.c 	if (unlikely(this_cpu_read(nmi_cr2) != read_cr2()))
this_cpu_read     554 arch/x86/kernel/nmi.c 		write_cr2(this_cpu_read(nmi_cr2));
this_cpu_read     216 arch/x86/kernel/paravirt.c 	BUG_ON(this_cpu_read(paravirt_lazy_mode) != PARAVIRT_LAZY_NONE);
this_cpu_read     223 arch/x86/kernel/paravirt.c 	BUG_ON(this_cpu_read(paravirt_lazy_mode) != mode);
this_cpu_read     255 arch/x86/kernel/paravirt.c 	if (this_cpu_read(paravirt_lazy_mode) == PARAVIRT_LAZY_MMU) {
this_cpu_read     278 arch/x86/kernel/paravirt.c 	return this_cpu_read(paravirt_lazy_mode);
this_cpu_read     199 arch/x86/kernel/process.c 	msrval = this_cpu_read(msr_misc_features_shadow);
this_cpu_read     513 arch/x86/kernel/process_64.c 		     this_cpu_read(irq_count) != -1);
this_cpu_read     342 arch/x86/kernel/traps.c 		struct pt_regs *gpregs = (struct pt_regs *)this_cpu_read(cpu_tss_rw.x86_tss.sp0) - 1;
this_cpu_read     624 arch/x86/kernel/traps.c 	struct pt_regs *regs = (struct pt_regs *)this_cpu_read(cpu_current_top_of_stack) - 1;
this_cpu_read     648 arch/x86/kernel/traps.c 		(struct bad_iret_stack *)this_cpu_read(cpu_tss_rw.x86_tss.sp0) - 1;
this_cpu_read      69 arch/x86/kernel/tsc.c 		seq = this_cpu_read(cyc2ns.seq.sequence);
this_cpu_read      72 arch/x86/kernel/tsc.c 		data->cyc2ns_offset = this_cpu_read(cyc2ns.data[idx].cyc2ns_offset);
this_cpu_read      73 arch/x86/kernel/tsc.c 		data->cyc2ns_mul    = this_cpu_read(cyc2ns.data[idx].cyc2ns_mul);
this_cpu_read      74 arch/x86/kernel/tsc.c 		data->cyc2ns_shift  = this_cpu_read(cyc2ns.data[idx].cyc2ns_shift);
this_cpu_read      76 arch/x86/kernel/tsc.c 	} while (unlikely(seq != this_cpu_read(cyc2ns.seq.sequence)));
this_cpu_read      18 arch/x86/kvm/vmx/evmcs.h #define current_evmcs ((struct hv_enlightened_vmcs *)this_cpu_read(current_vmcs))
this_cpu_read     167 arch/x86/lib/delay.c 	unsigned long lpj = this_cpu_read(cpu_info.loops_per_jiffy) ? : loops_per_jiffy;
this_cpu_read      62 arch/x86/mm/tlb.c 		if (asid == this_cpu_read(cpu_tlbstate.loaded_mm_asid))
this_cpu_read      87 arch/x86/mm/tlb.c 	if (this_cpu_read(cpu_tlbstate.invalidate_other))
this_cpu_read      91 arch/x86/mm/tlb.c 		if (this_cpu_read(cpu_tlbstate.ctxs[asid].ctx_id) !=
this_cpu_read      96 arch/x86/mm/tlb.c 		*need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) <
this_cpu_read     134 arch/x86/mm/tlb.c 	struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read     148 arch/x86/mm/tlb.c 	WARN_ON(!this_cpu_read(cpu_tlbstate.is_lazy));
this_cpu_read     249 arch/x86/mm/tlb.c 		prev_mm = this_cpu_read(cpu_tlbstate.last_user_mm_ibpb);
this_cpu_read     268 arch/x86/mm/tlb.c 		if (this_cpu_read(cpu_tlbstate.last_user_mm) != next->mm) {
this_cpu_read     278 arch/x86/mm/tlb.c 	struct mm_struct *real_prev = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read     279 arch/x86/mm/tlb.c 	u16 prev_asid = this_cpu_read(cpu_tlbstate.loaded_mm_asid);
this_cpu_read     280 arch/x86/mm/tlb.c 	bool was_lazy = this_cpu_read(cpu_tlbstate.is_lazy);
this_cpu_read     334 arch/x86/mm/tlb.c 		VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[prev_asid].ctx_id) !=
this_cpu_read     362 arch/x86/mm/tlb.c 		if (this_cpu_read(cpu_tlbstate.ctxs[prev_asid].tlb_gen) ==
this_cpu_read     463 arch/x86/mm/tlb.c 	if (this_cpu_read(cpu_tlbstate.loaded_mm) == &init_mm)
this_cpu_read     485 arch/x86/mm/tlb.c 	struct mm_struct *mm = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read     533 arch/x86/mm/tlb.c 	struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm);
this_cpu_read     534 arch/x86/mm/tlb.c 	u32 loaded_mm_asid = this_cpu_read(cpu_tlbstate.loaded_mm_asid);
this_cpu_read     536 arch/x86/mm/tlb.c 	u64 local_tlb_gen = this_cpu_read(cpu_tlbstate.ctxs[loaded_mm_asid].tlb_gen);
this_cpu_read     544 arch/x86/mm/tlb.c 	VM_WARN_ON(this_cpu_read(cpu_tlbstate.ctxs[loaded_mm_asid].ctx_id) !=
this_cpu_read     547 arch/x86/mm/tlb.c 	if (this_cpu_read(cpu_tlbstate.is_lazy)) {
this_cpu_read     651 arch/x86/mm/tlb.c 	if (f->mm && f->mm != this_cpu_read(cpu_tlbstate.loaded_mm))
this_cpu_read     790 arch/x86/mm/tlb.c 	if (mm == this_cpu_read(cpu_tlbstate.loaded_mm)) {
this_cpu_read     780 arch/x86/platform/uv/uv_nmi.c 		while (this_cpu_read(uv_cpu_nmi.state) != UV_NMI_STATE_DUMP)
this_cpu_read     905 arch/x86/platform/uv/uv_nmi.c 	if (!this_cpu_read(uv_cpu_nmi.pinging) && !uv_check_nmi(hub_nmi)) {
this_cpu_read     969 arch/x86/platform/uv/uv_nmi.c 	if (!this_cpu_read(uv_cpu_nmi.pinging)) {
this_cpu_read     857 arch/x86/xen/enlighten_pv.c 	unsigned long cr0 = this_cpu_read(xen_cr0_value);
this_cpu_read      32 arch/x86/xen/irq.c 	vcpu = this_cpu_read(xen_vcpu);
this_cpu_read      54 arch/x86/xen/irq.c 	vcpu = this_cpu_read(xen_vcpu);
this_cpu_read      73 arch/x86/xen/irq.c 	this_cpu_read(xen_vcpu)->evtchn_upcall_mask = 1;
this_cpu_read      89 arch/x86/xen/irq.c 	vcpu = this_cpu_read(xen_vcpu);
this_cpu_read     995 arch/x86/xen/mmu_pv.c 	if (this_cpu_read(cpu_tlbstate.loaded_mm) == mm)
this_cpu_read    1002 arch/x86/xen/mmu_pv.c 	if (this_cpu_read(xen_current_cr3) == __pa(mm->pgd))
this_cpu_read    1307 arch/x86/xen/mmu_pv.c 	this_cpu_read(xen_vcpu)->arch.cr2 = cr2;
this_cpu_read    1386 arch/x86/xen/mmu_pv.c 	return this_cpu_read(xen_cr3);
this_cpu_read      51 arch/x86/xen/multicalls.h 	local_irq_restore(this_cpu_read(xen_mc_irq_flags));
this_cpu_read      44 arch/x86/xen/suspend.c 		wrmsrl(MSR_IA32_SPEC_CTRL, this_cpu_read(spec_ctrl));
this_cpu_read     716 drivers/acpi/processor_throttling.c 	if ((this_cpu_read(cpu_info.x86_vendor) != X86_VENDOR_INTEL) ||
this_cpu_read     737 drivers/acpi/processor_throttling.c 	if ((this_cpu_read(cpu_info.x86_vendor) != X86_VENDOR_INTEL) ||
this_cpu_read     562 drivers/cpufreq/powernv-cpufreq.c 	chip = this_cpu_read(chip_info);
this_cpu_read     582 drivers/crypto/nx/nx-842-powernv.c 		txwin = this_cpu_read(cpu_txwin);
this_cpu_read     348 drivers/dma/dmaengine.c 	return this_cpu_read(channel_table[tx_type]->chan);
this_cpu_read     289 drivers/firmware/psci/psci_checker.c 	dev = this_cpu_read(cpuidle_devices);
this_cpu_read     157 drivers/input/gameport/gameport.c 	return (this_cpu_read(cpu_info.loops_per_jiffy) *
this_cpu_read      56 drivers/irqchip/irq-csky-mpintc.c 	(this_cpu_read(intcl_reg) + INTCL_CFGR) : (INTCG_base + INTCG_CICFGR)))
this_cpu_read      75 drivers/irqchip/irq-csky-mpintc.c 	void __iomem *reg_base = this_cpu_read(intcl_reg);
this_cpu_read      83 drivers/irqchip/irq-csky-mpintc.c 	void __iomem *reg_base = this_cpu_read(intcl_reg);
this_cpu_read      92 drivers/irqchip/irq-csky-mpintc.c 	void __iomem *reg_base = this_cpu_read(intcl_reg);
this_cpu_read      99 drivers/irqchip/irq-csky-mpintc.c 	void __iomem *reg_base = this_cpu_read(intcl_reg);
this_cpu_read     215 drivers/irqchip/irq-csky-mpintc.c 	void __iomem *reg_base = this_cpu_read(intcl_reg);
this_cpu_read    1108 drivers/nvdimm/region_devs.c 	idx = this_cpu_read(flush_idx);
this_cpu_read      86 drivers/staging/speakup/fakekey.c 	return this_cpu_read(reporting_keystroke);
this_cpu_read     439 drivers/xen/evtchn.c 	selected_cpu = this_cpu_read(bind_last_selected_cpu);
this_cpu_read      73 fs/eventfd.c   	if (WARN_ON_ONCE(this_cpu_read(eventfd_wake_count)))
this_cpu_read      64 include/crypto/internal/simd.h 	(may_use_simd() && !this_cpu_read(crypto_simd_disabled_for_test))
this_cpu_read      55 include/linux/context_tracking.h 	prev_ctx = this_cpu_read(context_tracking.state);
this_cpu_read      81 include/linux/context_tracking.h 		this_cpu_read(context_tracking.state) : CONTEXT_DISABLED;
this_cpu_read     734 include/linux/cpumask.h #define this_cpu_cpumask_var_ptr(x)	this_cpu_read(x)
this_cpu_read      49 include/linux/eventfd.h 	return this_cpu_read(eventfd_wake_count);
this_cpu_read     569 include/linux/interrupt.h 	return this_cpu_read(ksoftirqd);
this_cpu_read    3043 include/linux/netdevice.h 	return this_cpu_read(softnet_data.xmit.recursion);
this_cpu_read     343 kernel/bpf/helpers.c 	storage = this_cpu_read(bpf_cgroup_storage[stype]);
this_cpu_read     798 kernel/cpu.c   	kthread_unpark(this_cpu_read(cpuhp_state.thread));
this_cpu_read     318 kernel/printk/printk_safe.c 	if (this_cpu_read(printk_context) & PRINTK_NMI_CONTEXT_MASK)
this_cpu_read     366 kernel/printk/printk_safe.c 	if ((this_cpu_read(printk_context) & PRINTK_NMI_DIRECT_CONTEXT_MASK) &&
this_cpu_read     377 kernel/printk/printk_safe.c 	if (this_cpu_read(printk_context) & PRINTK_NMI_CONTEXT_MASK)
this_cpu_read     381 kernel/printk/printk_safe.c 	if (this_cpu_read(printk_context) & PRINTK_SAFE_CONTEXT_MASK)
this_cpu_read    5458 kernel/sched/fair.c 	int factor = this_cpu_read(sd_llc_size);
this_cpu_read     149 kernel/trace/ftrace.c 	if (tr && this_cpu_read(tr->trace_buffer.data->ftrace_ignore_pid))
this_cpu_read     818 kernel/trace/trace.c 	if (this_cpu_read(trace_buffered_event) == event) {
this_cpu_read    2413 kernel/trace/trace.c 		    this_cpu_read(trace_buffered_event) !=
this_cpu_read    2497 kernel/trace/trace.c 	    (entry = this_cpu_read(trace_buffered_event))) {
this_cpu_read    1061 kernel/trace/trace.h 	return !this_cpu_read(tr->trace_buffer.data->ftrace_ignore_pid);
this_cpu_read    1397 kernel/trace/trace.h 	if (this_cpu_read(trace_buffered_event) == event) {
this_cpu_read     574 kernel/trace/trace_events.c 	if (!this_cpu_read(tr->trace_buffer.data->ignore_pid))
this_cpu_read     590 kernel/trace/trace_events.c 	if (this_cpu_read(tr->trace_buffer.data->ignore_pid))
this_cpu_read      24 kernel/trace/trace_preemptirq.c 	if (this_cpu_read(tracing_irq_cpu)) {
this_cpu_read      38 kernel/trace/trace_preemptirq.c 	if (!this_cpu_read(tracing_irq_cpu)) {
this_cpu_read      52 kernel/trace/trace_preemptirq.c 	if (this_cpu_read(tracing_irq_cpu)) {
this_cpu_read      66 kernel/trace/trace_preemptirq.c 	if (!this_cpu_read(tracing_irq_cpu)) {
this_cpu_read     199 kernel/watchdog_hld.c 	perf_event_enable(this_cpu_read(watchdog_ev));
this_cpu_read     207 kernel/watchdog_hld.c 	struct perf_event *event = this_cpu_read(watchdog_ev);
this_cpu_read     292 kernel/watchdog_hld.c 		perf_event_release_kernel(this_cpu_read(watchdog_ev));
this_cpu_read     443 lib/sbitmap.c  	hint = this_cpu_read(*sbq->alloc_hint);
this_cpu_read     474 lib/sbitmap.c  	hint = this_cpu_read(*sbq->alloc_hint);
this_cpu_read      65 mm/kasan/tags.c 	u32 state = this_cpu_read(prng_state);
this_cpu_read    5389 mm/page_alloc.c 			K(this_cpu_read(zone->pageset->pcp.count)),
this_cpu_read    2260 mm/slub.c      		oldpage = this_cpu_read(s->cpu_slab->partial);
this_cpu_read    2705 mm/slub.c      		tid = this_cpu_read(s->cpu_slab->tid);
this_cpu_read    2979 mm/slub.c      		tid = this_cpu_read(s->cpu_slab->tid);
this_cpu_read     500 mm/vmstat.c    		t = this_cpu_read(pcp->stat_threshold);
this_cpu_read     502 mm/vmstat.c    		o = this_cpu_read(*p);
this_cpu_read     557 mm/vmstat.c    		t = this_cpu_read(pcp->stat_threshold);
this_cpu_read     559 mm/vmstat.c    		o = this_cpu_read(*p);
this_cpu_read     204 net/ipv4/icmp.c 	return this_cpu_read(*net->ipv4.icmp_sk);
this_cpu_read      55 net/ipv4/netfilter/nf_dup_ipv4.c 	if (this_cpu_read(nf_skb_duplicated))
this_cpu_read     773 net/ipv4/tcp_ipv4.c 	ctl_sk = this_cpu_read(*net->ipv4.tcp_sk);
this_cpu_read     871 net/ipv4/tcp_ipv4.c 	ctl_sk = this_cpu_read(*net->ipv4.tcp_sk);
this_cpu_read      80 net/ipv6/icmp.c 	return this_cpu_read(*net->ipv6.icmp_sk);
this_cpu_read      50 net/ipv6/netfilter/nf_dup_ipv6.c 	if (this_cpu_read(nf_skb_duplicated))
this_cpu_read    1408 net/ipv6/route.c 	pcpu_rt = this_cpu_read(*res->nh->rt6i_pcpu);
this_cpu_read     261 net/netfilter/nft_ct.c 	ct = this_cpu_read(nft_ct_pcpu_template);
this_cpu_read      84 net/openvswitch/actions.c 	int level = this_cpu_read(exec_actions_level);
this_cpu_read     282 net/xfrm/xfrm_ipcomp.c 		tfm = this_cpu_read(*pos->tfms);