__this_cpu_read    61 arch/alpha/kernel/time.c #define test_irq_work_pending()      __this_cpu_read(irq_work_pending)
__this_cpu_read    27 arch/arm64/include/asm/arch_timer.h 		__wa = __this_cpu_read(timer_unstable_counter_workaround); \
__this_cpu_read    34 arch/arm64/include/asm/arch_timer.h 		__wa = __this_cpu_read(timer_unstable_counter_workaround); \
__this_cpu_read   196 arch/arm64/kernel/fpsimd.c 	return !preemptible() && __this_cpu_read(fpsimd_context_busy);
__this_cpu_read  1000 arch/arm64/kernel/fpsimd.c 	wrong_task = __this_cpu_read(fpsimd_last_state.st) !=
__this_cpu_read  1355 arch/arm64/kernel/fpsimd.c 		    likely(__this_cpu_read(efi_sve_state_used))) {
__this_cpu_read   112 arch/arm64/kvm/debug.c 	vcpu->arch.mdcr_el2 = __this_cpu_read(mdcr_el2) & MDCR_EL2_HPMN_MASK;
__this_cpu_read   881 arch/arm64/mm/fault.c 	if (!__this_cpu_read(__in_cortex_a76_erratum_1463225_wa))
__this_cpu_read   167 arch/ia64/include/asm/hw_irq.h 	return __this_cpu_read(vector_irq[vec]);
__this_cpu_read    36 arch/ia64/include/asm/switch_to.h # define PERFMON_IS_SYSWIDE() (__this_cpu_read(pfm_syst_info) & 0x1)
__this_cpu_read   330 arch/ia64/kernel/irq_ia64.c 		irq = __this_cpu_read(vector_irq[vector]);
__this_cpu_read  1344 arch/ia64/kernel/mca.c 	if (__this_cpu_read(ia64_mca_tr_reload)) {
__this_cpu_read   277 arch/ia64/kernel/process.c 	info = __this_cpu_read(pfm_syst_info);
__this_cpu_read   297 arch/ia64/kernel/process.c 	info = __this_cpu_read(pfm_syst_info);
__this_cpu_read  1294 arch/mips/cavium-octeon/octeon-irq.c 	ciu_sum &= __this_cpu_read(octeon_irq_ciu0_en_mirror);
__this_cpu_read  1311 arch/mips/cavium-octeon/octeon-irq.c 	ciu_sum &= __this_cpu_read(octeon_irq_ciu1_en_mirror);
__this_cpu_read  2578 arch/mips/cavium-octeon/octeon-irq.c 	ciu3_info = __this_cpu_read(octeon_ciu3_info);
__this_cpu_read  2642 arch/mips/cavium-octeon/octeon-irq.c 	ciu3_info = __this_cpu_read(octeon_ciu3_info);
__this_cpu_read  2753 arch/mips/cavium-octeon/octeon-irq.c 	ciu3_info = __this_cpu_read(octeon_ciu3_info);
__this_cpu_read  2243 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.movs),
__this_cpu_read  2244 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.movs));
__this_cpu_read  2246 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.hilo),
__this_cpu_read  2247 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.hilo));
__this_cpu_read  2249 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.muls),
__this_cpu_read  2250 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.muls));
__this_cpu_read  2252 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.divs),
__this_cpu_read  2253 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.divs));
__this_cpu_read  2255 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.dsps),
__this_cpu_read  2256 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.dsps));
__this_cpu_read  2258 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.bops),
__this_cpu_read  2259 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.bops));
__this_cpu_read  2261 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.traps),
__this_cpu_read  2262 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.traps));
__this_cpu_read  2264 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.fpus),
__this_cpu_read  2265 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.fpus));
__this_cpu_read  2267 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.loads),
__this_cpu_read  2268 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.loads));
__this_cpu_read  2270 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.stores),
__this_cpu_read  2271 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.stores));
__this_cpu_read  2273 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.llsc),
__this_cpu_read  2274 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.llsc));
__this_cpu_read  2276 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2emustats.dsemul),
__this_cpu_read  2277 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bdemustats.dsemul));
__this_cpu_read  2279 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.jrs));
__this_cpu_read  2281 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bltzl));
__this_cpu_read  2283 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bgezl));
__this_cpu_read  2285 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bltzll));
__this_cpu_read  2287 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bgezll));
__this_cpu_read  2289 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bltzal));
__this_cpu_read  2291 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bgezal));
__this_cpu_read  2293 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.beql));
__this_cpu_read  2295 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bnel));
__this_cpu_read  2297 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.blezl));
__this_cpu_read  2299 arch/mips/kernel/mips-r2-to-r6-emul.c 		   (unsigned long)__this_cpu_read(mipsr2bremustats.bgtzl));
__this_cpu_read   337 arch/mips/kernel/smp-bmips.c 	action = __this_cpu_read(ipi_action_mask);
__this_cpu_read   269 arch/powerpc/kernel/hw_breakpoint.c 	bp = __this_cpu_read(bp_per_reg);
__this_cpu_read   178 arch/powerpc/kernel/mce.c 	int index = __this_cpu_read(mce_nest_count) - 1;
__this_cpu_read   263 arch/powerpc/kernel/mce.c 	while (__this_cpu_read(mce_ue_count) > 0) {
__this_cpu_read   264 arch/powerpc/kernel/mce.c 		index = __this_cpu_read(mce_ue_count) - 1;
__this_cpu_read   311 arch/powerpc/kernel/mce.c 	while (__this_cpu_read(mce_queue_count) > 0) {
__this_cpu_read   312 arch/powerpc/kernel/mce.c 		index = __this_cpu_read(mce_queue_count) - 1;
__this_cpu_read   400 arch/powerpc/kernel/sysfs.c 	if (__this_cpu_read(pmcs_enabled))
__this_cpu_read   530 arch/powerpc/kernel/time.c #define test_irq_work_pending()		__this_cpu_read(irq_work_pending)
__this_cpu_read   107 arch/powerpc/kvm/e500.c 	    __this_cpu_read(pcpu_sids.entry[entry->val]) == entry &&
__this_cpu_read   143 arch/powerpc/kvm/e500mc.c 	    __this_cpu_read(last_vcpu_of_lpid[get_lpid(vcpu)]) != vcpu) {
__this_cpu_read   157 arch/powerpc/lib/code-patching.c 	text_poke_addr = (unsigned long)__this_cpu_read(text_poke_area)->addr;
__this_cpu_read  1372 arch/powerpc/perf/hv-24x7.c 	txn_flags = __this_cpu_read(hv_24x7_txn_flags);
__this_cpu_read  1385 arch/powerpc/perf/hv-24x7.c 		if (__this_cpu_read(hv_24x7_txn_err))
__this_cpu_read  1453 arch/powerpc/perf/hv-24x7.c 	WARN_ON_ONCE(__this_cpu_read(hv_24x7_txn_flags));
__this_cpu_read  1498 arch/powerpc/perf/hv-24x7.c 	txn_flags = __this_cpu_read(hv_24x7_txn_flags);
__this_cpu_read  1505 arch/powerpc/perf/hv-24x7.c 	ret = __this_cpu_read(hv_24x7_txn_err);
__this_cpu_read  1549 arch/powerpc/perf/hv-24x7.c 	WARN_ON_ONCE(!__this_cpu_read(hv_24x7_txn_flags));
__this_cpu_read   203 arch/powerpc/platforms/pseries/iommu.c 	tcep = __this_cpu_read(tce_page);
__this_cpu_read   419 arch/powerpc/platforms/pseries/iommu.c 	tcep = __this_cpu_read(tce_page);
__this_cpu_read   359 arch/powerpc/platforms/pseries/lpar.c 	u64 i = __this_cpu_read(dtl_entry_ridx);
__this_cpu_read   297 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read   402 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read  1097 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read  1359 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read  1452 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read  1472 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read  1485 arch/powerpc/sysdev/xive/common.c 	struct xive_cpu *xc = __this_cpu_read(xive_cpu);
__this_cpu_read   243 arch/s390/kernel/kprobes.c 	kcb->prev_kprobe.kp = __this_cpu_read(current_kprobe);
__this_cpu_read   106 arch/s390/kernel/vtime.c 	u64 mult = __this_cpu_read(mt_scaling_mult);
__this_cpu_read   107 arch/s390/kernel/vtime.c 	u64 div = __this_cpu_read(mt_scaling_div);
__this_cpu_read   384 arch/sh/kernel/kprobes.c 		addr = __this_cpu_read(saved_current_opcode.addr);
__this_cpu_read   104 arch/sparc/kernel/iommu-common.c 	unsigned int pool_hash = __this_cpu_read(iommu_hash_common);
__this_cpu_read   102 arch/sparc/kernel/nmi.c 	if (__this_cpu_read(nmi_touch)) {
__this_cpu_read   106 arch/sparc/kernel/nmi.c 	if (!touched && __this_cpu_read(last_irq_sum) == sum) {
__this_cpu_read   108 arch/sparc/kernel/nmi.c 		if (__this_cpu_read(alert_counter) == 30 * nmi_hz)
__this_cpu_read   115 arch/sparc/kernel/nmi.c 	if (__this_cpu_read(wd_enabled)) {
__this_cpu_read   155 arch/sparc/kernel/nmi.c 	if (!__this_cpu_read(wd_enabled))
__this_cpu_read   212 arch/sparc/kernel/nmi.c 	if (__this_cpu_read(wd_enabled))
__this_cpu_read   226 arch/sparc/kernel/nmi.c 	if (!__this_cpu_read(wd_enabled))
__this_cpu_read  1457 arch/sparc/kernel/smp_64.c 	if (!__this_cpu_read(poke))
__this_cpu_read  1256 arch/x86/events/core.c 	if (__this_cpu_read(cpu_hw_events.enabled))
__this_cpu_read  1930 arch/x86/events/core.c 	__this_cpu_sub(cpu_hw_events.n_added, __this_cpu_read(cpu_hw_events.n_txn));
__this_cpu_read  1931 arch/x86/events/core.c 	__this_cpu_sub(cpu_hw_events.n_events, __this_cpu_read(cpu_hw_events.n_txn));
__this_cpu_read  2237 arch/x86/events/intel/core.c 		if (!__this_cpu_read(cpu_hw_events.enabled))
__this_cpu_read  2294 arch/x86/events/intel/core.c 	struct debug_store *ds = __this_cpu_read(cpu_hw_events.ds);
__this_cpu_read  2096 arch/x86/events/intel/ds.c 	struct debug_store *ds = __this_cpu_read(cpu_hw_events.ds);
__this_cpu_read   838 arch/x86/events/perf_event.h 	u64 disable_mask = __this_cpu_read(cpu_hw_events.perf_ctr_virt_mask);
__this_cpu_read   142 arch/x86/include/asm/cpu_entry_area.h 	CEA_ESTACK_TOP(__this_cpu_read(cea_exception_stacks), name)
__this_cpu_read    90 arch/x86/include/asm/debugreg.h 	return __this_cpu_read(cpu_dr7) & DR_GLOBAL_ENABLE_MASK;
__this_cpu_read    77 arch/x86/include/asm/hardirq.h 	return __this_cpu_read(irq_stat.kvm_cpu_l1tf_flush_l1d);
__this_cpu_read    19 arch/x86/include/asm/irq_regs.h 	return __this_cpu_read(irq_regs);
__this_cpu_read   168 arch/x86/include/asm/smp.h #define __smp_processor_id() __this_cpu_read(cpu_number)
__this_cpu_read    81 arch/x86/kernel/acpi/sleep.c 	if (__this_cpu_read(cpu_info.cpuid_level) >= 0) {
__this_cpu_read   100 arch/x86/kernel/apic/apic_numachip.c 	local_apicid = __this_cpu_read(x86_cpu_to_apicid);
__this_cpu_read   611 arch/x86/kernel/apic/x2apic_uv_x.c 	id = x | __this_cpu_read(x2apic_extra_bits);
__this_cpu_read   267 arch/x86/kernel/cpu/cacheinfo.c 		size_in_kb = __this_cpu_read(cpu_info.x86_cache_size);
__this_cpu_read   289 arch/x86/kernel/cpu/cacheinfo.c 	eax->split.num_cores_on_die = __this_cpu_read(cpu_info.x86_max_cores) - 1;
__this_cpu_read   376 arch/x86/kernel/cpu/mce/core.c 	unsigned bank = __this_cpu_read(injectm.bank);
__this_cpu_read   396 arch/x86/kernel/cpu/mce/core.c 	if (__this_cpu_read(injectm.finished)) {
__this_cpu_read   419 arch/x86/kernel/cpu/mce/core.c 	if (__this_cpu_read(injectm.finished)) {
__this_cpu_read  1421 arch/x86/kernel/cpu/mce/core.c 	iv = __this_cpu_read(mce_next_interval);
__this_cpu_read  1452 arch/x86/kernel/cpu/mce/core.c 	unsigned long iv = __this_cpu_read(mce_next_interval);
__this_cpu_read   129 arch/x86/kernel/cpu/mce/intel.c 	if (__this_cpu_read(cmci_storm_state) == CMCI_STORM_NONE)
__this_cpu_read   176 arch/x86/kernel/cpu/mce/intel.c 	    (__this_cpu_read(cmci_storm_state) == CMCI_STORM_ACTIVE)) {
__this_cpu_read   181 arch/x86/kernel/cpu/mce/intel.c 	switch (__this_cpu_read(cmci_storm_state)) {
__this_cpu_read   215 arch/x86/kernel/cpu/mce/intel.c 	unsigned int cnt = __this_cpu_read(cmci_storm_cnt);
__this_cpu_read   216 arch/x86/kernel/cpu/mce/intel.c 	unsigned long ts = __this_cpu_read(cmci_time_stamp);
__this_cpu_read   220 arch/x86/kernel/cpu/mce/intel.c 	if (__this_cpu_read(cmci_storm_state) != CMCI_STORM_NONE)
__this_cpu_read    96 arch/x86/kernel/dumpstack_64.c 	begin = (unsigned long)__this_cpu_read(cea_exception_stacks);
__this_cpu_read   413 arch/x86/kernel/hw_breakpoint.c 	set_debugreg(__this_cpu_read(cpu_debugreg[0]), 0);
__this_cpu_read   414 arch/x86/kernel/hw_breakpoint.c 	set_debugreg(__this_cpu_read(cpu_debugreg[1]), 1);
__this_cpu_read   415 arch/x86/kernel/hw_breakpoint.c 	set_debugreg(__this_cpu_read(cpu_debugreg[2]), 2);
__this_cpu_read   416 arch/x86/kernel/hw_breakpoint.c 	set_debugreg(__this_cpu_read(cpu_debugreg[3]), 3);
__this_cpu_read   418 arch/x86/kernel/hw_breakpoint.c 	set_debugreg(__this_cpu_read(cpu_dr7), 7);
__this_cpu_read   245 arch/x86/kernel/irq.c 	desc = __this_cpu_read(vector_irq[vector]);
__this_cpu_read   373 arch/x86/kernel/irq.c 		if (IS_ERR_OR_NULL(__this_cpu_read(vector_irq[vector])))
__this_cpu_read   378 arch/x86/kernel/irq.c 			desc = __this_cpu_read(vector_irq[vector]);
__this_cpu_read   389 arch/x86/kernel/irq.c 		if (__this_cpu_read(vector_irq[vector]) != VECTOR_RETRIGGERED)
__this_cpu_read    79 arch/x86/kernel/irq_32.c 	irqstk = __this_cpu_read(hardirq_stack_ptr);
__this_cpu_read   139 arch/x86/kernel/irq_32.c 	irqstk = __this_cpu_read(softirq_stack_ptr);
__this_cpu_read   234 arch/x86/kernel/kvm.c 	if (__this_cpu_read(apf_reason.enabled)) {
__this_cpu_read   235 arch/x86/kernel/kvm.c 		reason = __this_cpu_read(apf_reason.reason);
__this_cpu_read   344 arch/x86/kernel/kvm.c 	if (!__this_cpu_read(apf_reason.enabled))
__this_cpu_read   329 arch/x86/kernel/nmi.c 	if (regs->ip == __this_cpu_read(last_nmi_rip))
__this_cpu_read   415 arch/x86/kernel/nmi.c 	if (b2b && __this_cpu_read(swallow_nmi))
__this_cpu_read   494 arch/x86/kernel/nmi.c 	struct cea_exception_stacks *cs = __this_cpu_read(cea_exception_stacks);
__this_cpu_read   498 arch/x86/kernel/nmi.c 	if (__this_cpu_read(debug_stack_usage))
__this_cpu_read  1676 arch/x86/kernel/smpboot.c 	if (__this_cpu_read(cpu_info.cpuid_level) < CPUID_MWAIT_LEAF)
__this_cpu_read  1734 arch/x86/kernel/smpboot.c 	if (__this_cpu_read(cpu_info.x86) >= 4)
__this_cpu_read  2335 arch/x86/kvm/svm.c 		if (tsc_ratio != __this_cpu_read(current_tsc_ratio)) {
__this_cpu_read  2912 arch/x86/kvm/vmx/nested.c 		set_debugreg(__this_cpu_read(cpu_dr7), 7);
__this_cpu_read  2299 arch/x86/kvm/x86.c 	if (__this_cpu_read(cpu_tsc_khz)) {
__this_cpu_read  2300 arch/x86/kvm/x86.c 		kvm_get_time_scale(NSEC_PER_SEC, __this_cpu_read(cpu_tsc_khz) * 1000LL,
__this_cpu_read  2396 arch/x86/kvm/x86.c 	tgt_tsc_khz = __this_cpu_read(cpu_tsc_khz);
__this_cpu_read  7158 arch/x86/kvm/x86.c 	return __this_cpu_read(current_vcpu) != NULL;
__this_cpu_read  7165 arch/x86/kvm/x86.c 	if (__this_cpu_read(current_vcpu))
__this_cpu_read  7166 arch/x86/kvm/x86.c 		user_mode = kvm_x86_ops->get_cpl(__this_cpu_read(current_vcpu));
__this_cpu_read  7175 arch/x86/kvm/x86.c 	if (__this_cpu_read(current_vcpu))
__this_cpu_read  7176 arch/x86/kvm/x86.c 		ip = kvm_rip_read(__this_cpu_read(current_vcpu));
__this_cpu_read  7183 arch/x86/kvm/x86.c 	struct kvm_vcpu *vcpu = __this_cpu_read(current_vcpu);
__this_cpu_read   140 arch/x86/oprofile/nmi_int.c 	return __this_cpu_read(switch_index) + phys;
__this_cpu_read    87 arch/x86/oprofile/op_model_ppro.c 			__this_cpu_read(cpu_info.x86) == 6 &&
__this_cpu_read    88 arch/x86/oprofile/op_model_ppro.c 				__this_cpu_read(cpu_info.x86_model) == 15)) {
__this_cpu_read   710 arch/x86/xen/enlighten_pv.c 	start = __this_cpu_read(idt_desc.address);
__this_cpu_read   711 arch/x86/xen/enlighten_pv.c 	end = start + __this_cpu_read(idt_desc.size) + 1;
__this_cpu_read    39 arch/x86/xen/spinlock.c 	int irq = __this_cpu_read(lock_kicker_irq);
__this_cpu_read    51 arch/x86/xen/time.c 	src = &__this_cpu_read(xen_vcpu)->time;
__this_cpu_read   503 arch/x86/xen/time.c 	pvti = &__this_cpu_read(xen_vcpu)->time;
__this_cpu_read   760 drivers/acpi/processor_idle.c 	pr = __this_cpu_read(processors);
__this_cpu_read   800 drivers/acpi/processor_idle.c 		struct acpi_processor *pr = __this_cpu_read(processors);
__this_cpu_read  1247 drivers/acpi/processor_idle.c 	pr = __this_cpu_read(processors);
__this_cpu_read   595 drivers/clocksource/arm_arch_timer.c 	__wa = __this_cpu_read(timer_unstable_counter_workaround);
__this_cpu_read  1031 drivers/clocksource/arm_arch_timer.c 		arch_timer_set_cntkctl(__this_cpu_read(saved_cntkctl));
__this_cpu_read    59 drivers/clocksource/numachip.c 	unsigned local_apicid = __this_cpu_read(x86_cpu_to_apicid) & 0xff;
__this_cpu_read  1116 drivers/cpufreq/powernow-k8.c 	struct powernow_k8_data *data = __this_cpu_read(powernow_data);
__this_cpu_read    30 drivers/cpuidle/cpuidle-psci.c 	u32 *state = __this_cpu_read(psci_power_state);
__this_cpu_read    61 drivers/cpuidle/cpuidle.c 	struct cpuidle_device *dev = __this_cpu_read(cpuidle_devices);
__this_cpu_read   638 drivers/dma/fsl-qdma.c 		   __this_cpu_read(pre.queue) &&
__this_cpu_read   640 drivers/dma/fsl-qdma.c 			__this_cpu_read(pre.addr))
__this_cpu_read   658 drivers/dma/fsl-qdma.c 				__this_cpu_read(pre.addr)) {
__this_cpu_read    82 drivers/irqchip/irq-xtensa-mx.c 	mask = __this_cpu_read(cached_irq_mask) & ~mask;
__this_cpu_read   100 drivers/irqchip/irq-xtensa-mx.c 	mask |= __this_cpu_read(cached_irq_mask);
__this_cpu_read   178 drivers/soc/qcom/spm.c 	struct spm_driver_data *drv = __this_cpu_read(cpu_spm_drv);
__this_cpu_read   195 drivers/soc/qcom/spm.c 	return __this_cpu_read(qcom_idle_ops)[index]();
__this_cpu_read   121 drivers/xen/events/events_2l.c 		struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu);
__this_cpu_read   171 drivers/xen/events/events_2l.c 	struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu);
__this_cpu_read   190 drivers/xen/events/events_2l.c 	start_word_idx = __this_cpu_read(current_word_idx);
__this_cpu_read   191 drivers/xen/events/events_2l.c 	start_bit_idx = __this_cpu_read(current_bit_idx);
__this_cpu_read  1220 drivers/xen/events/events_base.c 	struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu);
__this_cpu_read  1234 drivers/xen/events/events_base.c 		count = __this_cpu_read(xed_nesting_count);
__this_cpu_read    29 drivers/xen/preempt.c 	if (unlikely(__this_cpu_read(xen_in_preemptible_hcall)
__this_cpu_read  1263 fs/buffer.c    		struct buffer_head *bh = __this_cpu_read(bh_lrus.bhs[i]);
__this_cpu_read  1270 fs/buffer.c    						__this_cpu_read(bh_lrus.bhs[i - 1]));
__this_cpu_read    21 include/asm-generic/irq_regs.h 	return __this_cpu_read(__irq_regs);
__this_cpu_read    28 include/asm-generic/irq_regs.h 	old_regs = __this_cpu_read(__irq_regs);
__this_cpu_read    36 include/linux/context_tracking_state.h 	return __this_cpu_read(context_tracking.active);
__this_cpu_read    41 include/linux/context_tracking_state.h 	return __this_cpu_read(context_tracking.state) == CONTEXT_USER;
__this_cpu_read   163 include/linux/cpuidle.h {return __this_cpu_read(cpuidle_devices); }
__this_cpu_read   129 include/linux/highmem.h 	return __this_cpu_read(__kmap_atomic_idx) - 1;
__this_cpu_read   491 include/linux/interrupt.h #define local_softirq_pending()	(__this_cpu_read(local_softirq_pending_ref))
__this_cpu_read   340 include/linux/kprobes.h 	return (__this_cpu_read(current_kprobe));
__this_cpu_read  3049 include/linux/netdevice.h 	return unlikely(__this_cpu_read(softnet_data.xmit.recursion) >
__this_cpu_read  4424 include/linux/netdevice.h 	return __this_cpu_read(softnet_data.xmit.more);
__this_cpu_read   371 include/linux/netfilter/x_tables.h 	addend = (__this_cpu_read(xt_recseq.sequence) + 1) & 1;
__this_cpu_read   286 kernel/bpf/helpers.c 	flags = __this_cpu_read(irqsave_flags);
__this_cpu_read    70 kernel/context_tracking.c 	if ( __this_cpu_read(context_tracking.state) != state) {
__this_cpu_read    71 kernel/context_tracking.c 		if (__this_cpu_read(context_tracking.active)) {
__this_cpu_read   150 kernel/context_tracking.c 	if (__this_cpu_read(context_tracking.state) == state) {
__this_cpu_read   151 kernel/context_tracking.c 		if (__this_cpu_read(context_tracking.active)) {
__this_cpu_read   521 kernel/events/core.c 	running_len = __this_cpu_read(running_sample_length);
__this_cpu_read  3323 kernel/events/core.c 	if (__this_cpu_read(perf_sched_cb_usages))
__this_cpu_read  3590 kernel/events/core.c 	if (__this_cpu_read(perf_sched_cb_usages))
__this_cpu_read  8144 kernel/events/core.c 	seq = __this_cpu_read(perf_throttled_seq);
__this_cpu_read  9855 kernel/events/core.c 	unsigned int flags = __this_cpu_read(nop_txn_flags);
__this_cpu_read  9868 kernel/events/core.c 	unsigned int flags =  __this_cpu_read(nop_txn_flags);
__this_cpu_read  1150 kernel/kprobes.c 	struct kprobe *cur = __this_cpu_read(kprobe_instance);
__this_cpu_read   392 kernel/rcu/tree.c 	RCU_LOCKDEP_WARN(__this_cpu_read(rcu_data.dynticks_nesting) < 0,
__this_cpu_read   394 kernel/rcu/tree.c 	RCU_LOCKDEP_WARN(__this_cpu_read(rcu_data.dynticks_nmi_nesting) <= 0,
__this_cpu_read   398 kernel/rcu/tree.c 	if (__this_cpu_read(rcu_data.dynticks_nmi_nesting) != 1)
__this_cpu_read   402 kernel/rcu/tree.c 	return __this_cpu_read(rcu_data.dynticks_nesting) == 0;
__this_cpu_read  2313 kernel/rcu/tree.c 	rnp = __this_cpu_read(rcu_data.mynode);
__this_cpu_read  2407 kernel/rcu/tree.c 	t = __this_cpu_read(rcu_data.rcu_cpu_kthread_task);
__this_cpu_read  2409 kernel/rcu/tree.c 		rcu_wake_cond(t, __this_cpu_read(rcu_data.rcu_cpu_kthread_status));
__this_cpu_read  2433 kernel/rcu/tree.c 	return __this_cpu_read(rcu_data.rcu_cpu_has_work);
__this_cpu_read   718 kernel/rcu/tree_exp.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.exp))
__this_cpu_read   741 kernel/rcu/tree_exp.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.exp)) {
__this_cpu_read   262 kernel/rcu/tree_plugin.h 	if (__this_cpu_read(rcu_data.cpu_no_qs.s)) {
__this_cpu_read   264 kernel/rcu/tree_plugin.h 				       __this_cpu_read(rcu_data.gp_seq),
__this_cpu_read   552 kernel/rcu/tree_plugin.h 	return (__this_cpu_read(rcu_data.exp_deferred_qs) ||
__this_cpu_read   706 kernel/rcu/tree_plugin.h 	    __this_cpu_read(rcu_data.core_needs_qs) &&
__this_cpu_read   707 kernel/rcu/tree_plugin.h 	    __this_cpu_read(rcu_data.cpu_no_qs.b.norm) &&
__this_cpu_read   800 kernel/rcu/tree_plugin.h 	if (!__this_cpu_read(rcu_data.cpu_no_qs.s))
__this_cpu_read   803 kernel/rcu/tree_plugin.h 			       __this_cpu_read(rcu_data.gp_seq), TPS("cpuqs"));
__this_cpu_read   805 kernel/rcu/tree_plugin.h 	if (!__this_cpu_read(rcu_data.cpu_no_qs.b.exp))
__this_cpu_read  1096 kernel/rcu/tree_plugin.h 	return __this_cpu_read(rcu_data.rcu_cpu_kthread_task) == current;
__this_cpu_read    73 kernel/softirq.c 	struct task_struct *tsk = __this_cpu_read(ksoftirqd);
__this_cpu_read    87 kernel/softirq.c 	struct task_struct *tsk = __this_cpu_read(ksoftirqd);
__this_cpu_read   304 kernel/softirq.c 	if (__this_cpu_read(ksoftirqd) == current)
__this_cpu_read   651 kernel/softirq.c 		*__this_cpu_read(tasklet_vec.tail) = per_cpu(tasklet_vec, cpu).head;
__this_cpu_read   659 kernel/softirq.c 		*__this_cpu_read(tasklet_hi_vec.tail) = per_cpu(tasklet_hi_vec, cpu).head;
__this_cpu_read    71 kernel/time/tick-common.c 	struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev);
__this_cpu_read    25 kernel/time/tick-oneshot.c 	struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev);
__this_cpu_read    52 kernel/time/tick-oneshot.c 	struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev);
__this_cpu_read   112 kernel/time/tick-oneshot.c 	ret = __this_cpu_read(tick_cpu_device.mode) == TICKDEV_MODE_ONESHOT;
__this_cpu_read   747 kernel/time/tick-sched.c 	struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev);
__this_cpu_read  1054 kernel/time/tick-sched.c 	return __this_cpu_read(tick_cpu_device.evtdev)->next_event;
__this_cpu_read  1065 kernel/time/tick-sched.c 	struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev);
__this_cpu_read  2251 kernel/trace/trace.c 	if (!__this_cpu_read(trace_taskinfo_save))
__this_cpu_read  2960 kernel/trace/trace.c 	if (__this_cpu_read(user_stack_count))
__this_cpu_read   301 kernel/trace/trace_stack.c 	if (__this_cpu_read(disable_stack_tracer) != 1)
__this_cpu_read   328 kernel/watchdog.c 	unsigned long hrint = __this_cpu_read(hrtimer_interrupts);
__this_cpu_read   330 kernel/watchdog.c 	if (__this_cpu_read(hrtimer_interrupts_saved) == hrint)
__this_cpu_read   356 kernel/watchdog.c 			 __this_cpu_read(hrtimer_interrupts));
__this_cpu_read   366 kernel/watchdog.c 	unsigned long touch_ts = __this_cpu_read(watchdog_touch_ts);
__this_cpu_read   389 kernel/watchdog.c 		if (unlikely(__this_cpu_read(softlockup_touch_sync))) {
__this_cpu_read   421 kernel/watchdog.c 		if (__this_cpu_read(soft_watchdog_warn) == true) {
__this_cpu_read   430 kernel/watchdog.c 			if (__this_cpu_read(softlockup_task_ptr_saved) !=
__this_cpu_read    80 kernel/watchdog_hld.c 	delta = now - __this_cpu_read(last_timestamp);
__this_cpu_read   117 kernel/watchdog_hld.c 	if (__this_cpu_read(watchdog_nmi_touch) == true) {
__this_cpu_read   135 kernel/watchdog_hld.c 		if (__this_cpu_read(hard_watchdog_warn) == true)
__this_cpu_read  1383 kernel/workqueue.c 	new_cpu = __this_cpu_read(wq_rr_cpu_last);
__this_cpu_read    87 lib/percpu_counter.c 	count = __this_cpu_read(*fbc->counters) + amount;
__this_cpu_read    11 lib/percpu_test.c 		WARN(__this_cpu_read(pcp) != (expected),                \
__this_cpu_read    13 lib/percpu_test.c 		     __this_cpu_read(pcp), __this_cpu_read(pcp),	\
__this_cpu_read   698 mm/memcontrol.c 	x = val + __this_cpu_read(memcg->vmstats_percpu->stat[idx]);
__this_cpu_read   758 mm/memcontrol.c 	x = val + __this_cpu_read(pn->lruvec_stat_cpu->count[idx]);
__this_cpu_read   814 mm/memcontrol.c 	x = count + __this_cpu_read(memcg->vmstats_percpu->events[idx]);
__this_cpu_read   882 mm/memcontrol.c 	val = __this_cpu_read(memcg->vmstats_percpu->nr_page_events);
__this_cpu_read   883 mm/memcontrol.c 	next = __this_cpu_read(memcg->vmstats_percpu->targets[target]);
__this_cpu_read   492 mm/slab.c      	int node = __this_cpu_read(slab_reap_node);
__this_cpu_read   711 mm/slab.c      	int node = __this_cpu_read(slab_reap_node);
__this_cpu_read  2005 mm/slub.c      	unsigned long actual_tid = __this_cpu_read(s->cpu_slab->tid);
__this_cpu_read  1093 mm/vmalloc.c   	if (!__this_cpu_read(ne_fit_preload_node)) {
__this_cpu_read   324 mm/vmstat.c    	x = delta + __this_cpu_read(*p);
__this_cpu_read   326 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   344 mm/vmstat.c    	x = delta + __this_cpu_read(*p);
__this_cpu_read   346 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   386 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   402 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   430 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   446 mm/vmstat.c    	t = __this_cpu_read(pcp->stat_threshold);
__this_cpu_read   796 mm/vmstat.c    			if (!__this_cpu_read(p->expire) ||
__this_cpu_read   797 mm/vmstat.c    			       !__this_cpu_read(p->pcp.count))
__this_cpu_read   811 mm/vmstat.c    			if (__this_cpu_read(p->pcp.count)) {
__this_cpu_read  2729 net/core/dev.c 	skb->next = __this_cpu_read(softnet_data.completion_queue);
__this_cpu_read   274 net/ipv4/netfilter/ip_tables.c 		jumpstack += private->stacksize * __this_cpu_read(nf_skb_duplicated);
__this_cpu_read   296 net/ipv6/netfilter/ip6_tables.c 		jumpstack += private->stacksize * __this_cpu_read(nf_skb_duplicated);
__this_cpu_read   485 net/rds/ib_recv.c 	chpfirst = __this_cpu_read(cache->percpu->first);
__this_cpu_read   494 net/rds/ib_recv.c 	if (__this_cpu_read(cache->percpu->count) < RDS_IB_RECYCLE_BATCH_COUNT)
__this_cpu_read    75 virt/kvm/arm/arm.c 	return __this_cpu_read(kvm_arm_running_vcpu);
__this_cpu_read  1349 virt/kvm/arm/arm.c 	stack_page = __this_cpu_read(kvm_arm_hyp_stack_page);
__this_cpu_read  1382 virt/kvm/arm/arm.c 	if (!__this_cpu_read(kvm_arm_hardware_enabled)) {
__this_cpu_read  1396 virt/kvm/arm/arm.c 	if (__this_cpu_read(kvm_arm_hardware_enabled)) {
__this_cpu_read  1419 virt/kvm/arm/arm.c 		if (__this_cpu_read(kvm_arm_hardware_enabled))
__this_cpu_read  1430 virt/kvm/arm/arm.c 		if (__this_cpu_read(kvm_arm_hardware_enabled))