Searched refs:__this_cpu_read (Results 1 - 112 of 112) sorted by relevance

/linux-4.1.27/include/asm-generic/
H A Dirq_regs.h25 return __this_cpu_read(__irq_regs); get_irq_regs()
32 old_regs = __this_cpu_read(__irq_regs); set_irq_regs()
/linux-4.1.27/include/linux/
H A Dcontext_tracking_state.h33 return __this_cpu_read(context_tracking.active); context_tracking_cpu_is_enabled()
38 return __this_cpu_read(context_tracking.state) == CONTEXT_USER; context_tracking_in_user()
H A Dhighmem.h103 return __this_cpu_read(__kmap_atomic_idx) - 1; kmap_atomic_idx()
H A Dkprobes.h351 return (__this_cpu_read(current_kprobe)); kprobe_running()
H A Dpercpu-defs.h430 #define __this_cpu_read(pcp) \ macro
/linux-4.1.27/kernel/time/
H A Dtick-oneshot.c29 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); tick_program_event()
39 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); tick_resume_oneshot()
100 ret = __this_cpu_read(tick_cpu_device.mode) == TICKDEV_MODE_ONESHOT; tick_oneshot_mode_active()
H A Dtick-common.c66 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); tick_is_oneshot_available()
H A Dtick-sched.c421 return __this_cpu_read(tick_cpu_sched.tick_stopped); tick_nohz_tick_stopped()
574 struct clock_event_device *dev = __this_cpu_read(tick_cpu_device.evtdev); tick_nohz_stop_sched_tick()
H A Dtimer.c1360 struct tvec_base *base = __this_cpu_read(tvec_bases); get_next_timer_interrupt()
1410 struct tvec_base *base = __this_cpu_read(tvec_bases); run_timer_softirq()
H A Dhrtimer.c513 return __this_cpu_read(hrtimer_bases.hres_active); hrtimer_hres_active()
/linux-4.1.27/lib/
H A Dpercpu_test.c10 WARN(__this_cpu_read(pcp) != (expected), \
12 __this_cpu_read(pcp), __this_cpu_read(pcp), \
H A Dpercpu_counter.c80 count = __this_cpu_read(*fbc->counters) + amount; __percpu_counter_add()
H A Diommu-common.c108 unsigned int pool_hash = __this_cpu_read(iommu_hash_common); iommu_tbl_range_alloc()
H A Didr.c119 new = __this_cpu_read(idr_preload_head); idr_layer_alloc()
413 while (__this_cpu_read(idr_preload_cnt) < MAX_IDR_FREE) { idr_preload()
423 new->ary[0] = __this_cpu_read(idr_preload_head); idr_preload()
/linux-4.1.27/arch/powerpc/include/asm/
H A Dhardirq.h24 #define local_softirq_pending() __this_cpu_read(irq_stat.__softirq_pending)
H A Dcputime.h61 __this_cpu_read(cputime_last_delta)) cputime_to_scaled()
63 __this_cpu_read(cputime_scaled_last_delta) / cputime_to_scaled()
64 __this_cpu_read(cputime_last_delta); cputime_to_scaled()
/linux-4.1.27/arch/sparc/kernel/
H A Dnmi.c103 if (__this_cpu_read(nmi_touch)) { perfctr_irq()
107 if (!touched && __this_cpu_read(last_irq_sum) == sum) { perfctr_irq()
109 if (__this_cpu_read(alert_counter) == 30 * nmi_hz) perfctr_irq()
116 if (__this_cpu_read(wd_enabled)) { perfctr_irq()
221 if (!__this_cpu_read(wd_enabled)) nmi_adjust_hz_one()
H A Dkprobes.c158 p = __this_cpu_read(current_kprobe); kprobe_handler()
/linux-4.1.27/arch/mips/kernel/
H A Dmips-r2-to-r6-emul.c2236 (unsigned long)__this_cpu_read(mipsr2emustats.movs), mipsr2_stats_show()
2237 (unsigned long)__this_cpu_read(mipsr2bdemustats.movs)); mipsr2_stats_show()
2239 (unsigned long)__this_cpu_read(mipsr2emustats.hilo), mipsr2_stats_show()
2240 (unsigned long)__this_cpu_read(mipsr2bdemustats.hilo)); mipsr2_stats_show()
2242 (unsigned long)__this_cpu_read(mipsr2emustats.muls), mipsr2_stats_show()
2243 (unsigned long)__this_cpu_read(mipsr2bdemustats.muls)); mipsr2_stats_show()
2245 (unsigned long)__this_cpu_read(mipsr2emustats.divs), mipsr2_stats_show()
2246 (unsigned long)__this_cpu_read(mipsr2bdemustats.divs)); mipsr2_stats_show()
2248 (unsigned long)__this_cpu_read(mipsr2emustats.dsps), mipsr2_stats_show()
2249 (unsigned long)__this_cpu_read(mipsr2bdemustats.dsps)); mipsr2_stats_show()
2251 (unsigned long)__this_cpu_read(mipsr2emustats.bops), mipsr2_stats_show()
2252 (unsigned long)__this_cpu_read(mipsr2bdemustats.bops)); mipsr2_stats_show()
2254 (unsigned long)__this_cpu_read(mipsr2emustats.traps), mipsr2_stats_show()
2255 (unsigned long)__this_cpu_read(mipsr2bdemustats.traps)); mipsr2_stats_show()
2257 (unsigned long)__this_cpu_read(mipsr2emustats.fpus), mipsr2_stats_show()
2258 (unsigned long)__this_cpu_read(mipsr2bdemustats.fpus)); mipsr2_stats_show()
2260 (unsigned long)__this_cpu_read(mipsr2emustats.loads), mipsr2_stats_show()
2261 (unsigned long)__this_cpu_read(mipsr2bdemustats.loads)); mipsr2_stats_show()
2263 (unsigned long)__this_cpu_read(mipsr2emustats.stores), mipsr2_stats_show()
2264 (unsigned long)__this_cpu_read(mipsr2bdemustats.stores)); mipsr2_stats_show()
2266 (unsigned long)__this_cpu_read(mipsr2emustats.llsc), mipsr2_stats_show()
2267 (unsigned long)__this_cpu_read(mipsr2bdemustats.llsc)); mipsr2_stats_show()
2269 (unsigned long)__this_cpu_read(mipsr2emustats.dsemul), mipsr2_stats_show()
2270 (unsigned long)__this_cpu_read(mipsr2bdemustats.dsemul)); mipsr2_stats_show()
2272 (unsigned long)__this_cpu_read(mipsr2bremustats.jrs)); mipsr2_stats_show()
2274 (unsigned long)__this_cpu_read(mipsr2bremustats.bltzl)); mipsr2_stats_show()
2276 (unsigned long)__this_cpu_read(mipsr2bremustats.bgezl)); mipsr2_stats_show()
2278 (unsigned long)__this_cpu_read(mipsr2bremustats.bltzll)); mipsr2_stats_show()
2280 (unsigned long)__this_cpu_read(mipsr2bremustats.bgezll)); mipsr2_stats_show()
2282 (unsigned long)__this_cpu_read(mipsr2bremustats.bltzal)); mipsr2_stats_show()
2284 (unsigned long)__this_cpu_read(mipsr2bremustats.bgezal)); mipsr2_stats_show()
2286 (unsigned long)__this_cpu_read(mipsr2bremustats.beql)); mipsr2_stats_show()
2288 (unsigned long)__this_cpu_read(mipsr2bremustats.bnel)); mipsr2_stats_show()
2290 (unsigned long)__this_cpu_read(mipsr2bremustats.blezl)); mipsr2_stats_show()
2292 (unsigned long)__this_cpu_read(mipsr2bremustats.bgtzl)); mipsr2_stats_show()
H A Dsmp-bmips.c331 action = __this_cpu_read(ipi_action_mask); bmips43xx_ipi_interrupt()
H A Dkprobes.c388 p = __this_cpu_read(current_kprobe); kprobe_handler()
/linux-4.1.27/arch/x86/oprofile/
H A Dop_model_ppro.c87 __this_cpu_read(cpu_info.x86) == 6 && ppro_setup_ctrs()
88 __this_cpu_read(cpu_info.x86_model) == 15)) { ppro_setup_ctrs()
215 if (eax.split.version_id == 0 && __this_cpu_read(cpu_info.x86) == 6 && arch_perfmon_setup_counters()
216 __this_cpu_read(cpu_info.x86_model) == 15) { arch_perfmon_setup_counters()
H A Dnmi_int.c140 return __this_cpu_read(switch_index) + phys; op_x86_phys_to_virt()
/linux-4.1.27/drivers/xen/
H A Dpreempt.c33 if (unlikely(__this_cpu_read(xen_in_preemptible_hcall) xen_maybe_preempt_hcall()
/linux-4.1.27/kernel/
H A Dwatchdog.c241 unsigned long hrint = __this_cpu_read(hrtimer_interrupts); is_hardlockup()
243 if (__this_cpu_read(hrtimer_interrupts_saved) == hrint) is_hardlockup()
281 if (__this_cpu_read(watchdog_nmi_touch) == true) { watchdog_overflow_callback()
296 if (__this_cpu_read(hard_watchdog_warn) == true) watchdog_overflow_callback()
326 unsigned long touch_ts = __this_cpu_read(watchdog_touch_ts); watchdog_timer_fn()
335 wake_up_process(__this_cpu_read(softlockup_watchdog)); watchdog_timer_fn()
341 if (unlikely(__this_cpu_read(softlockup_touch_sync))) { watchdog_timer_fn()
373 if (__this_cpu_read(soft_watchdog_warn) == true) { watchdog_timer_fn()
382 if (__this_cpu_read(softlockup_task_ptr_saved) != watchdog_timer_fn()
477 return __this_cpu_read(hrtimer_interrupts) != watchdog_should_run()
478 __this_cpu_read(soft_lockup_hrtimer_cnt); watchdog_should_run()
492 __this_cpu_read(hrtimer_interrupts)); watchdog()
H A Dcontext_tracking.c78 if ( __this_cpu_read(context_tracking.state) != state) { context_tracking_enter()
79 if (__this_cpu_read(context_tracking.active)) { context_tracking_enter()
142 if (__this_cpu_read(context_tracking.state) == state) { context_tracking_exit()
143 if (__this_cpu_read(context_tracking.active)) { context_tracking_exit()
H A Dsoftirq.c74 struct task_struct *tsk = __this_cpu_read(ksoftirqd); wakeup_softirqd()
455 *__this_cpu_read(tasklet_vec.tail) = t; __tasklet_schedule()
468 *__this_cpu_read(tasklet_hi_vec.tail) = t; __tasklet_hi_schedule()
479 t->next = __this_cpu_read(tasklet_hi_vec.head); __tasklet_hi_schedule_first()
490 list = __this_cpu_read(tasklet_vec.head); tasklet_action()
514 *__this_cpu_read(tasklet_vec.tail) = t; tasklet_action()
526 list = __this_cpu_read(tasklet_hi_vec.head); tasklet_hi_action()
550 *__this_cpu_read(tasklet_hi_vec.tail) = t; tasklet_hi_action()
710 *__this_cpu_read(tasklet_vec.tail) = per_cpu(tasklet_vec, cpu).head; takeover_tasklets()
718 *__this_cpu_read(tasklet_hi_vec.tail) = per_cpu(tasklet_hi_vec, cpu).head; takeover_tasklets()
H A Dkprobes.c1035 struct kprobe *cur = __this_cpu_read(kprobe_instance); aggr_fault_handler()
1051 struct kprobe *cur = __this_cpu_read(kprobe_instance); aggr_break_handler()
H A Dpadata.c206 if (__this_cpu_read(pd->pqueue->cpu_index) == next_queue->cpu_index) { padata_get_next()
/linux-4.1.27/arch/x86/kernel/cpu/mcheck/
H A Dmce_intel.c96 if (__this_cpu_read(cmci_storm_state) == CMCI_STORM_NONE) mce_intel_cmci_poll()
143 (__this_cpu_read(cmci_storm_state) == CMCI_STORM_ACTIVE)) { cmci_intel_adjust_timer()
148 switch (__this_cpu_read(cmci_storm_state)) { cmci_intel_adjust_timer()
182 unsigned int cnt = __this_cpu_read(cmci_storm_cnt); cmci_storm_detect()
183 unsigned long ts = __this_cpu_read(cmci_time_stamp); cmci_storm_detect()
187 if (__this_cpu_read(cmci_storm_state) != CMCI_STORM_NONE) cmci_storm_detect()
H A Dmce.c375 unsigned bank = __this_cpu_read(injectm.bank); msr_to_offset()
395 if (__this_cpu_read(injectm.finished)) { mce_rdmsrl()
418 if (__this_cpu_read(injectm.finished)) { mce_wrmsrl()
1305 iv = __this_cpu_read(mce_next_interval); mce_timer_fn()
1336 unsigned long iv = __this_cpu_read(mce_next_interval); mce_timer_kick()
/linux-4.1.27/drivers/irqchip/
H A Dirq-xtensa-mx.c78 mask = __this_cpu_read(cached_irq_mask) & ~mask; xtensa_mx_irq_mask()
93 mask |= __this_cpu_read(cached_irq_mask); xtensa_mx_irq_unmask()
/linux-4.1.27/arch/s390/kernel/
H A Dvtime.c122 u64 mult = __this_cpu_read(mt_scaling_mult); do_account_vtime()
123 u64 div = __this_cpu_read(mt_scaling_div); do_account_vtime()
183 u64 mult = __this_cpu_read(mt_scaling_mult); vtime_account_irq_enter()
184 u64 div = __this_cpu_read(mt_scaling_div); vtime_account_irq_enter()
H A Dkprobes.c252 kcb->prev_kprobe.kp = __this_cpu_read(current_kprobe); push_kprobe()
347 p = __this_cpu_read(current_kprobe); kprobe_handler()
/linux-4.1.27/drivers/xen/events/
H A Devents_2l.c120 struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu); evtchn_2l_unmask()
170 struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu); evtchn_2l_handle_events()
189 start_word_idx = __this_cpu_read(current_word_idx); evtchn_2l_handle_events()
190 start_bit_idx = __this_cpu_read(current_bit_idx); evtchn_2l_handle_events()
H A Devents_base.c1228 struct vcpu_info *vcpu_info = __this_cpu_read(xen_vcpu); __xen_evtchn_do_upcall()
1242 count = __this_cpu_read(xed_nesting_count); __xen_evtchn_do_upcall()
/linux-4.1.27/arch/x86/kernel/
H A Dirq.c198 irq = __this_cpu_read(vector_irq[vector]); do_IRQ()
302 irq = __this_cpu_read(vector_irq[vector]); check_irq_vectors_for_cpu_disable()
450 if (__this_cpu_read(vector_irq[vector]) <= VECTOR_UNDEFINED)
455 irq = __this_cpu_read(vector_irq[vector]);
467 if (__this_cpu_read(vector_irq[vector]) != VECTOR_RETRIGGERED)
H A Dirq_32.c84 irqstk = __this_cpu_read(hardirq_stack); execute_on_irq_stack()
145 irqstk = __this_cpu_read(softirq_stack); do_softirq_own_stack()
H A Dirq_64.c57 irq_stack_bottom = (u64)__this_cpu_read(irq_stack_ptr); stack_overflow_check()
H A Dhw_breakpoint.c390 set_debugreg(__this_cpu_read(cpu_debugreg[0]), 0); hw_breakpoint_restore()
391 set_debugreg(__this_cpu_read(cpu_debugreg[1]), 1); hw_breakpoint_restore()
392 set_debugreg(__this_cpu_read(cpu_debugreg[2]), 2); hw_breakpoint_restore()
393 set_debugreg(__this_cpu_read(cpu_debugreg[3]), 3); hw_breakpoint_restore()
395 set_debugreg(__this_cpu_read(cpu_dr7), 7); hw_breakpoint_restore()
H A Dkvm.c247 if (__this_cpu_read(apf_reason.enabled)) { kvm_read_and_reset_pf_reason()
248 reason = __this_cpu_read(apf_reason.reason); kvm_read_and_reset_pf_reason()
367 if (!__this_cpu_read(apf_reason.enabled)) kvm_pv_disable_apf()
H A Dnmi.c328 if (regs->ip == __this_cpu_read(last_nmi_rip)) default_do_nmi()
403 if (b2b && __this_cpu_read(swallow_nmi)) default_do_nmi()
H A Dsmpboot.c1416 if (__this_cpu_read(cpu_info.cpuid_level) < CPUID_MWAIT_LEAF) mwait_play_dead()
1474 if (__this_cpu_read(cpu_info.x86) >= 4) hlt_play_dead()
/linux-4.1.27/arch/tile/include/asm/
H A Dmmu_context.h87 install_page_table(mm->pgd, __this_cpu_read(current_asid)); enter_lazy_tlb()
99 int asid = __this_cpu_read(current_asid) + 1; switch_mm()
/linux-4.1.27/arch/x86/include/asm/
H A Ddebugreg.h89 return __this_cpu_read(cpu_dr7) & DR_GLOBAL_ENABLE_MASK; hw_breakpoint_active()
/linux-4.1.27/arch/ia64/include/asm/sn/
H A Dnodepda.h73 #define sn_nodepda __this_cpu_read(__sn_nodepda)
/linux-4.1.27/arch/ia64/include/asm/
H A Dswitch_to.h35 # define PERFMON_IS_SYSWIDE() (__this_cpu_read(pfm_syst_info) & 0x1)
H A Dhw_irq.h162 return __this_cpu_read(vector_irq[vec]); __ia64_local_vector_to_irq()
/linux-4.1.27/arch/tile/kernel/
H A Dirq.c133 unmask_irqs(~__this_cpu_read(irq_disable_mask)); tile_dev_intr()
153 if (__this_cpu_read(irq_depth) == 0) tile_irq_chip_enable()
199 if (!(__this_cpu_read(irq_disable_mask) & (1UL << d->irq))) tile_irq_chip_eoi()
H A Dkprobes.c230 p = __this_cpu_read(current_kprobe); kprobe_handler()
/linux-4.1.27/arch/powerpc/kernel/
H A Dmce.c146 int index = __this_cpu_read(mce_nest_count) - 1; get_mce_event()
211 while (__this_cpu_read(mce_queue_count) > 0) { machine_check_process_queued_event()
212 index = __this_cpu_read(mce_queue_count) - 1; machine_check_process_queued_event()
H A Dhw_breakpoint.c229 bp = __this_cpu_read(bp_per_reg); hw_breakpoint_handler()
H A Dkprobes.c195 p = __this_cpu_read(current_kprobe); kprobe_handler()
H A Dsysfs.c397 if (__this_cpu_read(pmcs_enabled)) ppc_enable_pmcs()
H A Diommu.c211 pool_nr = __this_cpu_read(iommu_pool_hash) & (tbl->nr_pools - 1); iommu_range_alloc()
H A Dtime.c463 #define test_irq_work_pending() __this_cpu_read(irq_work_pending)
/linux-4.1.27/arch/x86/kernel/acpi/
H A Dsleep.c81 if (__this_cpu_read(cpu_info.cpuid_level) >= 0) { x86_acpi_suspend_lowlevel()
/linux-4.1.27/net/netfilter/
H A Dxt_TEE.c91 if (__this_cpu_read(tee_active)) tee_tg4()
172 if (__this_cpu_read(tee_active)) tee_tg6()
/linux-4.1.27/arch/x86/kernel/cpu/
H A Dperf_event_intel_rapl.c219 struct rapl_pmu *pmu = __this_cpu_read(rapl_pmu); rapl_hrtimer_handle()
266 struct rapl_pmu *pmu = __this_cpu_read(rapl_pmu); rapl_pmu_event_start()
276 struct rapl_pmu *pmu = __this_cpu_read(rapl_pmu); rapl_pmu_event_stop()
310 struct rapl_pmu *pmu = __this_cpu_read(rapl_pmu); rapl_pmu_event_add()
764 pmu = __this_cpu_read(rapl_pmu);
H A Dcommon.c1247 return __this_cpu_read(debug_stack_usage) || is_debug_stack()
1248 (addr <= __this_cpu_read(debug_stack_addr) && is_debug_stack()
1249 addr > (__this_cpu_read(debug_stack_addr) - DEBUG_STKSZ)); is_debug_stack()
H A Dintel_cacheinfo.c260 size_in_kb = __this_cpu_read(cpu_info.x86_cache_size); amd_cpuid4()
282 eax->split.num_cores_on_die = __this_cpu_read(cpu_info.x86_max_cores) - 1; amd_cpuid4()
H A Dperf_event.c1139 if (__this_cpu_read(cpu_hw_events.enabled)) x86_pmu_enable_event()
1762 __this_cpu_sub(cpu_hw_events.n_added, __this_cpu_read(cpu_hw_events.n_txn)); x86_pmu_cancel_txn()
1763 __this_cpu_sub(cpu_hw_events.n_events, __this_cpu_read(cpu_hw_events.n_txn)); x86_pmu_cancel_txn()
H A Dperf_event.h721 u64 disable_mask = __this_cpu_read(cpu_hw_events.perf_ctr_virt_mask); __x86_pmu_enable_event()
H A Dperf_event_intel_ds.c1110 struct debug_store *ds = __this_cpu_read(cpu_hw_events.ds); perf_restore_debug_store()
H A Dperf_event_intel.c1477 if (!__this_cpu_read(cpu_hw_events.enabled)) intel_pmu_enable_event()
1532 struct debug_store *ds = __this_cpu_read(cpu_hw_events.ds); intel_pmu_reset()
/linux-4.1.27/mm/
H A Dvmstat.c229 x = delta + __this_cpu_read(*p); __mod_zone_page_state()
231 t = __this_cpu_read(pcp->stat_threshold); __mod_zone_page_state()
271 t = __this_cpu_read(pcp->stat_threshold); __inc_zone_state()
293 t = __this_cpu_read(pcp->stat_threshold); __dec_zone_state()
496 if (!__this_cpu_read(p->expire) || for_each_populated_zone()
497 !__this_cpu_read(p->pcp.count)) for_each_populated_zone()
511 if (__this_cpu_read(p->pcp.count)) { for_each_populated_zone()
H A Dslab.c623 int node = __this_cpu_read(slab_reap_node); next_reap_node()
946 int node = __this_cpu_read(slab_reap_node); reap_alien()
H A Dmemcontrol.c905 val = __this_cpu_read(memcg->stat->nr_page_events); mem_cgroup_event_ratelimit()
906 next = __this_cpu_read(memcg->stat->targets[target]); mem_cgroup_event_ratelimit()
H A Dslub.c1786 unsigned long actual_tid = __this_cpu_read(s->cpu_slab->tid); note_cmpxchg_failure()
/linux-4.1.27/arch/sh/kernel/
H A Dkprobes.c252 p = __this_cpu_read(current_kprobe); kprobe_handler()
392 addr = __this_cpu_read(saved_current_opcode.addr); post_kprobe_handler()
514 p = __this_cpu_read(current_kprobe); kprobe_exceptions_notify()
/linux-4.1.27/arch/ia64/kernel/
H A Dirq.c45 return __this_cpu_read(vector_irq[vec]); __ia64_local_vector_to_irq()
H A Dprocess.c276 info = __this_cpu_read(pfm_syst_info); ia64_save_extra()
296 info = __this_cpu_read(pfm_syst_info); ia64_load_extra()
H A Dirq_ia64.c331 irq = __this_cpu_read(vector_irq[vector]); smp_irq_move_cleanup_interrupt()
H A Dkprobes.c826 p = __this_cpu_read(current_kprobe); pre_kprobes_handler()
H A Dmca.c1344 if (__this_cpu_read(ia64_mca_tr_reload)) { ia64_mca_handler()
/linux-4.1.27/arch/arm64/kernel/
H A Dpsci.c529 struct psci_power_state *state = __this_cpu_read(psci_power_state); psci_suspend_finisher()
538 struct psci_power_state *state = __this_cpu_read(psci_power_state); cpu_psci_cpu_suspend()
H A Dfpsimd.c147 if (__this_cpu_read(fpsimd_last_state) == st fpsimd_thread_switch()
/linux-4.1.27/arch/x86/xen/
H A Dtime.c136 stolen = runnable + offline + __this_cpu_read(xen_residual_stolen); do_stolen_accounting()
161 src = &__this_cpu_read(xen_vcpu)->time; xen_clocksource_read()
H A Dspinlock.c111 int irq = __this_cpu_read(lock_kicker_irq); xen_lock_spinning()
H A Denlighten.c831 start = __this_cpu_read(idt_desc.address); xen_write_idt_entry()
832 end = start + __this_cpu_read(idt_desc.size) + 1; xen_write_idt_entry()
/linux-4.1.27/arch/mips/loongson/loongson-3/
H A Dsmp.c307 while (!__this_cpu_read(core0_c0count)) { loongson3_init_secondary()
314 initcount = __this_cpu_read(core0_c0count) + i; loongson3_init_secondary()
/linux-4.1.27/kernel/sched/
H A Didle.c81 struct cpuidle_device *dev = __this_cpu_read(cpuidle_devices); cpuidle_idle_call()
H A Dcputime.c59 delta = sched_clock_cpu(cpu) - __this_cpu_read(irq_start_time); irqtime_account_irq()
/linux-4.1.27/net/xfrm/
H A Dxfrm_ipcomp.c286 tfm = __this_cpu_read(*pos->tfms); ipcomp_alloc_tfms()
/linux-4.1.27/kernel/rcu/
H A Dtree_plugin.h119 if (!__this_cpu_read(rcu_preempt_data.passed_quiesce)) { rcu_preempt_qs()
121 __this_cpu_read(rcu_preempt_data.gpnum), rcu_preempt_qs()
494 __this_cpu_read(rcu_preempt_data.qs_pending) && rcu_preempt_check_callbacks()
495 !__this_cpu_read(rcu_preempt_data.passed_quiesce)) rcu_preempt_check_callbacks()
1136 if (__this_cpu_read(rcu_cpu_kthread_task) != NULL && invoke_rcu_callbacks_kthread()
1137 current != __this_cpu_read(rcu_cpu_kthread_task)) { invoke_rcu_callbacks_kthread()
1138 rcu_wake_cond(__this_cpu_read(rcu_cpu_kthread_task), invoke_rcu_callbacks_kthread()
1139 __this_cpu_read(rcu_cpu_kthread_status)); invoke_rcu_callbacks_kthread()
1150 return __this_cpu_read(rcu_cpu_kthread_task) == current; rcu_is_callbacks_kthread()
1221 return __this_cpu_read(rcu_cpu_has_work); rcu_cpu_kthread_should_run()
H A Dtree.c215 if (!__this_cpu_read(rcu_sched_data.passed_quiesce)) { rcu_sched_qs()
217 __this_cpu_read(rcu_sched_data.gpnum), rcu_sched_qs()
225 if (!__this_cpu_read(rcu_bh_data.passed_quiesce)) { rcu_bh_qs()
227 __this_cpu_read(rcu_bh_data.gpnum), rcu_bh_qs()
997 return __this_cpu_read(rcu_dynticks.dynticks_nesting) <= 1; rcu_is_cpu_rrupt_from_idle()
1704 rdp->rcu_qs_ctr_snap = __this_cpu_read(rcu_qs_ctr); __note_gp_changes()
2269 rdp->rcu_qs_ctr_snap == __this_cpu_read(rcu_qs_ctr)) || rcu_report_qs_rdp()
2280 rdp->rcu_qs_ctr_snap = __this_cpu_read(rcu_qs_ctr); rcu_report_qs_rdp()
2327 rdp->rcu_qs_ctr_snap == __this_cpu_read(rcu_qs_ctr)) rcu_check_quiescent_state()
2794 rnp = __this_cpu_read(rsp->rda->mynode); force_quiescent_state()
3431 rdp->rcu_qs_ctr_snap == __this_cpu_read(rcu_qs_ctr)) { __rcu_pending()
3435 rdp->rcu_qs_ctr_snap != __this_cpu_read(rcu_qs_ctr))) { __rcu_pending()
3783 rdp->rcu_qs_ctr_snap = __this_cpu_read(rcu_qs_ctr); rcu_init_percpu_data()
/linux-4.1.27/arch/powerpc/kvm/
H A De500mc.c147 __this_cpu_read(last_vcpu_of_lpid[get_lpid(vcpu)]) != vcpu) { kvmppc_core_vcpu_load_e500mc()
H A De500.c111 __this_cpu_read(pcpu_sids.entry[entry->val]) == entry && local_sid_lookup()
/linux-4.1.27/arch/alpha/kernel/
H A Dtime.c60 #define test_irq_work_pending() __this_cpu_read(irq_work_pending)
/linux-4.1.27/drivers/cpuidle/
H A Dcpuidle.c60 struct cpuidle_device *dev = __this_cpu_read(cpuidle_devices); cpuidle_play_dead()
/linux-4.1.27/arch/x86/kernel/apic/
H A Dvector.c411 irq = __this_cpu_read(vector_irq[vector]); smp_irq_move_cleanup_interrupt()
H A Dx2apic_uv_x.c343 id = x | __this_cpu_read(x2apic_extra_bits); x2apic_get_apic_id()
/linux-4.1.27/include/linux/netfilter/
H A Dx_tables.h303 addend = (__this_cpu_read(xt_recseq.sequence) + 1) & 1; xt_write_recseq_begin()
/linux-4.1.27/arch/ia64/sn/kernel/sn2/
H A Dsn2_smp.c303 if (itc2 > __this_cpu_read(ptcstats.shub_itc_clocks_max))
/linux-4.1.27/arch/arc/kernel/
H A Dkprobes.c240 p = __this_cpu_read(current_kprobe); arc_kprobe_handler()
/linux-4.1.27/drivers/staging/media/lirc/
H A Dlirc_serial.c341 loops_per_sec = __this_cpu_read(cpu.info.loops_per_jiffy); init_timing_params()
362 freq, duty_cycle, __this_cpu_read(cpu_info.loops_per_jiffy), init_timing_params()
/linux-4.1.27/arch/tile/mm/
H A Dinit.c592 __this_cpu_read(current_asid), kernel_physical_mapping_init()
599 __install_page_table(pgd_base, __this_cpu_read(current_asid), kernel_physical_mapping_init()
/linux-4.1.27/drivers/acpi/
H A Dprocessor_idle.c796 pr = __this_cpu_read(processors); acpi_idle_enter()
836 struct acpi_processor *pr = __this_cpu_read(processors); acpi_idle_enter_freeze()
/linux-4.1.27/arch/powerpc/platforms/pseries/
H A Diommu.c203 tcep = __this_cpu_read(tce_page); tce_buildmulti_pSeriesLP()
402 tcep = __this_cpu_read(tce_page); tce_setrange_multi_pSeriesLP()
/linux-4.1.27/arch/arm/kvm/
H A Darm.c77 return __this_cpu_read(kvm_arm_running_vcpu); kvm_arm_get_running_vcpu()
907 stack_page = __this_cpu_read(kvm_arm_hyp_stack_page); cpu_init_hyp_mode()
/linux-4.1.27/kernel/trace/
H A Dtrace_functions_graph.c286 if (unlikely(__this_cpu_read(ftrace_cpu_disabled))) __trace_graph_entry()
401 if (unlikely(__this_cpu_read(ftrace_cpu_disabled))) __trace_graph_return()
H A Dtrace.c1619 if (!__this_cpu_read(trace_cmdline_save)) tracing_record_cmdline()
1769 if (unlikely(__this_cpu_read(ftrace_cpu_disabled))) trace_function()
1946 if (__this_cpu_read(user_stack_count)) ftrace_trace_userstack()
/linux-4.1.27/net/rds/
H A Dib_recv.c428 chpfirst = __this_cpu_read(cache->percpu->first); rds_ib_recv_cache_put()
437 if (__this_cpu_read(cache->percpu->count) < RDS_IB_RECYCLE_BATCH_COUNT) rds_ib_recv_cache_put()
/linux-4.1.27/arch/mips/cavium-octeon/
H A Docteon-irq.c1250 ciu_sum &= __this_cpu_read(octeon_irq_ciu0_en_mirror); octeon_irq_ip2_ciu()
1267 ciu_sum &= __this_cpu_read(octeon_irq_ciu1_en_mirror); octeon_irq_ip3_ciu()
/linux-4.1.27/arch/x86/kvm/
H A Dx86.c1617 this_tsc_khz = __this_cpu_read(cpu_tsc_khz); kvm_guest_time_update()
5674 return __this_cpu_read(current_vcpu) != NULL; kvm_is_in_guest()
5681 if (__this_cpu_read(current_vcpu)) kvm_is_user_mode()
5682 user_mode = kvm_x86_ops->get_cpl(__this_cpu_read(current_vcpu)); kvm_is_user_mode()
5691 if (__this_cpu_read(current_vcpu)) kvm_get_guest_ip()
5692 ip = kvm_rip_read(__this_cpu_read(current_vcpu)); kvm_get_guest_ip()
H A Dsvm.c1322 svm->tsc_ratio != __this_cpu_read(current_tsc_ratio)) { svm_vcpu_load()
/linux-4.1.27/drivers/lguest/x86/
H A Dcore.c93 if (__this_cpu_read(lg_last_cpu) != cpu || cpu->last_pages != pages) { copy_in_guest_info()
/linux-4.1.27/drivers/cpufreq/
H A Dpowernow-k8.c1143 struct powernow_k8_data *data = __this_cpu_read(powernow_data); query_values_on_cpu()
/linux-4.1.27/arch/x86/kernel/kprobes/
H A Dcore.c657 p = __this_cpu_read(current_kprobe); kprobe_int3_handler()
/linux-4.1.27/fs/
H A Dbuffer.c1270 if (__this_cpu_read(bh_lrus.bhs[0]) != bh) { bh_lru_install()
1279 __this_cpu_read(bh_lrus.bhs[in]); bh_lru_install()
1314 struct buffer_head *bh = __this_cpu_read(bh_lrus.bhs[i]); lookup_bh_lru()
1321 __this_cpu_read(bh_lrus.bhs[i - 1])); lookup_bh_lru()
/linux-4.1.27/kernel/events/
H A Dcore.c254 local_samples_len = __this_cpu_read(running_sample_length); perf_duration_warn()
276 local_samples_len = __this_cpu_read(running_sample_length); perf_sample_event_took()
2659 if (__this_cpu_read(perf_sched_cb_usages)) __perf_event_task_sched_out()
2852 if (__this_cpu_read(perf_sched_cb_usages))
3089 __this_cpu_read(perf_throttled_count)) perf_event_can_stop_tick()
6111 seq = __this_cpu_read(perf_throttled_seq); __perf_event_overflow()
/linux-4.1.27/net/core/
H A Ddev.c2294 skb->next = __this_cpu_read(softnet_data.completion_queue); __dev_kfree_skb_irq()
2987 if (__this_cpu_read(xmit_recursion) > RECURSION_LIMIT) __dev_queue_xmit()

Completed in 7815 milliseconds