Searched refs:this_cpu (Results 1 - 52 of 52) sorted by relevance

/linux-4.1.27/arch/x86/kernel/apic/
H A Dx2apic_cluster.c31 unsigned int cpu, this_cpu; __x2apic_send_IPI_mask() local
39 this_cpu = smp_processor_id(); __x2apic_send_IPI_mask()
59 if (apic_dest == APIC_DEST_ALLINC || i != this_cpu) for_each_cpu_and()
133 unsigned int this_cpu = smp_processor_id(); init_x2apic_ldr() local
136 per_cpu(x86_cpu_to_logical_apicid, this_cpu) = apic_read(APIC_LDR); init_x2apic_ldr()
138 cpumask_set_cpu(this_cpu, per_cpu(cpus_in_cluster, this_cpu)); for_each_online_cpu()
140 if (x2apic_cluster(this_cpu) != x2apic_cluster(cpu)) for_each_online_cpu()
142 cpumask_set_cpu(this_cpu, per_cpu(cpus_in_cluster, cpu)); for_each_online_cpu()
143 cpumask_set_cpu(cpu, per_cpu(cpus_in_cluster, this_cpu)); for_each_online_cpu()
153 unsigned int this_cpu = (unsigned long)hcpu; update_clusterinfo() local
159 if (!zalloc_cpumask_var(&per_cpu(cpus_in_cluster, this_cpu), update_clusterinfo()
162 } else if (!zalloc_cpumask_var(&per_cpu(ipi_mask, this_cpu), update_clusterinfo()
164 free_cpumask_var(per_cpu(cpus_in_cluster, this_cpu)); update_clusterinfo()
172 if (x2apic_cluster(this_cpu) != x2apic_cluster(cpu)) for_each_online_cpu()
174 cpumask_clear_cpu(this_cpu, per_cpu(cpus_in_cluster, cpu)); for_each_online_cpu()
175 cpumask_clear_cpu(cpu, per_cpu(cpus_in_cluster, this_cpu)); for_each_online_cpu()
177 free_cpumask_var(per_cpu(cpus_in_cluster, this_cpu));
178 free_cpumask_var(per_cpu(ipi_mask, this_cpu));
H A Dx2apic_phys.c41 unsigned long this_cpu; __x2apic_send_IPI_mask() local
48 this_cpu = smp_processor_id(); for_each_cpu()
50 if (apic_dest == APIC_DEST_ALLBUT && this_cpu == query_cpu) for_each_cpu()
H A Dipi.c42 unsigned int this_cpu = smp_processor_id(); default_send_IPI_mask_allbutself_phys() local
50 if (query_cpu == this_cpu) for_each_cpu()
85 unsigned int this_cpu = smp_processor_id(); default_send_IPI_mask_allbutself_logical() local
91 if (query_cpu == this_cpu) for_each_cpu()
H A Dapic_numachip.c123 unsigned int this_cpu = smp_processor_id(); numachip_send_IPI_mask_allbutself() local
127 if (cpu != this_cpu) for_each_cpu()
134 unsigned int this_cpu = smp_processor_id(); numachip_send_IPI_allbutself() local
138 if (cpu != this_cpu) for_each_online_cpu()
H A Dhw_nmi.c61 int this_cpu = get_cpu(); arch_trigger_all_cpu_backtrace() local
74 cpumask_clear_cpu(this_cpu, to_cpumask(backtrace_mask)); arch_trigger_all_cpu_backtrace()
H A Dx2apic_uv_x.c275 unsigned int this_cpu = smp_processor_id(); uv_send_IPI_mask_allbutself() local
279 if (cpu != this_cpu) for_each_cpu()
286 unsigned int this_cpu = smp_processor_id(); uv_send_IPI_allbutself() local
290 if (cpu != this_cpu) for_each_online_cpu()
/linux-4.1.27/lib/
H A Dsmp_processor_id.c13 int this_cpu = raw_smp_processor_id(); check_preemption_disabled() local
25 if (cpumask_equal(tsk_cpus_allowed(current), cpumask_of(this_cpu))) check_preemption_disabled()
51 return this_cpu; check_preemption_disabled()
/linux-4.1.27/arch/x86/lib/
H A Dmsr-smp.c10 int this_cpu = raw_smp_processor_id(); __rdmsr_on_cpu() local
13 reg = per_cpu_ptr(rv->msrs, this_cpu); __rdmsr_on_cpu()
24 int this_cpu = raw_smp_processor_id(); __wrmsr_on_cpu() local
27 reg = per_cpu_ptr(rv->msrs, this_cpu); __wrmsr_on_cpu()
102 int this_cpu; __rwmsr_on_cpus() local
109 this_cpu = get_cpu(); __rwmsr_on_cpus()
111 if (cpumask_test_cpu(this_cpu, mask)) __rwmsr_on_cpus()
/linux-4.1.27/arch/parisc/kernel/
H A Dsmp.c123 int this_cpu = smp_processor_id(); ipi_interrupt() local
124 struct cpuinfo_parisc *p = &per_cpu(cpu_data, this_cpu); ipi_interrupt()
129 spinlock_t *lock = &per_cpu(ipi_lock, this_cpu); ipi_interrupt()
147 smp_debug(100, KERN_DEBUG "CPU%d IPI_NOP\n", this_cpu); ipi_interrupt()
151 smp_debug(100, KERN_DEBUG "CPU%d IPI_RESCHEDULE\n", this_cpu); ipi_interrupt()
157 smp_debug(100, KERN_DEBUG "CPU%d IPI_CALL_FUNC\n", this_cpu); ipi_interrupt()
162 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_START\n", this_cpu); ipi_interrupt()
166 smp_debug(100, KERN_DEBUG "CPU%d IPI_CPU_STOP\n", this_cpu); ipi_interrupt()
171 smp_debug(100, KERN_DEBUG "CPU%d is alive!\n", this_cpu); ipi_interrupt()
176 this_cpu, which); ipi_interrupt()
/linux-4.1.27/arch/arm/kernel/
H A Dsmp_tlb.c126 int this_cpu; broadcast_tlb_mm_a15_erratum() local
132 this_cpu = get_cpu(); broadcast_tlb_mm_a15_erratum()
133 a15_erratum_get_cpumask(this_cpu, mm, &mask); broadcast_tlb_mm_a15_erratum()
/linux-4.1.27/kernel/trace/
H A Dtrace_clock.c93 int this_cpu; trace_clock_global() local
98 this_cpu = raw_smp_processor_id(); trace_clock_global()
99 now = sched_clock_cpu(this_cpu); trace_clock_global()
/linux-4.1.27/arch/s390/kernel/
H A Dmachine_kexec.c58 int cpu, this_cpu; setup_regs() local
62 this_cpu = smp_find_processor_id(stap()); setup_regs()
63 add_elf_notes(this_cpu); for_each_online_cpu()
65 if (cpu == this_cpu) for_each_online_cpu()
/linux-4.1.27/arch/blackfin/kernel/
H A Dnmi.c146 unsigned int this_cpu = smp_processor_id(); check_nmi_wdt_touched() local
151 if (!atomic_read(&nmi_touched[this_cpu])) check_nmi_wdt_touched()
154 atomic_set(&nmi_touched[this_cpu], 0); check_nmi_wdt_touched()
156 cpumask_clear_cpu(this_cpu, &mask); check_nmi_wdt_touched()
/linux-4.1.27/arch/alpha/kernel/
H A Dsmp.c525 int this_cpu = smp_processor_id(); handle_ipi() local
526 unsigned long *pending_ipis = &ipi_data[this_cpu].bits; handle_ipi()
531 this_cpu, *pending_ipis, regs->pc)); handle_ipi()
558 this_cpu, which); handle_ipi()
566 cpu_data[this_cpu].ipi_count++; handle_ipi()
657 int cpu, this_cpu = smp_processor_id(); flush_tlb_mm() local
659 if (!cpu_online(cpu) || cpu == this_cpu) flush_tlb_mm()
706 int cpu, this_cpu = smp_processor_id(); flush_tlb_page() local
708 if (!cpu_online(cpu) || cpu == this_cpu) flush_tlb_page()
762 int cpu, this_cpu = smp_processor_id(); flush_icache_user_range() local
764 if (!cpu_online(cpu) || cpu == this_cpu) flush_icache_user_range()
/linux-4.1.27/arch/blackfin/mach-common/
H A Ddpmc.c87 unsigned int this_cpu = smp_processor_id(); bfin_wakeup_cpu() local
91 cpumask_clear_cpu(this_cpu, &mask); bfin_wakeup_cpu()
/linux-4.1.27/arch/sparc/kernel/
H A Dprocess_64.c195 int this_cpu) __global_reg_self()
201 rp = &global_cpu_snapshot[this_cpu].reg; __global_reg_self()
247 int this_cpu, cpu; arch_trigger_all_cpu_backtrace() local
254 this_cpu = raw_smp_processor_id(); arch_trigger_all_cpu_backtrace()
259 __global_reg_self(tp, regs, this_cpu); arch_trigger_all_cpu_backtrace()
266 if (!include_self && cpu == this_cpu) for_each_online_cpu()
275 (cpu == this_cpu ? '*' : ' '), cpu, for_each_online_cpu()
312 static void __global_pmu_self(int this_cpu) __global_pmu_self() argument
320 pp = &global_cpu_snapshot[this_cpu].pmu; __global_pmu_self()
346 int this_cpu, cpu; pmu_snapshot_all_cpus() local
352 this_cpu = raw_smp_processor_id(); pmu_snapshot_all_cpus()
354 __global_pmu_self(this_cpu); pmu_snapshot_all_cpus()
364 (cpu == this_cpu ? '*' : ' '), cpu, for_each_online_cpu()
194 __global_reg_self(struct thread_info *tp, struct pt_regs *regs, int this_cpu) __global_reg_self() argument
H A Dnmi.c71 int this_cpu = smp_processor_id(); die_nmi() local
78 panic("Watchdog detected hard LOCKUP on cpu %d", this_cpu); die_nmi()
80 WARN(1, "Watchdog detected hard LOCKUP on cpu %d", this_cpu); die_nmi()
H A Dsmp_64.c623 int retries, this_cpu, prev_sent, i, saw_cpu_error; hypervisor_xcall_deliver() local
627 this_cpu = smp_processor_id(); hypervisor_xcall_deliver()
710 this_cpu, saw_cpu_error - 1); hypervisor_xcall_deliver()
716 this_cpu, retries); hypervisor_xcall_deliver()
721 this_cpu, status); hypervisor_xcall_deliver()
724 this_cpu, cnt, tb->cpu_list_pa, tb->cpu_mondo_block_pa); hypervisor_xcall_deliver()
727 printk(KERN_CRIT "CPU[%d]: CPU list [ ", this_cpu); hypervisor_xcall_deliver()
738 int this_cpu, i, cnt; xcall_deliver() local
755 this_cpu = smp_processor_id(); xcall_deliver()
756 tb = &trap_block[this_cpu]; xcall_deliver()
769 if (i == this_cpu || !cpu_online(i)) for_each_cpu()
888 int this_cpu; smp_flush_dcache_page_impl() local
897 this_cpu = get_cpu(); smp_flush_dcache_page_impl()
899 if (cpu == this_cpu) { smp_flush_dcache_page_impl()
H A Dchmc.c591 unsigned long ret, this_cpu; chmc_read_mcreg() local
595 this_cpu = real_hard_smp_processor_id(); chmc_read_mcreg()
597 if (p->portid == this_cpu) { chmc_read_mcreg()
H A Dentry.h246 void sun4v_register_mondo_queues(int this_cpu);
H A Dirq_64.c995 void notrace sun4v_register_mondo_queues(int this_cpu) sun4v_register_mondo_queues() argument
997 struct trap_per_cpu *tb = &trap_block[this_cpu]; sun4v_register_mondo_queues()
/linux-4.1.27/arch/x86/kernel/cpu/
H A Dcommon.c92 static const struct cpu_dev *this_cpu = &default_cpu; variable in typeref:struct:cpu_dev
372 if (!this_cpu) table_lookup_model()
375 info = this_cpu->legacy_models; table_lookup_model()
472 if (this_cpu->legacy_cache_size) cpu_detect_cache_sizes()
473 l2size = this_cpu->legacy_cache_size(c, l2size); cpu_detect_cache_sizes()
496 if (this_cpu->c_detect_tlb) cpu_detect_tlb()
497 this_cpu->c_detect_tlb(c); cpu_detect_tlb()
572 this_cpu = cpu_devs[i]; get_cpu_vendor()
573 c->x86_vendor = this_cpu->c_x86_vendor; get_cpu_vendor()
583 this_cpu = &default_cpu; get_cpu_vendor()
763 if (this_cpu->c_early_init) early_identify_cpu()
764 this_cpu->c_early_init(c); early_identify_cpu()
769 if (this_cpu->c_bsp_init) early_identify_cpu()
770 this_cpu->c_bsp_init(c); early_identify_cpu()
904 if (this_cpu->c_identify) identify_cpu()
905 this_cpu->c_identify(c); identify_cpu()
927 if (this_cpu->c_init) identify_cpu()
928 this_cpu->c_init(c); identify_cpu()
1114 vendor = this_cpu->c_vendor; print_cpu_info()
/linux-4.1.27/arch/x86/kernel/
H A Dirq.c292 unsigned int this_cpu, vector, this_count, count; check_irq_vectors_for_cpu_disable() local
296 this_cpu = smp_processor_id(); check_irq_vectors_for_cpu_disable()
298 cpumask_clear_cpu(this_cpu, &online_new); check_irq_vectors_for_cpu_disable()
310 cpumask_clear_cpu(this_cpu, &affinity_new); check_irq_vectors_for_cpu_disable()
339 if (cpu == this_cpu) for_each_online_cpu()
357 this_cpu, this_count, count);
/linux-4.1.27/drivers/cpuidle/
H A Dcpuidle-ux500.c29 int this_cpu = smp_processor_id(); ux500_enter_idle() local
52 if (!prcmu_is_cpu_in_wfi(this_cpu ? 0 : 1)) ux500_enter_idle()
H A Dcoupled.c334 static void cpuidle_coupled_poke_others(int this_cpu, cpuidle_coupled_poke_others() argument
340 if (cpu != this_cpu && cpu_online(cpu)) cpuidle_coupled_poke_others()
/linux-4.1.27/arch/arm/include/asm/
H A Dmmu_context.h32 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm,
35 static inline void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, a15_erratum_get_cpumask() argument
H A Dmcpm.h280 bool __mcpm_outbound_enter_critical(unsigned int this_cpu, unsigned int cluster);
/linux-4.1.27/init/
H A Dcalibrate.c278 int this_cpu = smp_processor_id(); calibrate_delay() local
280 if (per_cpu(cpu_loops_per_jiffy, this_cpu)) { calibrate_delay()
281 lpj = per_cpu(cpu_loops_per_jiffy, this_cpu); calibrate_delay()
305 per_cpu(cpu_loops_per_jiffy, this_cpu) = lpj; calibrate_delay()
/linux-4.1.27/arch/arm/common/
H A DbL_switcher.c151 unsigned int mpidr, this_cpu, that_cpu; bL_switch_to() local
157 this_cpu = smp_processor_id(); bL_switch_to()
161 BUG_ON(cpu_logical_map(this_cpu) != ob_mpidr); bL_switch_to()
166 that_cpu = bL_switcher_cpu_pairing[this_cpu]; bL_switch_to()
172 this_cpu, ob_mpidr, ib_mpidr); bL_switch_to()
174 this_cpu = smp_processor_id(); bL_switch_to()
182 ipi_nr = register_ipi_completion(&inbound_alive, this_cpu); bL_switch_to()
229 cpu_logical_map(this_cpu) = ib_mpidr; bL_switch_to()
239 pr_debug("after switch: CPU %d MPIDR %#x\n", this_cpu, mpidr); bL_switch_to()
/linux-4.1.27/arch/x86/kernel/cpu/mcheck/
H A Dtherm_throt.c154 unsigned int this_cpu = smp_processor_id(); therm_throt_process() local
157 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); therm_throt_process()
194 this_cpu, therm_throt_process()
202 this_cpu, therm_throt_process()
213 unsigned int this_cpu = smp_processor_id(); thresh_event_valid() local
214 struct thermal_state *pstate = &per_cpu(thermal_state, this_cpu); thresh_event_valid()
/linux-4.1.27/kernel/
H A Dsmp.c276 int this_cpu; smp_call_function_single() local
283 this_cpu = get_cpu(); smp_call_function_single()
291 WARN_ON_ONCE(cpu_online(this_cpu) && irqs_disabled() smp_call_function_single()
408 int cpu, next_cpu, this_cpu = smp_processor_id(); smp_call_function_many() local
416 WARN_ON_ONCE(cpu_online(this_cpu) && irqs_disabled() smp_call_function_many()
421 if (cpu == this_cpu) smp_call_function_many()
430 if (next_cpu == this_cpu) smp_call_function_many()
442 cpumask_clear_cpu(this_cpu, cfd->cpumask); smp_call_function_many()
H A Dwatchdog.c293 int this_cpu = smp_processor_id(); watchdog_overflow_callback() local
301 this_cpu); watchdog_overflow_callback()
304 this_cpu); watchdog_overflow_callback()
/linux-4.1.27/kernel/sched/
H A Drt.c1552 int this_cpu = smp_processor_id(); find_lowest_rq() local
1580 if (!cpumask_test_cpu(this_cpu, lowest_mask)) find_lowest_rq()
1581 this_cpu = -1; /* Skip this_cpu opt if not among lowest */ find_lowest_rq()
1589 * "this_cpu" is cheaper to preempt than a for_each_domain()
1592 if (this_cpu != -1 && for_each_domain()
1593 cpumask_test_cpu(this_cpu, sched_domain_span(sd))) { for_each_domain()
1595 return this_cpu; for_each_domain()
1613 if (this_cpu != -1)
1614 return this_cpu;
1893 int this_cpu; try_to_push_tasks() local
1896 this_cpu = rt_rq->push_cpu; try_to_push_tasks()
1899 BUG_ON(this_cpu != smp_processor_id()); try_to_push_tasks()
1901 rq = cpu_rq(this_cpu); try_to_push_tasks()
1953 int this_cpu = this_rq->cpu, ret = 0, cpu; pull_rt_task() local
1974 if (this_cpu == cpu) pull_rt_task()
2001 p = pick_highest_pushable_task(src_rq, this_cpu); pull_rt_task()
2025 set_task_cpu(p, this_cpu); pull_rt_task()
H A Ddeadline.c1239 int this_cpu = smp_processor_id(); find_later_rq() local
1274 * Check if this_cpu is to be skipped (i.e., it is find_later_rq()
1277 if (!cpumask_test_cpu(this_cpu, later_mask)) find_later_rq()
1278 this_cpu = -1; find_later_rq()
1285 * If possible, preempting this_cpu is for_each_domain()
1288 if (this_cpu != -1 && for_each_domain()
1289 cpumask_test_cpu(this_cpu, sched_domain_span(sd))) { for_each_domain()
1291 return this_cpu; for_each_domain()
1311 if (this_cpu != -1)
1312 return this_cpu;
1478 int this_cpu = this_rq->cpu, ret = 0, cpu; pull_dl_task() local
1493 if (this_cpu == cpu) pull_dl_task()
1517 p = pick_next_earliest_dl_task(src_rq, this_cpu); pull_dl_task()
1542 set_task_cpu(p, this_cpu); pull_dl_task()
H A Ddebug.c657 unsigned int this_cpu = raw_smp_processor_id(); proc_sched_show_task() local
660 t0 = cpu_clock(this_cpu); proc_sched_show_task()
661 t1 = cpu_clock(this_cpu); proc_sched_show_task()
H A Dfair.c4568 int idx, this_cpu, prev_cpu; wake_affine() local
4581 this_cpu = smp_processor_id(); wake_affine()
4584 this_load = target_load(this_cpu, idx); wake_affine()
4595 this_load += effective_load(tg, this_cpu, -weight, -weight); wake_affine()
4603 * In low-load situations, where prev_cpu is idle and this_cpu is idle wake_affine()
4609 * task to be woken on this_cpu. wake_affine()
4615 prev_eff_load *= capacity_of(this_cpu); wake_affine()
4619 effective_load(tg, this_cpu, weight, weight); wake_affine()
4643 int this_cpu, int sd_flag) find_idlest_group()
4663 local_group = cpumask_test_cpu(this_cpu, find_idlest_group()
4699 find_idlest_cpu(struct sched_group *group, struct task_struct *p, int this_cpu) find_idlest_cpu() argument
4704 int least_loaded_cpu = this_cpu; find_idlest_cpu()
4734 if (load < min_load || (load == min_load && i == this_cpu)) { for_each_cpu_and()
5561 * can_migrate_task - may task p from runqueue rq be migrated to this_cpu?
6256 * @load_idx: Load index of sched_domain of this_cpu for load calc.
6257 * @local_group: Does group contain this_cpu.
6908 * Check this_cpu to ensure it is balanced within domain. Attempt to move
6911 static int load_balance(int this_cpu, struct rq *this_rq, load_balance() argument
6924 .dst_cpu = this_cpu, load_balance()
7087 * moved to this_cpu load_balance()
7089 if (!cpumask_test_cpu(this_cpu, load_balance()
7104 busiest->push_cpu = this_cpu; load_balance()
7202 * idle_balance is called by schedule() if this_cpu is about to become
7208 int this_cpu = this_rq->cpu; idle_balance() local
7237 update_blocked_averages(this_cpu); idle_balance()
7239 for_each_domain(this_cpu, sd) { for_each_domain()
7252 t0 = sched_clock_cpu(this_cpu); for_each_domain()
7254 pulled_task = load_balance(this_cpu, this_rq, for_each_domain()
7258 domain_cost = sched_clock_cpu(this_cpu) - t0; for_each_domain()
7634 int this_cpu = this_rq->cpu; nohz_idle_balance() local
7639 !test_bit(NOHZ_BALANCE_KICK, nohz_flags(this_cpu))) nohz_idle_balance()
7643 if (balance_cpu == this_cpu || !idle_cpu(balance_cpu)) for_each_cpu()
7673 clear_bit(NOHZ_BALANCE_KICK, nohz_flags(this_cpu));
7839 int this_cpu = smp_processor_id(); task_fork_fair() local
7857 __set_task_cpu(p, this_cpu); task_fork_fair()
4642 find_idlest_group(struct sched_domain *sd, struct task_struct *p, int this_cpu, int sd_flag) find_idlest_group() argument
H A Dcore.c1415 int this_cpu = smp_processor_id(); ttwu_stat() local
1417 if (cpu == this_cpu) { ttwu_stat()
1425 for_each_domain(this_cpu, sd) { for_each_domain()
1617 bool cpus_share_cache(int this_cpu, int that_cpu) cpus_share_cache() argument
1619 return per_cpu(sd_llc_id, this_cpu) == per_cpu(sd_llc_id, that_cpu); cpus_share_cache()
/linux-4.1.27/tools/perf/
H A Dbuiltin-sched.c1313 int cpu, this_cpu = sample->cpu; map_switch_event() local
1315 BUG_ON(this_cpu >= MAX_CPUS || this_cpu < 0); map_switch_event()
1317 if (this_cpu > sched->max_cpu) map_switch_event()
1318 sched->max_cpu = this_cpu; map_switch_event()
1320 timestamp0 = sched->cpu_last_switched[this_cpu]; map_switch_event()
1321 sched->cpu_last_switched[this_cpu] = timestamp; map_switch_event()
1334 sched->curr_thread[this_cpu] = sched_in; map_switch_event()
1365 if (cpu != this_cpu) map_switch_event()
1393 int this_cpu = sample->cpu, err = 0; process_sched_switch_event() local
1397 if (sched->curr_pid[this_cpu] != (u32)-1) { process_sched_switch_event()
1402 if (sched->curr_pid[this_cpu] != prev_pid) process_sched_switch_event()
1409 sched->curr_pid[this_cpu] = next_pid; process_sched_switch_event()
/linux-4.1.27/arch/ia64/kernel/
H A Dsmp.c99 int this_cpu = get_cpu(); handle_IPI() local
129 this_cpu, which); handle_IPI()
H A Dperfmon.c5263 int this_cpu = smp_processor_id(); pfm_overflow_handler() local
5298 pfm_stats[this_cpu].pfm_smpl_handler_calls++; pfm_overflow_handler()
5321 pfm_stats[this_cpu].pfm_smpl_handler_cycles += end_cycles - start_cycles; pfm_overflow_handler()
5463 int this_cpu = smp_processor_id(); pfm_do_interrupt_handler() local
5466 pfm_stats[this_cpu].pfm_ovfl_intr_count++; pfm_do_interrupt_handler()
5498 pfm_stats[this_cpu].pfm_spurious_ovfl_intr_count++; pfm_do_interrupt_handler()
5510 this_cpu, task_pid_nr(task)); pfm_do_interrupt_handler()
5515 this_cpu, pfm_do_interrupt_handler()
5526 int this_cpu; pfm_interrupt_handler() local
5530 this_cpu = get_cpu(); pfm_interrupt_handler()
5532 min = pfm_stats[this_cpu].pfm_ovfl_intr_cycles_min; pfm_interrupt_handler()
5533 max = pfm_stats[this_cpu].pfm_ovfl_intr_cycles_max; pfm_interrupt_handler()
5547 if (total_cycles < min) pfm_stats[this_cpu].pfm_ovfl_intr_cycles_min = total_cycles; pfm_interrupt_handler()
5548 if (total_cycles > max) pfm_stats[this_cpu].pfm_ovfl_intr_cycles_max = total_cycles; pfm_interrupt_handler()
5550 pfm_stats[this_cpu].pfm_ovfl_intr_cycles += total_cycles; pfm_interrupt_handler()
6689 int i, this_cpu; dump_pmu_state() local
6693 this_cpu = smp_processor_id(); dump_pmu_state()
6704 this_cpu, dump_pmu_state()
6713 printk("->CPU%d owner [%d] ctx=%p\n", this_cpu, task ? task_pid_nr(task) : -1, ctx); dump_pmu_state()
6718 this_cpu, dump_pmu_state()
6732 printk("->CPU%d pmc[%d]=0x%lx thread_pmc[%d]=0x%lx\n", this_cpu, i, ia64_get_pmc(i), i, ctx->th_pmcs[i]); dump_pmu_state()
6737 printk("->CPU%d pmd[%d]=0x%lx thread_pmd[%d]=0x%lx\n", this_cpu, i, ia64_get_pmd(i), i, ctx->th_pmds[i]); dump_pmu_state()
6742 this_cpu, dump_pmu_state()
H A Dprocess.c215 unsigned int this_cpu = smp_processor_id(); play_dead() local
223 ia64_jump_to_sal(&sal_boot_rendez_state[this_cpu]); play_dead()
/linux-4.1.27/arch/arm/mm/
H A Dcontext.c54 void a15_erratum_get_cpumask(int this_cpu, struct mm_struct *mm, a15_erratum_get_cpumask() argument
64 if (cpu == this_cpu) for_each_online_cpu()
/linux-4.1.27/drivers/cpufreq/
H A Dacpi-cpufreq.c321 int this_cpu; drv_write() local
323 this_cpu = get_cpu(); drv_write()
324 if (cpumask_test_cpu(this_cpu, cmd->mask)) drv_write()
/linux-4.1.27/arch/sparc/mm/
H A Dinit_64.c222 static inline void set_dcache_dirty(struct page *page, int this_cpu) set_dcache_dirty() argument
224 unsigned long mask = this_cpu; set_dcache_dirty()
291 int this_cpu = get_cpu(); flush_dcache() local
296 if (cpu == this_cpu) flush_dcache()
373 int this_cpu; flush_dcache_page() local
385 this_cpu = get_cpu(); flush_dcache_page()
393 if (dirty_cpu == this_cpu) flush_dcache_page()
397 set_dcache_dirty(page, this_cpu); flush_dcache_page()
/linux-4.1.27/kernel/time/
H A Dhrtimer.c202 int this_cpu = smp_processor_id(); switch_hrtimer_base() local
228 if (cpu != this_cpu && hrtimer_check_target(timer, new_base)) { switch_hrtimer_base()
229 cpu = this_cpu; switch_hrtimer_base()
237 if (cpu != this_cpu && hrtimer_check_target(timer, new_base)) { switch_hrtimer_base()
238 cpu = this_cpu; switch_hrtimer_base()
/linux-4.1.27/arch/x86/xen/
H A Dsmp.c655 unsigned int this_cpu = smp_processor_id(); xen_send_IPI_mask_allbutself() local
662 if (this_cpu == cpu) for_each_cpu_and()
/linux-4.1.27/tools/perf/bench/
H A Dnuma.c1148 int this_cpu; worker_thread() local
1157 this_cpu = g->threads[task_nr].curr_cpu; worker_thread()
1158 if (this_cpu < g->p.nr_cpus/2) worker_thread()
/linux-4.1.27/include/linux/
H A Dpercpu-defs.h374 * this_cpu operations (C) 2008-2013 Christoph Lameter <cl@linux.com>
H A Dsched.h1047 bool cpus_share_cache(int this_cpu, int that_cpu);
1092 static inline bool cpus_share_cache(int this_cpu, int that_cpu) cpus_share_cache() argument
/linux-4.1.27/kernel/printk/
H A Dprintk.c1620 int this_cpu; vprintk_emit() local
1636 this_cpu = smp_processor_id(); vprintk_emit()
1641 if (unlikely(logbuf_cpu == this_cpu)) { vprintk_emit()
1659 logbuf_cpu = this_cpu; vprintk_emit()
/linux-4.1.27/tools/power/x86/turbostat/
H A Dturbostat.c1458 int this_cpu; get_cpu_position_in_core() local
1472 fscanf(filep, "%d", &this_cpu); get_cpu_position_in_core()
1473 if (this_cpu == cpu) { get_cpu_position_in_core()
/linux-4.1.27/fs/
H A Deventpoll.c508 int this_cpu = get_cpu(); ep_poll_safewake() local
511 ep_poll_wakeup_proc, NULL, wq, (void *) (long) this_cpu); ep_poll_safewake()

Completed in 1084 milliseconds