Searched refs:prev_cpu (Results 1 - 3 of 3) sorted by relevance

/linux-4.4.14/arch/sparc/kernel/
H A Dcpumap.c192 int n, id, cpu, prev_cpu, last_cpu, level; build_cpuinfo_tree() local
204 prev_cpu = cpu = cpumask_first(cpu_online_mask); build_cpuinfo_tree()
268 (cpu == last_cpu) ? cpu : prev_cpu; build_cpuinfo_tree()
290 prev_cpu = cpu; build_cpuinfo_tree()
/linux-4.4.14/kernel/sched/
H A Dfair.c4665 int idx, this_cpu, prev_cpu; wake_affine() local
4672 prev_cpu = task_cpu(p); wake_affine()
4673 load = source_load(prev_cpu, idx); wake_affine()
4686 load += effective_load(tg, prev_cpu, 0, -weight); wake_affine()
4693 * In low-load situations, where prev_cpu is idle and this_cpu is idle wake_affine()
4702 this_eff_load *= capacity_of(prev_cpu); wake_affine()
4711 prev_eff_load *= load + effective_load(tg, prev_cpu, 0, weight); wake_affine()
4926 select_task_rq_fair(struct task_struct *p, int prev_cpu, int sd_flag, int wake_flags) select_task_rq_fair() argument
4930 int new_cpu = prev_cpu; select_task_rq_fair()
4943 * If both cpu and prev_cpu are part of this domain, for_each_domain()
4947 cpumask_test_cpu(prev_cpu, sched_domain_span(tmp))) { for_each_domain()
4960 if (cpu != prev_cpu && wake_affine(affine_sd, p, sync))
H A Drt.c1816 int prev_cpu = rq->rt.push_cpu; rto_next_cpu() local
1819 cpu = cpumask_next(prev_cpu, rq->rd->rto_mask); rto_next_cpu()
1826 if (prev_cpu < rq->cpu) { rto_next_cpu()

Completed in 240 milliseconds