Lines Matching refs:span
2002 for_each_cpu_and(i, rd->span, cpu_active_mask) in dl_bw_cpus()
3566 cpumask_t *span = rq->rd->span; in __sched_setscheduler() local
3573 if (!cpumask_subset(span, &p->cpus_allowed) || in __sched_setscheduler()
4074 if (!cpumask_subset(task_rq(p)->rd->span, new_mask)) { in sched_setaffinity()
4694 if (dl_task(p) && !cpumask_intersects(task_rq(p)->rd->span, in task_can_attach()
5282 BUG_ON(!cpumask_test_cpu(cpu, rq->rd->span)); in migration_call()
5295 BUG_ON(!cpumask_test_cpu(cpu, rq->rd->span)); in migration_call()
5574 free_cpumask_var(rd->span); in free_rootdomain()
5591 cpumask_clear_cpu(rq->cpu, old_rd->span); in rq_attach_root()
5605 cpumask_set_cpu(rq->cpu, rd->span); in rq_attach_root()
5619 if (!alloc_cpumask_var(&rd->span, GFP_KERNEL)) in init_rootdomain()
5643 free_cpumask_var(rd->span); in init_rootdomain()
5853 const struct cpumask *span = sched_domain_span(sd); in build_group_mask() local
5858 for_each_cpu(i, span) { in build_group_mask()
5880 const struct cpumask *span = sched_domain_span(sd); in build_overlap_sched_groups() local
5888 for_each_cpu(i, span) { in build_overlap_sched_groups()
5980 const struct cpumask *span = sched_domain_span(sd); in build_sched_groups() local
5987 if (cpu != cpumask_first(span)) in build_sched_groups()
5995 for_each_cpu(i, span) { in build_sched_groups()
6005 for_each_cpu(j, span) { in build_sched_groups()