sched_domain_span  562 kernel/sched/core.c 		for_each_cpu(i, sched_domain_span(sd)) {
sched_domain_span 2190 kernel/sched/core.c 			if (cpumask_test_cpu(cpu, sched_domain_span(sd))) {
sched_domain_span 1928 kernel/sched/deadline.c 			    cpumask_test_cpu(this_cpu, sched_domain_span(sd))) {
sched_domain_span 1934 kernel/sched/deadline.c 							sched_domain_span(sd));
sched_domain_span 5777 kernel/sched/fair.c 	if (!cpumask_intersects(sched_domain_span(sd), p->cpus_ptr))
sched_domain_span 5894 kernel/sched/fair.c 	cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr);
sched_domain_span 5992 kernel/sched/fair.c 	cpumask_and(cpus, sched_domain_span(sd), p->cpus_ptr);
sched_domain_span 6379 kernel/sched/fair.c 	while (sd && !cpumask_test_cpu(prev_cpu, sched_domain_span(sd)))
sched_domain_span 6397 kernel/sched/fair.c 		for_each_cpu_and(cpu, perf_domain_span(pd), sched_domain_span(sd)) {
sched_domain_span 6501 kernel/sched/fair.c 		    cpumask_test_cpu(prev_cpu, sched_domain_span(tmp))) {
sched_domain_span 8296 kernel/sched/fair.c 	    cpumask_subset(nohz.idle_cpus_mask, sched_domain_span(env->sd))) {
sched_domain_span 8852 kernel/sched/fair.c 	cpumask_and(cpus, sched_domain_span(sd), cpu_active_mask);
sched_domain_span 9176 kernel/sched/fair.c 		    cpumask_test_cpu(busiest_cpu, sched_domain_span(sd)))
sched_domain_span 9458 kernel/sched/fair.c 		for_each_cpu_and(i, sched_domain_span(sd), nohz.idle_cpus_mask) {
sched_domain_span 1680 kernel/sched/rt.c 			    cpumask_test_cpu(this_cpu, sched_domain_span(sd))) {
sched_domain_span 1686 kernel/sched/rt.c 						     sched_domain_span(sd));
sched_domain_span   49 kernel/sched/stats.c 				   cpumask_pr_args(sched_domain_span(sd)));
sched_domain_span   45 kernel/sched/topology.c 	       cpumask_pr_args(sched_domain_span(sd)), sd->name);
sched_domain_span   47 kernel/sched/topology.c 	if (!cpumask_test_cpu(cpu, sched_domain_span(sd))) {
sched_domain_span   91 kernel/sched/topology.c 		    !cpumask_equal(sched_domain_span(sd->child),
sched_domain_span  106 kernel/sched/topology.c 	if (!cpumask_equal(sched_domain_span(sd), groupmask))
sched_domain_span  110 kernel/sched/topology.c 	    !cpumask_subset(groupmask, sched_domain_span(sd->parent)))
sched_domain_span  150 kernel/sched/topology.c 	if (cpumask_weight(sched_domain_span(sd)) == 1)
sched_domain_span  181 kernel/sched/topology.c 	if (!cpumask_equal(sched_domain_span(sd), sched_domain_span(parent)))
sched_domain_span  636 kernel/sched/topology.c 		id = cpumask_first(sched_domain_span(sd));
sched_domain_span  637 kernel/sched/topology.c 		size = cpumask_weight(sched_domain_span(sd));
sched_domain_span  861 kernel/sched/topology.c 		if (!cpumask_equal(sg_span, sched_domain_span(sibling->child)))
sched_domain_span  890 kernel/sched/topology.c 		cpumask_copy(sg_span, sched_domain_span(sd->child));
sched_domain_span  892 kernel/sched/topology.c 		cpumask_copy(sg_span, sched_domain_span(sd));
sched_domain_span  930 kernel/sched/topology.c 	const struct cpumask *span = sched_domain_span(sd);
sched_domain_span  956 kernel/sched/topology.c 		if (!cpumask_test_cpu(i, sched_domain_span(sibling)))
sched_domain_span 1065 kernel/sched/topology.c 		cpu = cpumask_first(sched_domain_span(child));
sched_domain_span 1080 kernel/sched/topology.c 		cpumask_copy(sched_group_span(sg), sched_domain_span(child));
sched_domain_span 1106 kernel/sched/topology.c 	const struct cpumask *span = sched_domain_span(sd);
sched_domain_span 1373 kernel/sched/topology.c 	cpumask_and(sched_domain_span(sd), cpu_map, tl->mask(cpu));
sched_domain_span 1374 kernel/sched/topology.c 	sd_id = cpumask_first(sched_domain_span(sd));
sched_domain_span 1866 kernel/sched/topology.c 		if (!cpumask_subset(sched_domain_span(child),
sched_domain_span 1867 kernel/sched/topology.c 				    sched_domain_span(sd))) {
sched_domain_span 1874 kernel/sched/topology.c 			cpumask_or(sched_domain_span(sd),
sched_domain_span 1875 kernel/sched/topology.c 				   sched_domain_span(sd),
sched_domain_span 1876 kernel/sched/topology.c 				   sched_domain_span(child));
sched_domain_span 2026 kernel/sched/topology.c 			if (cpumask_equal(cpu_map, sched_domain_span(sd)))
sched_domain_span 2034 kernel/sched/topology.c 			sd->span_weight = cpumask_weight(sched_domain_span(sd));