cpu_active_mask   181 arch/mips/kernel/mips-mt-fpaff.c 	cpumask_and(&mask, &allowed, cpu_active_mask);
cpu_active_mask    59 arch/mips/loongson64/loongson-3/irq.c 		cpumask_and(&affinity, irqd->common->affinity, cpu_active_mask);
cpu_active_mask   116 include/linux/cpumask.h #define num_active_cpus()	cpumask_weight(cpu_active_mask)
cpu_active_mask   120 include/linux/cpumask.h #define cpu_active(cpu)		cpumask_test_cpu((cpu), cpu_active_mask)
cpu_active_mask   997 kernel/cgroup/cpuset.c 	    !cpumask_equal(top_cpuset.effective_cpus, cpu_active_mask))
cpu_active_mask  1001 kernel/cgroup/cpuset.c 	   !cpumask_subset(top_cpuset.effective_cpus, cpu_active_mask))
cpu_active_mask  1062 kernel/cgroup/cpuset.c 		cpumask_and(new_cpus, new_cpus, cpu_active_mask);
cpu_active_mask  1195 kernel/cgroup/cpuset.c 					 cpu_active_mask))
cpu_active_mask  1271 kernel/cgroup/cpuset.c 		cpumask_and(tmp->delmask, tmp->delmask, cpu_active_mask);
cpu_active_mask  3135 kernel/cgroup/cpuset.c 	cpumask_copy(&new_cpus, cpu_active_mask);
cpu_active_mask  3251 kernel/cgroup/cpuset.c 	cpumask_copy(top_cpuset.cpus_allowed, cpu_active_mask);
cpu_active_mask  3255 kernel/cgroup/cpuset.c 	cpumask_copy(top_cpuset.effective_cpus, cpu_active_mask);
cpu_active_mask  1627 kernel/sched/core.c 	const struct cpumask *cpu_valid_mask = cpu_active_mask;
cpu_active_mask  1669 kernel/sched/core.c 			!cpumask_intersects(new_mask, cpu_active_mask) &&
cpu_active_mask  5526 kernel/sched/core.c 	cpumask_and(mask, &p->cpus_mask, cpu_active_mask);
cpu_active_mask  6506 kernel/sched/core.c 	sched_init_domains(cpu_active_mask);
cpu_active_mask    61 kernel/sched/deadline.c 	for_each_cpu_and(i, rd->span, cpu_active_mask)
cpu_active_mask   542 kernel/sched/deadline.c 		cpu = cpumask_any_and(cpu_active_mask, p->cpus_ptr);
cpu_active_mask   555 kernel/sched/deadline.c 			cpu = cpumask_any(cpu_active_mask);
cpu_active_mask  2724 kernel/sched/deadline.c 	dest_cpu = cpumask_any_and(cpu_active_mask, cs_cpus_allowed);
cpu_active_mask  8852 kernel/sched/fair.c 	cpumask_and(cpus, sched_domain_span(sd), cpu_active_mask);
cpu_active_mask  2232 kernel/sched/sched.h 	for_each_cpu_and(i, rd->span, cpu_active_mask) {
cpu_active_mask   468 kernel/sched/topology.c 	if (cpumask_test_cpu(rq->cpu, cpu_active_mask))
cpu_active_mask  2242 kernel/sched/topology.c 			cpumask_and(doms_new[0], cpu_active_mask,
cpu_active_mask  2277 kernel/sched/topology.c 		cpumask_and(doms_new[0], cpu_active_mask,
cpu_active_mask   684 kernel/stop_machine.c 	queue_stop_cpus_work(cpu_active_mask, multi_cpu_stop, &msdata,