group_load       7717 kernel/sched/fair.c 	unsigned long group_load; /* Total load over the CPUs of the group */
group_load       8067 kernel/sched/fair.c 		sgs->group_load += cpu_runnable_load(rq);
group_load       8097 kernel/sched/fair.c 	sgs->avg_load = (sgs->group_load*SCHED_CAPACITY_SCALE) / sgs->group_capacity;
group_load       8100 kernel/sched/fair.c 		sgs->load_per_task = sgs->group_load / sgs->sum_nr_running;
group_load       8288 kernel/sched/fair.c 		sds->total_load += sgs->group_load;
group_load       8363 kernel/sched/fair.c 	env->imbalance = sds->busiest_stat.group_load;