scale_load_down  7358 kernel/sched/core.c 	if (shareval > scale_load_down(ULONG_MAX))
scale_load_down  7368 kernel/sched/core.c 	return (u64) scale_load_down(tg->shares);
scale_load_down  7743 kernel/sched/core.c 	u64 weight = scale_load_down(tg->shares);
scale_load_down  7769 kernel/sched/core.c 	unsigned long weight = scale_load_down(css_tg(css)->shares);
scale_load_down   196 kernel/sched/fair.c 	w = scale_load_down(lw->weight);
scale_load_down   220 kernel/sched/fair.c 	u64 fact = scale_load_down(weight);
scale_load_down   745 kernel/sched/fair.c 		sa->runnable_load_avg = sa->load_avg = scale_load_down(se->load.weight);
scale_load_down  3001 kernel/sched/fair.c 	load = max(scale_load_down(cfs_rq->load.weight), cfs_rq->avg.load_avg);
scale_load_down  3060 kernel/sched/fair.c 		       scale_load_down(cfs_rq->load.weight));
scale_load_down  3063 kernel/sched/fair.c 		       scale_load_down(cfs_rq->runnable_weight));
scale_load_down  3334 kernel/sched/fair.c 		if (scale_load_down(gcfs_rq->load.weight)) {
scale_load_down  3336 kernel/sched/fair.c 				scale_load_down(gcfs_rq->load.weight));
scale_load_down  5590 kernel/sched/fair.c 	unsigned long imbalance = scale_load_down(NICE_0_LOAD) *
scale_load_down  8482 kernel/sched/fair.c 			load_above_capacity *= scale_load_down(NICE_0_LOAD);
scale_load_down   294 kernel/sched/pelt.c 				scale_load_down(cfs_rq->load.weight),
scale_load_down   295 kernel/sched/pelt.c 				scale_load_down(cfs_rq->runnable_weight),
scale_load_down   707 kernel/sched/sched.h 	return scale_load_down(se->load.weight);
scale_load_down   712 kernel/sched/sched.h 	return scale_load_down(se->runnable_weight);