Lines Matching refs:SCHED_CAPACITY_SCALE
1161 smt = DIV_ROUND_UP(SCHED_CAPACITY_SCALE * cpus, ns->compute_capacity); in update_numa_stats()
1165 DIV_ROUND_CLOSEST(ns->compute_capacity, SCHED_CAPACITY_SCALE)); in update_numa_stats()
4680 avg_load = (avg_load * SCHED_CAPACITY_SCALE) / group->sgc->capacity; in find_idlest_group()
6025 return SCHED_CAPACITY_SCALE; in default_scale_cpu_capacity()
6054 if (likely(used < SCHED_CAPACITY_SCALE)) in scale_rt_capacity()
6055 return SCHED_CAPACITY_SCALE - used; in scale_rt_capacity()
6062 unsigned long capacity = SCHED_CAPACITY_SCALE; in update_cpu_capacity()
6298 sgs->avg_load = (sgs->group_load*SCHED_CAPACITY_SCALE) / sgs->group_capacity; in update_sg_lb_stats()
6506 SCHED_CAPACITY_SCALE); in check_asym_packing()
6535 (busiest->load_per_task * SCHED_CAPACITY_SCALE) / in fix_small_imbalance()
6554 capa_now /= SCHED_CAPACITY_SCALE; in fix_small_imbalance()
6565 busiest->load_per_task * SCHED_CAPACITY_SCALE) { in fix_small_imbalance()
6569 tmp = (busiest->load_per_task * SCHED_CAPACITY_SCALE) / in fix_small_imbalance()
6574 capa_move /= SCHED_CAPACITY_SCALE; in fix_small_imbalance()
6642 ) / SCHED_CAPACITY_SCALE; in calculate_imbalance()
6697 sds.avg_load = (SCHED_CAPACITY_SCALE * sds.total_load) in find_busiest_group()