src_load 1548 kernel/sched/fair.c static bool load_too_imbalanced(long src_load, long dst_load, src_load 1565 kernel/sched/fair.c imb = abs(dst_load * src_capacity - src_load * dst_capacity); src_load 1596 kernel/sched/fair.c long src_load, dst_load; src_load 1684 kernel/sched/fair.c src_load = env->src_stats.load - load; src_load 1686 kernel/sched/fair.c if (load_too_imbalanced(src_load, dst_load, env)) src_load 1713 kernel/sched/fair.c long src_load, dst_load, load; src_load 1719 kernel/sched/fair.c src_load = env->src_stats.load - load; src_load 1725 kernel/sched/fair.c maymove = !load_too_imbalanced(src_load, dst_load, env);