se_weight 2856 kernel/sched/fair.c cfs_rq->avg.load_sum += se_weight(se) * se->avg.load_sum; se_weight 2863 kernel/sched/fair.c sub_positive(&cfs_rq->avg.load_sum, se_weight(se) * se->avg.load_sum); se_weight 2895 kernel/sched/fair.c se->avg.load_avg = div_u64(se_weight(se) * se->avg.load_sum, divider); se_weight 3352 kernel/sched/fair.c load_sum = (s64)se_weight(se) * runnable_sum; se_weight 3355 kernel/sched/fair.c delta_sum = load_sum - (s64)se_weight(se) * se->avg.load_sum; se_weight 3365 kernel/sched/fair.c delta_sum = runnable_load_sum - se_weight(se) * se->avg.runnable_load_sum; se_weight 3542 kernel/sched/fair.c if (se_weight(se)) { se_weight 3544 kernel/sched/fair.c div_u64(se->avg.load_avg * se->avg.load_sum, se_weight(se)); se_weight 269 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se)); se_weight 282 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se));