se_runnable 2839 kernel/sched/fair.c cfs_rq->avg.runnable_load_sum += se_runnable(se) * se->avg.runnable_load_sum; se_runnable 2849 kernel/sched/fair.c se_runnable(se) * se->avg.runnable_load_sum); se_runnable 2897 kernel/sched/fair.c div_u64(se_runnable(se) * se->avg.runnable_load_sum, divider); se_runnable 3363 kernel/sched/fair.c runnable_load_sum = (s64)se_runnable(se) * runnable_sum; se_runnable 269 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se)); se_runnable 282 kernel/sched/pelt.c ___update_load_avg(&se->avg, se_weight(se), se_runnable(se));