runnable 2877 kernel/sched/fair.c unsigned long weight, unsigned long runnable) runnable 2888 kernel/sched/fair.c se->runnable_weight = runnable; runnable 3057 kernel/sched/fair.c long runnable, load_avg; runnable 3062 kernel/sched/fair.c runnable = max(cfs_rq->avg.runnable_load_avg, runnable 3065 kernel/sched/fair.c runnable *= shares; runnable 3067 kernel/sched/fair.c runnable /= load_avg; runnable 3069 kernel/sched/fair.c return clamp_t(long, runnable, MIN_SHARES, shares); runnable 3082 kernel/sched/fair.c long shares, runnable; runnable 3091 kernel/sched/fair.c runnable = shares = READ_ONCE(gcfs_rq->tg->shares); runnable 3097 kernel/sched/fair.c runnable = calc_group_runnable(gcfs_rq, shares); runnable 3100 kernel/sched/fair.c reweight_entity(cfs_rq_of(se), se, shares, runnable); runnable 111 kernel/sched/pelt.c unsigned long load, unsigned long runnable, int running) runnable 139 kernel/sched/pelt.c if (runnable) runnable 140 kernel/sched/pelt.c sa->runnable_load_sum += runnable * contrib; runnable 177 kernel/sched/pelt.c unsigned long load, unsigned long runnable, int running) runnable 211 kernel/sched/pelt.c runnable = running = 0; runnable 220 kernel/sched/pelt.c if (!accumulate_sum(delta, sa, load, runnable, running)) runnable 227 kernel/sched/pelt.c ___update_load_avg(struct sched_avg *sa, unsigned long load, unsigned long runnable) runnable 235 kernel/sched/pelt.c sa->runnable_load_avg = div_u64(runnable * sa->runnable_load_sum, divider);