util_sum 400 include/linux/sched.h u32 util_sum; util_sum 947 kernel/sched/debug.c P(se.avg.util_sum); util_sum 3302 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * LOAD_AVG_MAX; util_sum 3306 kernel/sched/fair.c cfs_rq->avg.util_sum = cfs_rq->avg.util_avg * LOAD_AVG_MAX; util_sum 3349 kernel/sched/fair.c running_sum = se->avg.util_sum >> SCHED_CAPACITY_SHIFT; util_sum 3493 kernel/sched/fair.c sub_positive(&sa->util_sum, r * divider); util_sum 3539 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * divider; util_sum 3551 kernel/sched/fair.c cfs_rq->avg.util_sum += se->avg.util_sum; util_sum 3572 kernel/sched/fair.c sub_positive(&cfs_rq->avg.util_sum, se->avg.util_sum); util_sum 7578 kernel/sched/fair.c if (cfs_rq->avg.util_sum) util_sum 126 kernel/sched/pelt.c sa->util_sum = decay_load((u64)(sa->util_sum), periods); util_sum 142 kernel/sched/pelt.c sa->util_sum += contrib << SCHED_CAPACITY_SHIFT; util_sum 236 kernel/sched/pelt.c WRITE_ONCE(sa->util_avg, sa->util_sum / divider); util_sum 100 kernel/sched/pelt.h u32 util_sum = rq->cfs.avg.util_sum; util_sum 101 kernel/sched/pelt.h util_sum += rq->avg_rt.util_sum; util_sum 102 kernel/sched/pelt.h util_sum += rq->avg_dl.util_sum; util_sum 113 kernel/sched/pelt.h if (util_sum >= divider)