util_avg 404 include/linux/sched.h unsigned long util_avg; util_avg 408 kernel/sched/debug.c P(se->avg.util_avg); util_avg 533 kernel/sched/debug.c cfs_rq->avg.util_avg); util_avg 539 kernel/sched/debug.c cfs_rq->removed.util_avg); util_avg 950 kernel/sched/debug.c P(se.avg.util_avg); util_avg 786 kernel/sched/fair.c long cap = (long)(cpu_scale - cfs_rq->avg.util_avg) / 2; util_avg 789 kernel/sched/fair.c if (cfs_rq->avg.util_avg != 0) { util_avg 790 kernel/sched/fair.c sa->util_avg = cfs_rq->avg.util_avg * se->load.weight; util_avg 791 kernel/sched/fair.c sa->util_avg /= (cfs_rq->avg.load_avg + 1); util_avg 793 kernel/sched/fair.c if (sa->util_avg > cap) util_avg 794 kernel/sched/fair.c sa->util_avg = cap; util_avg 796 kernel/sched/fair.c sa->util_avg = cap; util_avg 3286 kernel/sched/fair.c long delta = gcfs_rq->avg.util_avg - se->avg.util_avg; util_avg 3301 kernel/sched/fair.c se->avg.util_avg = gcfs_rq->avg.util_avg; util_avg 3302 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * LOAD_AVG_MAX; util_avg 3305 kernel/sched/fair.c add_positive(&cfs_rq->avg.util_avg, delta); util_avg 3306 kernel/sched/fair.c cfs_rq->avg.util_sum = cfs_rq->avg.util_avg * LOAD_AVG_MAX; util_avg 3422 kernel/sched/fair.c if (se->avg.load_avg || se->avg.util_avg) util_avg 3481 kernel/sched/fair.c swap(cfs_rq->removed.util_avg, removed_util); util_avg 3492 kernel/sched/fair.c sub_positive(&sa->util_avg, r); util_avg 3539 kernel/sched/fair.c se->avg.util_sum = se->avg.util_avg * divider; util_avg 3550 kernel/sched/fair.c cfs_rq->avg.util_avg += se->avg.util_avg; util_avg 3571 kernel/sched/fair.c sub_positive(&cfs_rq->avg.util_avg, se->avg.util_avg); util_avg 3677 kernel/sched/fair.c cfs_rq->removed.util_avg += se->avg.util_avg; util_avg 3695 kernel/sched/fair.c return READ_ONCE(p->se.avg.util_avg); util_avg 6107 kernel/sched/fair.c util = READ_ONCE(cfs_rq->avg.util_avg); util_avg 6138 kernel/sched/fair.c util = READ_ONCE(cfs_rq->avg.util_avg); util_avg 6238 kernel/sched/fair.c unsigned long util_est, util = READ_ONCE(cfs_rq->avg.util_avg); util_avg 7511 kernel/sched/fair.c if (cfs_rq->avg.util_avg) util_avg 7519 kernel/sched/fair.c if (READ_ONCE(rq->avg_rt.util_avg)) util_avg 7522 kernel/sched/fair.c if (READ_ONCE(rq->avg_dl.util_avg)) util_avg 7526 kernel/sched/fair.c if (READ_ONCE(rq->avg_irq.util_avg)) util_avg 7783 kernel/sched/fair.c used = READ_ONCE(rq->avg_rt.util_avg); util_avg 7784 kernel/sched/fair.c used += READ_ONCE(rq->avg_dl.util_avg); util_avg 236 kernel/sched/pelt.c WRITE_ONCE(sa->util_avg, sa->util_sum / divider); util_avg 536 kernel/sched/sched.h unsigned long util_avg; util_avg 2402 kernel/sched/sched.h return READ_ONCE(rq->avg_dl.util_avg); util_avg 2407 kernel/sched/sched.h unsigned long util = READ_ONCE(rq->cfs.avg.util_avg); util_avg 2419 kernel/sched/sched.h return READ_ONCE(rq->avg_rt.util_avg); util_avg 2433 kernel/sched/sched.h return rq->avg_irq.util_avg;