Lines Matching refs:avg
674 struct sched_avg *sa = &se->avg; in init_entity_runnable_average()
1712 delta = p->se.avg.load_sum / p->se.load.weight; in numa_get_avg_runtime()
2671 long delta = cfs_rq->avg.load_avg - cfs_rq->tg_load_avg_contrib; in update_tg_load_avg()
2675 cfs_rq->tg_load_avg_contrib = cfs_rq->avg.load_avg; in update_tg_load_avg()
2688 struct sched_avg *sa = &cfs_rq->avg; in update_cfs_rq_load_avg()
2726 __update_load_avg(now, cpu, &se->avg, in update_load_avg()
2743 if (se->avg.last_update_time) { in attach_entity_load_avg()
2744 __update_load_avg(cfs_rq->avg.last_update_time, cpu_of(rq_of(cfs_rq)), in attach_entity_load_avg()
2745 &se->avg, 0, 0, NULL); in attach_entity_load_avg()
2754 se->avg.last_update_time = cfs_rq->avg.last_update_time; in attach_entity_load_avg()
2755 cfs_rq->avg.load_avg += se->avg.load_avg; in attach_entity_load_avg()
2756 cfs_rq->avg.load_sum += se->avg.load_sum; in attach_entity_load_avg()
2757 cfs_rq->avg.util_avg += se->avg.util_avg; in attach_entity_load_avg()
2758 cfs_rq->avg.util_sum += se->avg.util_sum; in attach_entity_load_avg()
2763 __update_load_avg(cfs_rq->avg.last_update_time, cpu_of(rq_of(cfs_rq)), in detach_entity_load_avg()
2764 &se->avg, se->on_rq * scale_load_down(se->load.weight), in detach_entity_load_avg()
2767 cfs_rq->avg.load_avg = max_t(long, cfs_rq->avg.load_avg - se->avg.load_avg, 0); in detach_entity_load_avg()
2768 cfs_rq->avg.load_sum = max_t(s64, cfs_rq->avg.load_sum - se->avg.load_sum, 0); in detach_entity_load_avg()
2769 cfs_rq->avg.util_avg = max_t(long, cfs_rq->avg.util_avg - se->avg.util_avg, 0); in detach_entity_load_avg()
2770 cfs_rq->avg.util_sum = max_t(s32, cfs_rq->avg.util_sum - se->avg.util_sum, 0); in detach_entity_load_avg()
2777 struct sched_avg *sa = &se->avg; in enqueue_entity_load_avg()
2807 max_t(long, cfs_rq->runnable_load_avg - se->avg.load_avg, 0); in dequeue_entity_load_avg()
2809 max_t(s64, cfs_rq->runnable_load_sum - se->avg.load_sum, 0); in dequeue_entity_load_avg()
2827 last_update_time = cfs_rq->avg.last_update_time; in remove_entity_load_avg()
2830 last_update_time = cfs_rq->avg.last_update_time; in remove_entity_load_avg()
2833 __update_load_avg(last_update_time, cpu_of(rq_of(cfs_rq)), &se->avg, 0, 0, NULL); in remove_entity_load_avg()
2834 atomic_long_add(se->avg.load_avg, &cfs_rq->removed_load_avg); in remove_entity_load_avg()
2835 atomic_long_add(se->avg.util_avg, &cfs_rq->removed_util_avg); in remove_entity_load_avg()
2863 return cfs_rq->avg.load_avg; in cfs_rq_load_avg()
4613 wl -= se->avg.load_avg; in effective_load()
4683 weight = current->se.avg.load_avg; in wake_affine()
4690 weight = p->se.avg.load_avg; in wake_affine()
4907 unsigned long util = cpu_rq(cpu)->cfs.avg.util_avg; in cpu_util()
5025 p->se.avg.last_update_time = 0; in migrate_task_rq_fair()
5950 load = div64_ul(load * se->avg.load_avg, in update_cfs_rq_h_load()
5963 return div64_ul(p->se.avg.load_avg * cfs_rq->h_load, in task_h_load()
5981 return p->se.avg.load_avg; in task_h_load()
6081 u64 total, used, age_stamp, avg; in scale_rt_capacity() local
6089 avg = READ_ONCE(rq->rt_avg); in scale_rt_capacity()
6097 used = div_u64(avg, total); in scale_rt_capacity()
8087 p->se.avg.last_update_time = 0; in task_move_group_fair()