Lines Matching refs:this_rq
79 long calc_load_fold_active(struct rq *this_rq) in calc_load_fold_active() argument
83 nr_active = this_rq->nr_running; in calc_load_fold_active()
84 nr_active += (long) this_rq->nr_uninterruptible; in calc_load_fold_active()
86 if (nr_active != this_rq->calc_load_active) { in calc_load_fold_active()
87 delta = nr_active - this_rq->calc_load_active; in calc_load_fold_active()
88 this_rq->calc_load_active = nr_active; in calc_load_fold_active()
182 struct rq *this_rq = this_rq(); in calc_load_enter_idle() local
189 delta = calc_load_fold_active(this_rq); in calc_load_enter_idle()
198 struct rq *this_rq = this_rq(); in calc_load_exit_idle() local
203 if (time_before(jiffies, this_rq->calc_load_update)) in calc_load_exit_idle()
211 this_rq->calc_load_update = calc_load_update; in calc_load_exit_idle()
212 if (time_before(jiffies, this_rq->calc_load_update + 10)) in calc_load_exit_idle()
213 this_rq->calc_load_update += LOAD_FREQ; in calc_load_exit_idle()
379 static void calc_load_account_active(struct rq *this_rq) in calc_load_account_active() argument
383 if (time_before(jiffies, this_rq->calc_load_update)) in calc_load_account_active()
386 delta = calc_load_fold_active(this_rq); in calc_load_account_active()
390 this_rq->calc_load_update += LOAD_FREQ; in calc_load_account_active()
469 static void __update_cpu_load(struct rq *this_rq, unsigned long this_load, in __update_cpu_load() argument
474 this_rq->nr_load_updates++; in __update_cpu_load()
477 this_rq->cpu_load[0] = this_load; /* Fasttrack for idx 0 */ in __update_cpu_load()
483 old_load = this_rq->cpu_load[i]; in __update_cpu_load()
494 this_rq->cpu_load[i] = (old_load * (scale - 1) + new_load) >> i; in __update_cpu_load()
497 sched_avg_update(this_rq); in __update_cpu_load()
530 void update_idle_cpu_load(struct rq *this_rq) in update_idle_cpu_load() argument
533 unsigned long load = get_rq_runnable_load(this_rq); in update_idle_cpu_load()
539 if (load || curr_jiffies == this_rq->last_load_update_tick) in update_idle_cpu_load()
542 pending_updates = curr_jiffies - this_rq->last_load_update_tick; in update_idle_cpu_load()
543 this_rq->last_load_update_tick = curr_jiffies; in update_idle_cpu_load()
545 __update_cpu_load(this_rq, load, pending_updates); in update_idle_cpu_load()
553 struct rq *this_rq = this_rq(); in update_cpu_load_nohz() local
557 if (curr_jiffies == this_rq->last_load_update_tick) in update_cpu_load_nohz()
560 raw_spin_lock(&this_rq->lock); in update_cpu_load_nohz()
561 pending_updates = curr_jiffies - this_rq->last_load_update_tick; in update_cpu_load_nohz()
563 this_rq->last_load_update_tick = curr_jiffies; in update_cpu_load_nohz()
568 __update_cpu_load(this_rq, 0, pending_updates); in update_cpu_load_nohz()
570 raw_spin_unlock(&this_rq->lock); in update_cpu_load_nohz()
577 void update_cpu_load_active(struct rq *this_rq) in update_cpu_load_active() argument
579 unsigned long load = get_rq_runnable_load(this_rq); in update_cpu_load_active()
583 this_rq->last_load_update_tick = jiffies; in update_cpu_load_active()
584 __update_cpu_load(this_rq, load, 1); in update_cpu_load_active()
586 calc_load_account_active(this_rq); in update_cpu_load_active()