Lines Matching refs:cpu_rq

1146 		struct rq *rq = cpu_rq(cpu);  in update_numa_stats()
1257 struct rq *src_rq = cpu_rq(env->src_cpu); in task_numa_compare()
1258 struct rq *dst_rq = cpu_rq(env->dst_cpu); in task_numa_compare()
1968 tsk = READ_ONCE(cpu_rq(cpu)->curr); in task_numa_group()
4348 return cfs_rq_runnable_load_avg(&cpu_rq(cpu)->cfs); in weighted_cpuload()
4435 struct rq *rq = cpu_rq(cpu); in source_load()
4450 struct rq *rq = cpu_rq(cpu); in target_load()
4461 return cpu_rq(cpu)->cpu_capacity; in capacity_of()
4466 return cpu_rq(cpu)->cpu_capacity_orig; in capacity_orig_of()
4471 struct rq *rq = cpu_rq(cpu); in cpu_avg_load_per_task()
4801 struct rq *rq = cpu_rq(i); in find_idlest_cpu()
4907 unsigned long util = cpu_rq(cpu)->cfs.avg.util_avg; in cpu_util()
5898 struct rq *rq = cpu_rq(cpu); in update_blocked_averages()
5969 struct rq *rq = cpu_rq(cpu); in update_blocked_averages()
6080 struct rq *rq = cpu_rq(cpu); in scale_rt_capacity()
6110 cpu_rq(cpu)->cpu_capacity_orig = capacity; in update_cpu_capacity()
6118 cpu_rq(cpu)->cpu_capacity = capacity; in update_cpu_capacity()
6148 struct rq *rq = cpu_rq(cpu); in update_group_capacity()
6310 struct rq *rq = cpu_rq(i); in update_sg_lb_stats()
6810 rq = cpu_rq(i); in find_busiest_queue()
7074 env.dst_rq = cpu_rq(env.new_dst_cpu); in load_balance()
7352 struct rq *target_rq = cpu_rq(target_cpu); in active_load_balance_cpu_stop()
7535 if (on_null_domain(cpu_rq(cpu))) in nohz_balance_enter_idle()
7706 rq = cpu_rq(balance_cpu); in nohz_idle_balance()
8155 struct rq *rq = cpu_rq(cpu); in unregister_fair_sched_group()
8174 struct rq *rq = cpu_rq(cpu); in init_tg_cfs_entry()
8222 struct rq *rq = cpu_rq(i); in sched_group_set_shares()
8319 for_each_leaf_cfs_rq(cpu_rq(cpu), cfs_rq) in print_cfs_stats()