Home
last modified time | relevance | path

Searched refs:task_rq (Results 1 – 5 of 5) sorted by relevance

/linux-4.4.14/kernel/sched/
Dsched.h706 #define task_rq(p) cpu_rq(task_cpu(p)) macro
1439 rq = task_rq(p); in __task_rq_lock()
1441 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in __task_rq_lock()
1463 rq = task_rq(p); in task_rq_lock()
1481 if (likely(rq == task_rq(p) && !task_on_rq_migrating(p))) { in task_rq_lock()
Ddeadline.c36 struct rq *rq = task_rq(p); in dl_rq_of_se()
527 struct rq *rq = task_rq(p); in start_dl_timer()
1345 best_cpu = cpudl_find(&task_rq(task)->rd->cpudl, in find_later_rq()
1442 if (unlikely(task_rq(task) != rq || in find_lock_later_rq()
1685 rq = task_rq(p); in set_cpus_allowed_dl()
Dcore.c1148 if (task_rq(p) == rq && task_on_rq_queued(p)) in migration_cpu_stop()
1169 struct rq *rq = task_rq(p); in do_set_cpus_allowed()
1289 lockdep_is_held(&task_rq(p)->lock))); in set_task_cpu()
1310 src_rq = task_rq(p); in __migrate_swap_task()
1441 rq = task_rq(p); in wait_task_inactive()
2017 struct rq *rq = task_rq(p); in try_to_wake_up_local()
4457 if (!cpumask_subset(task_rq(p)->rd->span, new_mask)) { in sched_setaffinity()
4723 p_rq = task_rq(p); in yield_to()
4734 if (task_rq(p) != p_rq) { in yield_to()
5087 if (dl_task(p) && !cpumask_intersects(task_rq(p)->rd->span, in task_can_attach()
[all …]
Drt.c243 return task_rq(p); in rq_of_rt_se()
1573 if (!cpupri_find(&task_rq(task)->rd->cpupri, task, lowest_mask)) in find_lowest_rq()
1666 if (unlikely(task_rq(task) != rq || in find_lock_lowest_rq()
Dfair.c386 return &task_rq(p)->cfs; in task_cfs_rq()
392 struct rq *rq = task_rq(p); in cfs_rq_of()
4087 WARN_ON(task_rq(p) != rq); in hrtick_start_fair()
5857 BUG_ON(task_rq(p) != rq); in attach_task()