__sched            23 arch/arc/kernel/ctx_sw.c struct task_struct *__sched
__sched           644 arch/arm64/kernel/process.c asmlinkage void __sched arm64_preempt_schedule_irq(void)
__sched           118 drivers/md/bcache/closure.c void __sched __closure_sync(struct closure *cl)
__sched           155 drivers/tty/tty_ldsem.c static struct ld_semaphore __sched *
__sched           229 drivers/tty/tty_ldsem.c static struct ld_semaphore __sched *
__sched           337 drivers/tty/tty_ldsem.c int __sched ldsem_down_read(struct ld_semaphore *sem, long timeout)
__sched           363 drivers/tty/tty_ldsem.c int __sched ldsem_down_write(struct ld_semaphore *sem, long timeout)
__sched            85 kernel/latencytop.c static void __sched
__sched           150 kernel/latencytop.c void __sched
__sched           207 kernel/locking/mutex.c static void __sched
__sched           256 kernel/locking/mutex.c static void __sched __mutex_lock_slowpath(struct mutex *lock);
__sched           279 kernel/locking/mutex.c void __sched mutex_lock(struct mutex *lock)
__sched           351 kernel/locking/mutex.c static inline bool __sched
__sched           366 kernel/locking/mutex.c static bool __sched
__sched           443 kernel/locking/mutex.c static void __sched
__sched           721 kernel/locking/mutex.c static noinline void __sched __mutex_unlock_slowpath(struct mutex *lock, unsigned long ip);
__sched           734 kernel/locking/mutex.c void __sched mutex_unlock(struct mutex *lock)
__sched           755 kernel/locking/mutex.c void __sched ww_mutex_unlock(struct ww_mutex *lock)
__sched           775 kernel/locking/mutex.c static __always_inline int __sched
__sched           804 kernel/locking/mutex.c static inline int __sched
__sched           851 kernel/locking/mutex.c static inline int __sched
__sched           925 kernel/locking/mutex.c static __always_inline int __sched
__sched          1099 kernel/locking/mutex.c static int __sched
__sched          1106 kernel/locking/mutex.c static int __sched
__sched          1115 kernel/locking/mutex.c void __sched
__sched          1123 kernel/locking/mutex.c void __sched
__sched          1130 kernel/locking/mutex.c int __sched
__sched          1137 kernel/locking/mutex.c int __sched
__sched          1144 kernel/locking/mutex.c void __sched
__sched          1184 kernel/locking/mutex.c int __sched
__sched          1200 kernel/locking/mutex.c int __sched
__sched          1222 kernel/locking/mutex.c static noinline void __sched __mutex_unlock_slowpath(struct mutex *lock, unsigned long ip)
__sched          1288 kernel/locking/mutex.c static noinline int __sched
__sched          1291 kernel/locking/mutex.c static noinline int __sched
__sched          1306 kernel/locking/mutex.c int __sched mutex_lock_interruptible(struct mutex *lock)
__sched          1330 kernel/locking/mutex.c int __sched mutex_lock_killable(struct mutex *lock)
__sched          1351 kernel/locking/mutex.c void __sched mutex_lock_io(struct mutex *lock)
__sched          1361 kernel/locking/mutex.c static noinline void __sched
__sched          1367 kernel/locking/mutex.c static noinline int __sched
__sched          1373 kernel/locking/mutex.c static noinline int __sched
__sched          1379 kernel/locking/mutex.c static noinline int __sched
__sched          1386 kernel/locking/mutex.c static noinline int __sched
__sched          1410 kernel/locking/mutex.c int __sched mutex_trylock(struct mutex *lock)
__sched          1427 kernel/locking/mutex.c int __sched
__sched          1442 kernel/locking/mutex.c int __sched
__sched          1166 kernel/locking/rtmutex.c static int __sched
__sched          1229 kernel/locking/rtmutex.c static int __sched
__sched          1338 kernel/locking/rtmutex.c static bool __sched rt_mutex_slowunlock(struct rt_mutex *lock,
__sched          1482 kernel/locking/rtmutex.c void __sched rt_mutex_lock_nested(struct rt_mutex *lock, unsigned int subclass)
__sched          1495 kernel/locking/rtmutex.c void __sched rt_mutex_lock(struct rt_mutex *lock)
__sched          1511 kernel/locking/rtmutex.c int __sched rt_mutex_lock_interruptible(struct rt_mutex *lock)
__sched          1529 kernel/locking/rtmutex.c int __sched rt_mutex_futex_trylock(struct rt_mutex *lock)
__sched          1534 kernel/locking/rtmutex.c int __sched __rt_mutex_futex_trylock(struct rt_mutex *lock)
__sched          1581 kernel/locking/rtmutex.c int __sched rt_mutex_trylock(struct rt_mutex *lock)
__sched          1601 kernel/locking/rtmutex.c void __sched rt_mutex_unlock(struct rt_mutex *lock)
__sched          1612 kernel/locking/rtmutex.c bool __sched __rt_mutex_futex_unlock(struct rt_mutex *lock,
__sched          1635 kernel/locking/rtmutex.c void __sched rt_mutex_futex_unlock(struct rt_mutex *lock)
__sched           994 kernel/locking/rwsem.c static struct rw_semaphore __sched *
__sched          1492 kernel/locking/rwsem.c void __sched down_read(struct rw_semaphore *sem)
__sched          1501 kernel/locking/rwsem.c int __sched down_read_killable(struct rw_semaphore *sem)
__sched          1531 kernel/locking/rwsem.c void __sched down_write(struct rw_semaphore *sem)
__sched          1542 kernel/locking/rwsem.c int __sched down_write_killable(struct rw_semaphore *sem)
__sched          1635 kernel/locking/rwsem.c int __sched down_write_killable_nested(struct rw_semaphore *sem, int subclass)
__sched           204 kernel/locking/semaphore.c static inline int __sched __down_common(struct semaphore *sem, long state,
__sched           235 kernel/locking/semaphore.c static noinline void __sched __down(struct semaphore *sem)
__sched           240 kernel/locking/semaphore.c static noinline int __sched __down_interruptible(struct semaphore *sem)
__sched           245 kernel/locking/semaphore.c static noinline int __sched __down_killable(struct semaphore *sem)
__sched           250 kernel/locking/semaphore.c static noinline int __sched __down_timeout(struct semaphore *sem, long timeout)
__sched           255 kernel/locking/semaphore.c static noinline void __sched __up(struct semaphore *sem)
__sched          2546 kernel/printk/printk.c void __sched console_conditional_schedule(void)
__sched            68 kernel/sched/completion.c static inline long __sched
__sched            95 kernel/sched/completion.c static inline long __sched
__sched           112 kernel/sched/completion.c static long __sched
__sched           118 kernel/sched/completion.c static long __sched
__sched           134 kernel/sched/completion.c void __sched wait_for_completion(struct completion *x)
__sched           152 kernel/sched/completion.c unsigned long __sched
__sched           167 kernel/sched/completion.c void __sched wait_for_completion_io(struct completion *x)
__sched           186 kernel/sched/completion.c unsigned long __sched
__sched           202 kernel/sched/completion.c int __sched wait_for_completion_interruptible(struct completion *x)
__sched           222 kernel/sched/completion.c long __sched
__sched           239 kernel/sched/completion.c int __sched wait_for_completion_killable(struct completion *x)
__sched           260 kernel/sched/completion.c long __sched
__sched          3997 kernel/sched/core.c static void __sched notrace __schedule(bool preempt)
__sched          4137 kernel/sched/core.c asmlinkage __visible void __sched schedule(void)
__sched          4161 kernel/sched/core.c void __sched schedule_idle(void)
__sched          4177 kernel/sched/core.c asmlinkage __visible void __sched schedule_user(void)
__sched          4200 kernel/sched/core.c void __sched schedule_preempt_disabled(void)
__sched          4207 kernel/sched/core.c static void __sched notrace preempt_schedule_common(void)
__sched          4241 kernel/sched/core.c asmlinkage __visible void __sched notrace preempt_schedule(void)
__sched          4269 kernel/sched/core.c asmlinkage __visible void __sched notrace preempt_schedule_notrace(void)
__sched          4315 kernel/sched/core.c asmlinkage __visible void __sched preempt_schedule_irq(void)
__sched          5608 kernel/sched/core.c int __sched _cond_resched(void)
__sched          5670 kernel/sched/core.c void __sched yield(void)
__sched          5692 kernel/sched/core.c int __sched yield_to(struct task_struct *p, bool preempt)
__sched          5770 kernel/sched/core.c long __sched io_schedule_timeout(long timeout)
__sched          5783 kernel/sched/core.c void __sched io_schedule(void)
__sched            40 kernel/sched/wait_bit.c int __sched
__sched            58 kernel/sched/wait_bit.c int __sched out_of_line_wait_on_bit(void *word, int bit,
__sched            68 kernel/sched/wait_bit.c int __sched out_of_line_wait_on_bit_timeout(
__sched            81 kernel/sched/wait_bit.c int __sched
__sched           111 kernel/sched/wait_bit.c int __sched out_of_line_wait_on_bit_lock(void *word, int bit,
__sched           196 kernel/sched/wait_bit.c __sched int bit_wait(struct wait_bit_key *word, int mode)
__sched           206 kernel/sched/wait_bit.c __sched int bit_wait_io(struct wait_bit_key *word, int mode)
__sched           216 kernel/sched/wait_bit.c __sched int bit_wait_timeout(struct wait_bit_key *word, int mode)
__sched           230 kernel/sched/wait_bit.c __sched int bit_wait_io_timeout(struct wait_bit_key *word, int mode)
__sched           785 kernel/time/alarmtimer.c static long __sched alarm_timer_nsleep_restart(struct restart_block *restart)
__sched          1865 kernel/time/hrtimer.c static int __sched do_nanosleep(struct hrtimer_sleeper *t, enum hrtimer_mode mode)
__sched          1900 kernel/time/hrtimer.c static long __sched hrtimer_nanosleep_restart(struct restart_block *restart)
__sched          2102 kernel/time/hrtimer.c int __sched
__sched          2172 kernel/time/hrtimer.c int __sched schedule_hrtimeout_range(ktime_t *expires, u64 delta,
__sched          2206 kernel/time/hrtimer.c int __sched schedule_hrtimeout(ktime_t *expires,
__sched          1856 kernel/time/timer.c signed long __sched schedule_timeout(signed long timeout)
__sched          1912 kernel/time/timer.c signed long __sched schedule_timeout_interruptible(signed long timeout)
__sched          1919 kernel/time/timer.c signed long __sched schedule_timeout_killable(signed long timeout)
__sched          1926 kernel/time/timer.c signed long __sched schedule_timeout_uninterruptible(signed long timeout)
__sched          1937 kernel/time/timer.c signed long __sched schedule_timeout_idle(signed long timeout)
__sched          2079 kernel/time/timer.c void __sched usleep_range(unsigned long min, unsigned long max)