enqueued         1458 drivers/block/skd_main.c 					int limit, int *enqueued)
enqueued          201 drivers/firewire/net.c 	u8 enqueued;
enqueued          843 drivers/firewire/net.c 	free = (ptask->outstanding_pkts == 0 && ptask->enqueued);
enqueued          920 drivers/firewire/net.c 	free = ptask->enqueued;
enqueued         1021 drivers/firewire/net.c 		free = (ptask->outstanding_pkts == 0 && !ptask->enqueued);
enqueued         1023 drivers/firewire/net.c 			ptask->enqueued = true;
enqueued         1040 drivers/firewire/net.c 	free = (ptask->outstanding_pkts == 0 && !ptask->enqueued);
enqueued         1042 drivers/firewire/net.c 		ptask->enqueued = true;
enqueued         1346 drivers/firewire/net.c 	ptask->enqueued    = 0;
enqueued          774 fs/btrfs/reada.c 	u64 enqueued;
enqueued          780 fs/btrfs/reada.c 		enqueued = 0;
enqueued          785 fs/btrfs/reada.c 				enqueued += reada_start_machine_dev(device);
enqueued          788 fs/btrfs/reada.c 		total += enqueued;
enqueued          789 fs/btrfs/reada.c 	} while (enqueued && total < 10000);
enqueued          795 fs/btrfs/reada.c 	if (enqueued == 0)
enqueued          348 include/linux/sched.h 	unsigned int			enqueued;
enqueued          535 kernel/sched/debug.c 			cfs_rq->avg.util_est.enqueued);
enqueued          953 kernel/sched/debug.c 	P(se.avg.util_est.enqueued);
enqueued         3702 kernel/sched/fair.c 	return (max(ue.ewma, ue.enqueued) | UTIL_AVG_UNCHANGED);
enqueued         3713 kernel/sched/fair.c 	unsigned int enqueued;
enqueued         3719 kernel/sched/fair.c 	enqueued  = cfs_rq->avg.util_est.enqueued;
enqueued         3720 kernel/sched/fair.c 	enqueued += _task_util_est(p);
enqueued         3721 kernel/sched/fair.c 	WRITE_ONCE(cfs_rq->avg.util_est.enqueued, enqueued);
enqueued         3748 kernel/sched/fair.c 	ue.enqueued  = cfs_rq->avg.util_est.enqueued;
enqueued         3749 kernel/sched/fair.c 	ue.enqueued -= min_t(unsigned int, ue.enqueued, _task_util_est(p));
enqueued         3750 kernel/sched/fair.c 	WRITE_ONCE(cfs_rq->avg.util_est.enqueued, ue.enqueued);
enqueued         3764 kernel/sched/fair.c 	if (ue.enqueued & UTIL_AVG_UNCHANGED)
enqueued         3771 kernel/sched/fair.c 	ue.enqueued = (task_util(p) | UTIL_AVG_UNCHANGED);
enqueued         3772 kernel/sched/fair.c 	last_ewma_diff = ue.enqueued - ue.ewma;
enqueued         6110 kernel/sched/fair.c 		util = max(util, READ_ONCE(cfs_rq->avg.util_est.enqueued));
enqueued         6171 kernel/sched/fair.c 			READ_ONCE(cfs_rq->avg.util_est.enqueued);
enqueued         6252 kernel/sched/fair.c 		util_est = READ_ONCE(cfs_rq->avg.util_est.enqueued);
enqueued           31 kernel/sched/pelt.h 	unsigned int enqueued;
enqueued           37 kernel/sched/pelt.h 	enqueued = avg->util_est.enqueued;
enqueued           38 kernel/sched/pelt.h 	if (!(enqueued & UTIL_AVG_UNCHANGED))
enqueued           42 kernel/sched/pelt.h 	enqueued &= ~UTIL_AVG_UNCHANGED;
enqueued           43 kernel/sched/pelt.h 	WRITE_ONCE(avg->util_est.enqueued, enqueued);
enqueued         2411 kernel/sched/sched.h 			     READ_ONCE(rq->cfs.avg.util_est.enqueued));