tqueue           2795 drivers/atm/idt77252.c 		schedule_work(&card->tqueue);
tqueue           2807 drivers/atm/idt77252.c 		container_of(work, struct idt77252_dev, tqueue);
tqueue           3623 drivers/atm/idt77252.c 	INIT_WORK(&card->tqueue, idt77252_softint);
tqueue            365 drivers/atm/idt77252.h 	struct work_struct	tqueue;
tqueue            312 drivers/lightnvm/core.c 	struct request_queue *tqueue;
tqueue            383 drivers/lightnvm/core.c 	tqueue = blk_alloc_queue_node(GFP_KERNEL, dev->q->node);
tqueue            384 drivers/lightnvm/core.c 	if (!tqueue) {
tqueue            388 drivers/lightnvm/core.c 	blk_queue_make_request(tqueue, tt->make_rq);
tqueue            395 drivers/lightnvm/core.c 	tdisk->queue = tqueue;
tqueue            404 drivers/lightnvm/core.c 	tqueue->queuedata = targetdata;
tqueue            411 drivers/lightnvm/core.c 	blk_queue_max_hw_sectors(tqueue, mdts);
tqueue            436 drivers/lightnvm/core.c 	blk_cleanup_queue(tqueue);
tqueue           1149 drivers/lightnvm/pblk-init.c 	struct request_queue *tqueue = tdisk->queue;
tqueue           1242 drivers/lightnvm/pblk-init.c 	blk_queue_logical_block_size(tqueue, queue_physical_block_size(bqueue));
tqueue           1243 drivers/lightnvm/pblk-init.c 	blk_queue_max_hw_sectors(tqueue, queue_max_hw_sectors(bqueue));
tqueue           1245 drivers/lightnvm/pblk-init.c 	blk_queue_write_cache(tqueue, true, false);
tqueue           1247 drivers/lightnvm/pblk-init.c 	tqueue->limits.discard_granularity = geo->clba * geo->csecs;
tqueue           1248 drivers/lightnvm/pblk-init.c 	tqueue->limits.discard_alignment = 0;
tqueue           1249 drivers/lightnvm/pblk-init.c 	blk_queue_max_discard_sectors(tqueue, UINT_MAX >> 9);
tqueue           1250 drivers/lightnvm/pblk-init.c 	blk_queue_flag_set(QUEUE_FLAG_DISCARD, tqueue);
tqueue            574 drivers/net/ethernet/freescale/gianfar.c 		priv->tqueue |= (TQUEUE_EN0 >> i);
tqueue           1074 drivers/net/ethernet/freescale/gianfar.c 	gfar_write(&regs->tqueue, 0);
tqueue           1206 drivers/net/ethernet/freescale/gianfar.c 	gfar_write(&regs->tqueue, priv->tqueue);
tqueue            669 drivers/net/ethernet/freescale/gianfar.h 	u32	tqueue;		/* 0x.114 - Transmit queue control register */
tqueue           1148 drivers/net/ethernet/freescale/gianfar.h 	u32 tqueue;
tqueue            635 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		struct sxgbe_tx_queue *tqueue = priv->txq[queue_num];
tqueue            636 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tx_free_ring_skbufs(tqueue);
tqueue            730 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue)
tqueue            732 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	struct sxgbe_priv_data *priv = tqueue->priv_ptr;
tqueue            735 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	u8 queue_no = tqueue->queue_no;
tqueue            742 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	while (tqueue->dirty_tx != tqueue->cur_tx) {
tqueue            743 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		unsigned int entry = tqueue->dirty_tx % tx_rsize;
tqueue            744 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		struct sk_buff *skb = tqueue->tx_skbuff[entry];
tqueue            747 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		p = tqueue->dma_tx + entry;
tqueue            755 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 				 __func__, tqueue->cur_tx, tqueue->dirty_tx);
tqueue            757 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		if (likely(tqueue->tx_skbuff_dma[entry])) {
tqueue            759 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 					 tqueue->tx_skbuff_dma[entry],
tqueue            762 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 			tqueue->tx_skbuff_dma[entry] = 0;
tqueue            767 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 			tqueue->tx_skbuff[entry] = NULL;
tqueue            772 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tqueue->dirty_tx++;
tqueue            777 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	    sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) {
tqueue            796 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		struct sxgbe_tx_queue *tqueue = priv->txq[queue_num];
tqueue            798 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		sxgbe_tx_queue_clean(tqueue);
tqueue           1274 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	struct sxgbe_tx_queue *tqueue = priv->txq[txq_index];
tqueue           1286 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss))
tqueue           1291 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		      tqueue->hwts_tx_en)))
tqueue           1297 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) {
tqueue           1306 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	entry = tqueue->cur_tx % tx_rsize;
tqueue           1307 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	tx_desc = tqueue->dma_tx + entry;
tqueue           1314 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	tqueue->tx_skbuff[entry] = skb;
tqueue           1319 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 			if (unlikely(tqueue->prev_mss != cur_mss)) {
tqueue           1331 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 				entry = (++tqueue->cur_tx) % tx_rsize;
tqueue           1332 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 				first_desc = tqueue->dma_tx + entry;
tqueue           1334 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 				tqueue->prev_mss = cur_mss;
tqueue           1353 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		entry = (++tqueue->cur_tx) % tx_rsize;
tqueue           1354 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tx_desc = tqueue->dma_tx + entry;
tqueue           1358 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01;
tqueue           1359 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tqueue->tx_skbuff[entry] = NULL;
tqueue           1377 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	tqueue->tx_count_frames += nr_frags + 1;
tqueue           1378 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	if (tqueue->tx_count_frames > tqueue->tx_coal_frames) {
tqueue           1381 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		mod_timer(&tqueue->txtimer,
tqueue           1382 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 			  SXGBE_COAL_TIMER(tqueue->tx_coal_timer));
tqueue           1384 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		tqueue->tx_count_frames = 0;
tqueue           1393 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	tqueue->cur_tx++;
tqueue           1397 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		  __func__, tqueue->cur_tx % tx_rsize,
tqueue           1398 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		  tqueue->dirty_tx % tx_rsize, entry,
tqueue           1401 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 	if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) {
tqueue           1410 drivers/net/ethernet/samsung/sxgbe/sxgbe_main.c 		     tqueue->hwts_tx_en)) {
tqueue             58 sound/core/timer.c 	struct snd_timer_tread *tqueue;
tqueue           1330 sound/core/timer.c 		memcpy(&tu->tqueue[tu->qtail++], tread, sizeof(*tread));
tqueue           1407 sound/core/timer.c 		r = &tu->tqueue[prev];
tqueue           1431 sound/core/timer.c 	struct snd_timer_tread *tqueue = NULL;
tqueue           1434 sound/core/timer.c 		tqueue = kcalloc(size, sizeof(*tqueue), GFP_KERNEL);
tqueue           1435 sound/core/timer.c 		if (!tqueue)
tqueue           1445 sound/core/timer.c 	kfree(tu->tqueue);
tqueue           1448 sound/core/timer.c 	tu->tqueue = tqueue;
tqueue           1491 sound/core/timer.c 		kfree(tu->tqueue);
tqueue           2105 sound/core/timer.c 			if (copy_to_user(buffer, &tu->tqueue[qhead],