tx_queue         1062 arch/um/drivers/vector_kern.c 	queue_depth = vector_enqueue(vp->tx_queue, skb);
tx_queue         1068 arch/um/drivers/vector_kern.c 	if (queue_depth >= vp->tx_queue->max_depth - 1) {
tx_queue         1071 arch/um/drivers/vector_kern.c 		vector_send(vp->tx_queue);
tx_queue         1080 arch/um/drivers/vector_kern.c 		vector_send(vp->tx_queue);
tx_queue         1156 arch/um/drivers/vector_kern.c 	if (vp->tx_queue != NULL)
tx_queue         1157 arch/um/drivers/vector_kern.c 		destroy_queue(vp->tx_queue);
tx_queue         1174 arch/um/drivers/vector_kern.c 	vector_send(vp->tx_queue);
tx_queue         1224 arch/um/drivers/vector_kern.c 		vp->tx_queue = create_queue(
tx_queue         1356 arch/um/drivers/vector_kern.c 	ring->tx_max_pending = vp->tx_queue->max_depth;
tx_queue         1358 arch/um/drivers/vector_kern.c 	ring->tx_pending = vp->tx_queue->max_depth;
tx_queue         1451 arch/um/drivers/vector_kern.c 	vector_send(vp->tx_queue);
tx_queue           84 arch/um/drivers/vector_kern.h 	struct vector_queue *tx_queue;
tx_queue         1178 drivers/atm/eni.c 	skb_queue_tail(&eni_dev->tx_queue,skb);
tx_queue         1218 drivers/atm/eni.c 	while ((skb = skb_dequeue(&eni_dev->tx_queue))) {
tx_queue         1227 drivers/atm/eni.c 			skb_queue_head(&eni_dev->tx_queue,skb);
tx_queue         1446 drivers/atm/eni.c 	skb_queue_head_init(&eni_dev->tx_queue);
tx_queue         1983 drivers/atm/eni.c 	skb_queue_walk(&eni_dev->tx_queue, skb) {
tx_queue           87 drivers/atm/eni.h 	struct sk_buff_head tx_queue;	/* PDUs currently being TX DMAed*/
tx_queue         1065 drivers/atm/horizon.c   wait_event_interruptible(dev->tx_queue, (!test_and_set_bit(tx_busy, &dev->flags)));
tx_queue         1078 drivers/atm/horizon.c   wake_up_interruptible (&dev->tx_queue);
tx_queue         2778 drivers/atm/horizon.c 	init_waitqueue_head(&dev->tx_queue);
tx_queue          408 drivers/atm/horizon.h   wait_queue_head_t   tx_queue;
tx_queue          118 drivers/atm/solos-pci.c 	struct sk_buff_head tx_queue[4];
tx_queue          960 drivers/atm/solos-pci.c 	skb_queue_walk_safe(&card->tx_queue[port], skb, tmpskb) {
tx_queue          962 drivers/atm/solos-pci.c 			skb_unlink(skb, &card->tx_queue[port]);
tx_queue         1037 drivers/atm/solos-pci.c 	old_len = skb_queue_len(&card->tx_queue[port]);
tx_queue         1038 drivers/atm/solos-pci.c 	skb_queue_tail(&card->tx_queue[port], skb);
tx_queue         1079 drivers/atm/solos-pci.c 			skb = skb_dequeue(&card->tx_queue[port]);
tx_queue         1362 drivers/atm/solos-pci.c 		skb_queue_head_init(&card->tx_queue[i]);
tx_queue         1427 drivers/atm/solos-pci.c 			while ((skb = skb_dequeue(&card->tx_queue[i])))
tx_queue          700 drivers/atm/zatm.c 	skb_queue_tail(&zatm_vcc->tx_queue,skb);
tx_queue          719 drivers/atm/zatm.c 	skb = skb_dequeue(&zatm_vcc->tx_queue);
tx_queue          888 drivers/atm/zatm.c 	if (skb_peek(&zatm_vcc->tx_queue)) {
tx_queue          891 drivers/atm/zatm.c 		wait_event(zatm_vcc->tx_wait, !skb_peek(&zatm_vcc->tx_queue));
tx_queue          956 drivers/atm/zatm.c 	skb_queue_head_init(&zatm_vcc->tx_queue);
tx_queue           50 drivers/atm/zatm.h 	struct sk_buff_head tx_queue;	/* list of buffers in transit */
tx_queue           91 drivers/bluetooth/btmrvl_drv.h 	struct sk_buff_head tx_queue;
tx_queue          201 drivers/bluetooth/btmrvl_main.c 	skb_queue_head(&priv->adapter->tx_queue, skb);
tx_queue          396 drivers/bluetooth/btmrvl_main.c 	skb_queue_head_init(&priv->adapter->tx_queue);
tx_queue          419 drivers/bluetooth/btmrvl_main.c 	skb_queue_purge(&priv->adapter->tx_queue);
tx_queue          452 drivers/bluetooth/btmrvl_main.c 	skb_queue_tail(&priv->adapter->tx_queue, skb);
tx_queue          464 drivers/bluetooth/btmrvl_main.c 	skb_queue_purge(&priv->adapter->tx_queue);
tx_queue          473 drivers/bluetooth/btmrvl_main.c 	skb_queue_purge(&priv->adapter->tx_queue);
tx_queue          617 drivers/bluetooth/btmrvl_main.c 				skb_queue_empty(&adapter->tx_queue)))) {
tx_queue          639 drivers/bluetooth/btmrvl_main.c 					!skb_queue_empty(&adapter->tx_queue)) {
tx_queue          655 drivers/bluetooth/btmrvl_main.c 		skb = skb_dequeue(&adapter->tx_queue);
tx_queue          310 drivers/media/rc/ite-cir.c 		wake_up_interruptible(&dev->tx_queue);
tx_queue          465 drivers/media/rc/ite-cir.c 				wait_event_interruptible(dev->tx_queue, (fifo_avail = ITE_TX_FIFO_LEN - dev->params.get_tx_used_slots(dev)) >= 8);
tx_queue         1506 drivers/media/rc/ite-cir.c 	init_waitqueue_head(&itdev->tx_queue);
tx_queue          122 drivers/media/rc/ite-cir.h 	wait_queue_head_t tx_queue, tx_ended;
tx_queue          270 drivers/net/can/mscan/mscan.c 	list_add_tail(&priv->tx_queue[buf_id].list, &priv->tx_head);
tx_queue          696 drivers/net/can/mscan/mscan.c 		priv->tx_queue[i].id = i;
tx_queue          697 drivers/net/can/mscan/mscan.c 		priv->tx_queue[i].mask = 1 << i;
tx_queue          284 drivers/net/can/mscan/mscan.h 	struct tx_queue_entry tx_queue[TX_QUEUE_SIZE];
tx_queue          168 drivers/net/ethernet/atheros/alx/main.c 	struct netdev_queue *tx_queue;
tx_queue          174 drivers/net/ethernet/atheros/alx/main.c 	tx_queue = alx_get_tx_queue(txq);
tx_queue          197 drivers/net/ethernet/atheros/alx/main.c 		netdev_tx_completed_queue(tx_queue, total_packets, total_bytes);
tx_queue          200 drivers/net/ethernet/atheros/alx/main.c 	if (netif_tx_queue_stopped(tx_queue) && netif_carrier_ok(alx->dev) &&
tx_queue          202 drivers/net/ethernet/atheros/alx/main.c 		netif_tx_wake_queue(tx_queue);
tx_queue          354 drivers/net/ethernet/aurora/nb8800.c 	txb = &priv->tx_bufs[priv->tx_queue];
tx_queue          366 drivers/net/ethernet/aurora/nb8800.c 	priv->tx_queue = (priv->tx_queue + txb->chain_len) % TX_DESC_COUNT;
tx_queue          816 drivers/net/ethernet/aurora/nb8800.c 	priv->tx_queue = 0;
tx_queue          275 drivers/net/ethernet/aurora/nb8800.h 	u32				tx_queue;
tx_queue          523 drivers/net/ethernet/freescale/fec.h 	struct fec_enet_priv_tx_q *tx_queue[FEC_ENET_MAX_TX_QS];
tx_queue          344 drivers/net/ethernet/freescale/fec_main.c 	txq = fep->tx_queue[0];
tx_queue          808 drivers/net/ethernet/freescale/fec_main.c 	txq = fep->tx_queue[queue];
tx_queue          860 drivers/net/ethernet/freescale/fec_main.c 		txq = fep->tx_queue[q];
tx_queue          916 drivers/net/ethernet/freescale/fec_main.c 		txq = fep->tx_queue[i];
tx_queue          933 drivers/net/ethernet/freescale/fec_main.c 		txq = fep->tx_queue[i];
tx_queue         1257 drivers/net/ethernet/freescale/fec_main.c 	txq = fep->tx_queue[queue_id];
tx_queue         2770 drivers/net/ethernet/freescale/fec_main.c 		txq = fep->tx_queue[q];
tx_queue         2789 drivers/net/ethernet/freescale/fec_main.c 		if (fep->tx_queue[i] && fep->tx_queue[i]->tso_hdrs) {
tx_queue         2790 drivers/net/ethernet/freescale/fec_main.c 			txq = fep->tx_queue[i];
tx_queue         2800 drivers/net/ethernet/freescale/fec_main.c 		kfree(fep->tx_queue[i]);
tx_queue         2817 drivers/net/ethernet/freescale/fec_main.c 		fep->tx_queue[i] = txq;
tx_queue         2819 drivers/net/ethernet/freescale/fec_main.c 		fep->total_tx_ring_size += fep->tx_queue[i]->bd.ring_size;
tx_queue         2903 drivers/net/ethernet/freescale/fec_main.c 	txq = fep->tx_queue[queue];
tx_queue         3290 drivers/net/ethernet/freescale/fec_main.c 		struct fec_enet_priv_tx_q *txq = fep->tx_queue[i];
tx_queue          136 drivers/net/ethernet/freescale/gianfar.c 		gfar_write(baddr, priv->tx_queue[i]->tx_bd_dma_base);
tx_queue          254 drivers/net/ethernet/freescale/gianfar.c 			if (likely(priv->tx_queue[i]->txcoalescing))
tx_queue          255 drivers/net/ethernet/freescale/gianfar.c 				gfar_write(baddr + i, priv->tx_queue[i]->txic);
tx_queue          269 drivers/net/ethernet/freescale/gianfar.c 		if (likely(priv->tx_queue[0]->txcoalescing))
tx_queue          270 drivers/net/ethernet/freescale/gianfar.c 			gfar_write(&regs->txic, priv->tx_queue[0]->txic);
tx_queue          301 drivers/net/ethernet/freescale/gianfar.c 		tx_bytes += priv->tx_queue[i]->stats.tx_bytes;
tx_queue          302 drivers/net/ethernet/freescale/gianfar.c 		tx_packets += priv->tx_queue[i]->stats.tx_packets;
tx_queue          404 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i] = kzalloc(sizeof(struct gfar_priv_tx_q),
tx_queue          406 drivers/net/ethernet/freescale/gianfar.c 		if (!priv->tx_queue[i])
tx_queue          409 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->tx_skbuff = NULL;
tx_queue          410 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->qindex = i;
tx_queue          411 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->dev = priv->ndev;
tx_queue          412 drivers/net/ethernet/freescale/gianfar.c 		spin_lock_init(&(priv->tx_queue[i]->txlock));
tx_queue          438 drivers/net/ethernet/freescale/gianfar.c 		kfree(priv->tx_queue[i]);
tx_queue          570 drivers/net/ethernet/freescale/gianfar.c 		if (!grp->tx_queue)
tx_queue          571 drivers/net/ethernet/freescale/gianfar.c 			grp->tx_queue = priv->tx_queue[i];
tx_queue          575 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->grp = grp;
tx_queue         1086 drivers/net/ethernet/freescale/gianfar.c static void free_skb_tx_queue(struct gfar_priv_tx_q *tx_queue)
tx_queue         1089 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_private *priv = netdev_priv(tx_queue->dev);
tx_queue         1092 drivers/net/ethernet/freescale/gianfar.c 	txbdp = tx_queue->tx_bd_base;
tx_queue         1094 drivers/net/ethernet/freescale/gianfar.c 	for (i = 0; i < tx_queue->tx_ring_size; i++) {
tx_queue         1095 drivers/net/ethernet/freescale/gianfar.c 		if (!tx_queue->tx_skbuff[i])
tx_queue         1101 drivers/net/ethernet/freescale/gianfar.c 		for (j = 0; j < skb_shinfo(tx_queue->tx_skbuff[i])->nr_frags;
tx_queue         1109 drivers/net/ethernet/freescale/gianfar.c 		dev_kfree_skb_any(tx_queue->tx_skbuff[i]);
tx_queue         1110 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->tx_skbuff[i] = NULL;
tx_queue         1112 drivers/net/ethernet/freescale/gianfar.c 	kfree(tx_queue->tx_skbuff);
tx_queue         1113 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->tx_skbuff = NULL;
tx_queue         1150 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue         1158 drivers/net/ethernet/freescale/gianfar.c 		tx_queue = priv->tx_queue[i];
tx_queue         1159 drivers/net/ethernet/freescale/gianfar.c 		txq = netdev_get_tx_queue(tx_queue->dev, tx_queue->qindex);
tx_queue         1160 drivers/net/ethernet/freescale/gianfar.c 		if (tx_queue->tx_skbuff)
tx_queue         1161 drivers/net/ethernet/freescale/gianfar.c 			free_skb_tx_queue(tx_queue);
tx_queue         1174 drivers/net/ethernet/freescale/gianfar.c 			  priv->tx_queue[0]->tx_bd_base,
tx_queue         1175 drivers/net/ethernet/freescale/gianfar.c 			  priv->tx_queue[0]->tx_bd_dma_base);
tx_queue         1310 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue         1317 drivers/net/ethernet/freescale/gianfar.c 		tx_queue = priv->tx_queue[i];
tx_queue         1319 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->num_txbdfree = tx_queue->tx_ring_size;
tx_queue         1320 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->dirty_tx = tx_queue->tx_bd_base;
tx_queue         1321 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->cur_tx = tx_queue->tx_bd_base;
tx_queue         1322 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->skb_curtx = 0;
tx_queue         1323 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->skb_dirtytx = 0;
tx_queue         1326 drivers/net/ethernet/freescale/gianfar.c 		txbdp = tx_queue->tx_bd_base;
tx_queue         1327 drivers/net/ethernet/freescale/gianfar.c 		for (j = 0; j < tx_queue->tx_ring_size; j++) {
tx_queue         1364 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue         1369 drivers/net/ethernet/freescale/gianfar.c 		priv->total_tx_ring_size += priv->tx_queue[i]->tx_ring_size;
tx_queue         1386 drivers/net/ethernet/freescale/gianfar.c 		tx_queue = priv->tx_queue[i];
tx_queue         1387 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->tx_bd_base = vaddr;
tx_queue         1388 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->tx_bd_dma_base = addr;
tx_queue         1389 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->dev = ndev;
tx_queue         1391 drivers/net/ethernet/freescale/gianfar.c 		addr  += sizeof(struct txbd8) * tx_queue->tx_ring_size;
tx_queue         1392 drivers/net/ethernet/freescale/gianfar.c 		vaddr += sizeof(struct txbd8) * tx_queue->tx_ring_size;
tx_queue         1408 drivers/net/ethernet/freescale/gianfar.c 		tx_queue = priv->tx_queue[i];
tx_queue         1409 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->tx_skbuff =
tx_queue         1410 drivers/net/ethernet/freescale/gianfar.c 			kmalloc_array(tx_queue->tx_ring_size,
tx_queue         1411 drivers/net/ethernet/freescale/gianfar.c 				      sizeof(*tx_queue->tx_skbuff),
tx_queue         1413 drivers/net/ethernet/freescale/gianfar.c 		if (!tx_queue->tx_skbuff)
tx_queue         1416 drivers/net/ethernet/freescale/gianfar.c 		for (j = 0; j < tx_queue->tx_ring_size; j++)
tx_queue         1417 drivers/net/ethernet/freescale/gianfar.c 			tx_queue->tx_skbuff[j] = NULL;
tx_queue         1796 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue         1809 drivers/net/ethernet/freescale/gianfar.c 	tx_queue = priv->tx_queue[rq];
tx_queue         1811 drivers/net/ethernet/freescale/gianfar.c 	base = tx_queue->tx_bd_base;
tx_queue         1812 drivers/net/ethernet/freescale/gianfar.c 	regs = tx_queue->grp->regs;
tx_queue         1853 drivers/net/ethernet/freescale/gianfar.c 	if (nr_txbds > tx_queue->num_txbdfree) {
tx_queue         1862 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->stats.tx_bytes += bytes_sent;
tx_queue         1865 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->stats.tx_packets++;
tx_queue         1867 drivers/net/ethernet/freescale/gianfar.c 	txbdp = txbdp_start = tx_queue->cur_tx;
tx_queue         1914 drivers/net/ethernet/freescale/gianfar.c 						 tx_queue->tx_ring_size);
tx_queue         1928 drivers/net/ethernet/freescale/gianfar.c 			txbdp = next_txbd(txbdp, base, tx_queue->tx_ring_size);
tx_queue         1987 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->tx_skbuff[tx_queue->skb_curtx] = skb;
tx_queue         1992 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->skb_curtx = (tx_queue->skb_curtx + 1) &
tx_queue         1993 drivers/net/ethernet/freescale/gianfar.c 			      TX_RING_MOD_MASK(tx_queue->tx_ring_size);
tx_queue         1995 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->cur_tx = next_txbd(txbdp, base, tx_queue->tx_ring_size);
tx_queue         2002 drivers/net/ethernet/freescale/gianfar.c 	spin_lock_bh(&tx_queue->txlock);
tx_queue         2004 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->num_txbdfree -= (nr_txbds);
tx_queue         2005 drivers/net/ethernet/freescale/gianfar.c 	spin_unlock_bh(&tx_queue->txlock);
tx_queue         2010 drivers/net/ethernet/freescale/gianfar.c 	if (!tx_queue->num_txbdfree) {
tx_queue         2017 drivers/net/ethernet/freescale/gianfar.c 	gfar_write(&regs->tstat, TSTAT_CLEAR_THALT >> tx_queue->qindex);
tx_queue         2022 drivers/net/ethernet/freescale/gianfar.c 	txbdp = next_txbd(txbdp_start, base, tx_queue->tx_ring_size);
tx_queue         2024 drivers/net/ethernet/freescale/gianfar.c 		txbdp = next_txbd(txbdp, base, tx_queue->tx_ring_size);
tx_queue         2035 drivers/net/ethernet/freescale/gianfar.c 		txbdp = next_txbd(txbdp, base, tx_queue->tx_ring_size);
tx_queue         2183 drivers/net/ethernet/freescale/gianfar.c static void gfar_clean_tx_ring(struct gfar_priv_tx_q *tx_queue)
tx_queue         2185 drivers/net/ethernet/freescale/gianfar.c 	struct net_device *dev = tx_queue->dev;
tx_queue         2190 drivers/net/ethernet/freescale/gianfar.c 	struct txbd8 *base = tx_queue->tx_bd_base;
tx_queue         2193 drivers/net/ethernet/freescale/gianfar.c 	int tx_ring_size = tx_queue->tx_ring_size;
tx_queue         2197 drivers/net/ethernet/freescale/gianfar.c 	int tqi = tx_queue->qindex;
tx_queue         2203 drivers/net/ethernet/freescale/gianfar.c 	bdp = tx_queue->dirty_tx;
tx_queue         2204 drivers/net/ethernet/freescale/gianfar.c 	skb_dirtytx = tx_queue->skb_dirtytx;
tx_queue         2206 drivers/net/ethernet/freescale/gianfar.c 	while ((skb = tx_queue->tx_skbuff[skb_dirtytx])) {
tx_queue         2269 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->tx_skbuff[skb_dirtytx] = NULL;
tx_queue         2275 drivers/net/ethernet/freescale/gianfar.c 		spin_lock(&tx_queue->txlock);
tx_queue         2276 drivers/net/ethernet/freescale/gianfar.c 		tx_queue->num_txbdfree += nr_txbds;
tx_queue         2277 drivers/net/ethernet/freescale/gianfar.c 		spin_unlock(&tx_queue->txlock);
tx_queue         2281 drivers/net/ethernet/freescale/gianfar.c 	if (tx_queue->num_txbdfree &&
tx_queue         2287 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->skb_dirtytx = skb_dirtytx;
tx_queue         2288 drivers/net/ethernet/freescale/gianfar.c 	tx_queue->dirty_tx = bdp;
tx_queue         2660 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = gfargrp->tx_queue;
tx_queue         2669 drivers/net/ethernet/freescale/gianfar.c 	if (tx_queue->tx_skbuff[tx_queue->skb_dirtytx])
tx_queue         2670 drivers/net/ethernet/freescale/gianfar.c 		gfar_clean_tx_ring(tx_queue);
tx_queue         2751 drivers/net/ethernet/freescale/gianfar.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue         2761 drivers/net/ethernet/freescale/gianfar.c 		tx_queue = priv->tx_queue[i];
tx_queue         2763 drivers/net/ethernet/freescale/gianfar.c 		if (tx_queue->tx_skbuff[tx_queue->skb_dirtytx]) {
tx_queue         2764 drivers/net/ethernet/freescale/gianfar.c 			gfar_clean_tx_ring(tx_queue);
tx_queue         3382 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->tx_ring_size = DEFAULT_TX_RING_SIZE;
tx_queue         3383 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->num_txbdfree = DEFAULT_TX_RING_SIZE;
tx_queue         3384 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->txcoalescing = DEFAULT_TX_COALESCE;
tx_queue         3385 drivers/net/ethernet/freescale/gianfar.c 		priv->tx_queue[i]->txic = DEFAULT_TXIC;
tx_queue         3455 drivers/net/ethernet/freescale/gianfar.c 			    i, priv->tx_queue[i]->tx_ring_size);
tx_queue         1051 drivers/net/ethernet/freescale/gianfar.h 	struct gfar_priv_tx_q *tx_queue;
tx_queue         1102 drivers/net/ethernet/freescale/gianfar.h 	struct gfar_priv_tx_q *tx_queue[MAX_TX_QS];
tx_queue          254 drivers/net/ethernet/freescale/gianfar_ethtool.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue          267 drivers/net/ethernet/freescale/gianfar_ethtool.c 	tx_queue = priv->tx_queue[0];
tx_queue          271 drivers/net/ethernet/freescale/gianfar_ethtool.c 	txtime  = get_ictt_value(tx_queue->txic);
tx_queue          272 drivers/net/ethernet/freescale/gianfar_ethtool.c 	txcount = get_icft_value(tx_queue->txic);
tx_queue          376 drivers/net/ethernet/freescale/gianfar_ethtool.c 			priv->tx_queue[i]->txcoalescing = 0;
tx_queue          379 drivers/net/ethernet/freescale/gianfar_ethtool.c 			priv->tx_queue[i]->txcoalescing = 1;
tx_queue          383 drivers/net/ethernet/freescale/gianfar_ethtool.c 		priv->tx_queue[i]->txic = mk_ic_value(
tx_queue          407 drivers/net/ethernet/freescale/gianfar_ethtool.c 	struct gfar_priv_tx_q *tx_queue = NULL;
tx_queue          410 drivers/net/ethernet/freescale/gianfar_ethtool.c 	tx_queue = priv->tx_queue[0];
tx_queue          424 drivers/net/ethernet/freescale/gianfar_ethtool.c 	rvals->tx_pending = tx_queue->tx_ring_size;
tx_queue          463 drivers/net/ethernet/freescale/gianfar_ethtool.c 		priv->tx_queue[i]->tx_ring_size = rvals->tx_pending;
tx_queue          113 drivers/net/ethernet/fujitsu/fmvj18x_cs.c     uint tx_queue;
tx_queue          748 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	if (lp->tx_queue) {
tx_queue          749 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    outb(DO_TX | lp->tx_queue, ioaddr + TX_START);
tx_queue          750 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    lp->sent = lp->tx_queue ;
tx_queue          751 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    lp->tx_queue = 0;
tx_queue          798 drivers/net/ethernet/fujitsu/fmvj18x_cs.c     lp->tx_queue = 0;
tx_queue          844 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	lp->tx_queue++;
tx_queue          849 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    outb(DO_TX | lp->tx_queue, ioaddr + TX_START);
tx_queue          850 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    lp->sent = lp->tx_queue ;
tx_queue          851 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 	    lp->tx_queue = 0;
tx_queue          862 drivers/net/ethernet/fujitsu/fmvj18x_cs.c 						lp->tx_queue < 127 )
tx_queue         1078 drivers/net/ethernet/fujitsu/fmvj18x_cs.c     lp->tx_queue = 0;
tx_queue          795 drivers/net/ethernet/intel/igb/igb_main.c 	int tx_queue = IGB_N0_QUEUE;
tx_queue          801 drivers/net/ethernet/intel/igb/igb_main.c 		tx_queue = q_vector->tx.ring->reg_idx;
tx_queue          812 drivers/net/ethernet/intel/igb/igb_main.c 		if (tx_queue > IGB_N0_QUEUE)
tx_queue          813 drivers/net/ethernet/intel/igb/igb_main.c 			msixbm |= E1000_EICR_TX_QUEUE0 << tx_queue;
tx_queue          829 drivers/net/ethernet/intel/igb/igb_main.c 		if (tx_queue > IGB_N0_QUEUE)
tx_queue          831 drivers/net/ethernet/intel/igb/igb_main.c 				       tx_queue & 0x7,
tx_queue          832 drivers/net/ethernet/intel/igb/igb_main.c 				       ((tx_queue & 0x8) << 1) + 8);
tx_queue          850 drivers/net/ethernet/intel/igb/igb_main.c 		if (tx_queue > IGB_N0_QUEUE)
tx_queue          852 drivers/net/ethernet/intel/igb/igb_main.c 				       tx_queue >> 1,
tx_queue          853 drivers/net/ethernet/intel/igb/igb_main.c 				       ((tx_queue & 0x1) << 4) + 8);
tx_queue          922 drivers/net/ethernet/intel/igbvf/netdev.c 				int tx_queue, int msix_vector)
tx_queue          947 drivers/net/ethernet/intel/igbvf/netdev.c 	if (tx_queue > IGBVF_NO_QUEUE) {
tx_queue          948 drivers/net/ethernet/intel/igbvf/netdev.c 		index = (tx_queue >> 1);
tx_queue          950 drivers/net/ethernet/intel/igbvf/netdev.c 		if (tx_queue & 0x1) {
tx_queue          959 drivers/net/ethernet/intel/igbvf/netdev.c 		adapter->tx_ring[tx_queue].eims_value = BIT(msix_vector);
tx_queue         2596 drivers/net/ethernet/intel/igc/igc_main.c 	int tx_queue = IGC_N0_QUEUE;
tx_queue         2601 drivers/net/ethernet/intel/igc/igc_main.c 		tx_queue = q_vector->tx.ring->reg_idx;
tx_queue         2609 drivers/net/ethernet/intel/igc/igc_main.c 		if (tx_queue > IGC_N0_QUEUE)
tx_queue         2611 drivers/net/ethernet/intel/igc/igc_main.c 				       tx_queue >> 1,
tx_queue         2612 drivers/net/ethernet/intel/igc/igc_main.c 				       ((tx_queue & 0x1) << 4) + 8);
tx_queue          407 drivers/net/ethernet/marvell/mv643xx_eth.c 	struct tx_queue txq[8];
tx_queue          445 drivers/net/ethernet/marvell/mv643xx_eth.c static struct mv643xx_eth_private *txq_to_mp(struct tx_queue *txq)
tx_queue          466 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_reset_hw_ptr(struct tx_queue *txq)
tx_queue          476 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_enable(struct tx_queue *txq)
tx_queue          482 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_disable(struct tx_queue *txq)
tx_queue          492 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_maybe_wake(struct tx_queue *txq)
tx_queue          730 drivers/net/ethernet/marvell/mv643xx_eth.c txq_put_data_tso(struct net_device *dev, struct tx_queue *txq,
tx_queue          778 drivers/net/ethernet/marvell/mv643xx_eth.c txq_put_hdr_tso(struct sk_buff *skb, struct tx_queue *txq, int length,
tx_queue          820 drivers/net/ethernet/marvell/mv643xx_eth.c static int txq_submit_tso(struct tx_queue *txq, struct sk_buff *skb,
tx_queue          894 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_submit_frag_skb(struct tx_queue *txq, struct sk_buff *skb)
tx_queue          932 drivers/net/ethernet/marvell/mv643xx_eth.c static int txq_submit_skb(struct tx_queue *txq, struct sk_buff *skb,
tx_queue         1000 drivers/net/ethernet/marvell/mv643xx_eth.c 	struct tx_queue *txq;
tx_queue         1035 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_kick(struct tx_queue *txq)
tx_queue         1060 drivers/net/ethernet/marvell/mv643xx_eth.c static int txq_reclaim(struct tx_queue *txq, int budget, int force)
tx_queue         1168 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_set_rate(struct tx_queue *txq, int rate, int burst)
tx_queue         1186 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_set_fixed_prio_mode(struct tx_queue *txq)
tx_queue         1265 drivers/net/ethernet/marvell/mv643xx_eth.c 		struct tx_queue *txq = mp->txq + i;
tx_queue         2030 drivers/net/ethernet/marvell/mv643xx_eth.c 	struct tx_queue *txq = mp->txq + index;
tx_queue         2116 drivers/net/ethernet/marvell/mv643xx_eth.c static void txq_deinit(struct tx_queue *txq)
tx_queue         2206 drivers/net/ethernet/marvell/mv643xx_eth.c 				struct tx_queue *txq = mp->txq + i;
tx_queue         2349 drivers/net/ethernet/marvell/mv643xx_eth.c 		struct tx_queue *txq = mp->txq + i;
tx_queue         1742 drivers/net/ethernet/mellanox/mlx4/en_netdev.c 				tx_ring->tx_queue = netdev_get_tx_queue(dev, i);
tx_queue          386 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	if (ring->tx_queue)
tx_queue          387 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		netdev_tx_reset_queue(ring->tx_queue);
tx_queue          420 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	netdev_txq_bql_complete_prefetchw(ring->tx_queue);
tx_queue          497 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	netdev_tx_completed_queue(ring->tx_queue, packets, bytes);
tx_queue          501 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	if (netif_tx_queue_stopped(ring->tx_queue) &&
tx_queue          503 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		netif_tx_wake_queue(ring->tx_queue);
tx_queue          899 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	netdev_txq_bql_enqueue_prefetchw(ring->tx_queue);
tx_queue         1049 drivers/net/ethernet/mellanox/mlx4/en_tx.c 		netif_tx_stop_queue(ring->tx_queue);
tx_queue         1053 drivers/net/ethernet/mellanox/mlx4/en_tx.c 	send_doorbell = __netdev_tx_sent_queue(ring->tx_queue,
tx_queue         1080 drivers/net/ethernet/mellanox/mlx4/en_tx.c 			netif_tx_wake_queue(ring->tx_queue);
tx_queue          283 drivers/net/ethernet/mellanox/mlx4/mlx4_en.h 	struct netdev_queue	*tx_queue;
tx_queue           67 drivers/net/ethernet/qlogic/qed/qed_ll2.c #define QED_LL2_TX_REGISTERED(ll2)	((ll2)->tx_queue.b_cb_registered)
tx_queue          309 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_tx = &p_ll2_conn->tx_queue;
tx_queue          349 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_ll2_tx_queue *p_tx = &p_ll2_conn->tx_queue;
tx_queue          866 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_ll2_tx_queue *p_tx = &p_ll2_conn->tx_queue;
tx_queue         1006 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_ll2_tx_queue *p_tx = &p_ll2_conn->tx_queue;
tx_queue         1127 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_ll2_tx_queue *p_tx = &p_ll2_conn->tx_queue;
tx_queue         1218 drivers/net/ethernet/qlogic/qed/qed_ll2.c 			     &p_ll2_info->tx_queue.txq_chain, NULL);
tx_queue         1222 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	capacity = qed_chain_get_capacity(&p_ll2_info->tx_queue.txq_chain);
tx_queue         1233 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_ll2_info->tx_queue.descq_mem = p_descq;
tx_queue         1432 drivers/net/ethernet/qlogic/qed/qed_ll2.c 				    &p_ll2_info->tx_queue.tx_sb_index,
tx_queue         1433 drivers/net/ethernet/qlogic/qed/qed_ll2.c 				    &p_ll2_info->tx_queue.p_fw_cons);
tx_queue         1434 drivers/net/ethernet/qlogic/qed/qed_ll2.c 		p_ll2_info->tx_queue.b_cb_registered = true;
tx_queue         1502 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_tx = &p_ll2_conn->tx_queue;
tx_queue         1712 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_chain *p_tx_chain = &p_ll2->tx_queue.txq_chain;
tx_queue         1778 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	if (p_ll2->tx_queue.cur_send_frag_num == pkt->num_of_bds)
tx_queue         1782 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	for (frag_idx = p_ll2->tx_queue.cur_send_frag_num;
tx_queue         1798 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	bool b_notify = p_ll2_conn->tx_queue.cur_send_packet->notify_fw;
tx_queue         1799 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	struct qed_ll2_tx_queue *p_tx = &p_ll2_conn->tx_queue;
tx_queue         1804 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	if (p_ll2_conn->tx_queue.cur_send_frag_num !=
tx_queue         1805 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	    p_ll2_conn->tx_queue.cur_send_packet->bd_used)
tx_queue         1809 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	list_add_tail(&p_ll2_conn->tx_queue.cur_send_packet->list_entry,
tx_queue         1810 drivers/net/ethernet/qlogic/qed/qed_ll2.c 		      &p_ll2_conn->tx_queue.sending_descq);
tx_queue         1811 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_ll2_conn->tx_queue.cur_send_packet = NULL;
tx_queue         1812 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_ll2_conn->tx_queue.cur_send_frag_num = 0;
tx_queue         1818 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	bd_prod = qed_chain_get_prod_idx(&p_ll2_conn->tx_queue.txq_chain);
tx_queue         1860 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_tx = &p_ll2_conn->tx_queue;
tx_queue         1911 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	if (!p_ll2_conn->tx_queue.cur_send_packet)
tx_queue         1914 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_cur_send_packet = p_ll2_conn->tx_queue.cur_send_packet;
tx_queue         1915 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	cur_send_frag_num = p_ll2_conn->tx_queue.cur_send_frag_num;
tx_queue         1927 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	p_ll2_conn->tx_queue.cur_send_frag_num++;
tx_queue         1929 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	spin_lock_irqsave(&p_ll2_conn->tx_queue.lock, flags);
tx_queue         1931 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	spin_unlock_irqrestore(&p_ll2_conn->tx_queue.lock, flags);
tx_queue         1955 drivers/net/ethernet/qlogic/qed/qed_ll2.c 		p_ll2_conn->tx_queue.b_cb_registered = false;
tx_queue         1962 drivers/net/ethernet/qlogic/qed/qed_ll2.c 		qed_int_unregister_cb(p_hwfn, p_ll2_conn->tx_queue.tx_sb_index);
tx_queue         2022 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	kfree(p_ll2_conn->tx_queue.descq_mem);
tx_queue         2023 drivers/net/ethernet/qlogic/qed/qed_ll2.c 	qed_chain_free(p_hwfn->cdev, &p_ll2_conn->tx_queue.txq_chain);
tx_queue          127 drivers/net/ethernet/qlogic/qed/qed_ll2.h 	struct qed_ll2_tx_queue tx_queue;
tx_queue          916 drivers/net/ethernet/sfc/ef10.c 	struct efx_tx_queue *tx_queue;
tx_queue          951 drivers/net/ethernet/sfc/ef10.c 		efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          957 drivers/net/ethernet/sfc/ef10.c 				   tx_queue->channel->channel - 1) *
tx_queue          967 drivers/net/ethernet/sfc/ef10.c 			if (tx_queue->queue == nic_data->pio_write_vi_base) {
tx_queue          976 drivers/net/ethernet/sfc/ef10.c 					       tx_queue->queue);
tx_queue          988 drivers/net/ethernet/sfc/ef10.c 					  tx_queue->queue, index, rc);
tx_queue          989 drivers/net/ethernet/sfc/ef10.c 				tx_queue->piobuf = NULL;
tx_queue          991 drivers/net/ethernet/sfc/ef10.c 				tx_queue->piobuf =
tx_queue          994 drivers/net/ethernet/sfc/ef10.c 				tx_queue->piobuf_offset = offset;
tx_queue          997 drivers/net/ethernet/sfc/ef10.c 					  tx_queue->queue, index,
tx_queue          998 drivers/net/ethernet/sfc/ef10.c 					  tx_queue->piobuf_offset,
tx_queue          999 drivers/net/ethernet/sfc/ef10.c 					  tx_queue->piobuf);
tx_queue         1024 drivers/net/ethernet/sfc/ef10.c 	struct efx_tx_queue *tx_queue;
tx_queue         1028 drivers/net/ethernet/sfc/ef10.c 		efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         1029 drivers/net/ethernet/sfc/ef10.c 			tx_queue->piobuf = NULL;
tx_queue         2298 drivers/net/ethernet/sfc/ef10.c static int efx_ef10_tx_probe(struct efx_tx_queue *tx_queue)
tx_queue         2300 drivers/net/ethernet/sfc/ef10.c 	return efx_nic_alloc_buffer(tx_queue->efx, &tx_queue->txd.buf,
tx_queue         2301 drivers/net/ethernet/sfc/ef10.c 				    (tx_queue->ptr_mask + 1) *
tx_queue         2307 drivers/net/ethernet/sfc/ef10.c static inline void efx_ef10_push_tx_desc(struct efx_tx_queue *tx_queue,
tx_queue         2313 drivers/net/ethernet/sfc/ef10.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue         2316 drivers/net/ethernet/sfc/ef10.c 	efx_writeo_page(tx_queue->efx, &reg,
tx_queue         2317 drivers/net/ethernet/sfc/ef10.c 			ER_DZ_TX_DESC_UPD, tx_queue->queue);
tx_queue         2322 drivers/net/ethernet/sfc/ef10.c static int efx_ef10_tx_tso_desc(struct efx_tx_queue *tx_queue,
tx_queue         2334 drivers/net/ethernet/sfc/ef10.c 	EFX_WARN_ON_ONCE_PARANOID(tx_queue->tso_version != 2);
tx_queue         2360 drivers/net/ethernet/sfc/ef10.c 	buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue         2373 drivers/net/ethernet/sfc/ef10.c 	++tx_queue->insert_count;
tx_queue         2375 drivers/net/ethernet/sfc/ef10.c 	buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue         2387 drivers/net/ethernet/sfc/ef10.c 	++tx_queue->insert_count;
tx_queue         2406 drivers/net/ethernet/sfc/ef10.c static void efx_ef10_tx_init(struct efx_tx_queue *tx_queue)
tx_queue         2410 drivers/net/ethernet/sfc/ef10.c 	bool csum_offload = tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD;
tx_queue         2411 drivers/net/ethernet/sfc/ef10.c 	size_t entries = tx_queue->txd.buf.len / EFX_BUF_SIZE;
tx_queue         2412 drivers/net/ethernet/sfc/ef10.c 	struct efx_channel *channel = tx_queue->channel;
tx_queue         2413 drivers/net/ethernet/sfc/ef10.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue         2428 drivers/net/ethernet/sfc/ef10.c 		tx_queue->timestamping = false;
tx_queue         2441 drivers/net/ethernet/sfc/ef10.c 	    !tx_queue->timestamping) {
tx_queue         2447 drivers/net/ethernet/sfc/ef10.c 	MCDI_SET_DWORD(inbuf, INIT_TXQ_IN_SIZE, tx_queue->ptr_mask + 1);
tx_queue         2449 drivers/net/ethernet/sfc/ef10.c 	MCDI_SET_DWORD(inbuf, INIT_TXQ_IN_LABEL, tx_queue->queue);
tx_queue         2450 drivers/net/ethernet/sfc/ef10.c 	MCDI_SET_DWORD(inbuf, INIT_TXQ_IN_INSTANCE, tx_queue->queue);
tx_queue         2454 drivers/net/ethernet/sfc/ef10.c 	dma_addr = tx_queue->txd.buf.dma_addr;
tx_queue         2457 drivers/net/ethernet/sfc/ef10.c 		  tx_queue->queue, entries, (u64)dma_addr);
tx_queue         2476 drivers/net/ethernet/sfc/ef10.c 						tx_queue->timestamping);
tx_queue         2499 drivers/net/ethernet/sfc/ef10.c 	tx_queue->buffer[0].flags = EFX_TX_BUF_OPTION;
tx_queue         2500 drivers/net/ethernet/sfc/ef10.c 	tx_queue->insert_count = 1;
tx_queue         2501 drivers/net/ethernet/sfc/ef10.c 	txd = efx_tx_desc(tx_queue, 0);
tx_queue         2508 drivers/net/ethernet/sfc/ef10.c 			     ESF_DZ_TX_TIMESTAMP, tx_queue->timestamping);
tx_queue         2509 drivers/net/ethernet/sfc/ef10.c 	tx_queue->write_count = 1;
tx_queue         2512 drivers/net/ethernet/sfc/ef10.c 		tx_queue->handle_tso = efx_ef10_tx_tso_desc;
tx_queue         2513 drivers/net/ethernet/sfc/ef10.c 		tx_queue->tso_version = 2;
tx_queue         2516 drivers/net/ethernet/sfc/ef10.c 		tx_queue->tso_version = 1;
tx_queue         2520 drivers/net/ethernet/sfc/ef10.c 	efx_ef10_push_tx_desc(tx_queue, txd);
tx_queue         2526 drivers/net/ethernet/sfc/ef10.c 		    tx_queue->queue);
tx_queue         2529 drivers/net/ethernet/sfc/ef10.c static void efx_ef10_tx_fini(struct efx_tx_queue *tx_queue)
tx_queue         2533 drivers/net/ethernet/sfc/ef10.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue         2538 drivers/net/ethernet/sfc/ef10.c 		       tx_queue->queue);
tx_queue         2553 drivers/net/ethernet/sfc/ef10.c static void efx_ef10_tx_remove(struct efx_tx_queue *tx_queue)
tx_queue         2555 drivers/net/ethernet/sfc/ef10.c 	efx_nic_free_buffer(tx_queue->efx, &tx_queue->txd.buf);
tx_queue         2559 drivers/net/ethernet/sfc/ef10.c static inline void efx_ef10_notify_tx_desc(struct efx_tx_queue *tx_queue)
tx_queue         2564 drivers/net/ethernet/sfc/ef10.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue         2566 drivers/net/ethernet/sfc/ef10.c 	efx_writed_page(tx_queue->efx, &reg,
tx_queue         2567 drivers/net/ethernet/sfc/ef10.c 			ER_DZ_TX_DESC_UPD_DWORD, tx_queue->queue);
tx_queue         2572 drivers/net/ethernet/sfc/ef10.c static unsigned int efx_ef10_tx_limit_len(struct efx_tx_queue *tx_queue,
tx_queue         2589 drivers/net/ethernet/sfc/ef10.c static void efx_ef10_tx_write(struct efx_tx_queue *tx_queue)
tx_queue         2591 drivers/net/ethernet/sfc/ef10.c 	unsigned int old_write_count = tx_queue->write_count;
tx_queue         2596 drivers/net/ethernet/sfc/ef10.c 	tx_queue->xmit_more_available = false;
tx_queue         2597 drivers/net/ethernet/sfc/ef10.c 	if (unlikely(tx_queue->write_count == tx_queue->insert_count))
tx_queue         2601 drivers/net/ethernet/sfc/ef10.c 		write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue         2602 drivers/net/ethernet/sfc/ef10.c 		buffer = &tx_queue->buffer[write_ptr];
tx_queue         2603 drivers/net/ethernet/sfc/ef10.c 		txd = efx_tx_desc(tx_queue, write_ptr);
tx_queue         2604 drivers/net/ethernet/sfc/ef10.c 		++tx_queue->write_count;
tx_queue         2611 drivers/net/ethernet/sfc/ef10.c 				tx_queue->packet_write_count = tx_queue->write_count;
tx_queue         2613 drivers/net/ethernet/sfc/ef10.c 			tx_queue->packet_write_count = tx_queue->write_count;
tx_queue         2622 drivers/net/ethernet/sfc/ef10.c 	} while (tx_queue->write_count != tx_queue->insert_count);
tx_queue         2626 drivers/net/ethernet/sfc/ef10.c 	if (efx_nic_may_push_tx_desc(tx_queue, old_write_count)) {
tx_queue         2627 drivers/net/ethernet/sfc/ef10.c 		txd = efx_tx_desc(tx_queue,
tx_queue         2628 drivers/net/ethernet/sfc/ef10.c 				  old_write_count & tx_queue->ptr_mask);
tx_queue         2629 drivers/net/ethernet/sfc/ef10.c 		efx_ef10_push_tx_desc(tx_queue, txd);
tx_queue         2630 drivers/net/ethernet/sfc/ef10.c 		++tx_queue->pushes;
tx_queue         2632 drivers/net/ethernet/sfc/ef10.c 		efx_ef10_notify_tx_desc(tx_queue);
tx_queue         3687 drivers/net/ethernet/sfc/ef10.c 	struct efx_tx_queue *tx_queue;
tx_queue         3701 drivers/net/ethernet/sfc/ef10.c 	tx_queue = efx_channel_get_tx_queue(channel,
tx_queue         3704 drivers/net/ethernet/sfc/ef10.c 	if (!tx_queue->timestamping) {
tx_queue         3707 drivers/net/ethernet/sfc/ef10.c 		efx_xmit_done(tx_queue, tx_ev_desc_ptr & tx_queue->ptr_mask);
tx_queue         3728 drivers/net/ethernet/sfc/ef10.c 		if (tx_queue->completed_desc_ptr != tx_queue->ptr_mask)
tx_queue         3729 drivers/net/ethernet/sfc/ef10.c 			efx_xmit_done(tx_queue, tx_queue->completed_desc_ptr);
tx_queue         3733 drivers/net/ethernet/sfc/ef10.c 		tx_queue->completed_desc_ptr =
tx_queue         3734 drivers/net/ethernet/sfc/ef10.c 					tx_ev_desc_ptr & tx_queue->ptr_mask;
tx_queue         3739 drivers/net/ethernet/sfc/ef10.c 		tx_queue->completed_timestamp_minor = ts_part;
tx_queue         3744 drivers/net/ethernet/sfc/ef10.c 		tx_queue->completed_timestamp_major = ts_part;
tx_queue         3746 drivers/net/ethernet/sfc/ef10.c 		efx_xmit_done(tx_queue, tx_queue->completed_desc_ptr);
tx_queue         3747 drivers/net/ethernet/sfc/ef10.c 		tx_queue->completed_desc_ptr = tx_queue->ptr_mask;
tx_queue         3959 drivers/net/ethernet/sfc/ef10.c 	struct efx_tx_queue *tx_queue;
tx_queue         3976 drivers/net/ethernet/sfc/ef10.c 			efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         3977 drivers/net/ethernet/sfc/ef10.c 				efx_ef10_tx_fini(tx_queue);
tx_queue          263 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          275 drivers/net/ethernet/sfc/efx.c 	efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          276 drivers/net/ethernet/sfc/efx.c 		tx_queue->pkts_compl = 0;
tx_queue          277 drivers/net/ethernet/sfc/efx.c 		tx_queue->bytes_compl = 0;
tx_queue          290 drivers/net/ethernet/sfc/efx.c 	efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          291 drivers/net/ethernet/sfc/efx.c 		if (tx_queue->bytes_compl) {
tx_queue          292 drivers/net/ethernet/sfc/efx.c 			netdev_tx_completed_queue(tx_queue->core_txq,
tx_queue          293 drivers/net/ethernet/sfc/efx.c 				tx_queue->pkts_compl, tx_queue->bytes_compl);
tx_queue          465 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          477 drivers/net/ethernet/sfc/efx.c 		tx_queue = &channel->tx_queue[j];
tx_queue          478 drivers/net/ethernet/sfc/efx.c 		tx_queue->efx = efx;
tx_queue          479 drivers/net/ethernet/sfc/efx.c 		tx_queue->queue = i * EFX_TXQ_TYPES + j;
tx_queue          480 drivers/net/ethernet/sfc/efx.c 		tx_queue->channel = channel;
tx_queue          502 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          518 drivers/net/ethernet/sfc/efx.c 		tx_queue = &channel->tx_queue[j];
tx_queue          519 drivers/net/ethernet/sfc/efx.c 		if (tx_queue->channel)
tx_queue          520 drivers/net/ethernet/sfc/efx.c 			tx_queue->channel = channel;
tx_queue          521 drivers/net/ethernet/sfc/efx.c 		tx_queue->buffer = NULL;
tx_queue          522 drivers/net/ethernet/sfc/efx.c 		tx_queue->cb_page = NULL;
tx_queue          523 drivers/net/ethernet/sfc/efx.c 		memset(&tx_queue->txd, 0, sizeof(tx_queue->txd));
tx_queue          539 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          554 drivers/net/ethernet/sfc/efx.c 	efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          555 drivers/net/ethernet/sfc/efx.c 		rc = efx_probe_tx_queue(tx_queue);
tx_queue          643 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          711 drivers/net/ethernet/sfc/efx.c 		efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          712 drivers/net/ethernet/sfc/efx.c 			efx_init_tx_queue(tx_queue);
tx_queue          736 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          775 drivers/net/ethernet/sfc/efx.c 		efx_for_each_possible_channel_tx_queue(tx_queue, channel)
tx_queue          776 drivers/net/ethernet/sfc/efx.c 			efx_fini_tx_queue(tx_queue);
tx_queue          782 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue          790 drivers/net/ethernet/sfc/efx.c 	efx_for_each_possible_channel_tx_queue(tx_queue, channel)
tx_queue          791 drivers/net/ethernet/sfc/efx.c 		efx_remove_tx_queue(tx_queue);
tx_queue          821 drivers/net/ethernet/sfc/efx.c 		struct efx_tx_queue *tx_queue;
tx_queue          832 drivers/net/ethernet/sfc/efx.c 		efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          834 drivers/net/ethernet/sfc/efx.c 						tx_queue->txd.index +
tx_queue          835 drivers/net/ethernet/sfc/efx.c 						tx_queue->txd.entries);
tx_queue         1731 drivers/net/ethernet/sfc/efx.c 	struct efx_tx_queue *tx_queue;
tx_queue         1747 drivers/net/ethernet/sfc/efx.c 		efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         1748 drivers/net/ethernet/sfc/efx.c 			tx_queue->queue -= (efx->tx_channel_offset *
tx_queue         2592 drivers/net/ethernet/sfc/efx.c 		struct efx_tx_queue *tx_queue;
tx_queue         2593 drivers/net/ethernet/sfc/efx.c 		efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         2594 drivers/net/ethernet/sfc/efx.c 			efx_init_tx_queue_core_txq(tx_queue);
tx_queue           18 drivers/net/ethernet/sfc/efx.h int efx_probe_tx_queue(struct efx_tx_queue *tx_queue);
tx_queue           19 drivers/net/ethernet/sfc/efx.h void efx_remove_tx_queue(struct efx_tx_queue *tx_queue);
tx_queue           20 drivers/net/ethernet/sfc/efx.h void efx_init_tx_queue(struct efx_tx_queue *tx_queue);
tx_queue           21 drivers/net/ethernet/sfc/efx.h void efx_init_tx_queue_core_txq(struct efx_tx_queue *tx_queue);
tx_queue           22 drivers/net/ethernet/sfc/efx.h void efx_fini_tx_queue(struct efx_tx_queue *tx_queue);
tx_queue           25 drivers/net/ethernet/sfc/efx.h netdev_tx_t efx_enqueue_skb(struct efx_tx_queue *tx_queue, struct sk_buff *skb);
tx_queue           26 drivers/net/ethernet/sfc/efx.h void efx_xmit_done(struct efx_tx_queue *tx_queue, unsigned int index);
tx_queue           61 drivers/net/ethernet/sfc/ethtool.c 	EFX_ETHTOOL_STAT(tx_##field, tx_queue, field,		\
tx_queue          277 drivers/net/ethernet/sfc/ethtool.c 	struct efx_tx_queue *tx_queue;
tx_queue          279 drivers/net/ethernet/sfc/ethtool.c 	efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          281 drivers/net/ethernet/sfc/ethtool.c 			      &lb_tests->tx_sent[tx_queue->queue],
tx_queue          282 drivers/net/ethernet/sfc/ethtool.c 			      EFX_TX_QUEUE_NAME(tx_queue),
tx_queue          285 drivers/net/ethernet/sfc/ethtool.c 			      &lb_tests->tx_done[tx_queue->queue],
tx_queue          286 drivers/net/ethernet/sfc/ethtool.c 			      EFX_TX_QUEUE_NAME(tx_queue),
tx_queue          385 drivers/net/ethernet/sfc/ethtool.c 					 channel->tx_queue[0].queue /
tx_queue          457 drivers/net/ethernet/sfc/ethtool.c 	struct efx_tx_queue *tx_queue;
tx_queue          482 drivers/net/ethernet/sfc/ethtool.c 				efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          484 drivers/net/ethernet/sfc/ethtool.c 						stat->get_stat((void *)tx_queue
tx_queue          497 drivers/net/ethernet/sfc/ethtool.c 			efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          498 drivers/net/ethernet/sfc/ethtool.c 				*data += tx_queue->tx_packets;
tx_queue          240 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          246 drivers/net/ethernet/sfc/falcon/efx.c 	ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          247 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->pkts_compl = 0;
tx_queue          248 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->bytes_compl = 0;
tx_queue          261 drivers/net/ethernet/sfc/falcon/efx.c 	ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          262 drivers/net/ethernet/sfc/falcon/efx.c 		if (tx_queue->bytes_compl) {
tx_queue          263 drivers/net/ethernet/sfc/falcon/efx.c 			netdev_tx_completed_queue(tx_queue->core_txq,
tx_queue          264 drivers/net/ethernet/sfc/falcon/efx.c 				tx_queue->pkts_compl, tx_queue->bytes_compl);
tx_queue          429 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          441 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue = &channel->tx_queue[j];
tx_queue          442 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->efx = efx;
tx_queue          443 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->queue = i * EF4_TXQ_TYPES + j;
tx_queue          444 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->channel = channel;
tx_queue          462 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          478 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue = &channel->tx_queue[j];
tx_queue          479 drivers/net/ethernet/sfc/falcon/efx.c 		if (tx_queue->channel)
tx_queue          480 drivers/net/ethernet/sfc/falcon/efx.c 			tx_queue->channel = channel;
tx_queue          481 drivers/net/ethernet/sfc/falcon/efx.c 		tx_queue->buffer = NULL;
tx_queue          482 drivers/net/ethernet/sfc/falcon/efx.c 		memset(&tx_queue->txd, 0, sizeof(tx_queue->txd));
tx_queue          495 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          510 drivers/net/ethernet/sfc/falcon/efx.c 	ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          511 drivers/net/ethernet/sfc/falcon/efx.c 		rc = ef4_probe_tx_queue(tx_queue);
tx_queue          597 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          665 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          666 drivers/net/ethernet/sfc/falcon/efx.c 			ef4_init_tx_queue(tx_queue);
tx_queue          688 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          734 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_for_each_possible_channel_tx_queue(tx_queue, channel)
tx_queue          735 drivers/net/ethernet/sfc/falcon/efx.c 			ef4_fini_tx_queue(tx_queue);
tx_queue          741 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          749 drivers/net/ethernet/sfc/falcon/efx.c 	ef4_for_each_possible_channel_tx_queue(tx_queue, channel)
tx_queue          750 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_remove_tx_queue(tx_queue);
tx_queue          780 drivers/net/ethernet/sfc/falcon/efx.c 		struct ef4_tx_queue *tx_queue;
tx_queue          791 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          793 drivers/net/ethernet/sfc/falcon/efx.c 						tx_queue->txd.index +
tx_queue          794 drivers/net/ethernet/sfc/falcon/efx.c 						tx_queue->txd.entries);
tx_queue         1596 drivers/net/ethernet/sfc/falcon/efx.c 	struct ef4_tx_queue *tx_queue;
tx_queue         1612 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         1613 drivers/net/ethernet/sfc/falcon/efx.c 			tx_queue->queue -= (efx->tx_channel_offset *
tx_queue         2306 drivers/net/ethernet/sfc/falcon/efx.c 		struct ef4_tx_queue *tx_queue;
tx_queue         2307 drivers/net/ethernet/sfc/falcon/efx.c 		ef4_for_each_channel_tx_queue(tx_queue, channel)
tx_queue         2308 drivers/net/ethernet/sfc/falcon/efx.c 			ef4_init_tx_queue_core_txq(tx_queue);
tx_queue           23 drivers/net/ethernet/sfc/falcon/efx.h int ef4_probe_tx_queue(struct ef4_tx_queue *tx_queue);
tx_queue           24 drivers/net/ethernet/sfc/falcon/efx.h void ef4_remove_tx_queue(struct ef4_tx_queue *tx_queue);
tx_queue           25 drivers/net/ethernet/sfc/falcon/efx.h void ef4_init_tx_queue(struct ef4_tx_queue *tx_queue);
tx_queue           26 drivers/net/ethernet/sfc/falcon/efx.h void ef4_init_tx_queue_core_txq(struct ef4_tx_queue *tx_queue);
tx_queue           27 drivers/net/ethernet/sfc/falcon/efx.h void ef4_fini_tx_queue(struct ef4_tx_queue *tx_queue);
tx_queue           30 drivers/net/ethernet/sfc/falcon/efx.h netdev_tx_t ef4_enqueue_skb(struct ef4_tx_queue *tx_queue, struct sk_buff *skb);
tx_queue           31 drivers/net/ethernet/sfc/falcon/efx.h void ef4_xmit_done(struct ef4_tx_queue *tx_queue, unsigned int index);
tx_queue           61 drivers/net/ethernet/sfc/falcon/ethtool.c 	EF4_ETHTOOL_STAT(tx_##field, tx_queue, field,		\
tx_queue          259 drivers/net/ethernet/sfc/falcon/ethtool.c 	struct ef4_tx_queue *tx_queue;
tx_queue          261 drivers/net/ethernet/sfc/falcon/ethtool.c 	ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          263 drivers/net/ethernet/sfc/falcon/ethtool.c 			      &lb_tests->tx_sent[tx_queue->queue],
tx_queue          264 drivers/net/ethernet/sfc/falcon/ethtool.c 			      EF4_TX_QUEUE_NAME(tx_queue),
tx_queue          267 drivers/net/ethernet/sfc/falcon/ethtool.c 			      &lb_tests->tx_done[tx_queue->queue],
tx_queue          268 drivers/net/ethernet/sfc/falcon/ethtool.c 			      EF4_TX_QUEUE_NAME(tx_queue),
tx_queue          367 drivers/net/ethernet/sfc/falcon/ethtool.c 					 channel->tx_queue[0].queue /
tx_queue          437 drivers/net/ethernet/sfc/falcon/ethtool.c 	struct ef4_tx_queue *tx_queue;
tx_queue          462 drivers/net/ethernet/sfc/falcon/ethtool.c 				ef4_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          464 drivers/net/ethernet/sfc/falcon/ethtool.c 						stat->get_stat((void *)tx_queue
tx_queue          477 drivers/net/ethernet/sfc/falcon/ethtool.c 			ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          478 drivers/net/ethernet/sfc/falcon/ethtool.c 				*data += tx_queue->tx_packets;
tx_queue          272 drivers/net/ethernet/sfc/falcon/farch.c static inline void ef4_farch_notify_tx_desc(struct ef4_tx_queue *tx_queue)
tx_queue          277 drivers/net/ethernet/sfc/falcon/farch.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          279 drivers/net/ethernet/sfc/falcon/farch.c 	ef4_writed_page(tx_queue->efx, &reg,
tx_queue          280 drivers/net/ethernet/sfc/falcon/farch.c 			FR_AZ_TX_DESC_UPD_DWORD_P0, tx_queue->queue);
tx_queue          284 drivers/net/ethernet/sfc/falcon/farch.c static inline void ef4_farch_push_tx_desc(struct ef4_tx_queue *tx_queue,
tx_queue          293 drivers/net/ethernet/sfc/falcon/farch.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          297 drivers/net/ethernet/sfc/falcon/farch.c 	ef4_writeo_page(tx_queue->efx, &reg,
tx_queue          298 drivers/net/ethernet/sfc/falcon/farch.c 			FR_BZ_TX_DESC_UPD_P0, tx_queue->queue);
tx_queue          306 drivers/net/ethernet/sfc/falcon/farch.c void ef4_farch_tx_write(struct ef4_tx_queue *tx_queue)
tx_queue          311 drivers/net/ethernet/sfc/falcon/farch.c 	unsigned old_write_count = tx_queue->write_count;
tx_queue          313 drivers/net/ethernet/sfc/falcon/farch.c 	tx_queue->xmit_more_available = false;
tx_queue          314 drivers/net/ethernet/sfc/falcon/farch.c 	if (unlikely(tx_queue->write_count == tx_queue->insert_count))
tx_queue          318 drivers/net/ethernet/sfc/falcon/farch.c 		write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          319 drivers/net/ethernet/sfc/falcon/farch.c 		buffer = &tx_queue->buffer[write_ptr];
tx_queue          320 drivers/net/ethernet/sfc/falcon/farch.c 		txd = ef4_tx_desc(tx_queue, write_ptr);
tx_queue          321 drivers/net/ethernet/sfc/falcon/farch.c 		++tx_queue->write_count;
tx_queue          333 drivers/net/ethernet/sfc/falcon/farch.c 	} while (tx_queue->write_count != tx_queue->insert_count);
tx_queue          337 drivers/net/ethernet/sfc/falcon/farch.c 	if (ef4_nic_may_push_tx_desc(tx_queue, old_write_count)) {
tx_queue          338 drivers/net/ethernet/sfc/falcon/farch.c 		txd = ef4_tx_desc(tx_queue,
tx_queue          339 drivers/net/ethernet/sfc/falcon/farch.c 				  old_write_count & tx_queue->ptr_mask);
tx_queue          340 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_farch_push_tx_desc(tx_queue, txd);
tx_queue          341 drivers/net/ethernet/sfc/falcon/farch.c 		++tx_queue->pushes;
tx_queue          343 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_farch_notify_tx_desc(tx_queue);
tx_queue          347 drivers/net/ethernet/sfc/falcon/farch.c unsigned int ef4_farch_tx_limit_len(struct ef4_tx_queue *tx_queue,
tx_queue          355 drivers/net/ethernet/sfc/falcon/farch.c 	if (EF4_WORKAROUND_5391(tx_queue->efx) && (dma_addr & 0xf))
tx_queue          363 drivers/net/ethernet/sfc/falcon/farch.c int ef4_farch_tx_probe(struct ef4_tx_queue *tx_queue)
tx_queue          365 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          368 drivers/net/ethernet/sfc/falcon/farch.c 	entries = tx_queue->ptr_mask + 1;
tx_queue          369 drivers/net/ethernet/sfc/falcon/farch.c 	return ef4_alloc_special_buffer(efx, &tx_queue->txd,
tx_queue          373 drivers/net/ethernet/sfc/falcon/farch.c void ef4_farch_tx_init(struct ef4_tx_queue *tx_queue)
tx_queue          375 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          379 drivers/net/ethernet/sfc/falcon/farch.c 	ef4_init_special_buffer(efx, &tx_queue->txd);
tx_queue          386 drivers/net/ethernet/sfc/falcon/farch.c 			      FRF_AZ_TX_DESCQ_BUF_BASE_ID, tx_queue->txd.index,
tx_queue          388 drivers/net/ethernet/sfc/falcon/farch.c 			      tx_queue->channel->channel,
tx_queue          390 drivers/net/ethernet/sfc/falcon/farch.c 			      FRF_AZ_TX_DESCQ_LABEL, tx_queue->queue,
tx_queue          392 drivers/net/ethernet/sfc/falcon/farch.c 			      __ffs(tx_queue->txd.entries),
tx_queue          397 drivers/net/ethernet/sfc/falcon/farch.c 		int csum = tx_queue->queue & EF4_TXQ_TYPE_OFFLOAD;
tx_queue          404 drivers/net/ethernet/sfc/falcon/farch.c 			 tx_queue->queue);
tx_queue          411 drivers/net/ethernet/sfc/falcon/farch.c 		if (tx_queue->queue & EF4_TXQ_TYPE_OFFLOAD)
tx_queue          412 drivers/net/ethernet/sfc/falcon/farch.c 			__clear_bit_le(tx_queue->queue, &reg);
tx_queue          414 drivers/net/ethernet/sfc/falcon/farch.c 			__set_bit_le(tx_queue->queue, &reg);
tx_queue          421 drivers/net/ethernet/sfc/falcon/farch.c 				     (tx_queue->queue & EF4_TXQ_TYPE_HIGHPRI) ?
tx_queue          425 drivers/net/ethernet/sfc/falcon/farch.c 				 tx_queue->queue);
tx_queue          429 drivers/net/ethernet/sfc/falcon/farch.c static void ef4_farch_flush_tx_queue(struct ef4_tx_queue *tx_queue)
tx_queue          431 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          434 drivers/net/ethernet/sfc/falcon/farch.c 	WARN_ON(atomic_read(&tx_queue->flush_outstanding));
tx_queue          435 drivers/net/ethernet/sfc/falcon/farch.c 	atomic_set(&tx_queue->flush_outstanding, 1);
tx_queue          439 drivers/net/ethernet/sfc/falcon/farch.c 			     FRF_AZ_TX_FLUSH_DESCQ, tx_queue->queue);
tx_queue          443 drivers/net/ethernet/sfc/falcon/farch.c void ef4_farch_tx_fini(struct ef4_tx_queue *tx_queue)
tx_queue          445 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          451 drivers/net/ethernet/sfc/falcon/farch.c 			 tx_queue->queue);
tx_queue          454 drivers/net/ethernet/sfc/falcon/farch.c 	ef4_fini_special_buffer(efx, &tx_queue->txd);
tx_queue          458 drivers/net/ethernet/sfc/falcon/farch.c void ef4_farch_tx_remove(struct ef4_tx_queue *tx_queue)
tx_queue          460 drivers/net/ethernet/sfc/falcon/farch.c 	ef4_free_special_buffer(tx_queue->efx, &tx_queue->txd);
tx_queue          619 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_tx_queue *tx_queue;
tx_queue          622 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          624 drivers/net/ethernet/sfc/falcon/farch.c 					FR_BZ_TX_DESC_PTR_TBL, tx_queue->queue);
tx_queue          631 drivers/net/ethernet/sfc/falcon/farch.c 					  tx_queue->queue);
tx_queue          633 drivers/net/ethernet/sfc/falcon/farch.c 			} else if (atomic_cmpxchg(&tx_queue->flush_outstanding,
tx_queue          640 drivers/net/ethernet/sfc/falcon/farch.c 					  "the queue\n", tx_queue->queue);
tx_queue          647 drivers/net/ethernet/sfc/falcon/farch.c 							      tx_queue));
tx_queue          663 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_tx_queue *tx_queue;
tx_queue          667 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          668 drivers/net/ethernet/sfc/falcon/farch.c 			ef4_farch_flush_tx_queue(tx_queue);
tx_queue          720 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_tx_queue *tx_queue;
tx_queue          736 drivers/net/ethernet/sfc/falcon/farch.c 			ef4_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          737 drivers/net/ethernet/sfc/falcon/farch.c 				ef4_farch_tx_fini(tx_queue);
tx_queue          830 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_tx_queue *tx_queue;
tx_queue          841 drivers/net/ethernet/sfc/falcon/farch.c 		tx_queue = ef4_channel_get_tx_queue(
tx_queue          843 drivers/net/ethernet/sfc/falcon/farch.c 		tx_packets = ((tx_ev_desc_ptr - tx_queue->read_count) &
tx_queue          844 drivers/net/ethernet/sfc/falcon/farch.c 			      tx_queue->ptr_mask);
tx_queue          845 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_xmit_done(tx_queue, tx_ev_desc_ptr);
tx_queue          849 drivers/net/ethernet/sfc/falcon/farch.c 		tx_queue = ef4_channel_get_tx_queue(
tx_queue          853 drivers/net/ethernet/sfc/falcon/farch.c 		ef4_farch_notify_tx_desc(tx_queue);
tx_queue         1093 drivers/net/ethernet/sfc/falcon/farch.c 	struct ef4_tx_queue *tx_queue;
tx_queue         1098 drivers/net/ethernet/sfc/falcon/farch.c 		tx_queue = ef4_get_tx_queue(efx, qid / EF4_TXQ_TYPES,
tx_queue         1100 drivers/net/ethernet/sfc/falcon/farch.c 		if (atomic_cmpxchg(&tx_queue->flush_outstanding, 1, 0)) {
tx_queue         1101 drivers/net/ethernet/sfc/falcon/farch.c 			ef4_farch_magic_event(tx_queue->channel,
tx_queue         1102 drivers/net/ethernet/sfc/falcon/farch.c 					      EF4_CHANNEL_MAGIC_TX_DRAIN(tx_queue));
tx_queue          445 drivers/net/ethernet/sfc/falcon/net_driver.h 	struct ef4_tx_queue tx_queue[EF4_TXQ_TYPES];
tx_queue         1082 drivers/net/ethernet/sfc/falcon/net_driver.h 	int (*tx_probe)(struct ef4_tx_queue *tx_queue);
tx_queue         1083 drivers/net/ethernet/sfc/falcon/net_driver.h 	void (*tx_init)(struct ef4_tx_queue *tx_queue);
tx_queue         1084 drivers/net/ethernet/sfc/falcon/net_driver.h 	void (*tx_remove)(struct ef4_tx_queue *tx_queue);
tx_queue         1085 drivers/net/ethernet/sfc/falcon/net_driver.h 	void (*tx_write)(struct ef4_tx_queue *tx_queue);
tx_queue         1086 drivers/net/ethernet/sfc/falcon/net_driver.h 	unsigned int (*tx_limit_len)(struct ef4_tx_queue *tx_queue,
tx_queue         1192 drivers/net/ethernet/sfc/falcon/net_driver.h 	return &efx->channel[efx->tx_channel_offset + index]->tx_queue[type];
tx_queue         1206 drivers/net/ethernet/sfc/falcon/net_driver.h 	return &channel->tx_queue[type];
tx_queue         1209 drivers/net/ethernet/sfc/falcon/net_driver.h static inline bool ef4_tx_queue_used(struct ef4_tx_queue *tx_queue)
tx_queue         1211 drivers/net/ethernet/sfc/falcon/net_driver.h 	return !(tx_queue->efx->net_dev->num_tc < 2 &&
tx_queue         1212 drivers/net/ethernet/sfc/falcon/net_driver.h 		 tx_queue->queue & EF4_TXQ_TYPE_HIGHPRI);
tx_queue         1220 drivers/net/ethernet/sfc/falcon/net_driver.h 		for (_tx_queue = (_channel)->tx_queue;			\
tx_queue         1221 drivers/net/ethernet/sfc/falcon/net_driver.h 		     _tx_queue < (_channel)->tx_queue + EF4_TXQ_TYPES && \
tx_queue         1230 drivers/net/ethernet/sfc/falcon/net_driver.h 		for (_tx_queue = (_channel)->tx_queue;			\
tx_queue         1231 drivers/net/ethernet/sfc/falcon/net_driver.h 		     _tx_queue < (_channel)->tx_queue + EF4_TXQ_TYPES;	\
tx_queue         1310 drivers/net/ethernet/sfc/falcon/net_driver.h ef4_tx_queue_get_insert_index(const struct ef4_tx_queue *tx_queue)
tx_queue         1312 drivers/net/ethernet/sfc/falcon/net_driver.h 	return tx_queue->insert_count & tx_queue->ptr_mask;
tx_queue         1317 drivers/net/ethernet/sfc/falcon/net_driver.h __ef4_tx_queue_get_insert_buffer(const struct ef4_tx_queue *tx_queue)
tx_queue         1319 drivers/net/ethernet/sfc/falcon/net_driver.h 	return &tx_queue->buffer[ef4_tx_queue_get_insert_index(tx_queue)];
tx_queue         1324 drivers/net/ethernet/sfc/falcon/net_driver.h ef4_tx_queue_get_insert_buffer(const struct ef4_tx_queue *tx_queue)
tx_queue         1327 drivers/net/ethernet/sfc/falcon/net_driver.h 		__ef4_tx_queue_get_insert_buffer(tx_queue);
tx_queue           63 drivers/net/ethernet/sfc/falcon/nic.h ef4_tx_desc(struct ef4_tx_queue *tx_queue, unsigned int index)
tx_queue           65 drivers/net/ethernet/sfc/falcon/nic.h 	return ((ef4_qword_t *) (tx_queue->txd.buf.addr)) + index;
tx_queue           69 drivers/net/ethernet/sfc/falcon/nic.h static inline struct ef4_tx_queue *ef4_tx_queue_partner(struct ef4_tx_queue *tx_queue)
tx_queue           71 drivers/net/ethernet/sfc/falcon/nic.h 	if (tx_queue->queue & EF4_TXQ_TYPE_OFFLOAD)
tx_queue           72 drivers/net/ethernet/sfc/falcon/nic.h 		return tx_queue - EF4_TXQ_TYPE_OFFLOAD;
tx_queue           74 drivers/net/ethernet/sfc/falcon/nic.h 		return tx_queue + EF4_TXQ_TYPE_OFFLOAD;
tx_queue           80 drivers/net/ethernet/sfc/falcon/nic.h static inline bool __ef4_nic_tx_is_empty(struct ef4_tx_queue *tx_queue,
tx_queue           83 drivers/net/ethernet/sfc/falcon/nic.h 	unsigned int empty_read_count = READ_ONCE(tx_queue->empty_read_count);
tx_queue           99 drivers/net/ethernet/sfc/falcon/nic.h static inline bool ef4_nic_may_push_tx_desc(struct ef4_tx_queue *tx_queue,
tx_queue          102 drivers/net/ethernet/sfc/falcon/nic.h 	bool was_empty = __ef4_nic_tx_is_empty(tx_queue, write_count);
tx_queue          104 drivers/net/ethernet/sfc/falcon/nic.h 	tx_queue->empty_read_count = 0;
tx_queue          105 drivers/net/ethernet/sfc/falcon/nic.h 	return was_empty && tx_queue->write_count - write_count == 1;
tx_queue          315 drivers/net/ethernet/sfc/falcon/nic.h static inline int ef4_nic_probe_tx(struct ef4_tx_queue *tx_queue)
tx_queue          317 drivers/net/ethernet/sfc/falcon/nic.h 	return tx_queue->efx->type->tx_probe(tx_queue);
tx_queue          319 drivers/net/ethernet/sfc/falcon/nic.h static inline void ef4_nic_init_tx(struct ef4_tx_queue *tx_queue)
tx_queue          321 drivers/net/ethernet/sfc/falcon/nic.h 	tx_queue->efx->type->tx_init(tx_queue);
tx_queue          323 drivers/net/ethernet/sfc/falcon/nic.h static inline void ef4_nic_remove_tx(struct ef4_tx_queue *tx_queue)
tx_queue          325 drivers/net/ethernet/sfc/falcon/nic.h 	tx_queue->efx->type->tx_remove(tx_queue);
tx_queue          327 drivers/net/ethernet/sfc/falcon/nic.h static inline void ef4_nic_push_buffers(struct ef4_tx_queue *tx_queue)
tx_queue          329 drivers/net/ethernet/sfc/falcon/nic.h 	tx_queue->efx->type->tx_write(tx_queue);
tx_queue          383 drivers/net/ethernet/sfc/falcon/nic.h int ef4_farch_tx_probe(struct ef4_tx_queue *tx_queue);
tx_queue          384 drivers/net/ethernet/sfc/falcon/nic.h void ef4_farch_tx_init(struct ef4_tx_queue *tx_queue);
tx_queue          385 drivers/net/ethernet/sfc/falcon/nic.h void ef4_farch_tx_fini(struct ef4_tx_queue *tx_queue);
tx_queue          386 drivers/net/ethernet/sfc/falcon/nic.h void ef4_farch_tx_remove(struct ef4_tx_queue *tx_queue);
tx_queue          387 drivers/net/ethernet/sfc/falcon/nic.h void ef4_farch_tx_write(struct ef4_tx_queue *tx_queue);
tx_queue          388 drivers/net/ethernet/sfc/falcon/nic.h unsigned int ef4_farch_tx_limit_len(struct ef4_tx_queue *tx_queue,
tx_queue          410 drivers/net/ethernet/sfc/falcon/selftest.c static int ef4_begin_loopback(struct ef4_tx_queue *tx_queue)
tx_queue          412 drivers/net/ethernet/sfc/falcon/selftest.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          440 drivers/net/ethernet/sfc/falcon/selftest.c 		rc = ef4_enqueue_skb(tx_queue, skb);
tx_queue          446 drivers/net/ethernet/sfc/falcon/selftest.c 				  "%d in %s loopback test\n", tx_queue->queue,
tx_queue          466 drivers/net/ethernet/sfc/falcon/selftest.c static int ef4_end_loopback(struct ef4_tx_queue *tx_queue,
tx_queue          469 drivers/net/ethernet/sfc/falcon/selftest.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          498 drivers/net/ethernet/sfc/falcon/selftest.c 			  tx_queue->queue, tx_done, state->packet_count,
tx_queue          509 drivers/net/ethernet/sfc/falcon/selftest.c 			  tx_queue->queue, rx_good, state->packet_count,
tx_queue          516 drivers/net/ethernet/sfc/falcon/selftest.c 	lb_tests->tx_sent[tx_queue->queue] += state->packet_count;
tx_queue          517 drivers/net/ethernet/sfc/falcon/selftest.c 	lb_tests->tx_done[tx_queue->queue] += tx_done;
tx_queue          525 drivers/net/ethernet/sfc/falcon/selftest.c ef4_test_loopback(struct ef4_tx_queue *tx_queue,
tx_queue          528 drivers/net/ethernet/sfc/falcon/selftest.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          544 drivers/net/ethernet/sfc/falcon/selftest.c 			  tx_queue->queue, LOOPBACK_MODE(efx),
tx_queue          548 drivers/net/ethernet/sfc/falcon/selftest.c 		begin_rc = ef4_begin_loopback(tx_queue);
tx_queue          558 drivers/net/ethernet/sfc/falcon/selftest.c 		end_rc = ef4_end_loopback(tx_queue, lb_tests);
tx_queue          571 drivers/net/ethernet/sfc/falcon/selftest.c 		  "of %d packets\n", tx_queue->queue, LOOPBACK_MODE(efx),
tx_queue          620 drivers/net/ethernet/sfc/falcon/selftest.c 	struct ef4_tx_queue *tx_queue;
tx_queue          660 drivers/net/ethernet/sfc/falcon/selftest.c 		ef4_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          661 drivers/net/ethernet/sfc/falcon/selftest.c 			state->offload_csum = (tx_queue->queue &
tx_queue          663 drivers/net/ethernet/sfc/falcon/selftest.c 			rc = ef4_test_loopback(tx_queue,
tx_queue           25 drivers/net/ethernet/sfc/falcon/tx.c static inline u8 *ef4_tx_get_copy_buffer(struct ef4_tx_queue *tx_queue,
tx_queue           28 drivers/net/ethernet/sfc/falcon/tx.c 	unsigned int index = ef4_tx_queue_get_insert_index(tx_queue);
tx_queue           30 drivers/net/ethernet/sfc/falcon/tx.c 		&tx_queue->cb_page[index >> (PAGE_SHIFT - EF4_TX_CB_ORDER)];
tx_queue           35 drivers/net/ethernet/sfc/falcon/tx.c 	    ef4_nic_alloc_buffer(tx_queue->efx, page_buf, PAGE_SIZE,
tx_queue           43 drivers/net/ethernet/sfc/falcon/tx.c u8 *ef4_tx_get_copy_buffer_limited(struct ef4_tx_queue *tx_queue,
tx_queue           48 drivers/net/ethernet/sfc/falcon/tx.c 	return ef4_tx_get_copy_buffer(tx_queue, buffer);
tx_queue           51 drivers/net/ethernet/sfc/falcon/tx.c static void ef4_dequeue_buffer(struct ef4_tx_queue *tx_queue,
tx_queue           57 drivers/net/ethernet/sfc/falcon/tx.c 		struct device *dma_dev = &tx_queue->efx->pci_dev->dev;
tx_queue           72 drivers/net/ethernet/sfc/falcon/tx.c 		netif_vdbg(tx_queue->efx, tx_done, tx_queue->efx->net_dev,
tx_queue           74 drivers/net/ethernet/sfc/falcon/tx.c 			   tx_queue->queue, tx_queue->read_count);
tx_queue          147 drivers/net/ethernet/sfc/falcon/tx.c static int ef4_enqueue_skb_copy(struct ef4_tx_queue *tx_queue,
tx_queue          150 drivers/net/ethernet/sfc/falcon/tx.c 	unsigned int min_len = tx_queue->tx_min_size;
tx_queue          158 drivers/net/ethernet/sfc/falcon/tx.c 	buffer = ef4_tx_queue_get_insert_buffer(tx_queue);
tx_queue          160 drivers/net/ethernet/sfc/falcon/tx.c 	copy_buffer = ef4_tx_get_copy_buffer(tx_queue, buffer);
tx_queue          176 drivers/net/ethernet/sfc/falcon/tx.c 	++tx_queue->insert_count;
tx_queue          180 drivers/net/ethernet/sfc/falcon/tx.c static struct ef4_tx_buffer *ef4_tx_map_chunk(struct ef4_tx_queue *tx_queue,
tx_queue          184 drivers/net/ethernet/sfc/falcon/tx.c 	const struct ef4_nic_type *nic_type = tx_queue->efx->type;
tx_queue          190 drivers/net/ethernet/sfc/falcon/tx.c 		buffer = ef4_tx_queue_get_insert_buffer(tx_queue);
tx_queue          191 drivers/net/ethernet/sfc/falcon/tx.c 		dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len);
tx_queue          198 drivers/net/ethernet/sfc/falcon/tx.c 		++tx_queue->insert_count;
tx_queue          206 drivers/net/ethernet/sfc/falcon/tx.c static int ef4_tx_map_data(struct ef4_tx_queue *tx_queue, struct sk_buff *skb)
tx_queue          208 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          233 drivers/net/ethernet/sfc/falcon/tx.c 		buffer = ef4_tx_map_chunk(tx_queue, dma_addr, len);
tx_queue          268 drivers/net/ethernet/sfc/falcon/tx.c static void ef4_enqueue_unwind(struct ef4_tx_queue *tx_queue)
tx_queue          273 drivers/net/ethernet/sfc/falcon/tx.c 	while (tx_queue->insert_count != tx_queue->write_count) {
tx_queue          274 drivers/net/ethernet/sfc/falcon/tx.c 		--tx_queue->insert_count;
tx_queue          275 drivers/net/ethernet/sfc/falcon/tx.c 		buffer = __ef4_tx_queue_get_insert_buffer(tx_queue);
tx_queue          276 drivers/net/ethernet/sfc/falcon/tx.c 		ef4_dequeue_buffer(tx_queue, buffer, NULL, NULL);
tx_queue          296 drivers/net/ethernet/sfc/falcon/tx.c netdev_tx_t ef4_enqueue_skb(struct ef4_tx_queue *tx_queue, struct sk_buff *skb)
tx_queue          304 drivers/net/ethernet/sfc/falcon/tx.c 	if (skb_len < tx_queue->tx_min_size ||
tx_queue          307 drivers/net/ethernet/sfc/falcon/tx.c 		if (ef4_enqueue_skb_copy(tx_queue, skb))
tx_queue          309 drivers/net/ethernet/sfc/falcon/tx.c 		tx_queue->cb_packets++;
tx_queue          314 drivers/net/ethernet/sfc/falcon/tx.c 	if (!data_mapped && (ef4_tx_map_data(tx_queue, skb)))
tx_queue          318 drivers/net/ethernet/sfc/falcon/tx.c 	netdev_tx_sent_queue(tx_queue->core_txq, skb_len);
tx_queue          321 drivers/net/ethernet/sfc/falcon/tx.c 	if (!netdev_xmit_more() || netif_xmit_stopped(tx_queue->core_txq)) {
tx_queue          322 drivers/net/ethernet/sfc/falcon/tx.c 		struct ef4_tx_queue *txq2 = ef4_tx_queue_partner(tx_queue);
tx_queue          331 drivers/net/ethernet/sfc/falcon/tx.c 		ef4_nic_push_buffers(tx_queue);
tx_queue          333 drivers/net/ethernet/sfc/falcon/tx.c 		tx_queue->xmit_more_available = netdev_xmit_more();
tx_queue          336 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->tx_packets++;
tx_queue          338 drivers/net/ethernet/sfc/falcon/tx.c 	ef4_tx_maybe_stop_queue(tx_queue);
tx_queue          344 drivers/net/ethernet/sfc/falcon/tx.c 	ef4_enqueue_unwind(tx_queue);
tx_queue          354 drivers/net/ethernet/sfc/falcon/tx.c static void ef4_dequeue_buffers(struct ef4_tx_queue *tx_queue,
tx_queue          359 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          362 drivers/net/ethernet/sfc/falcon/tx.c 	stop_index = (index + 1) & tx_queue->ptr_mask;
tx_queue          363 drivers/net/ethernet/sfc/falcon/tx.c 	read_ptr = tx_queue->read_count & tx_queue->ptr_mask;
tx_queue          366 drivers/net/ethernet/sfc/falcon/tx.c 		struct ef4_tx_buffer *buffer = &tx_queue->buffer[read_ptr];
tx_queue          372 drivers/net/ethernet/sfc/falcon/tx.c 				  tx_queue->queue, read_ptr);
tx_queue          377 drivers/net/ethernet/sfc/falcon/tx.c 		ef4_dequeue_buffer(tx_queue, buffer, pkts_compl, bytes_compl);
tx_queue          379 drivers/net/ethernet/sfc/falcon/tx.c 		++tx_queue->read_count;
tx_queue          380 drivers/net/ethernet/sfc/falcon/tx.c 		read_ptr = tx_queue->read_count & tx_queue->ptr_mask;
tx_queue          397 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          408 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue = ef4_get_tx_queue(efx, index, type);
tx_queue          410 drivers/net/ethernet/sfc/falcon/tx.c 	return ef4_enqueue_skb(tx_queue, skb);
tx_queue          413 drivers/net/ethernet/sfc/falcon/tx.c void ef4_init_tx_queue_core_txq(struct ef4_tx_queue *tx_queue)
tx_queue          415 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          418 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->core_txq =
tx_queue          420 drivers/net/ethernet/sfc/falcon/tx.c 				    tx_queue->queue / EF4_TXQ_TYPES +
tx_queue          421 drivers/net/ethernet/sfc/falcon/tx.c 				    ((tx_queue->queue & EF4_TXQ_TYPE_HIGHPRI) ?
tx_queue          431 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_tx_queue *tx_queue;
tx_queue          456 drivers/net/ethernet/sfc/falcon/tx.c 			ef4_for_each_possible_channel_tx_queue(tx_queue,
tx_queue          458 drivers/net/ethernet/sfc/falcon/tx.c 				if (!(tx_queue->queue & EF4_TXQ_TYPE_HIGHPRI))
tx_queue          460 drivers/net/ethernet/sfc/falcon/tx.c 				if (!tx_queue->buffer) {
tx_queue          461 drivers/net/ethernet/sfc/falcon/tx.c 					rc = ef4_probe_tx_queue(tx_queue);
tx_queue          465 drivers/net/ethernet/sfc/falcon/tx.c 				if (!tx_queue->initialised)
tx_queue          466 drivers/net/ethernet/sfc/falcon/tx.c 					ef4_init_tx_queue(tx_queue);
tx_queue          467 drivers/net/ethernet/sfc/falcon/tx.c 				ef4_init_tx_queue_core_txq(tx_queue);
tx_queue          491 drivers/net/ethernet/sfc/falcon/tx.c void ef4_xmit_done(struct ef4_tx_queue *tx_queue, unsigned int index)
tx_queue          494 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          498 drivers/net/ethernet/sfc/falcon/tx.c 	EF4_BUG_ON_PARANOID(index > tx_queue->ptr_mask);
tx_queue          500 drivers/net/ethernet/sfc/falcon/tx.c 	ef4_dequeue_buffers(tx_queue, index, &pkts_compl, &bytes_compl);
tx_queue          501 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->pkts_compl += pkts_compl;
tx_queue          502 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->bytes_compl += bytes_compl;
tx_queue          505 drivers/net/ethernet/sfc/falcon/tx.c 		++tx_queue->merge_events;
tx_queue          512 drivers/net/ethernet/sfc/falcon/tx.c 	if (unlikely(netif_tx_queue_stopped(tx_queue->core_txq)) &&
tx_queue          515 drivers/net/ethernet/sfc/falcon/tx.c 		txq2 = ef4_tx_queue_partner(tx_queue);
tx_queue          516 drivers/net/ethernet/sfc/falcon/tx.c 		fill_level = max(tx_queue->insert_count - tx_queue->read_count,
tx_queue          519 drivers/net/ethernet/sfc/falcon/tx.c 			netif_tx_wake_queue(tx_queue->core_txq);
tx_queue          523 drivers/net/ethernet/sfc/falcon/tx.c 	if ((int)(tx_queue->read_count - tx_queue->old_write_count) >= 0) {
tx_queue          524 drivers/net/ethernet/sfc/falcon/tx.c 		tx_queue->old_write_count = READ_ONCE(tx_queue->write_count);
tx_queue          525 drivers/net/ethernet/sfc/falcon/tx.c 		if (tx_queue->read_count == tx_queue->old_write_count) {
tx_queue          527 drivers/net/ethernet/sfc/falcon/tx.c 			tx_queue->empty_read_count =
tx_queue          528 drivers/net/ethernet/sfc/falcon/tx.c 				tx_queue->read_count | EF4_EMPTY_COUNT_VALID;
tx_queue          533 drivers/net/ethernet/sfc/falcon/tx.c static unsigned int ef4_tx_cb_page_count(struct ef4_tx_queue *tx_queue)
tx_queue          535 drivers/net/ethernet/sfc/falcon/tx.c 	return DIV_ROUND_UP(tx_queue->ptr_mask + 1, PAGE_SIZE >> EF4_TX_CB_ORDER);
tx_queue          538 drivers/net/ethernet/sfc/falcon/tx.c int ef4_probe_tx_queue(struct ef4_tx_queue *tx_queue)
tx_queue          540 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          547 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->ptr_mask = entries - 1;
tx_queue          551 drivers/net/ethernet/sfc/falcon/tx.c 		  tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask);
tx_queue          554 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer),
tx_queue          556 drivers/net/ethernet/sfc/falcon/tx.c 	if (!tx_queue->buffer)
tx_queue          559 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->cb_page = kcalloc(ef4_tx_cb_page_count(tx_queue),
tx_queue          560 drivers/net/ethernet/sfc/falcon/tx.c 				    sizeof(tx_queue->cb_page[0]), GFP_KERNEL);
tx_queue          561 drivers/net/ethernet/sfc/falcon/tx.c 	if (!tx_queue->cb_page) {
tx_queue          567 drivers/net/ethernet/sfc/falcon/tx.c 	rc = ef4_nic_probe_tx(tx_queue);
tx_queue          574 drivers/net/ethernet/sfc/falcon/tx.c 	kfree(tx_queue->cb_page);
tx_queue          575 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->cb_page = NULL;
tx_queue          577 drivers/net/ethernet/sfc/falcon/tx.c 	kfree(tx_queue->buffer);
tx_queue          578 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->buffer = NULL;
tx_queue          582 drivers/net/ethernet/sfc/falcon/tx.c void ef4_init_tx_queue(struct ef4_tx_queue *tx_queue)
tx_queue          584 drivers/net/ethernet/sfc/falcon/tx.c 	struct ef4_nic *efx = tx_queue->efx;
tx_queue          587 drivers/net/ethernet/sfc/falcon/tx.c 		  "initialising TX queue %d\n", tx_queue->queue);
tx_queue          589 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->insert_count = 0;
tx_queue          590 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->write_count = 0;
tx_queue          591 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->old_write_count = 0;
tx_queue          592 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->read_count = 0;
tx_queue          593 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->old_read_count = 0;
tx_queue          594 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->empty_read_count = 0 | EF4_EMPTY_COUNT_VALID;
tx_queue          595 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->xmit_more_available = false;
tx_queue          598 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->tx_min_size = EF4_WORKAROUND_15592(efx) ? 33 : 0;
tx_queue          601 drivers/net/ethernet/sfc/falcon/tx.c 	ef4_nic_init_tx(tx_queue);
tx_queue          603 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->initialised = true;
tx_queue          606 drivers/net/ethernet/sfc/falcon/tx.c void ef4_fini_tx_queue(struct ef4_tx_queue *tx_queue)
tx_queue          610 drivers/net/ethernet/sfc/falcon/tx.c 	netif_dbg(tx_queue->efx, drv, tx_queue->efx->net_dev,
tx_queue          611 drivers/net/ethernet/sfc/falcon/tx.c 		  "shutting down TX queue %d\n", tx_queue->queue);
tx_queue          613 drivers/net/ethernet/sfc/falcon/tx.c 	if (!tx_queue->buffer)
tx_queue          617 drivers/net/ethernet/sfc/falcon/tx.c 	while (tx_queue->read_count != tx_queue->write_count) {
tx_queue          619 drivers/net/ethernet/sfc/falcon/tx.c 		buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask];
tx_queue          620 drivers/net/ethernet/sfc/falcon/tx.c 		ef4_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl);
tx_queue          622 drivers/net/ethernet/sfc/falcon/tx.c 		++tx_queue->read_count;
tx_queue          624 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->xmit_more_available = false;
tx_queue          625 drivers/net/ethernet/sfc/falcon/tx.c 	netdev_tx_reset_queue(tx_queue->core_txq);
tx_queue          628 drivers/net/ethernet/sfc/falcon/tx.c void ef4_remove_tx_queue(struct ef4_tx_queue *tx_queue)
tx_queue          632 drivers/net/ethernet/sfc/falcon/tx.c 	if (!tx_queue->buffer)
tx_queue          635 drivers/net/ethernet/sfc/falcon/tx.c 	netif_dbg(tx_queue->efx, drv, tx_queue->efx->net_dev,
tx_queue          636 drivers/net/ethernet/sfc/falcon/tx.c 		  "destroying TX queue %d\n", tx_queue->queue);
tx_queue          637 drivers/net/ethernet/sfc/falcon/tx.c 	ef4_nic_remove_tx(tx_queue);
tx_queue          639 drivers/net/ethernet/sfc/falcon/tx.c 	if (tx_queue->cb_page) {
tx_queue          640 drivers/net/ethernet/sfc/falcon/tx.c 		for (i = 0; i < ef4_tx_cb_page_count(tx_queue); i++)
tx_queue          641 drivers/net/ethernet/sfc/falcon/tx.c 			ef4_nic_free_buffer(tx_queue->efx,
tx_queue          642 drivers/net/ethernet/sfc/falcon/tx.c 					    &tx_queue->cb_page[i]);
tx_queue          643 drivers/net/ethernet/sfc/falcon/tx.c 		kfree(tx_queue->cb_page);
tx_queue          644 drivers/net/ethernet/sfc/falcon/tx.c 		tx_queue->cb_page = NULL;
tx_queue          647 drivers/net/ethernet/sfc/falcon/tx.c 	kfree(tx_queue->buffer);
tx_queue          648 drivers/net/ethernet/sfc/falcon/tx.c 	tx_queue->buffer = NULL;
tx_queue           15 drivers/net/ethernet/sfc/falcon/tx.h unsigned int ef4_tx_limit_len(struct ef4_tx_queue *tx_queue,
tx_queue           18 drivers/net/ethernet/sfc/falcon/tx.h u8 *ef4_tx_get_copy_buffer_limited(struct ef4_tx_queue *tx_queue,
tx_queue           21 drivers/net/ethernet/sfc/falcon/tx.h int ef4_enqueue_skb_tso(struct ef4_tx_queue *tx_queue, struct sk_buff *skb,
tx_queue          281 drivers/net/ethernet/sfc/farch.c static inline void efx_farch_notify_tx_desc(struct efx_tx_queue *tx_queue)
tx_queue          286 drivers/net/ethernet/sfc/farch.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          288 drivers/net/ethernet/sfc/farch.c 	efx_writed_page(tx_queue->efx, &reg,
tx_queue          289 drivers/net/ethernet/sfc/farch.c 			FR_AZ_TX_DESC_UPD_DWORD_P0, tx_queue->queue);
tx_queue          293 drivers/net/ethernet/sfc/farch.c static inline void efx_farch_push_tx_desc(struct efx_tx_queue *tx_queue,
tx_queue          302 drivers/net/ethernet/sfc/farch.c 	write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          306 drivers/net/ethernet/sfc/farch.c 	efx_writeo_page(tx_queue->efx, &reg,
tx_queue          307 drivers/net/ethernet/sfc/farch.c 			FR_BZ_TX_DESC_UPD_P0, tx_queue->queue);
tx_queue          315 drivers/net/ethernet/sfc/farch.c void efx_farch_tx_write(struct efx_tx_queue *tx_queue)
tx_queue          320 drivers/net/ethernet/sfc/farch.c 	unsigned old_write_count = tx_queue->write_count;
tx_queue          322 drivers/net/ethernet/sfc/farch.c 	tx_queue->xmit_more_available = false;
tx_queue          323 drivers/net/ethernet/sfc/farch.c 	if (unlikely(tx_queue->write_count == tx_queue->insert_count))
tx_queue          327 drivers/net/ethernet/sfc/farch.c 		write_ptr = tx_queue->write_count & tx_queue->ptr_mask;
tx_queue          328 drivers/net/ethernet/sfc/farch.c 		buffer = &tx_queue->buffer[write_ptr];
tx_queue          329 drivers/net/ethernet/sfc/farch.c 		txd = efx_tx_desc(tx_queue, write_ptr);
tx_queue          330 drivers/net/ethernet/sfc/farch.c 		++tx_queue->write_count;
tx_queue          342 drivers/net/ethernet/sfc/farch.c 	} while (tx_queue->write_count != tx_queue->insert_count);
tx_queue          346 drivers/net/ethernet/sfc/farch.c 	if (efx_nic_may_push_tx_desc(tx_queue, old_write_count)) {
tx_queue          347 drivers/net/ethernet/sfc/farch.c 		txd = efx_tx_desc(tx_queue,
tx_queue          348 drivers/net/ethernet/sfc/farch.c 				  old_write_count & tx_queue->ptr_mask);
tx_queue          349 drivers/net/ethernet/sfc/farch.c 		efx_farch_push_tx_desc(tx_queue, txd);
tx_queue          350 drivers/net/ethernet/sfc/farch.c 		++tx_queue->pushes;
tx_queue          352 drivers/net/ethernet/sfc/farch.c 		efx_farch_notify_tx_desc(tx_queue);
tx_queue          356 drivers/net/ethernet/sfc/farch.c unsigned int efx_farch_tx_limit_len(struct efx_tx_queue *tx_queue,
tx_queue          369 drivers/net/ethernet/sfc/farch.c int efx_farch_tx_probe(struct efx_tx_queue *tx_queue)
tx_queue          371 drivers/net/ethernet/sfc/farch.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          374 drivers/net/ethernet/sfc/farch.c 	entries = tx_queue->ptr_mask + 1;
tx_queue          375 drivers/net/ethernet/sfc/farch.c 	return efx_alloc_special_buffer(efx, &tx_queue->txd,
tx_queue          379 drivers/net/ethernet/sfc/farch.c void efx_farch_tx_init(struct efx_tx_queue *tx_queue)
tx_queue          381 drivers/net/ethernet/sfc/farch.c 	int csum = tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD;
tx_queue          382 drivers/net/ethernet/sfc/farch.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          386 drivers/net/ethernet/sfc/farch.c 	efx_init_special_buffer(efx, &tx_queue->txd);
tx_queue          393 drivers/net/ethernet/sfc/farch.c 			      FRF_AZ_TX_DESCQ_BUF_BASE_ID, tx_queue->txd.index,
tx_queue          395 drivers/net/ethernet/sfc/farch.c 			      tx_queue->channel->channel,
tx_queue          397 drivers/net/ethernet/sfc/farch.c 			      FRF_AZ_TX_DESCQ_LABEL, tx_queue->queue,
tx_queue          399 drivers/net/ethernet/sfc/farch.c 			      __ffs(tx_queue->txd.entries),
tx_queue          407 drivers/net/ethernet/sfc/farch.c 			 tx_queue->queue);
tx_queue          411 drivers/net/ethernet/sfc/farch.c 			     (tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI) ?
tx_queue          414 drivers/net/ethernet/sfc/farch.c 	efx_writeo_table(efx, &reg, FR_BZ_TX_PACE_TBL, tx_queue->queue);
tx_queue          417 drivers/net/ethernet/sfc/farch.c static void efx_farch_flush_tx_queue(struct efx_tx_queue *tx_queue)
tx_queue          419 drivers/net/ethernet/sfc/farch.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          422 drivers/net/ethernet/sfc/farch.c 	WARN_ON(atomic_read(&tx_queue->flush_outstanding));
tx_queue          423 drivers/net/ethernet/sfc/farch.c 	atomic_set(&tx_queue->flush_outstanding, 1);
tx_queue          427 drivers/net/ethernet/sfc/farch.c 			     FRF_AZ_TX_FLUSH_DESCQ, tx_queue->queue);
tx_queue          431 drivers/net/ethernet/sfc/farch.c void efx_farch_tx_fini(struct efx_tx_queue *tx_queue)
tx_queue          433 drivers/net/ethernet/sfc/farch.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          439 drivers/net/ethernet/sfc/farch.c 			 tx_queue->queue);
tx_queue          442 drivers/net/ethernet/sfc/farch.c 	efx_fini_special_buffer(efx, &tx_queue->txd);
tx_queue          446 drivers/net/ethernet/sfc/farch.c void efx_farch_tx_remove(struct efx_tx_queue *tx_queue)
tx_queue          448 drivers/net/ethernet/sfc/farch.c 	efx_free_special_buffer(tx_queue->efx, &tx_queue->txd);
tx_queue          601 drivers/net/ethernet/sfc/farch.c 	struct efx_tx_queue *tx_queue;
tx_queue          604 drivers/net/ethernet/sfc/farch.c 		efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          606 drivers/net/ethernet/sfc/farch.c 					FR_BZ_TX_DESC_PTR_TBL, tx_queue->queue);
tx_queue          613 drivers/net/ethernet/sfc/farch.c 					  tx_queue->queue);
tx_queue          615 drivers/net/ethernet/sfc/farch.c 			} else if (atomic_cmpxchg(&tx_queue->flush_outstanding,
tx_queue          622 drivers/net/ethernet/sfc/farch.c 					  "the queue\n", tx_queue->queue);
tx_queue          629 drivers/net/ethernet/sfc/farch.c 							      tx_queue));
tx_queue          645 drivers/net/ethernet/sfc/farch.c 	struct efx_tx_queue *tx_queue;
tx_queue          649 drivers/net/ethernet/sfc/farch.c 		efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          650 drivers/net/ethernet/sfc/farch.c 			efx_farch_flush_tx_queue(tx_queue);
tx_queue          713 drivers/net/ethernet/sfc/farch.c 	struct efx_tx_queue *tx_queue;
tx_queue          729 drivers/net/ethernet/sfc/farch.c 			efx_for_each_channel_tx_queue(tx_queue, channel)
tx_queue          730 drivers/net/ethernet/sfc/farch.c 				efx_farch_tx_fini(tx_queue);
tx_queue          823 drivers/net/ethernet/sfc/farch.c 	struct efx_tx_queue *tx_queue;
tx_queue          833 drivers/net/ethernet/sfc/farch.c 		tx_queue = efx_channel_get_tx_queue(
tx_queue          835 drivers/net/ethernet/sfc/farch.c 		efx_xmit_done(tx_queue, tx_ev_desc_ptr);
tx_queue          839 drivers/net/ethernet/sfc/farch.c 		tx_queue = efx_channel_get_tx_queue(
tx_queue          843 drivers/net/ethernet/sfc/farch.c 		efx_farch_notify_tx_desc(tx_queue);
tx_queue         1081 drivers/net/ethernet/sfc/farch.c 	struct efx_tx_queue *tx_queue;
tx_queue         1086 drivers/net/ethernet/sfc/farch.c 		tx_queue = efx_get_tx_queue(efx, qid / EFX_TXQ_TYPES,
tx_queue         1088 drivers/net/ethernet/sfc/farch.c 		if (atomic_cmpxchg(&tx_queue->flush_outstanding, 1, 0)) {
tx_queue         1089 drivers/net/ethernet/sfc/farch.c 			efx_farch_magic_event(tx_queue->channel,
tx_queue         1090 drivers/net/ethernet/sfc/farch.c 					      EFX_CHANNEL_MAGIC_TX_DRAIN(tx_queue));
tx_queue          504 drivers/net/ethernet/sfc/net_driver.h 	struct efx_tx_queue tx_queue[EFX_TXQ_TYPES];
tx_queue         1321 drivers/net/ethernet/sfc/net_driver.h 	int (*tx_probe)(struct efx_tx_queue *tx_queue);
tx_queue         1322 drivers/net/ethernet/sfc/net_driver.h 	void (*tx_init)(struct efx_tx_queue *tx_queue);
tx_queue         1323 drivers/net/ethernet/sfc/net_driver.h 	void (*tx_remove)(struct efx_tx_queue *tx_queue);
tx_queue         1324 drivers/net/ethernet/sfc/net_driver.h 	void (*tx_write)(struct efx_tx_queue *tx_queue);
tx_queue         1325 drivers/net/ethernet/sfc/net_driver.h 	unsigned int (*tx_limit_len)(struct efx_tx_queue *tx_queue,
tx_queue         1473 drivers/net/ethernet/sfc/net_driver.h 	return &efx->channel[efx->tx_channel_offset + index]->tx_queue[type];
tx_queue         1487 drivers/net/ethernet/sfc/net_driver.h 	return &channel->tx_queue[type];
tx_queue         1490 drivers/net/ethernet/sfc/net_driver.h static inline bool efx_tx_queue_used(struct efx_tx_queue *tx_queue)
tx_queue         1492 drivers/net/ethernet/sfc/net_driver.h 	return !(tx_queue->efx->net_dev->num_tc < 2 &&
tx_queue         1493 drivers/net/ethernet/sfc/net_driver.h 		 tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI);
tx_queue         1501 drivers/net/ethernet/sfc/net_driver.h 		for (_tx_queue = (_channel)->tx_queue;			\
tx_queue         1502 drivers/net/ethernet/sfc/net_driver.h 		     _tx_queue < (_channel)->tx_queue + EFX_TXQ_TYPES && \
tx_queue         1511 drivers/net/ethernet/sfc/net_driver.h 		for (_tx_queue = (_channel)->tx_queue;			\
tx_queue         1512 drivers/net/ethernet/sfc/net_driver.h 		     _tx_queue < (_channel)->tx_queue + EFX_TXQ_TYPES;	\
tx_queue         1600 drivers/net/ethernet/sfc/net_driver.h efx_tx_queue_get_insert_index(const struct efx_tx_queue *tx_queue)
tx_queue         1602 drivers/net/ethernet/sfc/net_driver.h 	return tx_queue->insert_count & tx_queue->ptr_mask;
tx_queue         1607 drivers/net/ethernet/sfc/net_driver.h __efx_tx_queue_get_insert_buffer(const struct efx_tx_queue *tx_queue)
tx_queue         1609 drivers/net/ethernet/sfc/net_driver.h 	return &tx_queue->buffer[efx_tx_queue_get_insert_index(tx_queue)];
tx_queue         1614 drivers/net/ethernet/sfc/net_driver.h efx_tx_queue_get_insert_buffer(const struct efx_tx_queue *tx_queue)
tx_queue         1617 drivers/net/ethernet/sfc/net_driver.h 		__efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue           61 drivers/net/ethernet/sfc/nic.h efx_tx_desc(struct efx_tx_queue *tx_queue, unsigned int index)
tx_queue           63 drivers/net/ethernet/sfc/nic.h 	return ((efx_qword_t *) (tx_queue->txd.buf.addr)) + index;
tx_queue           67 drivers/net/ethernet/sfc/nic.h static struct efx_tx_queue *efx_tx_queue_partner(struct efx_tx_queue *tx_queue)
tx_queue           69 drivers/net/ethernet/sfc/nic.h 	if (tx_queue->queue & EFX_TXQ_TYPE_OFFLOAD)
tx_queue           70 drivers/net/ethernet/sfc/nic.h 		return tx_queue - EFX_TXQ_TYPE_OFFLOAD;
tx_queue           72 drivers/net/ethernet/sfc/nic.h 		return tx_queue + EFX_TXQ_TYPE_OFFLOAD;
tx_queue           78 drivers/net/ethernet/sfc/nic.h static inline bool __efx_nic_tx_is_empty(struct efx_tx_queue *tx_queue,
tx_queue           81 drivers/net/ethernet/sfc/nic.h 	unsigned int empty_read_count = READ_ONCE(tx_queue->empty_read_count);
tx_queue           94 drivers/net/ethernet/sfc/nic.h static inline bool efx_nic_tx_is_empty(struct efx_tx_queue *tx_queue)
tx_queue           96 drivers/net/ethernet/sfc/nic.h 	EFX_WARN_ON_ONCE_PARANOID(!tx_queue->efx->type->option_descriptors);
tx_queue           97 drivers/net/ethernet/sfc/nic.h 	return __efx_nic_tx_is_empty(tx_queue, tx_queue->packet_write_count);
tx_queue          106 drivers/net/ethernet/sfc/nic.h static inline bool efx_nic_may_tx_pio(struct efx_tx_queue *tx_queue)
tx_queue          108 drivers/net/ethernet/sfc/nic.h 	struct efx_tx_queue *partner = efx_tx_queue_partner(tx_queue);
tx_queue          110 drivers/net/ethernet/sfc/nic.h 	return tx_queue->piobuf && efx_nic_tx_is_empty(tx_queue) &&
tx_queue          122 drivers/net/ethernet/sfc/nic.h static inline bool efx_nic_may_push_tx_desc(struct efx_tx_queue *tx_queue,
tx_queue          125 drivers/net/ethernet/sfc/nic.h 	bool was_empty = __efx_nic_tx_is_empty(tx_queue, write_count);
tx_queue          127 drivers/net/ethernet/sfc/nic.h 	tx_queue->empty_read_count = 0;
tx_queue          128 drivers/net/ethernet/sfc/nic.h 	return was_empty && tx_queue->write_count - write_count == 1;
tx_queue          474 drivers/net/ethernet/sfc/nic.h ktime_t efx_ptp_nic_to_kernel_time(struct efx_tx_queue *tx_queue);
tx_queue          492 drivers/net/ethernet/sfc/nic.h static inline int efx_nic_probe_tx(struct efx_tx_queue *tx_queue)
tx_queue          494 drivers/net/ethernet/sfc/nic.h 	return tx_queue->efx->type->tx_probe(tx_queue);
tx_queue          496 drivers/net/ethernet/sfc/nic.h static inline void efx_nic_init_tx(struct efx_tx_queue *tx_queue)
tx_queue          498 drivers/net/ethernet/sfc/nic.h 	tx_queue->efx->type->tx_init(tx_queue);
tx_queue          500 drivers/net/ethernet/sfc/nic.h static inline void efx_nic_remove_tx(struct efx_tx_queue *tx_queue)
tx_queue          502 drivers/net/ethernet/sfc/nic.h 	tx_queue->efx->type->tx_remove(tx_queue);
tx_queue          504 drivers/net/ethernet/sfc/nic.h static inline void efx_nic_push_buffers(struct efx_tx_queue *tx_queue)
tx_queue          506 drivers/net/ethernet/sfc/nic.h 	tx_queue->efx->type->tx_write(tx_queue);
tx_queue          560 drivers/net/ethernet/sfc/nic.h int efx_farch_tx_probe(struct efx_tx_queue *tx_queue);
tx_queue          561 drivers/net/ethernet/sfc/nic.h void efx_farch_tx_init(struct efx_tx_queue *tx_queue);
tx_queue          562 drivers/net/ethernet/sfc/nic.h void efx_farch_tx_fini(struct efx_tx_queue *tx_queue);
tx_queue          563 drivers/net/ethernet/sfc/nic.h void efx_farch_tx_remove(struct efx_tx_queue *tx_queue);
tx_queue          564 drivers/net/ethernet/sfc/nic.h void efx_farch_tx_write(struct efx_tx_queue *tx_queue);
tx_queue          565 drivers/net/ethernet/sfc/nic.h unsigned int efx_farch_tx_limit_len(struct efx_tx_queue *tx_queue,
tx_queue          609 drivers/net/ethernet/sfc/ptp.c ktime_t efx_ptp_nic_to_kernel_time(struct efx_tx_queue *tx_queue)
tx_queue          611 drivers/net/ethernet/sfc/ptp.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          617 drivers/net/ethernet/sfc/ptp.c 				tx_queue->completed_timestamp_major,
tx_queue          618 drivers/net/ethernet/sfc/ptp.c 				tx_queue->completed_timestamp_minor,
tx_queue          622 drivers/net/ethernet/sfc/ptp.c 				tx_queue->completed_timestamp_major,
tx_queue          623 drivers/net/ethernet/sfc/ptp.c 				tx_queue->completed_timestamp_minor,
tx_queue         1091 drivers/net/ethernet/sfc/ptp.c 	struct efx_tx_queue *tx_queue;
tx_queue         1094 drivers/net/ethernet/sfc/ptp.c 	tx_queue = &ptp_data->channel->tx_queue[type];
tx_queue         1095 drivers/net/ethernet/sfc/ptp.c 	if (tx_queue && tx_queue->timestamping) {
tx_queue         1096 drivers/net/ethernet/sfc/ptp.c 		efx_enqueue_skb(tx_queue, skb);
tx_queue          410 drivers/net/ethernet/sfc/selftest.c static int efx_begin_loopback(struct efx_tx_queue *tx_queue)
tx_queue          412 drivers/net/ethernet/sfc/selftest.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          440 drivers/net/ethernet/sfc/selftest.c 		rc = efx_enqueue_skb(tx_queue, skb);
tx_queue          446 drivers/net/ethernet/sfc/selftest.c 				  "%d in %s loopback test\n", tx_queue->queue,
tx_queue          466 drivers/net/ethernet/sfc/selftest.c static int efx_end_loopback(struct efx_tx_queue *tx_queue,
tx_queue          469 drivers/net/ethernet/sfc/selftest.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          498 drivers/net/ethernet/sfc/selftest.c 			  tx_queue->queue, tx_done, state->packet_count,
tx_queue          509 drivers/net/ethernet/sfc/selftest.c 			  tx_queue->queue, rx_good, state->packet_count,
tx_queue          516 drivers/net/ethernet/sfc/selftest.c 	lb_tests->tx_sent[tx_queue->queue] += state->packet_count;
tx_queue          517 drivers/net/ethernet/sfc/selftest.c 	lb_tests->tx_done[tx_queue->queue] += tx_done;
tx_queue          525 drivers/net/ethernet/sfc/selftest.c efx_test_loopback(struct efx_tx_queue *tx_queue,
tx_queue          528 drivers/net/ethernet/sfc/selftest.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          544 drivers/net/ethernet/sfc/selftest.c 			  tx_queue->queue, LOOPBACK_MODE(efx),
tx_queue          548 drivers/net/ethernet/sfc/selftest.c 		begin_rc = efx_begin_loopback(tx_queue);
tx_queue          558 drivers/net/ethernet/sfc/selftest.c 		end_rc = efx_end_loopback(tx_queue, lb_tests);
tx_queue          571 drivers/net/ethernet/sfc/selftest.c 		  "of %d packets\n", tx_queue->queue, LOOPBACK_MODE(efx),
tx_queue          620 drivers/net/ethernet/sfc/selftest.c 	struct efx_tx_queue *tx_queue;
tx_queue          660 drivers/net/ethernet/sfc/selftest.c 		efx_for_each_channel_tx_queue(tx_queue, channel) {
tx_queue          661 drivers/net/ethernet/sfc/selftest.c 			state->offload_csum = (tx_queue->queue &
tx_queue          663 drivers/net/ethernet/sfc/selftest.c 			rc = efx_test_loopback(tx_queue,
tx_queue           33 drivers/net/ethernet/sfc/tx.c static inline u8 *efx_tx_get_copy_buffer(struct efx_tx_queue *tx_queue,
tx_queue           36 drivers/net/ethernet/sfc/tx.c 	unsigned int index = efx_tx_queue_get_insert_index(tx_queue);
tx_queue           38 drivers/net/ethernet/sfc/tx.c 		&tx_queue->cb_page[index >> (PAGE_SHIFT - EFX_TX_CB_ORDER)];
tx_queue           43 drivers/net/ethernet/sfc/tx.c 	    efx_nic_alloc_buffer(tx_queue->efx, page_buf, PAGE_SIZE,
tx_queue           51 drivers/net/ethernet/sfc/tx.c u8 *efx_tx_get_copy_buffer_limited(struct efx_tx_queue *tx_queue,
tx_queue           56 drivers/net/ethernet/sfc/tx.c 	return efx_tx_get_copy_buffer(tx_queue, buffer);
tx_queue           59 drivers/net/ethernet/sfc/tx.c static void efx_dequeue_buffer(struct efx_tx_queue *tx_queue,
tx_queue           65 drivers/net/ethernet/sfc/tx.c 		struct device *dma_dev = &tx_queue->efx->pci_dev->dev;
tx_queue           82 drivers/net/ethernet/sfc/tx.c 		if (tx_queue->timestamping &&
tx_queue           83 drivers/net/ethernet/sfc/tx.c 		    (tx_queue->completed_timestamp_major ||
tx_queue           84 drivers/net/ethernet/sfc/tx.c 		     tx_queue->completed_timestamp_minor)) {
tx_queue           88 drivers/net/ethernet/sfc/tx.c 				efx_ptp_nic_to_kernel_time(tx_queue);
tx_queue           91 drivers/net/ethernet/sfc/tx.c 			tx_queue->completed_timestamp_major = 0;
tx_queue           92 drivers/net/ethernet/sfc/tx.c 			tx_queue->completed_timestamp_minor = 0;
tx_queue           95 drivers/net/ethernet/sfc/tx.c 		netif_vdbg(tx_queue->efx, tx_done, tx_queue->efx->net_dev,
tx_queue           97 drivers/net/ethernet/sfc/tx.c 			   tx_queue->queue, tx_queue->read_count);
tx_queue          164 drivers/net/ethernet/sfc/tx.c static int efx_enqueue_skb_copy(struct efx_tx_queue *tx_queue,
tx_queue          174 drivers/net/ethernet/sfc/tx.c 	buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          176 drivers/net/ethernet/sfc/tx.c 	copy_buffer = efx_tx_get_copy_buffer(tx_queue, buffer);
tx_queue          187 drivers/net/ethernet/sfc/tx.c 	++tx_queue->insert_count;
tx_queue          285 drivers/net/ethernet/sfc/tx.c static int efx_enqueue_skb_pio(struct efx_tx_queue *tx_queue,
tx_queue          289 drivers/net/ethernet/sfc/tx.c 		efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          290 drivers/net/ethernet/sfc/tx.c 	u8 __iomem *piobuf = tx_queue->piobuf;
tx_queue          305 drivers/net/ethernet/sfc/tx.c 		efx_skb_copy_bits_to_pio(tx_queue->efx, skb,
tx_queue          307 drivers/net/ethernet/sfc/tx.c 		efx_flush_copy_buffer(tx_queue->efx, piobuf, &copy_buf);
tx_queue          315 drivers/net/ethernet/sfc/tx.c 		__iowrite64_copy(tx_queue->piobuf, skb->data,
tx_queue          328 drivers/net/ethernet/sfc/tx.c 			     tx_queue->piobuf_offset);
tx_queue          329 drivers/net/ethernet/sfc/tx.c 	++tx_queue->insert_count;
tx_queue          334 drivers/net/ethernet/sfc/tx.c static struct efx_tx_buffer *efx_tx_map_chunk(struct efx_tx_queue *tx_queue,
tx_queue          338 drivers/net/ethernet/sfc/tx.c 	const struct efx_nic_type *nic_type = tx_queue->efx->type;
tx_queue          344 drivers/net/ethernet/sfc/tx.c 		buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          345 drivers/net/ethernet/sfc/tx.c 		dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len);
tx_queue          352 drivers/net/ethernet/sfc/tx.c 		++tx_queue->insert_count;
tx_queue          360 drivers/net/ethernet/sfc/tx.c static int efx_tx_map_data(struct efx_tx_queue *tx_queue, struct sk_buff *skb,
tx_queue          363 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          391 drivers/net/ethernet/sfc/tx.c 			tx_queue->tso_long_headers++;
tx_queue          392 drivers/net/ethernet/sfc/tx.c 			efx_tx_map_chunk(tx_queue, dma_addr, header_len);
tx_queue          403 drivers/net/ethernet/sfc/tx.c 		buffer = efx_tx_map_chunk(tx_queue, dma_addr, len);
tx_queue          438 drivers/net/ethernet/sfc/tx.c static void efx_enqueue_unwind(struct efx_tx_queue *tx_queue,
tx_queue          446 drivers/net/ethernet/sfc/tx.c 	while (tx_queue->insert_count != insert_count) {
tx_queue          447 drivers/net/ethernet/sfc/tx.c 		--tx_queue->insert_count;
tx_queue          448 drivers/net/ethernet/sfc/tx.c 		buffer = __efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          449 drivers/net/ethernet/sfc/tx.c 		efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl);
tx_queue          462 drivers/net/ethernet/sfc/tx.c static int efx_tx_tso_fallback(struct efx_tx_queue *tx_queue,
tx_queue          478 drivers/net/ethernet/sfc/tx.c 		efx_enqueue_skb(tx_queue, skb);
tx_queue          501 drivers/net/ethernet/sfc/tx.c netdev_tx_t efx_enqueue_skb(struct efx_tx_queue *tx_queue, struct sk_buff *skb)
tx_queue          503 drivers/net/ethernet/sfc/tx.c 	unsigned int old_insert_count = tx_queue->insert_count;
tx_queue          520 drivers/net/ethernet/sfc/tx.c 		EFX_WARN_ON_ONCE_PARANOID(!tx_queue->handle_tso);
tx_queue          521 drivers/net/ethernet/sfc/tx.c 		rc = tx_queue->handle_tso(tx_queue, skb, &data_mapped);
tx_queue          523 drivers/net/ethernet/sfc/tx.c 			rc = efx_tx_tso_fallback(tx_queue, skb);
tx_queue          524 drivers/net/ethernet/sfc/tx.c 			tx_queue->tso_fallbacks++;
tx_queue          532 drivers/net/ethernet/sfc/tx.c 		   efx_nic_may_tx_pio(tx_queue)) {
tx_queue          534 drivers/net/ethernet/sfc/tx.c 		if (efx_enqueue_skb_pio(tx_queue, skb))
tx_queue          536 drivers/net/ethernet/sfc/tx.c 		tx_queue->pio_packets++;
tx_queue          541 drivers/net/ethernet/sfc/tx.c 		if (efx_enqueue_skb_copy(tx_queue, skb))
tx_queue          543 drivers/net/ethernet/sfc/tx.c 		tx_queue->cb_packets++;
tx_queue          548 drivers/net/ethernet/sfc/tx.c 	if (!data_mapped && (efx_tx_map_data(tx_queue, skb, segments)))
tx_queue          551 drivers/net/ethernet/sfc/tx.c 	efx_tx_maybe_stop_queue(tx_queue);
tx_queue          554 drivers/net/ethernet/sfc/tx.c 	if (__netdev_tx_sent_queue(tx_queue->core_txq, skb_len, xmit_more)) {
tx_queue          555 drivers/net/ethernet/sfc/tx.c 		struct efx_tx_queue *txq2 = efx_tx_queue_partner(tx_queue);
tx_queue          564 drivers/net/ethernet/sfc/tx.c 		efx_nic_push_buffers(tx_queue);
tx_queue          566 drivers/net/ethernet/sfc/tx.c 		tx_queue->xmit_more_available = xmit_more;
tx_queue          570 drivers/net/ethernet/sfc/tx.c 		tx_queue->tso_bursts++;
tx_queue          571 drivers/net/ethernet/sfc/tx.c 		tx_queue->tso_packets += segments;
tx_queue          572 drivers/net/ethernet/sfc/tx.c 		tx_queue->tx_packets  += segments;
tx_queue          574 drivers/net/ethernet/sfc/tx.c 		tx_queue->tx_packets++;
tx_queue          581 drivers/net/ethernet/sfc/tx.c 	efx_enqueue_unwind(tx_queue, old_insert_count);
tx_queue          589 drivers/net/ethernet/sfc/tx.c 		struct efx_tx_queue *txq2 = efx_tx_queue_partner(tx_queue);
tx_queue          594 drivers/net/ethernet/sfc/tx.c 		efx_nic_push_buffers(tx_queue);
tx_queue          605 drivers/net/ethernet/sfc/tx.c static void efx_dequeue_buffers(struct efx_tx_queue *tx_queue,
tx_queue          610 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          613 drivers/net/ethernet/sfc/tx.c 	stop_index = (index + 1) & tx_queue->ptr_mask;
tx_queue          614 drivers/net/ethernet/sfc/tx.c 	read_ptr = tx_queue->read_count & tx_queue->ptr_mask;
tx_queue          617 drivers/net/ethernet/sfc/tx.c 		struct efx_tx_buffer *buffer = &tx_queue->buffer[read_ptr];
tx_queue          623 drivers/net/ethernet/sfc/tx.c 				  tx_queue->queue, read_ptr);
tx_queue          628 drivers/net/ethernet/sfc/tx.c 		efx_dequeue_buffer(tx_queue, buffer, pkts_compl, bytes_compl);
tx_queue          630 drivers/net/ethernet/sfc/tx.c 		++tx_queue->read_count;
tx_queue          631 drivers/net/ethernet/sfc/tx.c 		read_ptr = tx_queue->read_count & tx_queue->ptr_mask;
tx_queue          648 drivers/net/ethernet/sfc/tx.c 	struct efx_tx_queue *tx_queue;
tx_queue          665 drivers/net/ethernet/sfc/tx.c 	tx_queue = efx_get_tx_queue(efx, index, type);
tx_queue          667 drivers/net/ethernet/sfc/tx.c 	return efx_enqueue_skb(tx_queue, skb);
tx_queue          670 drivers/net/ethernet/sfc/tx.c void efx_init_tx_queue_core_txq(struct efx_tx_queue *tx_queue)
tx_queue          672 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          675 drivers/net/ethernet/sfc/tx.c 	tx_queue->core_txq =
tx_queue          677 drivers/net/ethernet/sfc/tx.c 				    tx_queue->queue / EFX_TXQ_TYPES +
tx_queue          678 drivers/net/ethernet/sfc/tx.c 				    ((tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI) ?
tx_queue          688 drivers/net/ethernet/sfc/tx.c 	struct efx_tx_queue *tx_queue;
tx_queue          713 drivers/net/ethernet/sfc/tx.c 			efx_for_each_possible_channel_tx_queue(tx_queue,
tx_queue          715 drivers/net/ethernet/sfc/tx.c 				if (!(tx_queue->queue & EFX_TXQ_TYPE_HIGHPRI))
tx_queue          717 drivers/net/ethernet/sfc/tx.c 				if (!tx_queue->buffer) {
tx_queue          718 drivers/net/ethernet/sfc/tx.c 					rc = efx_probe_tx_queue(tx_queue);
tx_queue          722 drivers/net/ethernet/sfc/tx.c 				if (!tx_queue->initialised)
tx_queue          723 drivers/net/ethernet/sfc/tx.c 					efx_init_tx_queue(tx_queue);
tx_queue          724 drivers/net/ethernet/sfc/tx.c 				efx_init_tx_queue_core_txq(tx_queue);
tx_queue          748 drivers/net/ethernet/sfc/tx.c void efx_xmit_done(struct efx_tx_queue *tx_queue, unsigned int index)
tx_queue          751 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          755 drivers/net/ethernet/sfc/tx.c 	EFX_WARN_ON_ONCE_PARANOID(index > tx_queue->ptr_mask);
tx_queue          757 drivers/net/ethernet/sfc/tx.c 	efx_dequeue_buffers(tx_queue, index, &pkts_compl, &bytes_compl);
tx_queue          758 drivers/net/ethernet/sfc/tx.c 	tx_queue->pkts_compl += pkts_compl;
tx_queue          759 drivers/net/ethernet/sfc/tx.c 	tx_queue->bytes_compl += bytes_compl;
tx_queue          762 drivers/net/ethernet/sfc/tx.c 		++tx_queue->merge_events;
tx_queue          769 drivers/net/ethernet/sfc/tx.c 	if (unlikely(netif_tx_queue_stopped(tx_queue->core_txq)) &&
tx_queue          772 drivers/net/ethernet/sfc/tx.c 		txq2 = efx_tx_queue_partner(tx_queue);
tx_queue          773 drivers/net/ethernet/sfc/tx.c 		fill_level = max(tx_queue->insert_count - tx_queue->read_count,
tx_queue          776 drivers/net/ethernet/sfc/tx.c 			netif_tx_wake_queue(tx_queue->core_txq);
tx_queue          780 drivers/net/ethernet/sfc/tx.c 	if ((int)(tx_queue->read_count - tx_queue->old_write_count) >= 0) {
tx_queue          781 drivers/net/ethernet/sfc/tx.c 		tx_queue->old_write_count = READ_ONCE(tx_queue->write_count);
tx_queue          782 drivers/net/ethernet/sfc/tx.c 		if (tx_queue->read_count == tx_queue->old_write_count) {
tx_queue          784 drivers/net/ethernet/sfc/tx.c 			tx_queue->empty_read_count =
tx_queue          785 drivers/net/ethernet/sfc/tx.c 				tx_queue->read_count | EFX_EMPTY_COUNT_VALID;
tx_queue          790 drivers/net/ethernet/sfc/tx.c static unsigned int efx_tx_cb_page_count(struct efx_tx_queue *tx_queue)
tx_queue          792 drivers/net/ethernet/sfc/tx.c 	return DIV_ROUND_UP(tx_queue->ptr_mask + 1, PAGE_SIZE >> EFX_TX_CB_ORDER);
tx_queue          795 drivers/net/ethernet/sfc/tx.c int efx_probe_tx_queue(struct efx_tx_queue *tx_queue)
tx_queue          797 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          804 drivers/net/ethernet/sfc/tx.c 	tx_queue->ptr_mask = entries - 1;
tx_queue          808 drivers/net/ethernet/sfc/tx.c 		  tx_queue->queue, efx->txq_entries, tx_queue->ptr_mask);
tx_queue          811 drivers/net/ethernet/sfc/tx.c 	tx_queue->buffer = kcalloc(entries, sizeof(*tx_queue->buffer),
tx_queue          813 drivers/net/ethernet/sfc/tx.c 	if (!tx_queue->buffer)
tx_queue          816 drivers/net/ethernet/sfc/tx.c 	tx_queue->cb_page = kcalloc(efx_tx_cb_page_count(tx_queue),
tx_queue          817 drivers/net/ethernet/sfc/tx.c 				    sizeof(tx_queue->cb_page[0]), GFP_KERNEL);
tx_queue          818 drivers/net/ethernet/sfc/tx.c 	if (!tx_queue->cb_page) {
tx_queue          824 drivers/net/ethernet/sfc/tx.c 	rc = efx_nic_probe_tx(tx_queue);
tx_queue          831 drivers/net/ethernet/sfc/tx.c 	kfree(tx_queue->cb_page);
tx_queue          832 drivers/net/ethernet/sfc/tx.c 	tx_queue->cb_page = NULL;
tx_queue          834 drivers/net/ethernet/sfc/tx.c 	kfree(tx_queue->buffer);
tx_queue          835 drivers/net/ethernet/sfc/tx.c 	tx_queue->buffer = NULL;
tx_queue          839 drivers/net/ethernet/sfc/tx.c void efx_init_tx_queue(struct efx_tx_queue *tx_queue)
tx_queue          841 drivers/net/ethernet/sfc/tx.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          844 drivers/net/ethernet/sfc/tx.c 		  "initialising TX queue %d\n", tx_queue->queue);
tx_queue          846 drivers/net/ethernet/sfc/tx.c 	tx_queue->insert_count = 0;
tx_queue          847 drivers/net/ethernet/sfc/tx.c 	tx_queue->write_count = 0;
tx_queue          848 drivers/net/ethernet/sfc/tx.c 	tx_queue->packet_write_count = 0;
tx_queue          849 drivers/net/ethernet/sfc/tx.c 	tx_queue->old_write_count = 0;
tx_queue          850 drivers/net/ethernet/sfc/tx.c 	tx_queue->read_count = 0;
tx_queue          851 drivers/net/ethernet/sfc/tx.c 	tx_queue->old_read_count = 0;
tx_queue          852 drivers/net/ethernet/sfc/tx.c 	tx_queue->empty_read_count = 0 | EFX_EMPTY_COUNT_VALID;
tx_queue          853 drivers/net/ethernet/sfc/tx.c 	tx_queue->xmit_more_available = false;
tx_queue          854 drivers/net/ethernet/sfc/tx.c 	tx_queue->timestamping = (efx_ptp_use_mac_tx_timestamps(efx) &&
tx_queue          855 drivers/net/ethernet/sfc/tx.c 				  tx_queue->channel == efx_ptp_channel(efx));
tx_queue          856 drivers/net/ethernet/sfc/tx.c 	tx_queue->completed_desc_ptr = tx_queue->ptr_mask;
tx_queue          857 drivers/net/ethernet/sfc/tx.c 	tx_queue->completed_timestamp_major = 0;
tx_queue          858 drivers/net/ethernet/sfc/tx.c 	tx_queue->completed_timestamp_minor = 0;
tx_queue          863 drivers/net/ethernet/sfc/tx.c 	tx_queue->handle_tso = efx_enqueue_skb_tso;
tx_queue          866 drivers/net/ethernet/sfc/tx.c 	efx_nic_init_tx(tx_queue);
tx_queue          868 drivers/net/ethernet/sfc/tx.c 	tx_queue->initialised = true;
tx_queue          871 drivers/net/ethernet/sfc/tx.c void efx_fini_tx_queue(struct efx_tx_queue *tx_queue)
tx_queue          875 drivers/net/ethernet/sfc/tx.c 	netif_dbg(tx_queue->efx, drv, tx_queue->efx->net_dev,
tx_queue          876 drivers/net/ethernet/sfc/tx.c 		  "shutting down TX queue %d\n", tx_queue->queue);
tx_queue          878 drivers/net/ethernet/sfc/tx.c 	if (!tx_queue->buffer)
tx_queue          882 drivers/net/ethernet/sfc/tx.c 	while (tx_queue->read_count != tx_queue->write_count) {
tx_queue          884 drivers/net/ethernet/sfc/tx.c 		buffer = &tx_queue->buffer[tx_queue->read_count & tx_queue->ptr_mask];
tx_queue          885 drivers/net/ethernet/sfc/tx.c 		efx_dequeue_buffer(tx_queue, buffer, &pkts_compl, &bytes_compl);
tx_queue          887 drivers/net/ethernet/sfc/tx.c 		++tx_queue->read_count;
tx_queue          889 drivers/net/ethernet/sfc/tx.c 	tx_queue->xmit_more_available = false;
tx_queue          890 drivers/net/ethernet/sfc/tx.c 	netdev_tx_reset_queue(tx_queue->core_txq);
tx_queue          893 drivers/net/ethernet/sfc/tx.c void efx_remove_tx_queue(struct efx_tx_queue *tx_queue)
tx_queue          897 drivers/net/ethernet/sfc/tx.c 	if (!tx_queue->buffer)
tx_queue          900 drivers/net/ethernet/sfc/tx.c 	netif_dbg(tx_queue->efx, drv, tx_queue->efx->net_dev,
tx_queue          901 drivers/net/ethernet/sfc/tx.c 		  "destroying TX queue %d\n", tx_queue->queue);
tx_queue          902 drivers/net/ethernet/sfc/tx.c 	efx_nic_remove_tx(tx_queue);
tx_queue          904 drivers/net/ethernet/sfc/tx.c 	if (tx_queue->cb_page) {
tx_queue          905 drivers/net/ethernet/sfc/tx.c 		for (i = 0; i < efx_tx_cb_page_count(tx_queue); i++)
tx_queue          906 drivers/net/ethernet/sfc/tx.c 			efx_nic_free_buffer(tx_queue->efx,
tx_queue          907 drivers/net/ethernet/sfc/tx.c 					    &tx_queue->cb_page[i]);
tx_queue          908 drivers/net/ethernet/sfc/tx.c 		kfree(tx_queue->cb_page);
tx_queue          909 drivers/net/ethernet/sfc/tx.c 		tx_queue->cb_page = NULL;
tx_queue          912 drivers/net/ethernet/sfc/tx.c 	kfree(tx_queue->buffer);
tx_queue          913 drivers/net/ethernet/sfc/tx.c 	tx_queue->buffer = NULL;
tx_queue           15 drivers/net/ethernet/sfc/tx.h unsigned int efx_tx_limit_len(struct efx_tx_queue *tx_queue,
tx_queue           18 drivers/net/ethernet/sfc/tx.h u8 *efx_tx_get_copy_buffer_limited(struct efx_tx_queue *tx_queue,
tx_queue           21 drivers/net/ethernet/sfc/tx.h int efx_enqueue_skb_tso(struct efx_tx_queue *tx_queue, struct sk_buff *skb,
tx_queue           79 drivers/net/ethernet/sfc/tx_tso.c static inline void prefetch_ptr(struct efx_tx_queue *tx_queue)
tx_queue           81 drivers/net/ethernet/sfc/tx_tso.c 	unsigned int insert_ptr = efx_tx_queue_get_insert_index(tx_queue);
tx_queue           84 drivers/net/ethernet/sfc/tx_tso.c 	ptr = (char *) (tx_queue->buffer + insert_ptr);
tx_queue           88 drivers/net/ethernet/sfc/tx_tso.c 	ptr = (char *) (((efx_qword_t *)tx_queue->txd.buf.addr) + insert_ptr);
tx_queue          102 drivers/net/ethernet/sfc/tx_tso.c static void efx_tx_queue_insert(struct efx_tx_queue *tx_queue,
tx_queue          112 drivers/net/ethernet/sfc/tx_tso.c 		buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          113 drivers/net/ethernet/sfc/tx_tso.c 		++tx_queue->insert_count;
tx_queue          115 drivers/net/ethernet/sfc/tx_tso.c 		EFX_WARN_ON_ONCE_PARANOID(tx_queue->insert_count -
tx_queue          116 drivers/net/ethernet/sfc/tx_tso.c 					  tx_queue->read_count >=
tx_queue          117 drivers/net/ethernet/sfc/tx_tso.c 					  tx_queue->efx->txq_entries);
tx_queue          121 drivers/net/ethernet/sfc/tx_tso.c 		dma_len = tx_queue->efx->type->tx_limit_len(tx_queue,
tx_queue          170 drivers/net/ethernet/sfc/tx_tso.c 		     struct efx_tx_queue *tx_queue,
tx_queue          232 drivers/net/ethernet/sfc/tx_tso.c static void tso_fill_packet_with_fragment(struct efx_tx_queue *tx_queue,
tx_queue          253 drivers/net/ethernet/sfc/tx_tso.c 	efx_tx_queue_insert(tx_queue, st->dma_addr, n, &buffer);
tx_queue          285 drivers/net/ethernet/sfc/tx_tso.c static int tso_start_new_packet(struct efx_tx_queue *tx_queue,
tx_queue          290 drivers/net/ethernet/sfc/tx_tso.c 		efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          319 drivers/net/ethernet/sfc/tx_tso.c 	++tx_queue->insert_count;
tx_queue          324 drivers/net/ethernet/sfc/tx_tso.c 	buffer = efx_tx_queue_get_insert_buffer(tx_queue);
tx_queue          339 drivers/net/ethernet/sfc/tx_tso.c 	++tx_queue->insert_count;
tx_queue          362 drivers/net/ethernet/sfc/tx_tso.c int efx_enqueue_skb_tso(struct efx_tx_queue *tx_queue,
tx_queue          366 drivers/net/ethernet/sfc/tx_tso.c 	struct efx_nic *efx = tx_queue->efx;
tx_queue          370 drivers/net/ethernet/sfc/tx_tso.c 	if (tx_queue->tso_version != 1)
tx_queue          378 drivers/net/ethernet/sfc/tx_tso.c 	EFX_WARN_ON_ONCE_PARANOID(tx_queue->write_count != tx_queue->insert_count);
tx_queue          380 drivers/net/ethernet/sfc/tx_tso.c 	rc = tso_start(&state, efx, tx_queue, skb);
tx_queue          397 drivers/net/ethernet/sfc/tx_tso.c 	rc = tso_start_new_packet(tx_queue, skb, &state);
tx_queue          401 drivers/net/ethernet/sfc/tx_tso.c 	prefetch_ptr(tx_queue);
tx_queue          404 drivers/net/ethernet/sfc/tx_tso.c 		tso_fill_packet_with_fragment(tx_queue, skb, &state);
tx_queue          419 drivers/net/ethernet/sfc/tx_tso.c 			rc = tso_start_new_packet(tx_queue, skb, &state);
tx_queue          170 drivers/net/ethernet/stmicro/stmmac/stmmac.h 	struct stmmac_tx_queue tx_queue[MTL_MAX_TX_QUEUES];
tx_queue          293 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue          335 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1087 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1162 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1263 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1375 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1513 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1618 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1872 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         1985 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[chan];
tx_queue         2232 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		tx_q = &priv->tx_queue[chan];
tx_queue         2247 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         2294 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[chan];
tx_queue         2753 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		del_timer_sync(&priv->tx_queue[chan].txtimer);
tx_queue         2786 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		del_timer_sync(&priv->tx_queue[chan].txtimer);
tx_queue         2852 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         2926 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	tx_q = &priv->tx_queue[queue];
tx_queue         3142 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	tx_q = &priv->tx_queue[queue];
tx_queue         3699 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 	tx_q = &priv->tx_queue[chan];
tx_queue         4091 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue         4778 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		del_timer_sync(&priv->tx_queue[chan].txtimer);
tx_queue         4827 drivers/net/ethernet/stmicro/stmmac/stmmac_main.c 		struct stmmac_tx_queue *tx_q = &priv->tx_queue[queue];
tx_queue          225 drivers/net/fddi/skfp/h/supern_2.h 	struct tx_queue *tq_next ;
tx_queue          307 drivers/net/hamradio/scc.c 	while (!skb_queue_empty(&scc->tx_queue))
tx_queue          308 drivers/net/hamradio/scc.c 		dev_kfree_skb(skb_dequeue(&scc->tx_queue));
tx_queue          378 drivers/net/hamradio/scc.c 		skb = skb_dequeue(&scc->tx_queue);
tx_queue         1132 drivers/net/hamradio/scc.c 		if (skb_queue_empty(&scc->tx_queue)) {	/* nothing to send */
tx_queue         1587 drivers/net/hamradio/scc.c 	skb_queue_head_init(&scc->tx_queue);
tx_queue         1667 drivers/net/hamradio/scc.c 	if (skb_queue_len(&scc->tx_queue) > scc->dev->tx_queue_len) {
tx_queue         1669 drivers/net/hamradio/scc.c 		skb_del = skb_dequeue(&scc->tx_queue);
tx_queue         1672 drivers/net/hamradio/scc.c 	skb_queue_tail(&scc->tx_queue, skb);
tx_queue         1890 drivers/net/hamradio/scc.c 			skb_queue_head_init(&scc->tx_queue);
tx_queue          118 drivers/net/phy/dp83640.c 	struct sk_buff_head tx_queue;
tx_queue          911 drivers/net/phy/dp83640.c 	skb = skb_dequeue(&dp83640->tx_queue);
tx_queue          922 drivers/net/phy/dp83640.c 			skb = skb_dequeue(&dp83640->tx_queue);
tx_queue         1163 drivers/net/phy/dp83640.c 	skb_queue_head_init(&dp83640->tx_queue);
tx_queue         1203 drivers/net/phy/dp83640.c 	skb_queue_purge(&dp83640->tx_queue);
tx_queue         1497 drivers/net/phy/dp83640.c 		skb_queue_tail(&dp83640->tx_queue, skb);
tx_queue           34 drivers/net/usb/cdc-phonet.c 	unsigned		tx_queue;
tx_queue           71 drivers/net/usb/cdc-phonet.c 	pnd->tx_queue++;
tx_queue           72 drivers/net/usb/cdc-phonet.c 	if (pnd->tx_queue >= dev->tx_queue_len)
tx_queue          108 drivers/net/usb/cdc-phonet.c 	pnd->tx_queue--;
tx_queue          742 drivers/net/usb/r8152.c 	struct sk_buff_head tx_queue, rx_queue;
tx_queue         1448 drivers/net/usb/r8152.c 	if (!skb_queue_empty(&tp->tx_queue))
tx_queue         1623 drivers/net/usb/r8152.c 	skb_queue_head_init(&tp->tx_queue);
tx_queue         1875 drivers/net/usb/r8152.c 	struct sk_buff_head skb_head, *tx_queue = &tp->tx_queue;
tx_queue         1880 drivers/net/usb/r8152.c 	spin_lock(&tx_queue->lock);
tx_queue         1881 drivers/net/usb/r8152.c 	skb_queue_splice_init(tx_queue, &skb_head);
tx_queue         1882 drivers/net/usb/r8152.c 	spin_unlock(&tx_queue->lock);
tx_queue         1943 drivers/net/usb/r8152.c 		spin_lock(&tx_queue->lock);
tx_queue         1944 drivers/net/usb/r8152.c 		skb_queue_splice(&skb_head, tx_queue);
tx_queue         1945 drivers/net/usb/r8152.c 		spin_unlock(&tx_queue->lock);
tx_queue         1951 drivers/net/usb/r8152.c 	    skb_queue_len(&tp->tx_queue) < tp->tx_qlen)
tx_queue         2197 drivers/net/usb/r8152.c 		if (skb_queue_empty(&tp->tx_queue))
tx_queue         2306 drivers/net/usb/r8152.c 	struct sk_buff_head skb_head, *tx_queue = &tp->tx_queue;
tx_queue         2309 drivers/net/usb/r8152.c 	if (skb_queue_empty(tx_queue))
tx_queue         2313 drivers/net/usb/r8152.c 	spin_lock_bh(&tx_queue->lock);
tx_queue         2314 drivers/net/usb/r8152.c 	skb_queue_splice_init(tx_queue, &skb_head);
tx_queue         2315 drivers/net/usb/r8152.c 	spin_unlock_bh(&tx_queue->lock);
tx_queue         2410 drivers/net/usb/r8152.c 	skb_queue_tail(&tp->tx_queue, skb);
tx_queue         2420 drivers/net/usb/r8152.c 	} else if (skb_queue_len(&tp->tx_queue) > tp->tx_qlen) {
tx_queue         4170 drivers/net/usb/r8152.c 			   skb_queue_len(&tp->tx_queue) < tp->tx_qlen) {
tx_queue         4698 drivers/net/usb/r8152.c 	else if (!skb_queue_empty(&tp->tx_queue))
tx_queue          113 drivers/net/vmxnet3/vmxnet3_drv.c 	netif_start_subqueue(adapter->netdev, tq - adapter->tx_queue);
tx_queue          121 drivers/net/vmxnet3/vmxnet3_drv.c 	netif_wake_subqueue(adapter->netdev, (tq - adapter->tx_queue));
tx_queue          130 drivers/net/vmxnet3/vmxnet3_drv.c 	netif_stop_subqueue(adapter->netdev, (tq - adapter->tx_queue));
tx_queue          157 drivers/net/vmxnet3/vmxnet3_drv.c 				vmxnet3_tq_start(&adapter->tx_queue[i],
tx_queue          166 drivers/net/vmxnet3/vmxnet3_drv.c 				vmxnet3_tq_stop(&adapter->tx_queue[i], adapter);
tx_queue          470 drivers/net/vmxnet3/vmxnet3_drv.c 		vmxnet3_tq_destroy(&adapter->tx_queue[i], adapter);
tx_queue          556 drivers/net/vmxnet3/vmxnet3_drv.c 		vmxnet3_tq_cleanup(&adapter->tx_queue[i], adapter);
tx_queue          816 drivers/net/vmxnet3/vmxnet3_drv.c 		vmxnet3_tq_init(&adapter->tx_queue[i], adapter);
tx_queue         1159 drivers/net/vmxnet3/vmxnet3_drv.c 			       &adapter->tx_queue[skb->queue_mapping],
tx_queue         1869 drivers/net/vmxnet3/vmxnet3_drv.c 		vmxnet3_tq_tx_complete(&adapter->tx_queue[i], adapter);
tx_queue         1912 drivers/net/vmxnet3/vmxnet3_drv.c 				&adapter->tx_queue[rq - adapter->rx_queue];
tx_queue         1946 drivers/net/vmxnet3/vmxnet3_drv.c 			struct vmxnet3_tx_queue *txq = &adapter->tx_queue[i];
tx_queue         2072 drivers/net/vmxnet3/vmxnet3_drv.c 				sprintf(adapter->tx_queue[i].name, "%s-tx-%d",
tx_queue         2077 drivers/net/vmxnet3/vmxnet3_drv.c 					      adapter->tx_queue[i].name,
tx_queue         2078 drivers/net/vmxnet3/vmxnet3_drv.c 					      &adapter->tx_queue[i]);
tx_queue         2080 drivers/net/vmxnet3/vmxnet3_drv.c 				sprintf(adapter->tx_queue[i].name, "%s-rxtx-%d",
tx_queue         2087 drivers/net/vmxnet3/vmxnet3_drv.c 					adapter->tx_queue[i].name, err);
tx_queue         2095 drivers/net/vmxnet3/vmxnet3_drv.c 					adapter->tx_queue[i].comp_ring.intr_idx
tx_queue         2100 drivers/net/vmxnet3/vmxnet3_drv.c 				adapter->tx_queue[i].comp_ring.intr_idx
tx_queue         2169 drivers/net/vmxnet3/vmxnet3_drv.c 				adapter->tx_queue[i].comp_ring.intr_idx = 0;
tx_queue         2197 drivers/net/vmxnet3/vmxnet3_drv.c 					 &(adapter->tx_queue[i]));
tx_queue         2444 drivers/net/vmxnet3/vmxnet3_drv.c 		struct vmxnet3_tx_queue	*tq = &adapter->tx_queue[i];
tx_queue         2445 drivers/net/vmxnet3/vmxnet3_drv.c 		BUG_ON(adapter->tx_queue[i].tx_ring.base == NULL);
tx_queue         2570 drivers/net/vmxnet3/vmxnet3_drv.c 		adapter->tx_queue[0].tx_ring.size,
tx_queue         2827 drivers/net/vmxnet3/vmxnet3_drv.c 		struct vmxnet3_tx_queue	*tq = &adapter->tx_queue[i];
tx_queue         2893 drivers/net/vmxnet3/vmxnet3_drv.c 		spin_lock_init(&adapter->tx_queue[i].tx_lock);
tx_queue          137 drivers/net/vmxnet3/vmxnet3_ethtool.c 		drvTxStats = &adapter->tx_queue[i].stats;
tx_queue          330 drivers/net/vmxnet3/vmxnet3_ethtool.c 		base = (u8 *)&adapter->tx_queue[j].stats;
tx_queue          392 drivers/net/vmxnet3/vmxnet3_ethtool.c 		struct vmxnet3_tx_queue *tq = &adapter->tx_queue[i];
tx_queue          331 drivers/net/vmxnet3/vmxnet3_int.h 	struct vmxnet3_tx_queue		tx_queue[VMXNET3_DEVICE_MAX_TX_QUEUES];
tx_queue          100 drivers/net/wan/hdlc_ppp.c static struct sk_buff_head tx_queue; /* used when holding the spin lock */
tx_queue          200 drivers/net/wan/hdlc_ppp.c 	while ((skb = skb_dequeue(&tx_queue)) != NULL)
tx_queue          255 drivers/net/wan/hdlc_ppp.c 	skb_queue_tail(&tx_queue, skb);
tx_queue          699 drivers/net/wan/hdlc_ppp.c 	skb_queue_head_init(&tx_queue);
tx_queue          132 drivers/net/wireless/ath/ath5k/dma.c 	u32 tx_queue;
tx_queue          141 drivers/net/wireless/ath/ath5k/dma.c 		tx_queue = ath5k_hw_reg_read(ah, AR5K_CR);
tx_queue          148 drivers/net/wireless/ath/ath5k/dma.c 			tx_queue |= AR5K_CR_TXE0 & ~AR5K_CR_TXD0;
tx_queue          151 drivers/net/wireless/ath/ath5k/dma.c 			tx_queue |= AR5K_CR_TXE1 & ~AR5K_CR_TXD1;
tx_queue          156 drivers/net/wireless/ath/ath5k/dma.c 			tx_queue |= AR5K_CR_TXE1 & ~AR5K_CR_TXD1;
tx_queue          164 drivers/net/wireless/ath/ath5k/dma.c 		ath5k_hw_reg_write(ah, tx_queue, AR5K_CR);
tx_queue          191 drivers/net/wireless/ath/ath5k/dma.c 	u32 tx_queue, pending;
tx_queue          200 drivers/net/wireless/ath/ath5k/dma.c 		tx_queue = ath5k_hw_reg_read(ah, AR5K_CR);
tx_queue          207 drivers/net/wireless/ath/ath5k/dma.c 			tx_queue |= AR5K_CR_TXD0 & ~AR5K_CR_TXE0;
tx_queue          212 drivers/net/wireless/ath/ath5k/dma.c 			tx_queue |= AR5K_CR_TXD1 & ~AR5K_CR_TXD1;
tx_queue          220 drivers/net/wireless/ath/ath5k/dma.c 		ath5k_hw_reg_write(ah, tx_queue, AR5K_CR);
tx_queue          961 drivers/net/wireless/broadcom/b43/b43.h 	struct sk_buff_head tx_queue[B43_QOS_QUEUE_NUM];
tx_queue         3596 drivers/net/wireless/broadcom/b43/main.c 		while (skb_queue_len(&wl->tx_queue[queue_num])) {
tx_queue         3597 drivers/net/wireless/broadcom/b43/main.c 			skb = skb_dequeue(&wl->tx_queue[queue_num]);
tx_queue         3605 drivers/net/wireless/broadcom/b43/main.c 				skb_queue_head(&wl->tx_queue[queue_num], skb);
tx_queue         3636 drivers/net/wireless/broadcom/b43/main.c 	skb_queue_tail(&wl->tx_queue[skb->queue_mapping], skb);
tx_queue         4378 drivers/net/wireless/broadcom/b43/main.c 		while (skb_queue_len(&wl->tx_queue[queue_num])) {
tx_queue         4381 drivers/net/wireless/broadcom/b43/main.c 			skb = skb_dequeue(&wl->tx_queue[queue_num]);
tx_queue         5605 drivers/net/wireless/broadcom/b43/main.c 		skb_queue_head_init(&wl->tx_queue[queue_num]);
tx_queue          634 drivers/net/wireless/broadcom/b43legacy/b43legacy.h 	struct sk_buff_head tx_queue[B43legacy_QOS_QUEUE_NUM];
tx_queue         2462 drivers/net/wireless/broadcom/b43legacy/main.c 		while (skb_queue_len(&wl->tx_queue[queue_num])) {
tx_queue         2463 drivers/net/wireless/broadcom/b43legacy/main.c 			skb = skb_dequeue(&wl->tx_queue[queue_num]);
tx_queue         2471 drivers/net/wireless/broadcom/b43legacy/main.c 				skb_queue_head(&wl->tx_queue[queue_num], skb);
tx_queue         2499 drivers/net/wireless/broadcom/b43legacy/main.c 	skb_queue_tail(&wl->tx_queue[skb->queue_mapping], skb);
tx_queue         2941 drivers/net/wireless/broadcom/b43legacy/main.c 		while (skb_queue_len(&wl->tx_queue[queue_num]))
tx_queue         2942 drivers/net/wireless/broadcom/b43legacy/main.c 			dev_kfree_skb(skb_dequeue(&wl->tx_queue[queue_num]));
tx_queue         3835 drivers/net/wireless/broadcom/b43legacy/main.c 		skb_queue_head_init(&wl->tx_queue[queue_num]);
tx_queue         2808 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_bd_queue *txq = &priv->tx_queue;
tx_queue         3000 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_bd_queue *txq = &priv->tx_queue;
tx_queue         3069 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	struct ipw2100_bd_queue *txq = &priv->tx_queue;
tx_queue         4423 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	err = bd_queue_allocate(priv, &priv->tx_queue, TX_QUEUE_LENGTH);
tx_queue         4434 drivers/net/wireless/intel/ipw2x00/ipw2100.c 		bd_queue_free(priv, &priv->tx_queue);
tx_queue         4508 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	priv->tx_queue.oldest = 0;
tx_queue         4509 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	priv->tx_queue.available = priv->tx_queue.entries;
tx_queue         4510 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	priv->tx_queue.next = 0;
tx_queue         4512 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	SET_STAT(&priv->txq_stat, priv->tx_queue.available);
tx_queue         4514 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	bd_queue_initialize(priv, &priv->tx_queue,
tx_queue         4530 drivers/net/wireless/intel/ipw2x00/ipw2100.c 	bd_queue_free(priv, &priv->tx_queue);
tx_queue          530 drivers/net/wireless/intel/ipw2x00/ipw2100.h 	struct ipw2100_bd_queue tx_queue;
tx_queue         1295 drivers/net/wireless/intel/iwlegacy/debug.c DEBUGFS_READ_FILE_OPS(tx_queue);
tx_queue         1339 drivers/net/wireless/intel/iwlegacy/debug.c 	DEBUGFS_ADD_FILE(tx_queue, dir_debug, 0400);
tx_queue          644 drivers/net/wireless/intel/iwlwifi/fw/api/tx.h 	__le16 tx_queue;
tx_queue         1464 drivers/net/wireless/intel/iwlwifi/mvm/tx.c 		txq_id = le16_to_cpu(tx_resp->tx_queue);
tx_queue         2915 drivers/net/wireless/intel/iwlwifi/pcie/trans.c DEBUGFS_READ_FILE_OPS(tx_queue);
tx_queue         2931 drivers/net/wireless/intel/iwlwifi/pcie/trans.c 	DEBUGFS_ADD_FILE(tx_queue, dir, 0400);
tx_queue          212 drivers/net/wireless/intersil/p54/main.c 	skb_queue_purge(&priv->tx_queue);
tx_queue          741 drivers/net/wireless/intersil/p54/main.c 	skb_queue_head_init(&priv->tx_queue);
tx_queue          171 drivers/net/wireless/intersil/p54/p54.h 	struct sk_buff_head tx_queue;
tx_queue           38 drivers/net/wireless/intersil/p54/txrx.c 	spin_lock_irqsave(&priv->tx_queue.lock, flags);
tx_queue           40 drivers/net/wireless/intersil/p54/txrx.c 		    skb_queue_len(&priv->tx_queue));
tx_queue           43 drivers/net/wireless/intersil/p54/txrx.c 	skb_queue_walk(&priv->tx_queue, skb) {
tx_queue           66 drivers/net/wireless/intersil/p54/txrx.c 	spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue           94 drivers/net/wireless/intersil/p54/txrx.c 	spin_lock_irqsave(&priv->tx_queue.lock, flags);
tx_queue           95 drivers/net/wireless/intersil/p54/txrx.c 	if (unlikely(skb_queue_len(&priv->tx_queue) == 32)) {
tx_queue          101 drivers/net/wireless/intersil/p54/txrx.c 		spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue          105 drivers/net/wireless/intersil/p54/txrx.c 	skb_queue_walk(&priv->tx_queue, entry) {
tx_queue          121 drivers/net/wireless/intersil/p54/txrx.c 			target_skb = skb_peek_tail(&priv->tx_queue);
tx_queue          128 drivers/net/wireless/intersil/p54/txrx.c 			spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue          143 drivers/net/wireless/intersil/p54/txrx.c 		__skb_queue_after(&priv->tx_queue, target_skb, skb);
tx_queue          145 drivers/net/wireless/intersil/p54/txrx.c 		__skb_queue_head(&priv->tx_queue, skb);
tx_queue          146 drivers/net/wireless/intersil/p54/txrx.c 	spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue          242 drivers/net/wireless/intersil/p54/txrx.c 	skb_unlink(skb, &priv->tx_queue);
tx_queue          254 drivers/net/wireless/intersil/p54/txrx.c 	spin_lock_irqsave(&priv->tx_queue.lock, flags);
tx_queue          255 drivers/net/wireless/intersil/p54/txrx.c 	skb_queue_walk(&priv->tx_queue, entry) {
tx_queue          259 drivers/net/wireless/intersil/p54/txrx.c 			__skb_unlink(entry, &priv->tx_queue);
tx_queue          260 drivers/net/wireless/intersil/p54/txrx.c 			spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue          265 drivers/net/wireless/intersil/p54/txrx.c 	spin_unlock_irqrestore(&priv->tx_queue.lock, flags);
tx_queue          482 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c 	bool tx_queue = false;
tx_queue          490 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c 		tx_queue = true;
tx_queue          511 drivers/net/wireless/ralink/rt2x00/rt2800mmio.c 		if (tx_queue)
tx_queue          989 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c 	bool tx_queue =
tx_queue         1004 drivers/net/wireless/ralink/rt2x00/rt2x00queue.c 	if (!drop && tx_queue)
tx_queue           36 drivers/net/wireless/rsi/rsi_91x_core.c 		q_len = skb_queue_len(&common->tx_queue[ii]);
tx_queue           60 drivers/net/wireless/rsi/rsi_91x_core.c 		q_len = skb_queue_len(&common->tx_queue[ii]);
tx_queue          106 drivers/net/wireless/rsi/rsi_91x_core.c 	if (skb_queue_len(&common->tx_queue[q_num]))
tx_queue          107 drivers/net/wireless/rsi/rsi_91x_core.c 		skb = skb_peek(&common->tx_queue[q_num]);
tx_queue          121 drivers/net/wireless/rsi/rsi_91x_core.c 		if (skb_queue_len(&common->tx_queue[q_num]) - pkt_cnt)
tx_queue          145 drivers/net/wireless/rsi/rsi_91x_core.c 	if (skb_queue_len(&common->tx_queue[MGMT_BEACON_Q])) {
tx_queue          149 drivers/net/wireless/rsi/rsi_91x_core.c 	if (skb_queue_len(&common->tx_queue[MGMT_SOFT_Q])) {
tx_queue          172 drivers/net/wireless/rsi/rsi_91x_core.c 		q_len = skb_queue_len(&common->tx_queue[ii]);
tx_queue          187 drivers/net/wireless/rsi/rsi_91x_core.c 	q_len = skb_queue_len(&common->tx_queue[q_num]);
tx_queue          201 drivers/net/wireless/rsi/rsi_91x_core.c 	q_len = skb_queue_len(&common->tx_queue[q_num]);
tx_queue          230 drivers/net/wireless/rsi/rsi_91x_core.c 	skb_queue_tail(&common->tx_queue[q_num], skb);
tx_queue          250 drivers/net/wireless/rsi/rsi_91x_core.c 	return skb_dequeue(&common->tx_queue[q_num]);
tx_queue          292 drivers/net/wireless/rsi/rsi_91x_core.c 		    ((skb_queue_len(&common->tx_queue[q_num])) <=
tx_queue          478 drivers/net/wireless/rsi/rsi_91x_core.c 	    ((skb_queue_len(&common->tx_queue[q_num]) + 1) >=
tx_queue          146 drivers/net/wireless/rsi/rsi_91x_debugfs.c 		   skb_queue_len(&common->tx_queue[MGMT_SOFT_Q]));
tx_queue          154 drivers/net/wireless/rsi/rsi_91x_debugfs.c 		   skb_queue_len(&common->tx_queue[VO_Q]));
tx_queue          160 drivers/net/wireless/rsi/rsi_91x_debugfs.c 		   skb_queue_len(&common->tx_queue[VI_Q]));
tx_queue          166 drivers/net/wireless/rsi/rsi_91x_debugfs.c 		   skb_queue_len(&common->tx_queue[BE_Q]));
tx_queue          172 drivers/net/wireless/rsi/rsi_91x_debugfs.c 		   skb_queue_len(&common->tx_queue[BK_Q]));
tx_queue          308 drivers/net/wireless/rsi/rsi_91x_main.c 		skb_queue_head_init(&common->tx_queue[ii]);
tx_queue          392 drivers/net/wireless/rsi/rsi_91x_main.c 		skb_queue_purge(&common->tx_queue[ii]);
tx_queue          358 drivers/net/wireless/rsi/rsi_91x_mgmt.c 	skb_queue_tail(&common->tx_queue[MGMT_SOFT_Q], skb);
tx_queue         1762 drivers/net/wireless/rsi/rsi_91x_mgmt.c 	skb_queue_tail(&common->tx_queue[MGMT_BEACON_Q], skb);
tx_queue          904 drivers/net/wireless/rsi/rsi_91x_sdio.c 		skb_queue_purge(&adapter->priv->tx_queue[ii]);
tx_queue          242 drivers/net/wireless/rsi/rsi_main.h 	struct sk_buff_head tx_queue[NUM_EDCA_QUEUES + 2];
tx_queue          489 drivers/net/wireless/st/cw1200/bh.c 						&priv->tx_queue[i],
tx_queue          130 drivers/net/wireless/st/cw1200/cw1200.h 	struct cw1200_queue		tx_queue[4];
tx_queue          207 drivers/net/wireless/st/cw1200/debug.c 		cw1200_queue_status_show(seq, &priv->tx_queue[i]);
tx_queue          389 drivers/net/wireless/st/cw1200/main.c 		if (cw1200_queue_init(&priv->tx_queue[i],
tx_queue          393 drivers/net/wireless/st/cw1200/main.c 				cw1200_queue_deinit(&priv->tx_queue[i - 1]);
tx_queue          472 drivers/net/wireless/st/cw1200/main.c 		cw1200_queue_deinit(&priv->tx_queue[i]);
tx_queue          361 drivers/net/wireless/st/cw1200/scan.c 	struct cw1200_queue *queue = &priv->tx_queue[queue_id];
tx_queue          131 drivers/net/wireless/st/cw1200/sta.c 		cw1200_queue_clear(&priv->tx_queue[i]);
tx_queue          846 drivers/net/wireless/st/cw1200/sta.c 	struct cw1200_queue *queue = &priv->tx_queue[queue_id];
tx_queue          901 drivers/net/wireless/st/cw1200/sta.c 				cw1200_queue_clear(&priv->tx_queue[i]);
tx_queue           34 drivers/net/wireless/st/cw1200/txrx.c 		cw1200_queue_lock(&priv->tx_queue[i]);
tx_queue           41 drivers/net/wireless/st/cw1200/txrx.c 		cw1200_queue_unlock(&priv->tx_queue[i]);
tx_queue          771 drivers/net/wireless/st/cw1200/txrx.c 		BUG_ON(cw1200_queue_put(&priv->tx_queue[t.queue],
tx_queue          836 drivers/net/wireless/st/cw1200/txrx.c 		if (cw1200_queue_get_num_queued(&priv->tx_queue[i],
tx_queue          855 drivers/net/wireless/st/cw1200/txrx.c 	struct cw1200_queue *queue = &priv->tx_queue[queue_id];
tx_queue         1203 drivers/net/wireless/st/cw1200/wsm.c 					&priv->tx_queue[i],
tx_queue         1591 drivers/net/wireless/st/cw1200/wsm.c 		queued = cw1200_queue_get_num_queued(&priv->tx_queue[i],
tx_queue         1610 drivers/net/wireless/st/cw1200/wsm.c 		    &priv->tx_queue[winner],
tx_queue         1613 drivers/net/wireless/st/cw1200/wsm.c 		    &priv->tx_queue[priv->tx_burst_idx],
tx_queue         1655 drivers/net/wireless/st/cw1200/wsm.c 	*queue_p = &priv->tx_queue[idx];
tx_queue         1693 drivers/net/wireless/st/cw1200/wsm.c 			queue_num = queue - priv->tx_queue;
tx_queue          205 drivers/net/wireless/ti/wl1251/debugfs.c 	queue_len = skb_queue_len(&wl->tx_queue);
tx_queue          345 drivers/net/wireless/ti/wl1251/main.c 	skb_queue_tail(&wl->tx_queue, skb);
tx_queue          358 drivers/net/wireless/ti/wl1251/main.c 	if (skb_queue_len(&wl->tx_queue) >= WL1251_TX_QUEUE_HIGH_WATERMARK) {
tx_queue         1588 drivers/net/wireless/ti/wl1251/main.c 	skb_queue_head_init(&wl->tx_queue);
tx_queue          342 drivers/net/wireless/ti/wl1251/tx.c 	while ((skb = skb_dequeue(&wl->tx_queue))) {
tx_queue          352 drivers/net/wireless/ti/wl1251/tx.c 			skb_queue_head(&wl->tx_queue, skb);
tx_queue          476 drivers/net/wireless/ti/wl1251/tx.c 	queue_len = skb_queue_len(&wl->tx_queue);
tx_queue          554 drivers/net/wireless/ti/wl1251/tx.c 	while ((skb = skb_dequeue(&wl->tx_queue))) {
tx_queue          305 drivers/net/wireless/ti/wl1251/wl1251.h 	struct sk_buff_head tx_queue;
tx_queue         1245 drivers/net/wireless/ti/wlcore/main.c 	skb_queue_tail(&wl->links[hlid].tx_queue[q], skb);
tx_queue         6386 drivers/net/wireless/ti/wlcore/main.c 			skb_queue_head_init(&wl->links[j].tx_queue[i]);
tx_queue           89 drivers/net/wireless/ti/wlcore/ps.c 		while ((skb = skb_dequeue(&lnk->tx_queue[i]))) {
tx_queue          513 drivers/net/wireless/ti/wlcore/tx.c 	skb = skb_dequeue(&lnk->tx_queue[q]);
tx_queue          536 drivers/net/wireless/ti/wlcore/tx.c 		    !skb_queue_empty(&lnk->tx_queue[ac]) &&
tx_queue          681 drivers/net/wireless/ti/wlcore/tx.c 		skb_queue_head(&wl->links[hlid].tx_queue[q], skb);
tx_queue         1029 drivers/net/wireless/ti/wlcore/tx.c 		while ((skb = skb_dequeue(&lnk->tx_queue[i]))) {
tx_queue          256 drivers/net/wireless/ti/wlcore/wlcore_i.h 	struct sk_buff_head tx_queue[NUM_TX_QUEUES];
tx_queue          150 drivers/net/xen-netback/common.h 	struct sk_buff_head tx_queue;
tx_queue          538 drivers/net/xen-netback/interface.c 	skb_queue_head_init(&queue->tx_queue);
tx_queue          798 drivers/net/xen-netback/netback.c 	while (skb_queue_len(&queue->tx_queue) < budget) {
tx_queue         1016 drivers/net/xen-netback/netback.c 		__skb_queue_tail(&queue->tx_queue, skb);
tx_queue         1101 drivers/net/xen-netback/netback.c 	while ((skb = __skb_dequeue(&queue->tx_queue)) != NULL) {
tx_queue           84 drivers/net/xen-netback/xenbus.c 		   skb_queue_len(&queue->tx_queue),
tx_queue          285 drivers/scsi/fnic/fnic.h 	struct sk_buff_head tx_queue;
tx_queue         1185 drivers/scsi/fnic/fnic_fcs.c 		skb_queue_tail(&fnic->tx_queue, fp_skb(fp));
tx_queue         1209 drivers/scsi/fnic/fnic_fcs.c 	while ((skb = skb_dequeue(&fnic->tx_queue))) {
tx_queue          880 drivers/scsi/fnic/fnic_main.c 	skb_queue_head_init(&fnic->tx_queue);
tx_queue          968 drivers/scsi/fnic/fnic_main.c 	skb_queue_purge(&fnic->tx_queue);
tx_queue         1000 drivers/scsi/fnic/fnic_main.c 	BUG_ON(!skb_queue_empty(&fnic->tx_queue));
tx_queue          215 drivers/scsi/fnic/fnic_scsi.c 	skb_queue_purge(&fnic->tx_queue);
tx_queue          691 drivers/scsi/fnic/fnic_scsi.c 		skb_queue_purge(&fnic->tx_queue);
tx_queue          243 drivers/tty/ipwireless/hardware.c 	struct list_head tx_queue[NL_NUM_OF_PRIORITIES];
tx_queue          510 drivers/tty/ipwireless/hardware.c 		list_add(&packet->queue, &hw->tx_queue[0]);
tx_queue          967 drivers/tty/ipwireless/hardware.c 			if (!list_empty(&hw->tx_queue[priority])) {
tx_queue          969 drivers/tty/ipwireless/hardware.c 						&hw->tx_queue[priority],
tx_queue          993 drivers/tty/ipwireless/hardware.c 			if (!list_empty(&hw->tx_queue[priority])) {
tx_queue         1243 drivers/tty/ipwireless/hardware.c 	list_add_tail(&packet->queue, &hw->tx_queue[priority]);
tx_queue         1633 drivers/tty/ipwireless/hardware.c 		INIT_LIST_HEAD(&hw->tx_queue[i]);
tx_queue         1745 drivers/tty/ipwireless/hardware.c 		list_for_each_entry_safe(tp, tq, &hw->tx_queue[i], queue) {
tx_queue          100 drivers/usb/usbip/vudc.h 	struct list_head tx_queue;
tx_queue          571 drivers/usb/usbip/vudc_dev.c 	INIT_LIST_HEAD(&udc->tx_queue);
tx_queue          192 drivers/usb/usbip/vudc_tx.c 	while (!list_empty(&udc->tx_queue)) {
tx_queue          193 drivers/usb/usbip/vudc_tx.c 		txi = list_first_entry(&udc->tx_queue, struct tx_item,
tx_queue          236 drivers/usb/usbip/vudc_tx.c 					 (!list_empty(&udc->tx_queue) ||
tx_queue          266 drivers/usb/usbip/vudc_tx.c 	list_add_tail(&txi->tx_entry, &udc->tx_queue);
tx_queue          283 drivers/usb/usbip/vudc_tx.c 	list_add_tail(&txi->tx_entry, &udc->tx_queue);
tx_queue           74 include/linux/scc.h         struct sk_buff_head tx_queue;	/* next tx buffer */
tx_queue          173 include/net/bluetooth/rfcomm.h 	struct sk_buff_head   tx_queue;
tx_queue         1750 include/net/sock.h static inline void sk_tx_queue_set(struct sock *sk, int tx_queue)
tx_queue         1753 include/net/sock.h 	if (WARN_ON_ONCE((unsigned short)tx_queue >= USHRT_MAX))
tx_queue         1755 include/net/sock.h 	sk->sk_tx_queue_mapping = tx_queue;
tx_queue          312 net/bluetooth/rfcomm/core.c 	skb_queue_head_init(&d->tx_queue);
tx_queue          327 net/bluetooth/rfcomm/core.c 	skb_queue_purge(&d->tx_queue);
tx_queue          442 net/bluetooth/rfcomm/core.c 	if (skb_queue_empty(&d->tx_queue)) {
tx_queue          496 net/bluetooth/rfcomm/core.c 		skb_queue_purge(&d->tx_queue);
tx_queue          569 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
tx_queue          583 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
tx_queue          873 net/bluetooth/rfcomm/core.c 	skb_queue_tail(&d->tx_queue, skb);
tx_queue         1818 net/bluetooth/rfcomm/core.c 		return skb_queue_len(&d->tx_queue);
tx_queue         1820 net/bluetooth/rfcomm/core.c 	while (d->tx_credits && (skb = skb_dequeue(&d->tx_queue))) {
tx_queue         1823 net/bluetooth/rfcomm/core.c 			skb_queue_head(&d->tx_queue, skb);
tx_queue         1836 net/bluetooth/rfcomm/core.c 	return skb_queue_len(&d->tx_queue);
tx_queue          698 net/bluetooth/rfcomm/tty.c 	skb_queue_purge(&dev->dlc->tx_queue);
tx_queue         1022 net/bluetooth/rfcomm/tty.c 	if (!skb_queue_empty(&dev->dlc->tx_queue))
tx_queue         1037 net/bluetooth/rfcomm/tty.c 	skb_queue_purge(&dev->dlc->tx_queue);
tx_queue           60 net/nfc/llcp.h 	struct sk_buff_head tx_queue;
tx_queue          135 net/nfc/llcp.h 	struct sk_buff_head tx_queue;
tx_queue          352 net/nfc/llcp_commands.c 	skb_queue_tail(&local->tx_queue, skb);
tx_queue          449 net/nfc/llcp_commands.c 	skb_queue_tail(&local->tx_queue, skb);
tx_queue          509 net/nfc/llcp_commands.c 	skb_queue_tail(&local->tx_queue, skb);
tx_queue          571 net/nfc/llcp_commands.c 	skb_queue_tail(&local->tx_queue, skb);
tx_queue          605 net/nfc/llcp_commands.c 	skb_queue_tail(&local->tx_queue, skb);
tx_queue          638 net/nfc/llcp_commands.c 	skb_queue_head(&local->tx_queue, skb);
tx_queue          662 net/nfc/llcp_commands.c 	    skb_queue_len(&sock->tx_queue) >= 2 * sock->remote_rw)) {
tx_queue          670 net/nfc/llcp_commands.c 	     skb_queue_len(&sock->tx_queue) >= 2 * sock->remote_rw)) {
tx_queue          672 net/nfc/llcp_commands.c 		       skb_queue_len(&sock->tx_queue));
tx_queue          709 net/nfc/llcp_commands.c 		skb_queue_tail(&sock->tx_queue, pdu);
tx_queue          779 net/nfc/llcp_commands.c 		skb_queue_tail(&local->tx_queue, pdu);
tx_queue          809 net/nfc/llcp_commands.c 	skb_queue_head(&local->tx_queue, skb);
tx_queue           50 net/nfc/llcp_core.c 	skb_queue_purge(&sock->tx_queue);
tx_queue           57 net/nfc/llcp_core.c 	skb_queue_walk_safe(&local->tx_queue, s, tmp) {
tx_queue           61 net/nfc/llcp_core.c 		skb_unlink(s, &local->tx_queue);
tx_queue           73 net/nfc/llcp_core.c 	skb_queue_purge(&local->tx_queue);
tx_queue          157 net/nfc/llcp_core.c 	skb_queue_purge(&local->tx_queue);
tx_queue          722 net/nfc/llcp_core.c 	skb = skb_dequeue(&local->tx_queue);
tx_queue          731 net/nfc/llcp_core.c 			skb_queue_head(&local->tx_queue, skb);
tx_queue         1013 net/nfc/llcp_core.c 		pdu = skb_dequeue(&sock->tx_queue);
tx_queue         1020 net/nfc/llcp_core.c 		skb_queue_tail(&local->tx_queue, pdu);
tx_queue         1098 net/nfc/llcp_core.c 			skb_queue_head(&local->tx_queue, s);
tx_queue         1582 net/nfc/llcp_core.c 	skb_queue_head_init(&local->tx_queue);
tx_queue          976 net/nfc/llcp_sock.c 	skb_queue_head_init(&llcp_sock->tx_queue);
tx_queue          990 net/nfc/llcp_sock.c 	skb_queue_purge(&sock->tx_queue);