Lines Matching refs:tqueue
642 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in dma_free_tx_skbufs() local
643 tx_free_ring_skbufs(tqueue); in dma_free_tx_skbufs()
737 static void sxgbe_tx_queue_clean(struct sxgbe_tx_queue *tqueue) in sxgbe_tx_queue_clean() argument
739 struct sxgbe_priv_data *priv = tqueue->priv_ptr; in sxgbe_tx_queue_clean()
742 u8 queue_no = tqueue->queue_no; in sxgbe_tx_queue_clean()
746 spin_lock(&tqueue->tx_lock); in sxgbe_tx_queue_clean()
749 while (tqueue->dirty_tx != tqueue->cur_tx) { in sxgbe_tx_queue_clean()
750 unsigned int entry = tqueue->dirty_tx % tx_rsize; in sxgbe_tx_queue_clean()
751 struct sk_buff *skb = tqueue->tx_skbuff[entry]; in sxgbe_tx_queue_clean()
754 p = tqueue->dma_tx + entry; in sxgbe_tx_queue_clean()
762 __func__, tqueue->cur_tx, tqueue->dirty_tx); in sxgbe_tx_queue_clean()
764 if (likely(tqueue->tx_skbuff_dma[entry])) { in sxgbe_tx_queue_clean()
766 tqueue->tx_skbuff_dma[entry], in sxgbe_tx_queue_clean()
769 tqueue->tx_skbuff_dma[entry] = 0; in sxgbe_tx_queue_clean()
774 tqueue->tx_skbuff[entry] = NULL; in sxgbe_tx_queue_clean()
779 tqueue->dirty_tx++; in sxgbe_tx_queue_clean()
784 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv))) { in sxgbe_tx_queue_clean()
787 sxgbe_tx_avail(tqueue, tx_rsize) > SXGBE_TX_THRESH(priv)) { in sxgbe_tx_queue_clean()
795 spin_unlock(&tqueue->tx_lock); in sxgbe_tx_queue_clean()
808 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean() local
810 sxgbe_tx_queue_clean(tqueue); in sxgbe_tx_all_clean()
1288 struct sxgbe_tx_queue *tqueue = priv->txq[txq_index]; in sxgbe_xmit() local
1300 if (unlikely(skb_is_gso(skb) && tqueue->prev_mss != cur_mss)) in sxgbe_xmit()
1305 tqueue->hwts_tx_en))) in sxgbe_xmit()
1309 spin_lock(&tqueue->tx_lock); in sxgbe_xmit()
1314 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) < nr_frags + 1)) { in sxgbe_xmit()
1321 spin_unlock(&tqueue->tx_lock); in sxgbe_xmit()
1325 entry = tqueue->cur_tx % tx_rsize; in sxgbe_xmit()
1326 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1333 tqueue->tx_skbuff[entry] = skb; in sxgbe_xmit()
1338 if (unlikely(tqueue->prev_mss != cur_mss)) { in sxgbe_xmit()
1350 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1351 first_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1353 tqueue->prev_mss = cur_mss; in sxgbe_xmit()
1372 entry = (++tqueue->cur_tx) % tx_rsize; in sxgbe_xmit()
1373 tx_desc = tqueue->dma_tx + entry; in sxgbe_xmit()
1377 tqueue->tx_skbuff_dma[entry] = tx_desc->tdes01; in sxgbe_xmit()
1378 tqueue->tx_skbuff[entry] = NULL; in sxgbe_xmit()
1396 tqueue->tx_count_frames += nr_frags + 1; in sxgbe_xmit()
1397 if (tqueue->tx_count_frames > tqueue->tx_coal_frames) { in sxgbe_xmit()
1400 mod_timer(&tqueue->txtimer, in sxgbe_xmit()
1401 SXGBE_COAL_TIMER(tqueue->tx_coal_timer)); in sxgbe_xmit()
1403 tqueue->tx_count_frames = 0; in sxgbe_xmit()
1412 tqueue->cur_tx++; in sxgbe_xmit()
1416 __func__, tqueue->cur_tx % tx_rsize, in sxgbe_xmit()
1417 tqueue->dirty_tx % tx_rsize, entry, in sxgbe_xmit()
1420 if (unlikely(sxgbe_tx_avail(tqueue, tx_rsize) <= (MAX_SKB_FRAGS + 1))) { in sxgbe_xmit()
1429 tqueue->hwts_tx_en)) { in sxgbe_xmit()
1435 if (!tqueue->hwts_tx_en) in sxgbe_xmit()
1440 spin_unlock(&tqueue->tx_lock); in sxgbe_xmit()