Lines Matching refs:txq

1087 	netif_tx_stop_queue(q->txq);  in eth_txq_stop()
2384 struct sge_ofld_txq *txq = s->egr_map[id]; in sge_tx_timer_cb() local
2387 tasklet_schedule(&txq->qresume_tsk); in sge_tx_timer_cb()
2396 time_after_eq(jiffies, q->txq->trans_start + HZ / 100) && in sge_tx_timer_cb()
2397 __netif_tx_trylock(q->txq)) { in sge_tx_timer_cb()
2408 __netif_tx_unlock(q->txq); in sge_tx_timer_cb()
2637 int t4_sge_alloc_eth_txq(struct adapter *adap, struct sge_eth_txq *txq, in t4_sge_alloc_eth_txq() argument
2647 nentries = txq->q.size + s->stat_len / sizeof(struct tx_desc); in t4_sge_alloc_eth_txq()
2649 txq->q.desc = alloc_ring(adap->pdev_dev, txq->q.size, in t4_sge_alloc_eth_txq()
2651 &txq->q.phys_addr, &txq->q.sdesc, s->stat_len, in t4_sge_alloc_eth_txq()
2653 if (!txq->q.desc) in t4_sge_alloc_eth_txq()
2674 c.eqaddr = cpu_to_be64(txq->q.phys_addr); in t4_sge_alloc_eth_txq()
2678 kfree(txq->q.sdesc); in t4_sge_alloc_eth_txq()
2679 txq->q.sdesc = NULL; in t4_sge_alloc_eth_txq()
2682 txq->q.desc, txq->q.phys_addr); in t4_sge_alloc_eth_txq()
2683 txq->q.desc = NULL; in t4_sge_alloc_eth_txq()
2687 init_txq(adap, &txq->q, FW_EQ_ETH_CMD_EQID_G(ntohl(c.eqid_pkd))); in t4_sge_alloc_eth_txq()
2688 txq->txq = netdevq; in t4_sge_alloc_eth_txq()
2689 txq->tso = txq->tx_cso = txq->vlan_ins = 0; in t4_sge_alloc_eth_txq()
2690 txq->mapping_err = 0; in t4_sge_alloc_eth_txq()
2694 int t4_sge_alloc_ctrl_txq(struct adapter *adap, struct sge_ctrl_txq *txq, in t4_sge_alloc_ctrl_txq() argument
2704 nentries = txq->q.size + s->stat_len / sizeof(struct tx_desc); in t4_sge_alloc_ctrl_txq()
2706 txq->q.desc = alloc_ring(adap->pdev_dev, nentries, in t4_sge_alloc_ctrl_txq()
2707 sizeof(struct tx_desc), 0, &txq->q.phys_addr, in t4_sge_alloc_ctrl_txq()
2709 if (!txq->q.desc) in t4_sge_alloc_ctrl_txq()
2729 c.eqaddr = cpu_to_be64(txq->q.phys_addr); in t4_sge_alloc_ctrl_txq()
2735 txq->q.desc, txq->q.phys_addr); in t4_sge_alloc_ctrl_txq()
2736 txq->q.desc = NULL; in t4_sge_alloc_ctrl_txq()
2740 init_txq(adap, &txq->q, FW_EQ_CTRL_CMD_EQID_G(ntohl(c.cmpliqid_eqid))); in t4_sge_alloc_ctrl_txq()
2741 txq->adap = adap; in t4_sge_alloc_ctrl_txq()
2742 skb_queue_head_init(&txq->sendq); in t4_sge_alloc_ctrl_txq()
2743 tasklet_init(&txq->qresume_tsk, restart_ctrlq, (unsigned long)txq); in t4_sge_alloc_ctrl_txq()
2744 txq->full = 0; in t4_sge_alloc_ctrl_txq()
2748 int t4_sge_alloc_ofld_txq(struct adapter *adap, struct sge_ofld_txq *txq, in t4_sge_alloc_ofld_txq() argument
2757 nentries = txq->q.size + s->stat_len / sizeof(struct tx_desc); in t4_sge_alloc_ofld_txq()
2759 txq->q.desc = alloc_ring(adap->pdev_dev, txq->q.size, in t4_sge_alloc_ofld_txq()
2761 &txq->q.phys_addr, &txq->q.sdesc, s->stat_len, in t4_sge_alloc_ofld_txq()
2763 if (!txq->q.desc) in t4_sge_alloc_ofld_txq()
2782 c.eqaddr = cpu_to_be64(txq->q.phys_addr); in t4_sge_alloc_ofld_txq()
2786 kfree(txq->q.sdesc); in t4_sge_alloc_ofld_txq()
2787 txq->q.sdesc = NULL; in t4_sge_alloc_ofld_txq()
2790 txq->q.desc, txq->q.phys_addr); in t4_sge_alloc_ofld_txq()
2791 txq->q.desc = NULL; in t4_sge_alloc_ofld_txq()
2795 init_txq(adap, &txq->q, FW_EQ_OFLD_CMD_EQID_G(ntohl(c.eqid_pkd))); in t4_sge_alloc_ofld_txq()
2796 txq->adap = adap; in t4_sge_alloc_ofld_txq()
2797 skb_queue_head_init(&txq->sendq); in t4_sge_alloc_ofld_txq()
2798 tasklet_init(&txq->qresume_tsk, restart_ofldq, (unsigned long)txq); in t4_sge_alloc_ofld_txq()
2799 txq->full = 0; in t4_sge_alloc_ofld_txq()
2800 txq->mapping_err = 0; in t4_sge_alloc_ofld_txq()