Lines Matching refs:tcb

163 bnad_txq_cleanup(struct bnad *bnad, struct bna_tcb *tcb)  in bnad_txq_cleanup()  argument
165 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txq_cleanup()
169 for (i = 0; i < tcb->q_depth; i++) { in bnad_txq_cleanup()
173 bnad_tx_buff_unmap(bnad, unmap_q, tcb->q_depth, i); in bnad_txq_cleanup()
185 bnad_txcmpl_process(struct bnad *bnad, struct bna_tcb *tcb) in bnad_txcmpl_process() argument
189 struct bnad_tx_unmap *unmap_q = tcb->unmap_q; in bnad_txcmpl_process()
194 if (!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_txcmpl_process()
197 hw_cons = *(tcb->hw_consumer_index); in bnad_txcmpl_process()
198 cons = tcb->consumer_index; in bnad_txcmpl_process()
199 q_depth = tcb->q_depth; in bnad_txcmpl_process()
202 BUG_ON(!(wis <= BNA_QE_IN_USE_CNT(tcb, tcb->q_depth))); in bnad_txcmpl_process()
220 tcb->consumer_index = hw_cons; in bnad_txcmpl_process()
222 tcb->txq->tx_packets += sent_packets; in bnad_txcmpl_process()
223 tcb->txq->tx_bytes += sent_bytes; in bnad_txcmpl_process()
229 bnad_tx_complete(struct bnad *bnad, struct bna_tcb *tcb) in bnad_tx_complete() argument
234 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) in bnad_tx_complete()
237 sent = bnad_txcmpl_process(bnad, tcb); in bnad_tx_complete()
241 BNA_QE_FREE_CNT(tcb, tcb->q_depth) >= in bnad_tx_complete()
243 if (test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) { in bnad_tx_complete()
250 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_tx_complete()
251 bna_ib_ack(tcb->i_dbell, sent); in bnad_tx_complete()
254 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_complete()
263 struct bna_tcb *tcb = (struct bna_tcb *)data; in bnad_msix_tx() local
264 struct bnad *bnad = tcb->bnad; in bnad_msix_tx()
266 bnad_tx_complete(bnad, tcb); in bnad_msix_tx()
802 struct bna_tcb *tcb = NULL; in bnad_isr() local
829 tcb = bnad->tx_info[i].tcb[j]; in bnad_isr()
830 if (tcb && test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)) in bnad_isr()
831 bnad_tx_complete(bnad, bnad->tx_info[i].tcb[j]); in bnad_isr()
956 struct bna_tcb *tcb = in bnad_cb_ethport_link_status() local
957 bnad->tx_info[tx_id].tcb[tcb_id]; in bnad_cb_ethport_link_status()
959 if (!tcb) in bnad_cb_ethport_link_status()
962 txq_id = tcb->id; in bnad_cb_ethport_link_status()
965 &tcb->flags)) { in bnad_cb_ethport_link_status()
1007 bnad_cb_tcb_setup(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_setup() argument
1010 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_setup()
1012 tcb->priv = tcb; in bnad_cb_tcb_setup()
1013 tx_info->tcb[tcb->id] = tcb; in bnad_cb_tcb_setup()
1017 bnad_cb_tcb_destroy(struct bnad *bnad, struct bna_tcb *tcb) in bnad_cb_tcb_destroy() argument
1020 (struct bnad_tx_info *)tcb->txq->tx->priv; in bnad_cb_tcb_destroy()
1022 tx_info->tcb[tcb->id] = NULL; in bnad_cb_tcb_destroy()
1023 tcb->priv = NULL; in bnad_cb_tcb_destroy()
1050 struct bna_tcb *tcb; in bnad_cb_tx_stall() local
1055 tcb = tx_info->tcb[i]; in bnad_cb_tx_stall()
1056 if (!tcb) in bnad_cb_tx_stall()
1058 txq_id = tcb->id; in bnad_cb_tx_stall()
1059 clear_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_stall()
1070 struct bna_tcb *tcb; in bnad_cb_tx_resume() local
1075 tcb = tx_info->tcb[i]; in bnad_cb_tx_resume()
1076 if (!tcb) in bnad_cb_tx_resume()
1078 txq_id = tcb->id; in bnad_cb_tx_resume()
1080 BUG_ON(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags)); in bnad_cb_tx_resume()
1081 set_bit(BNAD_TXQ_TX_STARTED, &tcb->flags); in bnad_cb_tx_resume()
1082 BUG_ON(*(tcb->hw_consumer_index) != 0); in bnad_cb_tx_resume()
1112 struct bna_tcb *tcb; in bnad_tx_cleanup() local
1117 tcb = tx_info->tcb[i]; in bnad_tx_cleanup()
1118 if (!tcb) in bnad_tx_cleanup()
1121 bnad = tcb->bnad; in bnad_tx_cleanup()
1123 if (test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_tx_cleanup()
1128 bnad_txq_cleanup(bnad, tcb); in bnad_tx_cleanup()
1131 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_tx_cleanup()
1149 struct bna_tcb *tcb; in bnad_cb_tx_cleanup() local
1153 tcb = tx_info->tcb[i]; in bnad_cb_tx_cleanup()
1154 if (!tcb) in bnad_cb_tx_cleanup()
1529 if (tx_info->tcb[i] == NULL) in bnad_tx_msix_unregister()
1532 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_unregister()
1533 free_irq(bnad->msix_table[vector_num].vector, tx_info->tcb[i]); in bnad_tx_msix_unregister()
1549 vector_num = tx_info->tcb[i]->intr_vector; in bnad_tx_msix_register()
1550 sprintf(tx_info->tcb[i]->name, "%s TXQ %d", bnad->netdev->name, in bnad_tx_msix_register()
1551 tx_id + tx_info->tcb[i]->id); in bnad_tx_msix_register()
1554 tx_info->tcb[i]->name, in bnad_tx_msix_register()
1555 tx_info->tcb[i]); in bnad_tx_msix_register()
1941 if (tx_info->tcb[0]->intr_type == BNA_INTR_T_MSIX) in bnad_destroy_tx()
2427 if (bnad->tx_info[i].tcb[j]) { in bnad_netdev_qstats_fill()
2429 bnad->tx_info[i].tcb[j]->txq->tx_packets; in bnad_netdev_qstats_fill()
2431 bnad->tx_info[i].tcb[j]->txq->tx_bytes; in bnad_netdev_qstats_fill()
2821 bnad_txq_wi_prepare(struct bnad *bnad, struct bna_tcb *tcb, in bnad_txq_wi_prepare() argument
2833 vlan_tag = ((tcb->priority & 0x7) << VLAN_PRIO_SHIFT) in bnad_txq_wi_prepare()
2936 struct bna_tcb *tcb = NULL; in bnad_start_xmit() local
2964 tcb = bnad->tx_info[0].tcb[txq_id]; in bnad_start_xmit()
2970 if (unlikely(!tcb || !test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) { in bnad_start_xmit()
2976 q_depth = tcb->q_depth; in bnad_start_xmit()
2977 prod = tcb->producer_index; in bnad_start_xmit()
2978 unmap_q = tcb->unmap_q; in bnad_start_xmit()
2990 if (unlikely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
2991 if ((*tcb->hw_consumer_index != tcb->consumer_index) && in bnad_start_xmit()
2992 !test_and_set_bit(BNAD_TXQ_FREE_SENT, &tcb->flags)) { in bnad_start_xmit()
2994 sent = bnad_txcmpl_process(bnad, tcb); in bnad_start_xmit()
2995 if (likely(test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
2996 bna_ib_ack(tcb->i_dbell, sent); in bnad_start_xmit()
2998 clear_bit(BNAD_TXQ_FREE_SENT, &tcb->flags); in bnad_start_xmit()
3010 if (likely(wis > BNA_QE_FREE_CNT(tcb, q_depth))) { in bnad_start_xmit()
3019 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
3023 if (bnad_txq_wi_prepare(bnad, tcb, skb, txqent)) { in bnad_start_xmit()
3049 tcb->producer_index); in bnad_start_xmit()
3061 txqent = &((struct bna_txq_entry *)tcb->sw_q)[prod]; in bnad_start_xmit()
3078 bnad_tx_buff_unmap(bnad, unmap_q, q_depth, tcb->producer_index); in bnad_start_xmit()
3085 tcb->producer_index = prod; in bnad_start_xmit()
3089 if (unlikely(!test_bit(BNAD_TXQ_TX_STARTED, &tcb->flags))) in bnad_start_xmit()
3094 bna_txq_prod_indx_doorbell(tcb); in bnad_start_xmit()