Lines Matching refs:rcb
272 bnad_rxq_alloc_uninit(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_uninit() argument
274 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_uninit()
284 bnad_rxq_alloc_init(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_init() argument
286 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_init()
289 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_alloc_init()
291 order = get_order(rcb->rxq->buffer_size); in bnad_rxq_alloc_init()
295 if (bna_is_small_rxq(rcb->id)) { in bnad_rxq_alloc_init()
297 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
299 if (rcb->rxq->multi_buffer) { in bnad_rxq_alloc_init()
301 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
306 (rcb->rxq->buffer_size > 2048) ? in bnad_rxq_alloc_init()
347 bnad_rxq_cleanup(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_cleanup() argument
349 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_cleanup()
352 for (i = 0; i < rcb->q_depth; i++) { in bnad_rxq_cleanup()
360 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_cleanup()
364 bnad_rxq_refill_page(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_page() argument
367 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_page()
374 prod = rcb->producer_index; in bnad_rxq_refill_page()
375 q_depth = rcb->q_depth; in bnad_rxq_refill_page()
396 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_page()
414 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_page()
422 rcb->producer_index = prod; in bnad_rxq_refill_page()
424 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_page()
425 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_page()
432 bnad_rxq_refill_skb(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_skb() argument
435 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_skb()
441 buff_sz = rcb->rxq->buffer_size; in bnad_rxq_refill_skb()
442 prod = rcb->producer_index; in bnad_rxq_refill_skb()
443 q_depth = rcb->q_depth; in bnad_rxq_refill_skb()
453 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_skb()
463 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_skb()
471 rcb->producer_index = prod; in bnad_rxq_refill_skb()
473 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_skb()
474 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_skb()
481 bnad_rxq_post(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_post() argument
483 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_post()
486 to_alloc = BNA_QE_FREE_CNT(rcb, rcb->q_depth); in bnad_rxq_post()
491 bnad_rxq_refill_skb(bnad, rcb, to_alloc); in bnad_rxq_post()
493 bnad_rxq_refill_page(bnad, rcb, to_alloc); in bnad_rxq_post()
511 bnad_cq_drop_packet(struct bnad *bnad, struct bna_rcb *rcb, in bnad_cq_drop_packet() argument
518 unmap_q = rcb->unmap_q; in bnad_cq_drop_packet()
521 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_drop_packet()
531 bnad_cq_setup_skb_frags(struct bna_rcb *rcb, struct sk_buff *skb, in bnad_cq_setup_skb_frags() argument
539 unmap_q = rcb->unmap_q; in bnad_cq_setup_skb_frags()
540 bnad = rcb->bnad; in bnad_cq_setup_skb_frags()
548 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_setup_skb_frags()
591 struct bna_rcb *rcb = NULL; in bnad_cq_process() local
621 rcb = ccb->rcb[1]; in bnad_cq_process()
623 rcb = ccb->rcb[0]; in bnad_cq_process()
625 unmap_q = rcb->unmap_q; in bnad_cq_process()
628 sop_ci = rcb->consumer_index; in bnad_cq_process()
684 bnad_cq_drop_packet(bnad, rcb, sop_ci, nvecs); in bnad_cq_process()
685 rcb->rxq->rx_packets_with_error++; in bnad_cq_process()
693 bnad_cq_setup_skb_frags(rcb, skb, sop_ci, nvecs, len); in bnad_cq_process()
695 rcb->rxq->rx_packets++; in bnad_cq_process()
696 rcb->rxq->rx_bytes += totlen; in bnad_cq_process()
721 BNA_QE_INDX_ADD(rcb->consumer_index, nvecs, rcb->q_depth); in bnad_cq_process()
731 if (likely(test_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags))) in bnad_cq_process()
734 bnad_rxq_post(bnad, ccb->rcb[0]); in bnad_cq_process()
735 if (ccb->rcb[1]) in bnad_cq_process()
736 bnad_rxq_post(bnad, ccb->rcb[1]); in bnad_cq_process()
1175 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[0]->flags); in bnad_cb_rx_stall()
1177 if (ccb->rcb[1]) in bnad_cb_rx_stall()
1178 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[1]->flags); in bnad_cb_rx_stall()
1210 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[0]); in bnad_rx_cleanup()
1211 if (rx_ctrl->ccb->rcb[1]) in bnad_rx_cleanup()
1212 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[1]); in bnad_rx_cleanup()
1234 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags); in bnad_cb_rx_cleanup()
1236 if (ccb->rcb[1]) in bnad_cb_rx_cleanup()
1237 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[1]->flags); in bnad_cb_rx_cleanup()
1248 struct bna_rcb *rcb; in bnad_cb_rx_post() local
1261 rcb = ccb->rcb[j]; in bnad_cb_rx_post()
1262 if (!rcb) in bnad_cb_rx_post()
1265 bnad_rxq_alloc_init(bnad, rcb); in bnad_cb_rx_post()
1266 set_bit(BNAD_RXQ_STARTED, &rcb->flags); in bnad_cb_rx_post()
1267 set_bit(BNAD_RXQ_POST_OK, &rcb->flags); in bnad_cb_rx_post()
1268 bnad_rxq_post(bnad, rcb); in bnad_cb_rx_post()
2409 rx_ctrl[j].ccb->rcb[0]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2411 rx_ctrl[j].ccb->rcb[0]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
2412 if (bnad->rx_info[i].rx_ctrl[j].ccb->rcb[1] && in bnad_netdev_qstats_fill()
2414 rcb[1]->rxq) { in bnad_netdev_qstats_fill()
2417 ccb->rcb[1]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2420 ccb->rcb[1]->rxq->rx_bytes; in bnad_netdev_qstats_fill()