Lines Matching refs:rcb
273 bnad_rxq_alloc_uninit(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_uninit() argument
275 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_uninit()
285 bnad_rxq_alloc_init(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_alloc_init() argument
287 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_alloc_init()
290 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_alloc_init()
292 order = get_order(rcb->rxq->buffer_size); in bnad_rxq_alloc_init()
296 if (bna_is_small_rxq(rcb->id)) { in bnad_rxq_alloc_init()
298 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
300 if (rcb->rxq->multi_buffer) { in bnad_rxq_alloc_init()
302 unmap_q->map_size = rcb->rxq->buffer_size; in bnad_rxq_alloc_init()
307 (rcb->rxq->buffer_size > 2048) ? in bnad_rxq_alloc_init()
348 bnad_rxq_cleanup(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_cleanup() argument
350 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_cleanup()
353 for (i = 0; i < rcb->q_depth; i++) { in bnad_rxq_cleanup()
361 bnad_rxq_alloc_uninit(bnad, rcb); in bnad_rxq_cleanup()
365 bnad_rxq_refill_page(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_page() argument
368 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_page()
375 prod = rcb->producer_index; in bnad_rxq_refill_page()
376 q_depth = rcb->q_depth; in bnad_rxq_refill_page()
397 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_page()
406 rcb->rxq->rxbuf_map_failed++; in bnad_rxq_refill_page()
421 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_page()
429 rcb->producer_index = prod; in bnad_rxq_refill_page()
431 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_page()
432 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_page()
439 bnad_rxq_refill_skb(struct bnad *bnad, struct bna_rcb *rcb, u32 nalloc) in bnad_rxq_refill_skb() argument
442 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_refill_skb()
448 buff_sz = rcb->rxq->buffer_size; in bnad_rxq_refill_skb()
449 prod = rcb->producer_index; in bnad_rxq_refill_skb()
450 q_depth = rcb->q_depth; in bnad_rxq_refill_skb()
460 rcb->rxq->rxbuf_alloc_failed++; in bnad_rxq_refill_skb()
469 rcb->rxq->rxbuf_map_failed++; in bnad_rxq_refill_skb()
477 rxent = &((struct bna_rxq_entry *)rcb->sw_q)[prod]; in bnad_rxq_refill_skb()
485 rcb->producer_index = prod; in bnad_rxq_refill_skb()
487 if (likely(test_bit(BNAD_RXQ_POST_OK, &rcb->flags))) in bnad_rxq_refill_skb()
488 bna_rxq_prod_indx_doorbell(rcb); in bnad_rxq_refill_skb()
495 bnad_rxq_post(struct bnad *bnad, struct bna_rcb *rcb) in bnad_rxq_post() argument
497 struct bnad_rx_unmap_q *unmap_q = rcb->unmap_q; in bnad_rxq_post()
500 to_alloc = BNA_QE_FREE_CNT(rcb, rcb->q_depth); in bnad_rxq_post()
505 bnad_rxq_refill_skb(bnad, rcb, to_alloc); in bnad_rxq_post()
507 bnad_rxq_refill_page(bnad, rcb, to_alloc); in bnad_rxq_post()
525 bnad_cq_drop_packet(struct bnad *bnad, struct bna_rcb *rcb, in bnad_cq_drop_packet() argument
532 unmap_q = rcb->unmap_q; in bnad_cq_drop_packet()
535 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_drop_packet()
545 bnad_cq_setup_skb_frags(struct bna_rcb *rcb, struct sk_buff *skb, in bnad_cq_setup_skb_frags() argument
553 unmap_q = rcb->unmap_q; in bnad_cq_setup_skb_frags()
554 bnad = rcb->bnad; in bnad_cq_setup_skb_frags()
562 BNA_QE_INDX_INC(ci, rcb->q_depth); in bnad_cq_setup_skb_frags()
605 struct bna_rcb *rcb = NULL; in bnad_cq_process() local
635 rcb = ccb->rcb[1]; in bnad_cq_process()
637 rcb = ccb->rcb[0]; in bnad_cq_process()
639 unmap_q = rcb->unmap_q; in bnad_cq_process()
642 sop_ci = rcb->consumer_index; in bnad_cq_process()
698 bnad_cq_drop_packet(bnad, rcb, sop_ci, nvecs); in bnad_cq_process()
699 rcb->rxq->rx_packets_with_error++; in bnad_cq_process()
707 bnad_cq_setup_skb_frags(rcb, skb, sop_ci, nvecs, len); in bnad_cq_process()
709 rcb->rxq->rx_packets++; in bnad_cq_process()
710 rcb->rxq->rx_bytes += totlen; in bnad_cq_process()
735 BNA_QE_INDX_ADD(rcb->consumer_index, nvecs, rcb->q_depth); in bnad_cq_process()
744 if (likely(test_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags))) in bnad_cq_process()
747 bnad_rxq_post(bnad, ccb->rcb[0]); in bnad_cq_process()
748 if (ccb->rcb[1]) in bnad_cq_process()
749 bnad_rxq_post(bnad, ccb->rcb[1]); in bnad_cq_process()
1178 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[0]->flags); in bnad_cb_rx_stall()
1180 if (ccb->rcb[1]) in bnad_cb_rx_stall()
1181 clear_bit(BNAD_RXQ_POST_OK, &ccb->rcb[1]->flags); in bnad_cb_rx_stall()
1213 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[0]); in bnad_rx_cleanup()
1214 if (rx_ctrl->ccb->rcb[1]) in bnad_rx_cleanup()
1215 bnad_rxq_cleanup(bnad, rx_ctrl->ccb->rcb[1]); in bnad_rx_cleanup()
1237 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[0]->flags); in bnad_cb_rx_cleanup()
1239 if (ccb->rcb[1]) in bnad_cb_rx_cleanup()
1240 clear_bit(BNAD_RXQ_STARTED, &ccb->rcb[1]->flags); in bnad_cb_rx_cleanup()
1251 struct bna_rcb *rcb; in bnad_cb_rx_post() local
1264 rcb = ccb->rcb[j]; in bnad_cb_rx_post()
1265 if (!rcb) in bnad_cb_rx_post()
1268 bnad_rxq_alloc_init(bnad, rcb); in bnad_cb_rx_post()
1269 set_bit(BNAD_RXQ_STARTED, &rcb->flags); in bnad_cb_rx_post()
1270 set_bit(BNAD_RXQ_POST_OK, &rcb->flags); in bnad_cb_rx_post()
1271 bnad_rxq_post(bnad, rcb); in bnad_cb_rx_post()
2411 rx_ctrl[j].ccb->rcb[0]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2413 rx_ctrl[j].ccb->rcb[0]->rxq->rx_bytes; in bnad_netdev_qstats_fill()
2414 if (bnad->rx_info[i].rx_ctrl[j].ccb->rcb[1] && in bnad_netdev_qstats_fill()
2416 rcb[1]->rxq) { in bnad_netdev_qstats_fill()
2419 ccb->rcb[1]->rxq->rx_packets; in bnad_netdev_qstats_fill()
2422 ccb->rcb[1]->rxq->rx_bytes; in bnad_netdev_qstats_fill()