Lines Matching refs:rxr
720 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_mem() local
724 if (rxr->rx_desc_ring[j]) in bnx2_free_rx_mem()
726 rxr->rx_desc_ring[j], in bnx2_free_rx_mem()
727 rxr->rx_desc_mapping[j]); in bnx2_free_rx_mem()
728 rxr->rx_desc_ring[j] = NULL; in bnx2_free_rx_mem()
730 vfree(rxr->rx_buf_ring); in bnx2_free_rx_mem()
731 rxr->rx_buf_ring = NULL; in bnx2_free_rx_mem()
734 if (rxr->rx_pg_desc_ring[j]) in bnx2_free_rx_mem()
736 rxr->rx_pg_desc_ring[j], in bnx2_free_rx_mem()
737 rxr->rx_pg_desc_mapping[j]); in bnx2_free_rx_mem()
738 rxr->rx_pg_desc_ring[j] = NULL; in bnx2_free_rx_mem()
740 vfree(rxr->rx_pg_ring); in bnx2_free_rx_mem()
741 rxr->rx_pg_ring = NULL; in bnx2_free_rx_mem()
774 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_alloc_rx_mem() local
777 rxr->rx_buf_ring = in bnx2_alloc_rx_mem()
779 if (rxr->rx_buf_ring == NULL) in bnx2_alloc_rx_mem()
783 rxr->rx_desc_ring[j] = in bnx2_alloc_rx_mem()
786 &rxr->rx_desc_mapping[j], in bnx2_alloc_rx_mem()
788 if (rxr->rx_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
794 rxr->rx_pg_ring = vzalloc(SW_RXPG_RING_SIZE * in bnx2_alloc_rx_mem()
796 if (rxr->rx_pg_ring == NULL) in bnx2_alloc_rx_mem()
802 rxr->rx_pg_desc_ring[j] = in bnx2_alloc_rx_mem()
805 &rxr->rx_pg_desc_mapping[j], in bnx2_alloc_rx_mem()
807 if (rxr->rx_pg_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
2706 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_page() argument
2709 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_alloc_rx_page()
2711 &rxr->rx_pg_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_page()
2731 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) in bnx2_free_rx_page() argument
2733 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_free_rx_page()
2747 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_data() argument
2750 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[index]; in bnx2_alloc_rx_data()
2753 &rxr->rx_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_data()
2774 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_alloc_rx_data()
2923 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_skb_pages() argument
2930 u16 cons = rxr->rx_pg_cons; in bnx2_reuse_rx_skb_pages()
2932 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2951 hw_prod = rxr->rx_pg_prod; in bnx2_reuse_rx_skb_pages()
2956 prod_rx_pg = &rxr->rx_pg_ring[prod]; in bnx2_reuse_rx_skb_pages()
2957 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2958 cons_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(cons)] in bnx2_reuse_rx_skb_pages()
2960 prod_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(prod)] in bnx2_reuse_rx_skb_pages()
2976 rxr->rx_pg_prod = hw_prod; in bnx2_reuse_rx_skb_pages()
2977 rxr->rx_pg_cons = cons; in bnx2_reuse_rx_skb_pages()
2981 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_data() argument
2987 cons_rx_buf = &rxr->rx_buf_ring[cons]; in bnx2_reuse_rx_data()
2988 prod_rx_buf = &rxr->rx_buf_ring[prod]; in bnx2_reuse_rx_data()
2994 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_reuse_rx_data()
3004 cons_bd = &rxr->rx_desc_ring[BNX2_RX_RING(cons)][BNX2_RX_IDX(cons)]; in bnx2_reuse_rx_data()
3005 prod_bd = &rxr->rx_desc_ring[BNX2_RX_RING(prod)][BNX2_RX_IDX(prod)]; in bnx2_reuse_rx_data()
3011 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, in bnx2_rx_skb() argument
3019 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); in bnx2_rx_skb()
3021 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); in bnx2_rx_skb()
3027 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_skb()
3046 u16 pg_cons = rxr->rx_pg_cons; in bnx2_rx_skb()
3047 u16 pg_prod = rxr->rx_pg_prod; in bnx2_rx_skb()
3060 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3061 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3062 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, in bnx2_rx_skb()
3075 rx_pg = &rxr->rx_pg_ring[pg_cons]; in bnx2_rx_skb()
3087 err = bnx2_alloc_rx_page(bp, rxr, in bnx2_rx_skb()
3091 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3092 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3093 bnx2_reuse_rx_skb_pages(bp, rxr, skb, in bnx2_rx_skb()
3109 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3110 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3132 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_rx_int() local
3141 sw_cons = rxr->rx_cons; in bnx2_rx_int()
3142 sw_prod = rxr->rx_prod; in bnx2_rx_int()
3160 rx_buf = &rxr->rx_buf_ring[sw_ring_cons]; in bnx2_rx_int()
3174 next_rx_buf = &rxr->rx_buf_ring[next_ring_idx]; in bnx2_rx_int()
3195 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3202 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_int()
3212 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3224 bnx2_reuse_rx_data(bp, rxr, data, in bnx2_rx_int()
3228 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, in bnx2_rx_int()
3280 rxr->rx_cons = sw_cons; in bnx2_rx_int()
3281 rxr->rx_prod = sw_prod; in bnx2_rx_int()
3284 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_rx_int()
3286 BNX2_WR16(bp, rxr->rx_bidx_addr, sw_prod); in bnx2_rx_int()
3288 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_rx_int()
3379 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_has_fast_work() local
3381 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) || in bnx2_has_fast_work()
3473 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_poll_work() local
3478 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) in bnx2_poll_work()
5103 struct bnx2_rx_ring_info *rxr; in bnx2_clear_ring_states() local
5109 rxr = &bnapi->rx_ring; in bnx2_clear_ring_states()
5113 rxr->rx_prod_bseq = 0; in bnx2_clear_ring_states()
5114 rxr->rx_prod = 0; in bnx2_clear_ring_states()
5115 rxr->rx_cons = 0; in bnx2_clear_ring_states()
5116 rxr->rx_pg_prod = 0; in bnx2_clear_ring_states()
5117 rxr->rx_pg_cons = 0; in bnx2_clear_ring_states()
5214 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_init_rx_ring() local
5223 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping, in bnx2_init_rx_ring()
5235 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring, in bnx2_init_rx_ring()
5236 rxr->rx_pg_desc_mapping, in bnx2_init_rx_ring()
5243 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5246 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5253 val = (u64) rxr->rx_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5256 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5259 ring_prod = prod = rxr->rx_pg_prod; in bnx2_init_rx_ring()
5261 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5269 rxr->rx_pg_prod = prod; in bnx2_init_rx_ring()
5271 ring_prod = prod = rxr->rx_prod; in bnx2_init_rx_ring()
5273 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5281 rxr->rx_prod = prod; in bnx2_init_rx_ring()
5283 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX; in bnx2_init_rx_ring()
5284 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ; in bnx2_init_rx_ring()
5285 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX; in bnx2_init_rx_ring()
5287 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_init_rx_ring()
5288 BNX2_WR16(bp, rxr->rx_bidx_addr, prod); in bnx2_init_rx_ring()
5290 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_init_rx_ring()
5452 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_skbs() local
5455 if (rxr->rx_buf_ring == NULL) in bnx2_free_rx_skbs()
5459 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[j]; in bnx2_free_rx_skbs()
5475 bnx2_free_rx_page(bp, rxr, j); in bnx2_free_rx_skbs()
5789 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_run_loopback() local
5794 rxr = &bnapi->rx_ring; in bnx2_run_loopback()
5870 rx_buf = &rxr->rx_buf_ring[rx_start_idx]; in bnx2_run_loopback()