Lines Matching refs:rxr
720 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_mem() local
724 if (rxr->rx_desc_ring[j]) in bnx2_free_rx_mem()
726 rxr->rx_desc_ring[j], in bnx2_free_rx_mem()
727 rxr->rx_desc_mapping[j]); in bnx2_free_rx_mem()
728 rxr->rx_desc_ring[j] = NULL; in bnx2_free_rx_mem()
730 vfree(rxr->rx_buf_ring); in bnx2_free_rx_mem()
731 rxr->rx_buf_ring = NULL; in bnx2_free_rx_mem()
734 if (rxr->rx_pg_desc_ring[j]) in bnx2_free_rx_mem()
736 rxr->rx_pg_desc_ring[j], in bnx2_free_rx_mem()
737 rxr->rx_pg_desc_mapping[j]); in bnx2_free_rx_mem()
738 rxr->rx_pg_desc_ring[j] = NULL; in bnx2_free_rx_mem()
740 vfree(rxr->rx_pg_ring); in bnx2_free_rx_mem()
741 rxr->rx_pg_ring = NULL; in bnx2_free_rx_mem()
774 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_alloc_rx_mem() local
777 rxr->rx_buf_ring = in bnx2_alloc_rx_mem()
779 if (rxr->rx_buf_ring == NULL) in bnx2_alloc_rx_mem()
783 rxr->rx_desc_ring[j] = in bnx2_alloc_rx_mem()
786 &rxr->rx_desc_mapping[j], in bnx2_alloc_rx_mem()
788 if (rxr->rx_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
794 rxr->rx_pg_ring = vzalloc(SW_RXPG_RING_SIZE * in bnx2_alloc_rx_mem()
796 if (rxr->rx_pg_ring == NULL) in bnx2_alloc_rx_mem()
802 rxr->rx_pg_desc_ring[j] = in bnx2_alloc_rx_mem()
805 &rxr->rx_pg_desc_mapping[j], in bnx2_alloc_rx_mem()
807 if (rxr->rx_pg_desc_ring[j] == NULL) in bnx2_alloc_rx_mem()
2724 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_page() argument
2727 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_alloc_rx_page()
2729 &rxr->rx_pg_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_page()
2749 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) in bnx2_free_rx_page() argument
2751 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; in bnx2_free_rx_page()
2765 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) in bnx2_alloc_rx_data() argument
2768 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[index]; in bnx2_alloc_rx_data()
2771 &rxr->rx_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; in bnx2_alloc_rx_data()
2792 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_alloc_rx_data()
2941 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_skb_pages() argument
2948 u16 cons = rxr->rx_pg_cons; in bnx2_reuse_rx_skb_pages()
2950 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2969 hw_prod = rxr->rx_pg_prod; in bnx2_reuse_rx_skb_pages()
2974 prod_rx_pg = &rxr->rx_pg_ring[prod]; in bnx2_reuse_rx_skb_pages()
2975 cons_rx_pg = &rxr->rx_pg_ring[cons]; in bnx2_reuse_rx_skb_pages()
2976 cons_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(cons)] in bnx2_reuse_rx_skb_pages()
2978 prod_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(prod)] in bnx2_reuse_rx_skb_pages()
2994 rxr->rx_pg_prod = hw_prod; in bnx2_reuse_rx_skb_pages()
2995 rxr->rx_pg_cons = cons; in bnx2_reuse_rx_skb_pages()
2999 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, in bnx2_reuse_rx_data() argument
3005 cons_rx_buf = &rxr->rx_buf_ring[cons]; in bnx2_reuse_rx_data()
3006 prod_rx_buf = &rxr->rx_buf_ring[prod]; in bnx2_reuse_rx_data()
3012 rxr->rx_prod_bseq += bp->rx_buf_use_size; in bnx2_reuse_rx_data()
3022 cons_bd = &rxr->rx_desc_ring[BNX2_RX_RING(cons)][BNX2_RX_IDX(cons)]; in bnx2_reuse_rx_data()
3023 prod_bd = &rxr->rx_desc_ring[BNX2_RX_RING(prod)][BNX2_RX_IDX(prod)]; in bnx2_reuse_rx_data()
3029 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, in bnx2_rx_skb() argument
3037 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); in bnx2_rx_skb()
3039 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); in bnx2_rx_skb()
3045 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_skb()
3064 u16 pg_cons = rxr->rx_pg_cons; in bnx2_rx_skb()
3065 u16 pg_prod = rxr->rx_pg_prod; in bnx2_rx_skb()
3078 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3079 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3080 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, in bnx2_rx_skb()
3093 rx_pg = &rxr->rx_pg_ring[pg_cons]; in bnx2_rx_skb()
3105 err = bnx2_alloc_rx_page(bp, rxr, in bnx2_rx_skb()
3109 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3110 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3111 bnx2_reuse_rx_skb_pages(bp, rxr, skb, in bnx2_rx_skb()
3127 rxr->rx_pg_prod = pg_prod; in bnx2_rx_skb()
3128 rxr->rx_pg_cons = pg_cons; in bnx2_rx_skb()
3150 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_rx_int() local
3159 sw_cons = rxr->rx_cons; in bnx2_rx_int()
3160 sw_prod = rxr->rx_prod; in bnx2_rx_int()
3178 rx_buf = &rxr->rx_buf_ring[sw_ring_cons]; in bnx2_rx_int()
3192 next_rx_buf = &rxr->rx_buf_ring[next_ring_idx]; in bnx2_rx_int()
3213 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3220 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); in bnx2_rx_int()
3230 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, in bnx2_rx_int()
3242 bnx2_reuse_rx_data(bp, rxr, data, in bnx2_rx_int()
3246 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, in bnx2_rx_int()
3298 rxr->rx_cons = sw_cons; in bnx2_rx_int()
3299 rxr->rx_prod = sw_prod; in bnx2_rx_int()
3302 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_rx_int()
3304 BNX2_WR16(bp, rxr->rx_bidx_addr, sw_prod); in bnx2_rx_int()
3306 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_rx_int()
3397 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_has_fast_work() local
3399 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) || in bnx2_has_fast_work()
3491 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_poll_work() local
3496 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) in bnx2_poll_work()
5121 struct bnx2_rx_ring_info *rxr; in bnx2_clear_ring_states() local
5127 rxr = &bnapi->rx_ring; in bnx2_clear_ring_states()
5131 rxr->rx_prod_bseq = 0; in bnx2_clear_ring_states()
5132 rxr->rx_prod = 0; in bnx2_clear_ring_states()
5133 rxr->rx_cons = 0; in bnx2_clear_ring_states()
5134 rxr->rx_pg_prod = 0; in bnx2_clear_ring_states()
5135 rxr->rx_pg_cons = 0; in bnx2_clear_ring_states()
5232 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_init_rx_ring() local
5241 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping, in bnx2_init_rx_ring()
5253 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring, in bnx2_init_rx_ring()
5254 rxr->rx_pg_desc_mapping, in bnx2_init_rx_ring()
5261 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5264 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5271 val = (u64) rxr->rx_desc_mapping[0] >> 32; in bnx2_init_rx_ring()
5274 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff; in bnx2_init_rx_ring()
5277 ring_prod = prod = rxr->rx_pg_prod; in bnx2_init_rx_ring()
5279 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5287 rxr->rx_pg_prod = prod; in bnx2_init_rx_ring()
5289 ring_prod = prod = rxr->rx_prod; in bnx2_init_rx_ring()
5291 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { in bnx2_init_rx_ring()
5299 rxr->rx_prod = prod; in bnx2_init_rx_ring()
5301 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX; in bnx2_init_rx_ring()
5302 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ; in bnx2_init_rx_ring()
5303 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX; in bnx2_init_rx_ring()
5305 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); in bnx2_init_rx_ring()
5306 BNX2_WR16(bp, rxr->rx_bidx_addr, prod); in bnx2_init_rx_ring()
5308 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); in bnx2_init_rx_ring()
5470 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_free_rx_skbs() local
5473 if (rxr->rx_buf_ring == NULL) in bnx2_free_rx_skbs()
5477 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[j]; in bnx2_free_rx_skbs()
5493 bnx2_free_rx_page(bp, rxr, j); in bnx2_free_rx_skbs()
5807 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; in bnx2_run_loopback() local
5812 rxr = &bnapi->rx_ring; in bnx2_run_loopback()
5888 rx_buf = &rxr->rx_buf_ring[rx_start_idx]; in bnx2_run_loopback()