Searched refs:rxr (Results 1 - 7 of 7) sorted by relevance

/linux-4.4.14/drivers/net/ethernet/broadcom/bnxt/
H A Dbnxt.c506 struct bnxt_rx_ring_info *rxr, bnxt_alloc_rx_data()
509 struct rx_bd *rxbd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)]; bnxt_alloc_rx_data()
510 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[prod]; bnxt_alloc_rx_data()
526 static void bnxt_reuse_rx_data(struct bnxt_rx_ring_info *rxr, u16 cons, bnxt_reuse_rx_data() argument
529 u16 prod = rxr->rx_prod; bnxt_reuse_rx_data()
533 prod_rx_buf = &rxr->rx_buf_ring[prod]; bnxt_reuse_rx_data()
534 cons_rx_buf = &rxr->rx_buf_ring[cons]; bnxt_reuse_rx_data()
541 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)]; bnxt_reuse_rx_data()
542 cons_bd = &rxr->rx_desc_ring[RX_RING(cons)][RX_IDX(cons)]; bnxt_reuse_rx_data()
547 static inline u16 bnxt_find_next_agg_idx(struct bnxt_rx_ring_info *rxr, u16 idx) bnxt_find_next_agg_idx() argument
549 u16 next, max = rxr->rx_agg_bmap_size; bnxt_find_next_agg_idx()
551 next = find_next_zero_bit(rxr->rx_agg_bmap, max, idx); bnxt_find_next_agg_idx()
553 next = find_first_zero_bit(rxr->rx_agg_bmap, max); bnxt_find_next_agg_idx()
558 struct bnxt_rx_ring_info *rxr, bnxt_alloc_rx_page()
562 &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)]; bnxt_alloc_rx_page()
567 u16 sw_prod = rxr->rx_sw_agg_prod; bnxt_alloc_rx_page()
580 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap))) bnxt_alloc_rx_page()
581 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod); bnxt_alloc_rx_page()
583 __set_bit(sw_prod, rxr->rx_agg_bmap); bnxt_alloc_rx_page()
584 rx_agg_buf = &rxr->rx_agg_ring[sw_prod]; bnxt_alloc_rx_page()
585 rxr->rx_sw_agg_prod = NEXT_RX_AGG(sw_prod); bnxt_alloc_rx_page()
599 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_reuse_rx_agg_bufs() local
600 u16 prod = rxr->rx_agg_prod; bnxt_reuse_rx_agg_bufs()
601 u16 sw_prod = rxr->rx_sw_agg_prod; bnxt_reuse_rx_agg_bufs()
614 __clear_bit(cons, rxr->rx_agg_bmap); bnxt_reuse_rx_agg_bufs()
616 if (unlikely(test_bit(sw_prod, rxr->rx_agg_bmap))) bnxt_reuse_rx_agg_bufs()
617 sw_prod = bnxt_find_next_agg_idx(rxr, sw_prod); bnxt_reuse_rx_agg_bufs()
619 __set_bit(sw_prod, rxr->rx_agg_bmap); bnxt_reuse_rx_agg_bufs()
620 prod_rx_buf = &rxr->rx_agg_ring[sw_prod]; bnxt_reuse_rx_agg_bufs()
621 cons_rx_buf = &rxr->rx_agg_ring[cons]; bnxt_reuse_rx_agg_bufs()
632 prod_bd = &rxr->rx_agg_desc_ring[RX_RING(prod)][RX_IDX(prod)]; bnxt_reuse_rx_agg_bufs()
641 rxr->rx_agg_prod = prod; bnxt_reuse_rx_agg_bufs()
642 rxr->rx_sw_agg_prod = sw_prod; bnxt_reuse_rx_agg_bufs()
646 struct bnxt_rx_ring_info *rxr, u16 cons, bnxt_rx_skb()
653 err = bnxt_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); bnxt_rx_skb()
655 bnxt_reuse_rx_data(rxr, cons, data); bnxt_rx_skb()
678 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_rx_pages() local
679 u16 prod = rxr->rx_agg_prod; bnxt_rx_pages()
695 cons_rx_buf = &rxr->rx_agg_ring[cons]; bnxt_rx_pages()
697 __clear_bit(cons, rxr->rx_agg_bmap); bnxt_rx_pages()
707 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_ATOMIC) != 0) { bnxt_rx_pages()
722 rxr->rx_agg_prod = prod; bnxt_rx_pages()
737 rxr->rx_agg_prod = prod; bnxt_rx_pages()
779 static void bnxt_tpa_start(struct bnxt *bp, struct bnxt_rx_ring_info *rxr, bnxt_tpa_start() argument
791 prod = rxr->rx_prod; bnxt_tpa_start()
792 cons_rx_buf = &rxr->rx_buf_ring[cons]; bnxt_tpa_start()
793 prod_rx_buf = &rxr->rx_buf_ring[prod]; bnxt_tpa_start()
794 tpa_info = &rxr->rx_tpa[agg_id]; bnxt_tpa_start()
801 prod_bd = &rxr->rx_desc_ring[RX_RING(prod)][RX_IDX(prod)]; bnxt_tpa_start()
831 rxr->rx_prod = NEXT_RX(prod); bnxt_tpa_start()
833 cons_rx_buf = &rxr->rx_buf_ring[cons]; bnxt_tpa_start()
835 bnxt_reuse_rx_data(rxr, cons, cons_rx_buf->data); bnxt_tpa_start()
836 rxr->rx_prod = NEXT_RX(rxr->rx_prod); bnxt_tpa_start()
932 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_tpa_end() local
941 tpa_info = &rxr->rx_tpa[agg_id]; bnxt_tpa_end()
1048 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_rx_pkt() local
1074 prod = rxr->rx_prod; bnxt_rx_pkt()
1077 bnxt_tpa_start(bp, rxr, (struct rx_tpa_start_cmp *)rxcmp, bnxt_rx_pkt()
1105 rx_buf = &rxr->rx_buf_ring[cons]; bnxt_rx_pkt()
1122 bnxt_reuse_rx_data(rxr, cons, data); bnxt_rx_pkt()
1135 bnxt_reuse_rx_data(rxr, cons, data); bnxt_rx_pkt()
1141 skb = bnxt_rx_skb(bp, rxr, cons, prod, data, dma_addr, len); bnxt_rx_pkt()
1203 rxr->rx_prod = NEXT_RX(prod); bnxt_rx_pkt()
1380 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_poll_work() local
1382 writel(DB_KEY_RX | rxr->rx_prod, rxr->rx_doorbell); bnxt_poll_work()
1383 writel(DB_KEY_RX | rxr->rx_prod, rxr->rx_doorbell); bnxt_poll_work()
1385 writel(DB_KEY_RX | rxr->rx_agg_prod, bnxt_poll_work()
1386 rxr->rx_agg_doorbell); bnxt_poll_work()
1387 writel(DB_KEY_RX | rxr->rx_agg_prod, bnxt_poll_work()
1388 rxr->rx_agg_doorbell); bnxt_poll_work()
1514 struct bnxt_rx_ring_info *rxr; bnxt_free_rx_skbs() local
1520 rxr = &bnapi->rx_ring; bnxt_free_rx_skbs()
1522 if (rxr->rx_tpa) { bnxt_free_rx_skbs()
1525 &rxr->rx_tpa[j]; bnxt_free_rx_skbs()
1544 struct bnxt_sw_rx_bd *rx_buf = &rxr->rx_buf_ring[j]; bnxt_free_rx_skbs()
1562 &rxr->rx_agg_ring[j]; bnxt_free_rx_skbs()
1573 __clear_bit(j, rxr->rx_agg_bmap); bnxt_free_rx_skbs()
1654 struct bnxt_rx_ring_info *rxr; bnxt_free_rx_rings() local
1660 rxr = &bnapi->rx_ring; bnxt_free_rx_rings()
1662 kfree(rxr->rx_tpa); bnxt_free_rx_rings()
1663 rxr->rx_tpa = NULL; bnxt_free_rx_rings()
1665 kfree(rxr->rx_agg_bmap); bnxt_free_rx_rings()
1666 rxr->rx_agg_bmap = NULL; bnxt_free_rx_rings()
1668 ring = &rxr->rx_ring_struct; bnxt_free_rx_rings()
1671 ring = &rxr->rx_agg_ring_struct; bnxt_free_rx_rings()
1688 struct bnxt_rx_ring_info *rxr; bnxt_alloc_rx_rings() local
1694 rxr = &bnapi->rx_ring; bnxt_alloc_rx_rings()
1695 ring = &rxr->rx_ring_struct; bnxt_alloc_rx_rings()
1704 ring = &rxr->rx_agg_ring_struct; bnxt_alloc_rx_rings()
1709 rxr->rx_agg_bmap_size = bp->rx_agg_ring_mask + 1; bnxt_alloc_rx_rings()
1710 mem_size = rxr->rx_agg_bmap_size / 8; bnxt_alloc_rx_rings()
1711 rxr->rx_agg_bmap = kzalloc(mem_size, GFP_KERNEL); bnxt_alloc_rx_rings()
1712 if (!rxr->rx_agg_bmap) bnxt_alloc_rx_rings()
1716 rxr->rx_tpa = kcalloc(MAX_TPA, bnxt_alloc_rx_rings()
1719 if (!rxr->rx_tpa) bnxt_alloc_rx_rings()
1873 struct bnxt_rx_ring_info *rxr; bnxt_init_ring_struct() local
1888 rxr = &bnapi->rx_ring; bnxt_init_ring_struct()
1889 ring = &rxr->rx_ring_struct; bnxt_init_ring_struct()
1892 ring->pg_arr = (void **)rxr->rx_desc_ring; bnxt_init_ring_struct()
1893 ring->dma_arr = rxr->rx_desc_mapping; bnxt_init_ring_struct()
1895 ring->vmem = (void **)&rxr->rx_buf_ring; bnxt_init_ring_struct()
1897 ring = &rxr->rx_agg_ring_struct; bnxt_init_ring_struct()
1900 ring->pg_arr = (void **)rxr->rx_agg_desc_ring; bnxt_init_ring_struct()
1901 ring->dma_arr = rxr->rx_agg_desc_mapping; bnxt_init_ring_struct()
1903 ring->vmem = (void **)&rxr->rx_agg_ring; bnxt_init_ring_struct()
1942 struct bnxt_rx_ring_info *rxr; bnxt_init_one_rx_ring() local
1956 rxr = &bnapi->rx_ring; bnxt_init_one_rx_ring()
1957 ring = &rxr->rx_ring_struct; bnxt_init_one_rx_ring()
1960 prod = rxr->rx_prod; bnxt_init_one_rx_ring()
1962 if (bnxt_alloc_rx_data(bp, rxr, prod, GFP_KERNEL) != 0) { bnxt_init_one_rx_ring()
1969 rxr->rx_prod = prod; bnxt_init_one_rx_ring()
1975 ring = &rxr->rx_agg_ring_struct; bnxt_init_one_rx_ring()
1982 prod = rxr->rx_agg_prod; bnxt_init_one_rx_ring()
1984 if (bnxt_alloc_rx_page(bp, rxr, prod, GFP_KERNEL) != 0) { bnxt_init_one_rx_ring()
1991 rxr->rx_agg_prod = prod; bnxt_init_one_rx_ring()
1995 if (rxr->rx_tpa) { bnxt_init_one_rx_ring()
2005 rxr->rx_tpa[i].data = data; bnxt_init_one_rx_ring()
2006 rxr->rx_tpa[i].mapping = mapping; bnxt_init_one_rx_ring()
2417 struct bnxt_rx_ring_info *rxr; bnxt_clear_ring_indices() local
2430 rxr = &bnapi->rx_ring; bnxt_clear_ring_indices()
2431 rxr->rx_prod = 0; bnxt_clear_ring_indices()
2432 rxr->rx_agg_prod = 0; bnxt_clear_ring_indices()
2433 rxr->rx_sw_agg_prod = 0; bnxt_clear_ring_indices()
3332 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_hwrm_ring_alloc() local
3333 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct; bnxt_hwrm_ring_alloc()
3340 rxr->rx_doorbell = bp->bar1 + i * 0x80; bnxt_hwrm_ring_alloc()
3341 writel(DB_KEY_RX | rxr->rx_prod, rxr->rx_doorbell); bnxt_hwrm_ring_alloc()
3349 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_hwrm_ring_alloc() local
3351 &rxr->rx_agg_ring_struct; bnxt_hwrm_ring_alloc()
3360 rxr->rx_agg_doorbell = bnxt_hwrm_ring_alloc()
3362 writel(DB_KEY_RX | rxr->rx_agg_prod, bnxt_hwrm_ring_alloc()
3363 rxr->rx_agg_doorbell); bnxt_hwrm_ring_alloc()
3439 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_hwrm_ring_free() local
3440 struct bnxt_ring_struct *ring = &rxr->rx_ring_struct; bnxt_hwrm_ring_free()
3459 struct bnxt_rx_ring_info *rxr = &bnapi->rx_ring; bnxt_hwrm_ring_free() local
3461 &rxr->rx_agg_ring_struct; bnxt_hwrm_ring_free()
5007 struct bnxt_rx_ring_info *rxr; bnxt_dbg_dump_states() local
5013 rxr = &bnapi->rx_ring; bnxt_dbg_dump_states()
5020 i, rxr->rx_ring_struct.fw_ring_id, bnxt_dbg_dump_states()
5021 rxr->rx_prod, bnxt_dbg_dump_states()
5022 rxr->rx_agg_ring_struct.fw_ring_id, bnxt_dbg_dump_states()
5023 rxr->rx_agg_prod, rxr->rx_sw_agg_prod); bnxt_dbg_dump_states()
505 bnxt_alloc_rx_data(struct bnxt *bp, struct bnxt_rx_ring_info *rxr, u16 prod, gfp_t gfp) bnxt_alloc_rx_data() argument
557 bnxt_alloc_rx_page(struct bnxt *bp, struct bnxt_rx_ring_info *rxr, u16 prod, gfp_t gfp) bnxt_alloc_rx_page() argument
645 bnxt_rx_skb(struct bnxt *bp, struct bnxt_rx_ring_info *rxr, u16 cons, u16 prod, u8 *data, dma_addr_t dma_addr, unsigned int len) bnxt_rx_skb() argument
/linux-4.4.14/drivers/net/ethernet/broadcom/
H A Dbnx2.c720 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_free_rx_mem() local
724 if (rxr->rx_desc_ring[j]) bnx2_free_rx_mem()
726 rxr->rx_desc_ring[j], bnx2_free_rx_mem()
727 rxr->rx_desc_mapping[j]); bnx2_free_rx_mem()
728 rxr->rx_desc_ring[j] = NULL; bnx2_free_rx_mem()
730 vfree(rxr->rx_buf_ring); bnx2_free_rx_mem()
731 rxr->rx_buf_ring = NULL; bnx2_free_rx_mem()
734 if (rxr->rx_pg_desc_ring[j]) bnx2_free_rx_mem()
736 rxr->rx_pg_desc_ring[j], bnx2_free_rx_mem()
737 rxr->rx_pg_desc_mapping[j]); bnx2_free_rx_mem()
738 rxr->rx_pg_desc_ring[j] = NULL; bnx2_free_rx_mem()
740 vfree(rxr->rx_pg_ring); bnx2_free_rx_mem()
741 rxr->rx_pg_ring = NULL; bnx2_free_rx_mem()
774 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_alloc_rx_mem() local
777 rxr->rx_buf_ring = bnx2_alloc_rx_mem()
779 if (rxr->rx_buf_ring == NULL) bnx2_alloc_rx_mem()
783 rxr->rx_desc_ring[j] = bnx2_alloc_rx_mem()
786 &rxr->rx_desc_mapping[j], bnx2_alloc_rx_mem()
788 if (rxr->rx_desc_ring[j] == NULL) bnx2_alloc_rx_mem()
794 rxr->rx_pg_ring = vzalloc(SW_RXPG_RING_SIZE * bnx2_alloc_rx_mem()
796 if (rxr->rx_pg_ring == NULL) bnx2_alloc_rx_mem()
802 rxr->rx_pg_desc_ring[j] = bnx2_alloc_rx_mem()
805 &rxr->rx_pg_desc_mapping[j], bnx2_alloc_rx_mem()
807 if (rxr->rx_pg_desc_ring[j] == NULL) bnx2_alloc_rx_mem()
2724 bnx2_alloc_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) bnx2_alloc_rx_page() argument
2727 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; bnx2_alloc_rx_page()
2729 &rxr->rx_pg_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; bnx2_alloc_rx_page()
2749 bnx2_free_rx_page(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index) bnx2_free_rx_page() argument
2751 struct bnx2_sw_pg *rx_pg = &rxr->rx_pg_ring[index]; bnx2_free_rx_page()
2765 bnx2_alloc_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u16 index, gfp_t gfp) bnx2_alloc_rx_data() argument
2768 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[index]; bnx2_alloc_rx_data()
2771 &rxr->rx_desc_ring[BNX2_RX_RING(index)][BNX2_RX_IDX(index)]; bnx2_alloc_rx_data()
2792 rxr->rx_prod_bseq += bp->rx_buf_use_size; bnx2_alloc_rx_data()
2941 bnx2_reuse_rx_skb_pages(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, bnx2_reuse_rx_skb_pages() argument
2948 u16 cons = rxr->rx_pg_cons; bnx2_reuse_rx_skb_pages()
2950 cons_rx_pg = &rxr->rx_pg_ring[cons]; bnx2_reuse_rx_skb_pages()
2969 hw_prod = rxr->rx_pg_prod; bnx2_reuse_rx_skb_pages()
2974 prod_rx_pg = &rxr->rx_pg_ring[prod]; bnx2_reuse_rx_skb_pages()
2975 cons_rx_pg = &rxr->rx_pg_ring[cons]; bnx2_reuse_rx_skb_pages()
2976 cons_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(cons)] bnx2_reuse_rx_skb_pages()
2978 prod_bd = &rxr->rx_pg_desc_ring[BNX2_RX_RING(prod)] bnx2_reuse_rx_skb_pages()
2994 rxr->rx_pg_prod = hw_prod; bnx2_reuse_rx_skb_pages()
2995 rxr->rx_pg_cons = cons; bnx2_reuse_rx_skb_pages()
2999 bnx2_reuse_rx_data(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, bnx2_reuse_rx_data() argument
3005 cons_rx_buf = &rxr->rx_buf_ring[cons]; bnx2_reuse_rx_data()
3006 prod_rx_buf = &rxr->rx_buf_ring[prod]; bnx2_reuse_rx_data()
3012 rxr->rx_prod_bseq += bp->rx_buf_use_size; bnx2_reuse_rx_data()
3022 cons_bd = &rxr->rx_desc_ring[BNX2_RX_RING(cons)][BNX2_RX_IDX(cons)]; bnx2_reuse_rx_data()
3023 prod_bd = &rxr->rx_desc_ring[BNX2_RX_RING(prod)][BNX2_RX_IDX(prod)]; bnx2_reuse_rx_data()
3029 bnx2_rx_skb(struct bnx2 *bp, struct bnx2_rx_ring_info *rxr, u8 *data, bnx2_rx_skb() argument
3037 err = bnx2_alloc_rx_data(bp, rxr, prod, GFP_ATOMIC); bnx2_rx_skb()
3039 bnx2_reuse_rx_data(bp, rxr, data, (u16) (ring_idx >> 16), prod); bnx2_rx_skb()
3045 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); bnx2_rx_skb()
3064 u16 pg_cons = rxr->rx_pg_cons; bnx2_rx_skb()
3065 u16 pg_prod = rxr->rx_pg_prod; bnx2_rx_skb()
3078 rxr->rx_pg_cons = pg_cons; bnx2_rx_skb()
3079 rxr->rx_pg_prod = pg_prod; bnx2_rx_skb()
3080 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, bnx2_rx_skb()
3093 rx_pg = &rxr->rx_pg_ring[pg_cons]; bnx2_rx_skb()
3105 err = bnx2_alloc_rx_page(bp, rxr, bnx2_rx_skb()
3109 rxr->rx_pg_cons = pg_cons; bnx2_rx_skb()
3110 rxr->rx_pg_prod = pg_prod; bnx2_rx_skb()
3111 bnx2_reuse_rx_skb_pages(bp, rxr, skb, bnx2_rx_skb()
3127 rxr->rx_pg_prod = pg_prod; bnx2_rx_skb()
3128 rxr->rx_pg_cons = pg_cons; bnx2_rx_skb()
3150 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_rx_int() local
3159 sw_cons = rxr->rx_cons; bnx2_rx_int()
3160 sw_prod = rxr->rx_prod; bnx2_rx_int()
3178 rx_buf = &rxr->rx_buf_ring[sw_ring_cons]; bnx2_rx_int()
3192 next_rx_buf = &rxr->rx_buf_ring[next_ring_idx]; bnx2_rx_int()
3213 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, bnx2_rx_int()
3220 bnx2_reuse_rx_skb_pages(bp, rxr, NULL, pages); bnx2_rx_int()
3230 bnx2_reuse_rx_data(bp, rxr, data, sw_ring_cons, bnx2_rx_int()
3242 bnx2_reuse_rx_data(bp, rxr, data, bnx2_rx_int()
3246 skb = bnx2_rx_skb(bp, rxr, data, len, hdr_len, dma_addr, bnx2_rx_int()
3298 rxr->rx_cons = sw_cons; bnx2_rx_int()
3299 rxr->rx_prod = sw_prod; bnx2_rx_int()
3302 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); bnx2_rx_int()
3304 BNX2_WR16(bp, rxr->rx_bidx_addr, sw_prod); bnx2_rx_int()
3306 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); bnx2_rx_int()
3397 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_has_fast_work() local
3399 if ((bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) || bnx2_has_fast_work()
3491 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_poll_work() local
3496 if (bnx2_get_hw_rx_cons(bnapi) != rxr->rx_cons) bnx2_poll_work()
5121 struct bnx2_rx_ring_info *rxr; bnx2_clear_ring_states() local
5127 rxr = &bnapi->rx_ring; bnx2_clear_ring_states()
5131 rxr->rx_prod_bseq = 0; bnx2_clear_ring_states()
5132 rxr->rx_prod = 0; bnx2_clear_ring_states()
5133 rxr->rx_cons = 0; bnx2_clear_ring_states()
5134 rxr->rx_pg_prod = 0; bnx2_clear_ring_states()
5135 rxr->rx_pg_cons = 0; bnx2_clear_ring_states()
5232 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_init_rx_ring() local
5241 bnx2_init_rxbd_rings(rxr->rx_desc_ring, rxr->rx_desc_mapping, bnx2_init_rx_ring()
5253 bnx2_init_rxbd_rings(rxr->rx_pg_desc_ring, bnx2_init_rx_ring()
5254 rxr->rx_pg_desc_mapping, bnx2_init_rx_ring()
5261 val = (u64) rxr->rx_pg_desc_mapping[0] >> 32; bnx2_init_rx_ring()
5264 val = (u64) rxr->rx_pg_desc_mapping[0] & 0xffffffff; bnx2_init_rx_ring()
5271 val = (u64) rxr->rx_desc_mapping[0] >> 32; bnx2_init_rx_ring()
5274 val = (u64) rxr->rx_desc_mapping[0] & 0xffffffff; bnx2_init_rx_ring()
5277 ring_prod = prod = rxr->rx_pg_prod; bnx2_init_rx_ring()
5279 if (bnx2_alloc_rx_page(bp, rxr, ring_prod, GFP_KERNEL) < 0) { bnx2_init_rx_ring()
5287 rxr->rx_pg_prod = prod; bnx2_init_rx_ring()
5289 ring_prod = prod = rxr->rx_prod; bnx2_init_rx_ring()
5291 if (bnx2_alloc_rx_data(bp, rxr, ring_prod, GFP_KERNEL) < 0) { bnx2_init_rx_ring()
5299 rxr->rx_prod = prod; bnx2_init_rx_ring()
5301 rxr->rx_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BDIDX; bnx2_init_rx_ring()
5302 rxr->rx_bseq_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_BSEQ; bnx2_init_rx_ring()
5303 rxr->rx_pg_bidx_addr = MB_GET_CID_ADDR(cid) + BNX2_L2CTX_HOST_PG_BDIDX; bnx2_init_rx_ring()
5305 BNX2_WR16(bp, rxr->rx_pg_bidx_addr, rxr->rx_pg_prod); bnx2_init_rx_ring()
5306 BNX2_WR16(bp, rxr->rx_bidx_addr, prod); bnx2_init_rx_ring()
5308 BNX2_WR(bp, rxr->rx_bseq_addr, rxr->rx_prod_bseq); bnx2_init_rx_ring()
5470 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_free_rx_skbs() local
5473 if (rxr->rx_buf_ring == NULL) bnx2_free_rx_skbs()
5477 struct bnx2_sw_bd *rx_buf = &rxr->rx_buf_ring[j]; bnx2_free_rx_skbs()
5493 bnx2_free_rx_page(bp, rxr, j); bnx2_free_rx_skbs()
5807 struct bnx2_rx_ring_info *rxr = &bnapi->rx_ring; bnx2_run_loopback() local
5812 rxr = &bnapi->rx_ring; bnx2_run_loopback()
5888 rx_buf = &rxr->rx_buf_ring[rx_start_idx]; bnx2_run_loopback()
/linux-4.4.14/drivers/net/ethernet/sgi/
H A Dioc3-eth.c80 unsigned long *rxr; /* pointer to receiver ring */ member in struct:ioc3_private
582 unsigned long *rxr; ioc3_rx() local
585 rxr = ip->rxr; /* Ring base */ ioc3_rx()
637 rxr[n_entry] = cpu_to_be64(ioc3_map(rxb, 1)); ioc3_rx()
838 ip->rxr[ip->rx_pi++] = ip->rxr[ip->rx_ci++]; ioc3_clean_rx_ring()
879 if (ip->rxr) { ioc3_free_rings()
890 free_page((unsigned long)ip->rxr); ioc3_free_rings()
891 ip->rxr = NULL; ioc3_free_rings()
899 unsigned long *rxr; ioc3_alloc_rings() local
902 if (ip->rxr == NULL) { ioc3_alloc_rings()
904 ip->rxr = (unsigned long *) get_zeroed_page(GFP_ATOMIC); ioc3_alloc_rings()
905 rxr = ip->rxr; ioc3_alloc_rings()
906 if (!rxr) ioc3_alloc_rings()
926 rxr[i] = cpu_to_be64(ioc3_map(rxb, 1)); ioc3_alloc_rings()
956 ring = ioc3_map(ip->rxr, 0); ioc3_init_rings()
/linux-4.4.14/drivers/net/ethernet/intel/i40evf/
H A Di40e_txrx.h313 void i40evf_alloc_rx_buffers_ps(struct i40e_ring *rxr, u16 cleaned_count);
314 void i40evf_alloc_rx_buffers_1buf(struct i40e_ring *rxr, u16 cleaned_count);
315 void i40evf_alloc_rx_headers(struct i40e_ring *rxr);
/linux-4.4.14/drivers/net/ethernet/intel/i40e/
H A Di40e_txrx.h318 void i40e_alloc_rx_buffers_ps(struct i40e_ring *rxr, u16 cleaned_count);
319 void i40e_alloc_rx_buffers_1buf(struct i40e_ring *rxr, u16 cleaned_count);
320 void i40e_alloc_rx_headers(struct i40e_ring *rxr);
/linux-4.4.14/drivers/tty/serial/
H A Dmpsc.c168 dma_addr_t rxr; /* Rx descriptor ring */ member in struct:mpsc_port_info
169 dma_addr_t rxr_p; /* Phys addr of rxr */
808 pi->rxr = dp; mpsc_init_rings()
832 dp = pi->rxr; mpsc_init_rings()
892 pi->rxr = 0; mpsc_uninit_rings()
947 rxre = (struct mpsc_rx_desc *)(pi->rxr + (pi->rxr_posn*MPSC_RXRE_SIZE)); mpsc_rx_intr()
1070 (pi->rxr + (pi->rxr_posn * MPSC_RXRE_SIZE)); mpsc_rx_intr()
1580 rxre = (struct mpsc_rx_desc *)(pi->rxr + mpsc_get_poll_char()
1635 rxre = (struct mpsc_rx_desc *)(pi->rxr + mpsc_get_poll_char()
/linux-4.4.14/drivers/net/ethernet/sun/
H A Dsunhme.c1551 HMD(("ring ptrs rxr[%08x] txr[%08x]\n", happy_meal_init()

Completed in 427 milliseconds