Lines Matching refs:rx
553 vptr->rx.dirty = vptr->rx.filled = vptr->rx.curr = 0; in velocity_init_rx_ring_indexes()
575 vptr->rx.ring[i].rdesc0.len |= OWNED_BY_NIC; in velocity_rx_reset()
578 writel(vptr->rx.pool_dma, ®s->RDBaseLo); in velocity_rx_reset()
1402 writel(vptr->rx.pool_dma, ®s->RDBaseLo); in velocity_init_registers()
1447 if (vptr->rx.filled < 4) in velocity_give_many_rx_descs()
1452 unusable = vptr->rx.filled & 0x0003; in velocity_give_many_rx_descs()
1453 dirty = vptr->rx.dirty - unusable; in velocity_give_many_rx_descs()
1454 for (avail = vptr->rx.filled & 0xfffc; avail; avail--) { in velocity_give_many_rx_descs()
1456 vptr->rx.ring[dirty].rdesc0.len |= OWNED_BY_NIC; in velocity_give_many_rx_descs()
1459 writew(vptr->rx.filled & 0xfffc, ®s->RBRDU); in velocity_give_many_rx_descs()
1460 vptr->rx.filled = unusable; in velocity_give_many_rx_descs()
1493 vptr->rx.ring = pool; in velocity_init_dma_rings()
1494 vptr->rx.pool_dma = pool_dma; in velocity_init_dma_rings()
1511 vptr->rx.buf_sz = (mtu <= ETH_DATA_LEN) ? PKT_BUF_SZ : mtu + 32; in velocity_set_rxbufsize()
1526 struct rx_desc *rd = &(vptr->rx.ring[idx]); in velocity_alloc_rx_buf()
1527 struct velocity_rd_info *rd_info = &(vptr->rx.info[idx]); in velocity_alloc_rx_buf()
1529 rd_info->skb = netdev_alloc_skb(vptr->netdev, vptr->rx.buf_sz + 64); in velocity_alloc_rx_buf()
1540 vptr->rx.buf_sz, DMA_FROM_DEVICE); in velocity_alloc_rx_buf()
1547 rd->size = cpu_to_le16(vptr->rx.buf_sz) | RX_INTEN; in velocity_alloc_rx_buf()
1556 int dirty = vptr->rx.dirty, done = 0; in velocity_rx_refill()
1559 struct rx_desc *rd = vptr->rx.ring + dirty; in velocity_rx_refill()
1565 if (!vptr->rx.info[dirty].skb) { in velocity_rx_refill()
1571 } while (dirty != vptr->rx.curr); in velocity_rx_refill()
1574 vptr->rx.dirty = dirty; in velocity_rx_refill()
1575 vptr->rx.filled += done; in velocity_rx_refill()
1592 if (vptr->rx.info == NULL) in velocity_free_rd_ring()
1596 struct velocity_rd_info *rd_info = &(vptr->rx.info[i]); in velocity_free_rd_ring()
1597 struct rx_desc *rd = vptr->rx.ring + i; in velocity_free_rd_ring()
1603 dma_unmap_single(vptr->dev, rd_info->skb_dma, vptr->rx.buf_sz, in velocity_free_rd_ring()
1611 kfree(vptr->rx.info); in velocity_free_rd_ring()
1612 vptr->rx.info = NULL; in velocity_free_rd_ring()
1626 vptr->rx.info = kcalloc(vptr->options.numrx, in velocity_init_rd_ring()
1628 if (!vptr->rx.info) in velocity_init_rd_ring()
1685 dma_free_coherent(vptr->dev, size, vptr->rx.ring, vptr->rx.pool_dma); in velocity_free_dma_rings()
2045 struct velocity_rd_info *rd_info = &(vptr->rx.info[idx]); in velocity_receive_frame()
2046 struct rx_desc *rd = &(vptr->rx.ring[idx]); in velocity_receive_frame()
2063 vptr->rx.buf_sz, DMA_FROM_DEVICE); in velocity_receive_frame()
2070 dma_unmap_single(vptr->dev, rd_info->skb_dma, vptr->rx.buf_sz, in velocity_receive_frame()
2074 vptr->rx.buf_sz, DMA_FROM_DEVICE); in velocity_receive_frame()
2104 int rd_curr = vptr->rx.curr; in velocity_rx_srv()
2108 struct rx_desc *rd = vptr->rx.ring + rd_curr; in velocity_rx_srv()
2110 if (!vptr->rx.info[rd_curr].skb) in velocity_rx_srv()
2141 vptr->rx.curr = rd_curr; in velocity_rx_srv()
2305 struct rx_info rx; in velocity_change_mtu() local
2331 rx = vptr->rx; in velocity_change_mtu()
2334 vptr->rx = tmp_vptr->rx; in velocity_change_mtu()
2337 tmp_vptr->rx = rx; in velocity_change_mtu()