Lines Matching refs:ring
123 struct xgbe_ring *ring) in xgbe_free_ring() argument
128 if (!ring) in xgbe_free_ring()
131 if (ring->rdata) { in xgbe_free_ring()
132 for (i = 0; i < ring->rdesc_count; i++) { in xgbe_free_ring()
133 rdata = XGBE_GET_DESC_DATA(ring, i); in xgbe_free_ring()
137 kfree(ring->rdata); in xgbe_free_ring()
138 ring->rdata = NULL; in xgbe_free_ring()
141 if (ring->rx_hdr_pa.pages) { in xgbe_free_ring()
142 dma_unmap_page(pdata->dev, ring->rx_hdr_pa.pages_dma, in xgbe_free_ring()
143 ring->rx_hdr_pa.pages_len, DMA_FROM_DEVICE); in xgbe_free_ring()
144 put_page(ring->rx_hdr_pa.pages); in xgbe_free_ring()
146 ring->rx_hdr_pa.pages = NULL; in xgbe_free_ring()
147 ring->rx_hdr_pa.pages_len = 0; in xgbe_free_ring()
148 ring->rx_hdr_pa.pages_offset = 0; in xgbe_free_ring()
149 ring->rx_hdr_pa.pages_dma = 0; in xgbe_free_ring()
152 if (ring->rx_buf_pa.pages) { in xgbe_free_ring()
153 dma_unmap_page(pdata->dev, ring->rx_buf_pa.pages_dma, in xgbe_free_ring()
154 ring->rx_buf_pa.pages_len, DMA_FROM_DEVICE); in xgbe_free_ring()
155 put_page(ring->rx_buf_pa.pages); in xgbe_free_ring()
157 ring->rx_buf_pa.pages = NULL; in xgbe_free_ring()
158 ring->rx_buf_pa.pages_len = 0; in xgbe_free_ring()
159 ring->rx_buf_pa.pages_offset = 0; in xgbe_free_ring()
160 ring->rx_buf_pa.pages_dma = 0; in xgbe_free_ring()
163 if (ring->rdesc) { in xgbe_free_ring()
166 ring->rdesc_count), in xgbe_free_ring()
167 ring->rdesc, ring->rdesc_dma); in xgbe_free_ring()
168 ring->rdesc = NULL; in xgbe_free_ring()
189 struct xgbe_ring *ring, unsigned int rdesc_count) in xgbe_init_ring() argument
193 if (!ring) in xgbe_init_ring()
197 ring->rdesc_count = rdesc_count; in xgbe_init_ring()
198 ring->rdesc = dma_alloc_coherent(pdata->dev, in xgbe_init_ring()
200 rdesc_count), &ring->rdesc_dma, in xgbe_init_ring()
202 if (!ring->rdesc) in xgbe_init_ring()
206 ring->rdata = kcalloc(rdesc_count, sizeof(struct xgbe_ring_data), in xgbe_init_ring()
208 if (!ring->rdata) in xgbe_init_ring()
213 ring->rdesc, &ring->rdesc_dma, ring->rdata); in xgbe_init_ring()
324 struct xgbe_ring *ring, in xgbe_map_rx_buffer() argument
329 if (!ring->rx_hdr_pa.pages) { in xgbe_map_rx_buffer()
330 ret = xgbe_alloc_pages(pdata, &ring->rx_hdr_pa, GFP_ATOMIC, 0); in xgbe_map_rx_buffer()
335 if (!ring->rx_buf_pa.pages) { in xgbe_map_rx_buffer()
337 ret = xgbe_alloc_pages(pdata, &ring->rx_buf_pa, GFP_ATOMIC, in xgbe_map_rx_buffer()
344 xgbe_set_buffer_data(&rdata->rx.hdr, &ring->rx_hdr_pa, in xgbe_map_rx_buffer()
348 xgbe_set_buffer_data(&rdata->rx.buf, &ring->rx_buf_pa, in xgbe_map_rx_buffer()
358 struct xgbe_ring *ring; in xgbe_wrapper_tx_descriptor_init() local
368 ring = channel->tx_ring; in xgbe_wrapper_tx_descriptor_init()
369 if (!ring) in xgbe_wrapper_tx_descriptor_init()
372 rdesc = ring->rdesc; in xgbe_wrapper_tx_descriptor_init()
373 rdesc_dma = ring->rdesc_dma; in xgbe_wrapper_tx_descriptor_init()
375 for (j = 0; j < ring->rdesc_count; j++) { in xgbe_wrapper_tx_descriptor_init()
376 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_wrapper_tx_descriptor_init()
385 ring->cur = 0; in xgbe_wrapper_tx_descriptor_init()
386 ring->dirty = 0; in xgbe_wrapper_tx_descriptor_init()
387 memset(&ring->tx, 0, sizeof(ring->tx)); in xgbe_wrapper_tx_descriptor_init()
399 struct xgbe_ring *ring; in xgbe_wrapper_rx_descriptor_init() local
409 ring = channel->rx_ring; in xgbe_wrapper_rx_descriptor_init()
410 if (!ring) in xgbe_wrapper_rx_descriptor_init()
413 rdesc = ring->rdesc; in xgbe_wrapper_rx_descriptor_init()
414 rdesc_dma = ring->rdesc_dma; in xgbe_wrapper_rx_descriptor_init()
416 for (j = 0; j < ring->rdesc_count; j++) { in xgbe_wrapper_rx_descriptor_init()
417 rdata = XGBE_GET_DESC_DATA(ring, j); in xgbe_wrapper_rx_descriptor_init()
422 if (xgbe_map_rx_buffer(pdata, ring, rdata)) in xgbe_wrapper_rx_descriptor_init()
429 ring->cur = 0; in xgbe_wrapper_rx_descriptor_init()
430 ring->dirty = 0; in xgbe_wrapper_rx_descriptor_init()
494 struct xgbe_ring *ring = channel->tx_ring; in xgbe_map_tx_skb() local
503 DBGPR("-->xgbe_map_tx_skb: cur = %d\n", ring->cur); in xgbe_map_tx_skb()
506 start_index = ring->cur; in xgbe_map_tx_skb()
507 cur_index = ring->cur; in xgbe_map_tx_skb()
509 packet = &ring->packet_data; in xgbe_map_tx_skb()
519 if ((tso && (packet->mss != ring->tx.cur_mss)) || in xgbe_map_tx_skb()
520 (vlan && (packet->vlan_ctag != ring->tx.cur_vlan_ctag))) in xgbe_map_tx_skb()
522 rdata = XGBE_GET_DESC_DATA(ring, cur_index); in xgbe_map_tx_skb()
543 rdata = XGBE_GET_DESC_DATA(ring, cur_index); in xgbe_map_tx_skb()
568 rdata = XGBE_GET_DESC_DATA(ring, cur_index); in xgbe_map_tx_skb()
602 rdata = XGBE_GET_DESC_DATA(ring, cur_index); in xgbe_map_tx_skb()
610 rdata = XGBE_GET_DESC_DATA(ring, cur_index - 1); in xgbe_map_tx_skb()
622 rdata = XGBE_GET_DESC_DATA(ring, start_index++); in xgbe_map_tx_skb()