Lines Matching refs:orig
367 struct ring_desc *orig; member
1026 if (np->rx_ring.orig) in free_rings()
1028 np->rx_ring.orig, np->ring_addr); in free_rings()
1810 less_rx = np->get_rx.orig; in nv_alloc_rx()
1811 if (less_rx-- == np->first_rx.orig) in nv_alloc_rx()
1812 less_rx = np->last_rx.orig; in nv_alloc_rx()
1814 while (np->put_rx.orig != less_rx) { in nv_alloc_rx()
1828 np->put_rx.orig->buf = cpu_to_le32(np->put_rx_ctx->dma); in nv_alloc_rx()
1830 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx()
1831 if (unlikely(np->put_rx.orig++ == np->last_rx.orig)) in nv_alloc_rx()
1832 np->put_rx.orig = np->first_rx.orig; in nv_alloc_rx()
1906 np->last_rx.orig = &np->rx_ring.orig[np->rx_ring_size-1]; in nv_init_rx()
1914 np->rx_ring.orig[i].flaglen = 0; in nv_init_rx()
1915 np->rx_ring.orig[i].buf = 0; in nv_init_rx()
1935 np->last_tx.orig = &np->tx_ring.orig[np->tx_ring_size-1]; in nv_init_tx()
1948 np->tx_ring.orig[i].flaglen = 0; in nv_init_tx()
1949 np->tx_ring.orig[i].buf = 0; in nv_init_tx()
2011 np->tx_ring.orig[i].flaglen = 0; in nv_drain_tx()
2012 np->tx_ring.orig[i].buf = 0; in nv_drain_tx()
2042 np->rx_ring.orig[i].flaglen = 0; in nv_drain_rx()
2043 np->rx_ring.orig[i].buf = 0; in nv_drain_rx()
2222 start_tx = put_tx = np->put_tx.orig; in nv_start_xmit()
2248 if (unlikely(put_tx++ == np->last_tx.orig)) in nv_start_xmit()
2249 put_tx = np->first_tx.orig; in nv_start_xmit()
2295 if (unlikely(put_tx++ == np->last_tx.orig)) in nv_start_xmit()
2296 put_tx = np->first_tx.orig; in nv_start_xmit()
2323 np->put_tx.orig = put_tx; in nv_start_xmit()
2536 struct ring_desc *orig_get_tx = np->get_tx.orig; in nv_tx_done()
2539 while ((np->get_tx.orig != np->put_tx.orig) && in nv_tx_done()
2540 !((flags = le32_to_cpu(np->get_tx.orig->flaglen)) & NV_TX_VALID) && in nv_tx_done()
2580 if (unlikely(np->get_tx.orig++ == np->last_tx.orig)) in nv_tx_done()
2581 np->get_tx.orig = np->first_tx.orig; in nv_tx_done()
2588 if (unlikely((np->tx_stop == 1) && (np->get_tx.orig != orig_get_tx))) { in nv_tx_done()
2690 le32_to_cpu(np->tx_ring.orig[i].buf), in nv_tx_timeout()
2691 le32_to_cpu(np->tx_ring.orig[i].flaglen), in nv_tx_timeout()
2692 le32_to_cpu(np->tx_ring.orig[i+1].buf), in nv_tx_timeout()
2693 le32_to_cpu(np->tx_ring.orig[i+1].flaglen), in nv_tx_timeout()
2694 le32_to_cpu(np->tx_ring.orig[i+2].buf), in nv_tx_timeout()
2695 le32_to_cpu(np->tx_ring.orig[i+2].flaglen), in nv_tx_timeout()
2696 le32_to_cpu(np->tx_ring.orig[i+3].buf), in nv_tx_timeout()
2697 le32_to_cpu(np->tx_ring.orig[i+3].flaglen)); in nv_tx_timeout()
2806 while ((np->get_rx.orig != np->put_rx.orig) && in nv_rx_process()
2807 !((flags = le32_to_cpu(np->get_rx.orig->flaglen)) & NV_RX_AVAIL) && in nv_rx_process()
2892 if (unlikely(np->get_rx.orig++ == np->last_rx.orig)) in nv_rx_process()
2893 np->get_rx.orig = np->first_rx.orig; in nv_rx_process()
4645 np->rx_ring.orig = (struct ring_desc *)rxtx_ring; in nv_set_ringparam()
4646 np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size]; in nv_set_ringparam()
5090 np->tx_ring.orig[0].buf = cpu_to_le32(test_dma_addr); in nv_loopback_test()
5091 np->tx_ring.orig[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5104 flags = le32_to_cpu(np->rx_ring.orig[0].flaglen); in nv_loopback_test()
5105 len = nv_descr_getlength(&np->rx_ring.orig[0], np->desc_ver); in nv_loopback_test()
5744 np->rx_ring.orig = pci_alloc_consistent(pci_dev, in nv_probe()
5747 if (!np->rx_ring.orig) in nv_probe()
5749 np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size]; in nv_probe()