Lines Matching refs:ex

368 	struct ring_desc_ex *ex;  member
1030 if (np->rx_ring.ex) in free_rings()
1032 np->rx_ring.ex, np->ring_addr); in free_rings()
1851 less_rx = np->get_rx.ex; in nv_alloc_rx_optimized()
1852 if (less_rx-- == np->first_rx.ex) in nv_alloc_rx_optimized()
1853 less_rx = np->last_rx.ex; in nv_alloc_rx_optimized()
1855 while (np->put_rx.ex != less_rx) { in nv_alloc_rx_optimized()
1869 np->put_rx.ex->bufhigh = cpu_to_le32(dma_high(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1870 np->put_rx.ex->buflow = cpu_to_le32(dma_low(np->put_rx_ctx->dma)); in nv_alloc_rx_optimized()
1872 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
1873 if (unlikely(np->put_rx.ex++ == np->last_rx.ex)) in nv_alloc_rx_optimized()
1874 np->put_rx.ex = np->first_rx.ex; in nv_alloc_rx_optimized()
1908 np->last_rx.ex = &np->rx_ring.ex[np->rx_ring_size-1]; in nv_init_rx()
1917 np->rx_ring.ex[i].flaglen = 0; in nv_init_rx()
1918 np->rx_ring.ex[i].txvlan = 0; in nv_init_rx()
1919 np->rx_ring.ex[i].bufhigh = 0; in nv_init_rx()
1920 np->rx_ring.ex[i].buflow = 0; in nv_init_rx()
1937 np->last_tx.ex = &np->tx_ring.ex[np->tx_ring_size-1]; in nv_init_tx()
1951 np->tx_ring.ex[i].flaglen = 0; in nv_init_tx()
1952 np->tx_ring.ex[i].txvlan = 0; in nv_init_tx()
1953 np->tx_ring.ex[i].bufhigh = 0; in nv_init_tx()
1954 np->tx_ring.ex[i].buflow = 0; in nv_init_tx()
2014 np->tx_ring.ex[i].flaglen = 0; in nv_drain_tx()
2015 np->tx_ring.ex[i].txvlan = 0; in nv_drain_tx()
2016 np->tx_ring.ex[i].bufhigh = 0; in nv_drain_tx()
2017 np->tx_ring.ex[i].buflow = 0; in nv_drain_tx()
2045 np->rx_ring.ex[i].flaglen = 0; in nv_drain_rx()
2046 np->rx_ring.ex[i].txvlan = 0; in nv_drain_rx()
2047 np->rx_ring.ex[i].bufhigh = 0; in nv_drain_rx()
2048 np->rx_ring.ex[i].buflow = 0; in nv_drain_rx()
2370 start_tx = put_tx = np->put_tx.ex; in nv_start_xmit_optimized()
2398 if (unlikely(put_tx++ == np->last_tx.ex)) in nv_start_xmit_optimized()
2399 put_tx = np->first_tx.ex; in nv_start_xmit_optimized()
2445 if (unlikely(put_tx++ == np->last_tx.ex)) in nv_start_xmit_optimized()
2446 put_tx = np->first_tx.ex; in nv_start_xmit_optimized()
2500 np->put_tx.ex = put_tx; in nv_start_xmit_optimized()
2600 struct ring_desc_ex *orig_get_tx = np->get_tx.ex; in nv_tx_done_optimized()
2603 while ((np->get_tx.ex != np->put_tx.ex) && in nv_tx_done_optimized()
2604 !((flags = le32_to_cpu(np->get_tx.ex->flaglen)) & NV_TX2_VALID) && in nv_tx_done_optimized()
2634 if (unlikely(np->get_tx.ex++ == np->last_tx.ex)) in nv_tx_done_optimized()
2635 np->get_tx.ex = np->first_tx.ex; in nv_tx_done_optimized()
2642 if (unlikely((np->tx_stop == 1) && (np->get_tx.ex != orig_get_tx))) { in nv_tx_done_optimized()
2705 le32_to_cpu(np->tx_ring.ex[i].bufhigh), in nv_tx_timeout()
2706 le32_to_cpu(np->tx_ring.ex[i].buflow), in nv_tx_timeout()
2707 le32_to_cpu(np->tx_ring.ex[i].flaglen), in nv_tx_timeout()
2708 le32_to_cpu(np->tx_ring.ex[i+1].bufhigh), in nv_tx_timeout()
2709 le32_to_cpu(np->tx_ring.ex[i+1].buflow), in nv_tx_timeout()
2710 le32_to_cpu(np->tx_ring.ex[i+1].flaglen), in nv_tx_timeout()
2711 le32_to_cpu(np->tx_ring.ex[i+2].bufhigh), in nv_tx_timeout()
2712 le32_to_cpu(np->tx_ring.ex[i+2].buflow), in nv_tx_timeout()
2713 le32_to_cpu(np->tx_ring.ex[i+2].flaglen), in nv_tx_timeout()
2714 le32_to_cpu(np->tx_ring.ex[i+3].bufhigh), in nv_tx_timeout()
2715 le32_to_cpu(np->tx_ring.ex[i+3].buflow), in nv_tx_timeout()
2716 le32_to_cpu(np->tx_ring.ex[i+3].flaglen)); in nv_tx_timeout()
2737 put_tx.ex = np->tx_change_owner->first_tx_desc; in nv_tx_timeout()
2912 while ((np->get_rx.ex != np->put_rx.ex) && in nv_rx_process_optimized()
2913 !((flags = le32_to_cpu(np->get_rx.ex->flaglen)) & NV_RX2_AVAIL) && in nv_rx_process_optimized()
2959 vlanflags = le32_to_cpu(np->get_rx.ex->buflow); in nv_rx_process_optimized()
2981 if (unlikely(np->get_rx.ex++ == np->last_rx.ex)) in nv_rx_process_optimized()
2982 np->get_rx.ex = np->first_rx.ex; in nv_rx_process_optimized()
4646 np->rx_ring.ex = (struct ring_desc_ex *)rxtx_ring; in nv_set_ringparam()
4647 np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; in nv_set_ringparam()
5091 np->tx_ring.ex[0].bufhigh = cpu_to_le32(dma_high(test_dma_addr)); in nv_loopback_test()
5092 np->tx_ring.ex[0].buflow = cpu_to_le32(dma_low(test_dma_addr)); in nv_loopback_test()
5093 np->tx_ring.ex[0].flaglen = cpu_to_le32((pkt_len-1) | np->tx_flags | tx_flags_extra); in nv_loopback_test()
5106 flags = le32_to_cpu(np->rx_ring.ex[0].flaglen); in nv_loopback_test()
5107 len = nv_descr_getlength_ex(&np->rx_ring.ex[0], np->desc_ver); in nv_loopback_test()
5749 np->rx_ring.ex = pci_alloc_consistent(pci_dev, in nv_probe()
5752 if (!np->rx_ring.ex) in nv_probe()
5754 np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size]; in nv_probe()