put_tx 825 drivers/net/ethernet/nvidia/forcedeth.c union ring_type get_tx, put_tx, last_tx; put_tx 1956 drivers/net/ethernet/nvidia/forcedeth.c np->put_tx = np->tx_ring; put_tx 2222 drivers/net/ethernet/nvidia/forcedeth.c struct ring_desc *put_tx; put_tx 2247 drivers/net/ethernet/nvidia/forcedeth.c start_tx = put_tx = np->put_tx.orig; put_tx 2266 drivers/net/ethernet/nvidia/forcedeth.c put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); put_tx 2267 drivers/net/ethernet/nvidia/forcedeth.c put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); put_tx 2272 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.orig)) put_tx 2273 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.orig; put_tx 2313 drivers/net/ethernet/nvidia/forcedeth.c put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); put_tx 2314 drivers/net/ethernet/nvidia/forcedeth.c put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); put_tx 2318 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.orig)) put_tx 2319 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.orig; put_tx 2325 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx == np->tx_ring.orig)) put_tx 2328 drivers/net/ethernet/nvidia/forcedeth.c prev_tx = put_tx - 1; put_tx 2356 drivers/net/ethernet/nvidia/forcedeth.c np->put_tx.orig = put_tx; put_tx 2377 drivers/net/ethernet/nvidia/forcedeth.c struct ring_desc_ex *put_tx; put_tx 2403 drivers/net/ethernet/nvidia/forcedeth.c start_tx = put_tx = np->put_tx.ex; put_tx 2423 drivers/net/ethernet/nvidia/forcedeth.c put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); put_tx 2424 drivers/net/ethernet/nvidia/forcedeth.c put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); put_tx 2425 drivers/net/ethernet/nvidia/forcedeth.c put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); put_tx 2430 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.ex)) put_tx 2431 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.ex; put_tx 2470 drivers/net/ethernet/nvidia/forcedeth.c put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); put_tx 2471 drivers/net/ethernet/nvidia/forcedeth.c put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); put_tx 2472 drivers/net/ethernet/nvidia/forcedeth.c put_tx->flaglen = cpu_to_le32((bcnt-1) | tx_flags); put_tx 2476 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx++ == np->last_tx.ex)) put_tx 2477 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->tx_ring.ex; put_tx 2483 drivers/net/ethernet/nvidia/forcedeth.c if (unlikely(put_tx == np->tx_ring.ex)) put_tx 2486 drivers/net/ethernet/nvidia/forcedeth.c prev_tx = put_tx - 1; put_tx 2541 drivers/net/ethernet/nvidia/forcedeth.c np->put_tx.ex = put_tx; put_tx 2580 drivers/net/ethernet/nvidia/forcedeth.c while ((np->get_tx.orig != np->put_tx.orig) && put_tx 2650 drivers/net/ethernet/nvidia/forcedeth.c while ((np->get_tx.ex != np->put_tx.ex) && put_tx 2708 drivers/net/ethernet/nvidia/forcedeth.c union ring_type put_tx; put_tx 2787 drivers/net/ethernet/nvidia/forcedeth.c put_tx.ex = np->tx_change_owner->first_tx_desc; put_tx 2789 drivers/net/ethernet/nvidia/forcedeth.c put_tx = np->put_tx; put_tx 2796 drivers/net/ethernet/nvidia/forcedeth.c np->get_tx = np->put_tx = put_tx;