Lines Matching refs:put_tx_ctx
826 struct nv_skb_map *get_tx_ctx, *put_tx_ctx; member
1938 np->get_tx_ctx = np->put_tx_ctx = np->first_tx_ctx = np->tx_skb; in nv_init_tx()
2070 …return (u32)(np->tx_ring_size - ((np->tx_ring_size + (np->put_tx_ctx - np->get_tx_ctx)) % np->tx_r… in nv_get_empty_tx_slots()
2227 prev_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2229 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt, in nv_start_xmit()
2232 np->put_tx_ctx->dma)) { in nv_start_xmit()
2240 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2241 np->put_tx_ctx->dma_single = 1; in nv_start_xmit()
2242 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2250 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2251 np->put_tx_ctx = np->first_tx_ctx; in nv_start_xmit()
2262 prev_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2264 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit()
2267 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit()
2272 if (dma_mapping_error(&np->pci_dev->dev, np->put_tx_ctx->dma)) { in nv_start_xmit()
2279 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit()
2281 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit()
2288 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit()
2289 np->put_tx_ctx->dma_single = 0; in nv_start_xmit()
2290 put_tx->buf = cpu_to_le32(np->put_tx_ctx->dma); in nv_start_xmit()
2297 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit()
2298 np->put_tx_ctx = np->first_tx_ctx; in nv_start_xmit()
2371 start_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2376 prev_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2378 np->put_tx_ctx->dma = pci_map_single(np->pci_dev, skb->data + offset, bcnt, in nv_start_xmit_optimized()
2381 np->put_tx_ctx->dma)) { in nv_start_xmit_optimized()
2389 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2390 np->put_tx_ctx->dma_single = 1; in nv_start_xmit_optimized()
2391 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2392 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2400 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2401 np->put_tx_ctx = np->first_tx_ctx; in nv_start_xmit_optimized()
2412 prev_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2415 start_tx_ctx = tmp_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2416 np->put_tx_ctx->dma = skb_frag_dma_map( in nv_start_xmit_optimized()
2422 if (dma_mapping_error(&np->pci_dev->dev, np->put_tx_ctx->dma)) { in nv_start_xmit_optimized()
2429 } while (tmp_tx_ctx != np->put_tx_ctx); in nv_start_xmit_optimized()
2431 np->put_tx_ctx = start_tx_ctx; in nv_start_xmit_optimized()
2437 np->put_tx_ctx->dma_len = bcnt; in nv_start_xmit_optimized()
2438 np->put_tx_ctx->dma_single = 0; in nv_start_xmit_optimized()
2439 put_tx->bufhigh = cpu_to_le32(dma_high(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2440 put_tx->buflow = cpu_to_le32(dma_low(np->put_tx_ctx->dma)); in nv_start_xmit_optimized()
2447 if (unlikely(np->put_tx_ctx++ == np->last_tx_ctx)) in nv_start_xmit_optimized()
2448 np->put_tx_ctx = np->first_tx_ctx; in nv_start_xmit_optimized()
2486 start_tx_ctx->next_tx_ctx = np->put_tx_ctx; in nv_start_xmit_optimized()
2487 np->tx_end_flip = np->put_tx_ctx; in nv_start_xmit_optimized()