xdp_ring         4174 drivers/net/ethernet/intel/i40e/i40e_main.c 		struct i40e_ring *xdp_ring = vsi->xdp_rings[qp_idx];
xdp_ring         4176 drivers/net/ethernet/intel/i40e/i40e_main.c 		xdp_ring->q_vector = q_vector;
xdp_ring         4177 drivers/net/ethernet/intel/i40e/i40e_main.c 		xdp_ring->next = q_vector->tx.ring;
xdp_ring         4178 drivers/net/ethernet/intel/i40e/i40e_main.c 		q_vector->tx.ring = xdp_ring;
xdp_ring         2179 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			      struct i40e_ring *xdp_ring);
xdp_ring         2181 drivers/net/ethernet/intel/i40e/i40e_txrx.c int i40e_xmit_xdp_tx_ring(struct xdp_buff *xdp, struct i40e_ring *xdp_ring)
xdp_ring         2188 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	return i40e_xmit_xdp_ring(xdpf, xdp_ring);
xdp_ring         2200 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	struct i40e_ring *xdp_ring;
xdp_ring         2217 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		xdp_ring = rx_ring->vsi->xdp_rings[rx_ring->queue_index];
xdp_ring         2218 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		result = i40e_xmit_xdp_tx_ring(xdp, xdp_ring);
xdp_ring         2266 drivers/net/ethernet/intel/i40e/i40e_txrx.c void i40e_xdp_ring_update_tail(struct i40e_ring *xdp_ring)
xdp_ring         2272 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	writel_relaxed(xdp_ring->next_to_use, xdp_ring->tail);
xdp_ring         2310 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		struct i40e_ring *xdp_ring =
xdp_ring         2313 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_xdp_ring_update_tail(xdp_ring);
xdp_ring         3504 drivers/net/ethernet/intel/i40e/i40e_txrx.c 			      struct i40e_ring *xdp_ring)
xdp_ring         3506 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	u16 i = xdp_ring->next_to_use;
xdp_ring         3513 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (!unlikely(I40E_DESC_UNUSED(xdp_ring))) {
xdp_ring         3514 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		xdp_ring->tx_stats.tx_busy++;
xdp_ring         3517 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	dma = dma_map_single(xdp_ring->dev, data, size, DMA_TO_DEVICE);
xdp_ring         3518 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (dma_mapping_error(xdp_ring->dev, dma))
xdp_ring         3521 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	tx_bi = &xdp_ring->tx_bi[i];
xdp_ring         3530 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	tx_desc = I40E_TX_DESC(xdp_ring, i);
xdp_ring         3542 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (i == xdp_ring->count)
xdp_ring         3546 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	xdp_ring->next_to_use = i;
xdp_ring         3709 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	struct i40e_ring *xdp_ring;
xdp_ring         3723 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	xdp_ring = vsi->xdp_rings[queue_index];
xdp_ring         3729 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		err = i40e_xmit_xdp_ring(xdpf, xdp_ring);
xdp_ring         3737 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		i40e_xdp_ring_update_tail(xdp_ring);
xdp_ring            9 drivers/net/ethernet/intel/i40e/i40e_txrx_common.h int i40e_xmit_xdp_tx_ring(struct xdp_buff *xdp, struct i40e_ring *xdp_ring);
xdp_ring           16 drivers/net/ethernet/intel/i40e/i40e_txrx_common.h void i40e_xdp_ring_update_tail(struct i40e_ring *xdp_ring);
xdp_ring          195 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct i40e_ring *xdp_ring;
xdp_ring          214 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xdp_ring = rx_ring->vsi->xdp_rings[rx_ring->queue_index];
xdp_ring          215 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		result = i40e_xmit_xdp_tx_ring(xdp, xdp_ring);
xdp_ring          647 drivers/net/ethernet/intel/i40e/i40e_xsk.c static bool i40e_xmit_zc(struct i40e_ring *xdp_ring, unsigned int budget)
xdp_ring          656 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (!unlikely(I40E_DESC_UNUSED(xdp_ring))) {
xdp_ring          657 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			xdp_ring->tx_stats.tx_busy++;
xdp_ring          662 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (!xsk_umem_consume_tx(xdp_ring->xsk_umem, &desc))
xdp_ring          665 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
xdp_ring          667 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma_sync_single_for_device(xdp_ring->dev, dma, desc.len,
xdp_ring          670 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		tx_bi = &xdp_ring->tx_bi[xdp_ring->next_to_use];
xdp_ring          673 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		tx_desc = I40E_TX_DESC(xdp_ring, xdp_ring->next_to_use);
xdp_ring          680 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xdp_ring->next_to_use++;
xdp_ring          681 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (xdp_ring->next_to_use == xdp_ring->count)
xdp_ring          682 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			xdp_ring->next_to_use = 0;
xdp_ring          689 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		i40e_xdp_ring_update_tail(xdp_ring);
xdp_ring          691 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xsk_umem_consume_tx_done(xdp_ring->xsk_umem);
xdp_ring          635 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	struct ixgbe_ring *xdp_ring[MAX_XDP_QUEUES];
xdp_ring         1064 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 			adapter->xdp_ring[i]->count = new_tx_count;
xdp_ring         1108 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 			memcpy(&temp_ring[i], adapter->xdp_ring[j],
xdp_ring         1129 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 			ixgbe_free_tx_resources(adapter->xdp_ring[j]);
xdp_ring         1131 drivers/net/ethernet/intel/ixgbe/ixgbe_ethtool.c 			memcpy(adapter->xdp_ring[j], &temp_ring[i],
xdp_ring          263 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 		adapter->xdp_ring[i]->reg_idx = reg_idx;
xdp_ring          953 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 		adapter->xdp_ring[xdp_idx] = ring;
xdp_ring         1025 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 			adapter->xdp_ring[ring->queue_index] = NULL;
xdp_ring         1111 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 		if (adapter->xdp_ring[i])
xdp_ring         1112 drivers/net/ethernet/intel/ixgbe/ixgbe_lib.c 			adapter->xdp_ring[i]->ring_idx = i;
xdp_ring          617 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ring = adapter->xdp_ring[n];
xdp_ring          956 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			  &adapter->xdp_ring[i]->state);
xdp_ring         1003 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *xdp_ring = adapter->xdp_ring[i];
xdp_ring         1005 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		tc = xdp_ring->dcb_tc;
xdp_ring         1007 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			clear_bit(__IXGBE_HANG_CHECK_ARMED, &xdp_ring->state);
xdp_ring         2410 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *ring = adapter->xdp_ring[smp_processor_id()];
xdp_ring         3647 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_configure_tx_ring(adapter, adapter->xdp_ring[i]);
xdp_ring         5832 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *ring = adapter->xdp_ring[i];
xdp_ring         5879 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			struct ixgbe_ring *ring = adapter->xdp_ring[i];
xdp_ring         6066 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_clean_tx_ring(adapter->xdp_ring[i]);
xdp_ring         6107 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (adapter->xdp_ring[0])
xdp_ring         6511 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		err = ixgbe_setup_tx_resources(adapter->xdp_ring[j]);
xdp_ring         6523 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_free_tx_resources(adapter->xdp_ring[j]);
xdp_ring         6661 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		if (adapter->xdp_ring[i]->desc)
xdp_ring         6662 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			ixgbe_free_tx_resources(adapter->xdp_ring[i]);
xdp_ring         7094 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *xdp_ring = adapter->xdp_ring[i];
xdp_ring         7096 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		restart_queue += xdp_ring->tx_stats.restart_queue;
xdp_ring         7097 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		tx_busy += xdp_ring->tx_stats.tx_busy;
xdp_ring         7098 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		bytes += xdp_ring->stats.bytes;
xdp_ring         7099 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		packets += xdp_ring->stats.packets;
xdp_ring         7297 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 				&adapter->xdp_ring[i]->state);
xdp_ring         7332 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			set_check_for_tx_hang(adapter->xdp_ring[i]);
xdp_ring         7548 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *ring = adapter->xdp_ring[i];
xdp_ring         8551 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	struct ixgbe_ring *ring = adapter->xdp_ring[smp_processor_id()];
xdp_ring         8962 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		struct ixgbe_ring *ring = READ_ONCE(adapter->xdp_ring[i]);
xdp_ring         10293 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			if (adapter->xdp_ring[i]->xsk_umem)
xdp_ring         10346 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	ring = adapter->xdp_prog ? adapter->xdp_ring[smp_processor_id()] : NULL;
xdp_ring         10514 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	struct ixgbe_ring *rx_ring, *tx_ring, *xdp_ring;
xdp_ring         10518 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	xdp_ring = adapter->xdp_ring[ring];
xdp_ring         10521 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10522 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_disable_txr(adapter, xdp_ring);
xdp_ring         10525 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10532 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10533 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_clean_tx_ring(xdp_ring);
xdp_ring         10537 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10538 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_reset_txr_stats(xdp_ring);
xdp_ring         10552 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	struct ixgbe_ring *rx_ring, *tx_ring, *xdp_ring;
xdp_ring         10556 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	xdp_ring = adapter->xdp_ring[ring];
xdp_ring         10562 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10563 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ixgbe_configure_tx_ring(adapter, xdp_ring);
xdp_ring         10567 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (xdp_ring)
xdp_ring         10568 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		clear_bit(__IXGBE_TX_DISABLED, &xdp_ring->state);
xdp_ring         11055 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		u64_stats_init(&adapter->xdp_ring[i]->syncp);
xdp_ring          533 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		struct ixgbe_ring *ring = adapter->xdp_ring[smp_processor_id()];
xdp_ring          576 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c static bool ixgbe_xmit_zc(struct ixgbe_ring *xdp_ring, unsigned int budget)
xdp_ring          586 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (unlikely(!ixgbe_desc_unused(xdp_ring)) ||
xdp_ring          587 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		    !netif_carrier_ok(xdp_ring->netdev)) {
xdp_ring          592 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (!xsk_umem_consume_tx(xdp_ring->xsk_umem, &desc))
xdp_ring          595 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
xdp_ring          597 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma_sync_single_for_device(xdp_ring->dev, dma, desc.len,
xdp_ring          600 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		tx_bi = &xdp_ring->tx_buffer_info[xdp_ring->next_to_use];
xdp_ring          605 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		tx_desc = IXGBE_TX_DESC(xdp_ring, xdp_ring->next_to_use);
xdp_ring          617 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		xdp_ring->next_to_use++;
xdp_ring          618 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (xdp_ring->next_to_use == xdp_ring->count)
xdp_ring          619 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			xdp_ring->next_to_use = 0;
xdp_ring          623 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		ixgbe_xdp_ring_update_tail(xdp_ring);
xdp_ring          624 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		xsk_umem_consume_tx_done(xdp_ring->xsk_umem);
xdp_ring          712 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	ring = adapter->xdp_ring[qid];
xdp_ring          271 drivers/net/ethernet/intel/ixgbevf/ethtool.c 			adapter->xdp_ring[i]->count = new_tx_count;
xdp_ring          309 drivers/net/ethernet/intel/ixgbevf/ethtool.c 			tx_ring[i] = *adapter->xdp_ring[j];
xdp_ring          370 drivers/net/ethernet/intel/ixgbevf/ethtool.c 			ixgbevf_free_tx_resources(adapter->xdp_ring[j]);
xdp_ring          371 drivers/net/ethernet/intel/ixgbevf/ethtool.c 			*adapter->xdp_ring[j] = tx_ring[i];
xdp_ring          472 drivers/net/ethernet/intel/ixgbevf/ethtool.c 		ring = adapter->xdp_ring[j];
xdp_ring          336 drivers/net/ethernet/intel/ixgbevf/ixgbevf.h 	struct ixgbevf_ring *xdp_ring[MAX_XDP_QUEUES];
xdp_ring         1065 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 	struct ixgbevf_ring *xdp_ring;
xdp_ring         1080 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		xdp_ring = adapter->xdp_ring[rx_ring->queue_index];
xdp_ring         1081 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		result = ixgbevf_xmit_xdp_ring(xdp_ring, xdp);
xdp_ring         1234 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		struct ixgbevf_ring *xdp_ring =
xdp_ring         1235 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			adapter->xdp_ring[rx_ring->queue_index];
xdp_ring         1241 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_write_tail(xdp_ring, xdp_ring->next_to_use);
xdp_ring         1754 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_configure_tx_ring(adapter, adapter->xdp_ring[i]);
xdp_ring         2457 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_clean_tx_ring(adapter->xdp_ring[i]);
xdp_ring         2497 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		u8 reg_idx = adapter->xdp_ring[i]->reg_idx;
xdp_ring         2769 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		adapter->xdp_ring[xdp_idx] = ring;
xdp_ring         2826 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			adapter->xdp_ring[ring->queue_index] = NULL;
xdp_ring         3200 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			set_check_for_tx_hang(adapter->xdp_ring[i]);
xdp_ring         3373 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		if (adapter->xdp_ring[i]->desc)
xdp_ring         3374 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 			ixgbevf_free_tx_resources(adapter->xdp_ring[i]);
xdp_ring         3436 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		err = ixgbevf_setup_tx_resources(adapter->xdp_ring[j]);
xdp_ring         3447 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ixgbevf_free_tx_resources(adapter->xdp_ring[j]);
xdp_ring         4405 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c 		ring = adapter->xdp_ring[i];
xdp_ring          425 drivers/net/ethernet/netronome/nfp/nfp_net.h 	struct nfp_net_tx_ring *xdp_ring;
xdp_ring         1819 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	tx_ring = r_vec->xdp_ring;
xdp_ring         2610 drivers/net/ethernet/netronome/nfp/nfp_net_common.c 	r_vec->xdp_ring = idx < dp->num_tx_rings - dp->num_stack_tx_rings ?
xdp_ring           87 drivers/net/ethernet/netronome/nfp/nfp_net_debugfs.c 		tx_ring = r_vec->xdp_ring;
xdp_ring           57 drivers/net/veth.c 	struct ptr_ring		xdp_ring;
xdp_ring          220 drivers/net/veth.c 	if (unlikely(ptr_ring_produce(&rq->xdp_ring, skb))) {
xdp_ring          426 drivers/net/veth.c 	spin_lock(&rq->xdp_ring.producer_lock);
xdp_ring          432 drivers/net/veth.c 			     __ptr_ring_produce(&rq->xdp_ring, ptr))) {
xdp_ring          437 drivers/net/veth.c 	spin_unlock(&rq->xdp_ring.producer_lock);
xdp_ring          732 drivers/net/veth.c 		void *ptr = __ptr_ring_consume(&rq->xdp_ring);
xdp_ring          784 drivers/net/veth.c 		if (unlikely(!__ptr_ring_empty(&rq->xdp_ring))) {
xdp_ring          807 drivers/net/veth.c 		err = ptr_ring_init(&rq->xdp_ring, VETH_RING_SIZE, GFP_KERNEL);
xdp_ring          822 drivers/net/veth.c 		ptr_ring_cleanup(&priv->rq[i].xdp_ring, veth_ptr_free);
xdp_ring          845 drivers/net/veth.c 		ptr_ring_cleanup(&rq->xdp_ring, veth_ptr_free);
xdp_ring           28 net/xdp/xsk_queue.c 	return sizeof(struct xdp_ring) + q->nentries * sizeof(struct xdp_desc);
xdp_ring           49 net/xdp/xsk_queue.c 	q->ring = (struct xdp_ring *)__get_free_pages(gfp_flags,
xdp_ring           24 net/xdp/xsk_queue.h 	struct xdp_ring ptrs;
xdp_ring           30 net/xdp/xsk_queue.h 	struct xdp_ring ptrs;
xdp_ring           43 net/xdp/xsk_queue.h 	struct xdp_ring *ring;