xsk_umem         3141 drivers/net/ethernet/intel/i40e/i40e_main.c 		ring->xsk_umem = i40e_xsk_umem(ring);
xsk_umem         3263 drivers/net/ethernet/intel/i40e/i40e_main.c 	ring->xsk_umem = i40e_xsk_umem(ring);
xsk_umem         3264 drivers/net/ethernet/intel/i40e/i40e_main.c 	if (ring->xsk_umem) {
xsk_umem         3265 drivers/net/ethernet/intel/i40e/i40e_main.c 		ring->rx_buf_len = ring->xsk_umem->chunk_size_nohr -
xsk_umem         3347 drivers/net/ethernet/intel/i40e/i40e_main.c 	ok = ring->xsk_umem ?
xsk_umem         3356 drivers/net/ethernet/intel/i40e/i40e_main.c 			 ring->xsk_umem ? "UMEM enabled " : "",
xsk_umem         12548 drivers/net/ethernet/intel/i40e/i40e_main.c 			if (vsi->xdp_rings[i]->xsk_umem)
xsk_umem          638 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (ring_is_xdp(tx_ring) && tx_ring->xsk_umem) {
xsk_umem         1357 drivers/net/ethernet/intel/i40e/i40e_txrx.c 	if (rx_ring->xsk_umem) {
xsk_umem         2588 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		bool wd = ring->xsk_umem ?
xsk_umem         2610 drivers/net/ethernet/intel/i40e/i40e_txrx.c 		int cleaned = ring->xsk_umem ?
xsk_umem          421 drivers/net/ethernet/intel/i40e/i40e_txrx.h 	struct xdp_umem *xsk_umem;
xsk_umem          193 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          248 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          289 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          297 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	handle &= rx_ring->xsk_umem->chunk_mask;
xsk_umem          455 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	hr = rx_ring->xsk_umem->headroom + XDP_PACKET_HEADROOM;
xsk_umem          456 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	mask = rx_ring->xsk_umem->chunk_mask;
xsk_umem          466 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->dma = xdp_umem_get_dma(rx_ring->xsk_umem, handle);
xsk_umem          469 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->addr = xdp_umem_get_data(rx_ring->xsk_umem, handle);
xsk_umem          472 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	bi->handle = xsk_umem_adjust_offset(rx_ring->xsk_umem, (u64)handle,
xsk_umem          473 drivers/net/ethernet/intel/i40e/i40e_xsk.c 					    rx_ring->xsk_umem->headroom);
xsk_umem          629 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	if (xsk_umem_uses_need_wakeup(rx_ring->xsk_umem)) {
xsk_umem          631 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			xsk_set_rx_need_wakeup(rx_ring->xsk_umem);
xsk_umem          633 drivers/net/ethernet/intel/i40e/i40e_xsk.c 			xsk_clear_rx_need_wakeup(rx_ring->xsk_umem);
xsk_umem          662 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		if (!xsk_umem_consume_tx(xdp_ring->xsk_umem, &desc))
xsk_umem          665 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
xsk_umem          691 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xsk_umem_consume_tx_done(xdp_ring->xsk_umem);
xsk_umem          724 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct xdp_umem *umem = tx_ring->xsk_umem;
xsk_umem          770 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	if (xsk_umem_uses_need_wakeup(tx_ring->xsk_umem))
xsk_umem          771 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xsk_set_tx_need_wakeup(tx_ring->xsk_umem);
xsk_umem          805 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	if (!vsi->xdp_rings[queue_id]->xsk_umem)
xsk_umem          832 drivers/net/ethernet/intel/i40e/i40e_xsk.c 		xsk_umem_fq_reuse(rx_ring->xsk_umem, rx_bi->handle);
xsk_umem          844 drivers/net/ethernet/intel/i40e/i40e_xsk.c 	struct xdp_umem *umem = tx_ring->xsk_umem;
xsk_umem          353 drivers/net/ethernet/intel/ixgbe/ixgbe.h 	struct xdp_umem *xsk_umem;
xsk_umem         3171 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		bool wd = ring->xsk_umem ?
xsk_umem         3191 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		int cleaned = ring->xsk_umem ?
xsk_umem         3486 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	ring->xsk_umem = NULL;
xsk_umem         3488 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		ring->xsk_umem = ixgbe_xsk_umem(adapter, ring);
xsk_umem         3728 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (rx_ring->xsk_umem) {
xsk_umem         3729 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		u32 xsk_buf_len = rx_ring->xsk_umem->chunk_size_nohr -
xsk_umem         4075 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	ring->xsk_umem = ixgbe_xsk_umem(adapter, ring);
xsk_umem         4076 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ring->xsk_umem) {
xsk_umem         4136 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ring->xsk_umem && hw->mac.type != ixgbe_mac_82599EB) {
xsk_umem         4137 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 		u32 xsk_buf_len = ring->xsk_umem->chunk_size_nohr -
xsk_umem         4160 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (ring->xsk_umem)
xsk_umem         5293 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (rx_ring->xsk_umem) {
xsk_umem         5981 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 	if (tx_ring->xsk_umem) {
xsk_umem         10293 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c 			if (adapter->xdp_ring[i]->xsk_umem)
xsk_umem          146 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          234 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	hr = rx_ring->xsk_umem->headroom + XDP_PACKET_HEADROOM;
xsk_umem          235 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	mask = rx_ring->xsk_umem->chunk_mask;
xsk_umem          245 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->dma = xdp_umem_get_dma(rx_ring->xsk_umem, handle);
xsk_umem          248 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->addr = xdp_umem_get_data(rx_ring->xsk_umem, handle);
xsk_umem          251 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	bi->handle = xsk_umem_adjust_offset(rx_ring->xsk_umem, (u64)handle,
xsk_umem          252 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 					    rx_ring->xsk_umem->headroom);
xsk_umem          258 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          287 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	struct xdp_umem *umem = rx_ring->xsk_umem;
xsk_umem          295 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	handle &= rx_ring->xsk_umem->chunk_mask;
xsk_umem          549 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	if (xsk_umem_uses_need_wakeup(rx_ring->xsk_umem)) {
xsk_umem          551 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			xsk_set_rx_need_wakeup(rx_ring->xsk_umem);
xsk_umem          553 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 			xsk_clear_rx_need_wakeup(rx_ring->xsk_umem);
xsk_umem          566 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		xsk_umem_fq_reuse(rx_ring->xsk_umem, bi->handle);
xsk_umem          592 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		if (!xsk_umem_consume_tx(xdp_ring->xsk_umem, &desc))
xsk_umem          595 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		dma = xdp_umem_get_dma(xdp_ring->xsk_umem, desc.addr);
xsk_umem          624 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		xsk_umem_consume_tx_done(xdp_ring->xsk_umem);
xsk_umem          645 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	struct xdp_umem *umem = tx_ring->xsk_umem;
xsk_umem          692 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	if (xsk_umem_uses_need_wakeup(tx_ring->xsk_umem))
xsk_umem          693 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 		xsk_set_tx_need_wakeup(tx_ring->xsk_umem);
xsk_umem          717 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	if (!ring->xsk_umem)
xsk_umem          732 drivers/net/ethernet/intel/ixgbe/ixgbe_xsk.c 	struct xdp_umem *umem = tx_ring->xsk_umem;
xsk_umem           83 samples/bpf/xdpsock_user.c 	struct xsk_umem *umem;
xsk_umem          198 samples/bpf/xdpsock_user.c 	struct xsk_umem *umem = xsks[0]->umem->umem;
xsk_umem           60 tools/lib/bpf/xsk.c 	struct xsk_umem *umem;
xsk_umem           91 tools/lib/bpf/xsk.c int xsk_umem__fd(const struct xsk_umem *umem)
xsk_umem          203 tools/lib/bpf/xsk.c int xsk_umem__create_v0_0_4(struct xsk_umem **umem_ptr, void *umem_area,
xsk_umem          210 tools/lib/bpf/xsk.c 	struct xsk_umem *umem;
xsk_umem          317 tools/lib/bpf/xsk.c int xsk_umem__create_v0_0_2(struct xsk_umem **umem_ptr, void *umem_area,
xsk_umem          559 tools/lib/bpf/xsk.c 		       __u32 queue_id, struct xsk_umem *umem,
xsk_umem          708 tools/lib/bpf/xsk.c int xsk_umem__delete(struct xsk_umem *umem)
xsk_umem           45 tools/lib/bpf/xsk.h struct xsk_umem;
xsk_umem          186 tools/lib/bpf/xsk.h LIBBPF_API int xsk_umem__fd(const struct xsk_umem *umem);
xsk_umem          216 tools/lib/bpf/xsk.h LIBBPF_API int xsk_umem__create(struct xsk_umem **umem,
xsk_umem          221 tools/lib/bpf/xsk.h LIBBPF_API int xsk_umem__create_v0_0_2(struct xsk_umem **umem,
xsk_umem          226 tools/lib/bpf/xsk.h LIBBPF_API int xsk_umem__create_v0_0_4(struct xsk_umem **umem,
xsk_umem          233 tools/lib/bpf/xsk.h 				  struct xsk_umem *umem,
xsk_umem          239 tools/lib/bpf/xsk.h LIBBPF_API int xsk_umem__delete(struct xsk_umem *umem);