tun_tx_ix         516 drivers/infiniband/hw/mlx4/mad.c 	unsigned tun_tx_ix = 0;
tun_tx_ix         580 drivers/infiniband/hw/mlx4/mad.c 		tun_tx_ix = (++tun_qp->tx_ix_head) & (MLX4_NUM_TUNNEL_BUFS - 1);
tun_tx_ix         585 drivers/infiniband/hw/mlx4/mad.c 	tun_mad = (struct mlx4_rcv_tunnel_mad *) (tun_qp->tx_ring[tun_tx_ix].buf.addr);
tun_tx_ix         586 drivers/infiniband/hw/mlx4/mad.c 	if (tun_qp->tx_ring[tun_tx_ix].ah)
tun_tx_ix         587 drivers/infiniband/hw/mlx4/mad.c 		rdma_destroy_ah(tun_qp->tx_ring[tun_tx_ix].ah, 0);
tun_tx_ix         588 drivers/infiniband/hw/mlx4/mad.c 	tun_qp->tx_ring[tun_tx_ix].ah = ah;
tun_tx_ix         590 drivers/infiniband/hw/mlx4/mad.c 				   tun_qp->tx_ring[tun_tx_ix].buf.map,
tun_tx_ix         632 drivers/infiniband/hw/mlx4/mad.c 				      tun_qp->tx_ring[tun_tx_ix].buf.map,
tun_tx_ix         636 drivers/infiniband/hw/mlx4/mad.c 	list.addr = tun_qp->tx_ring[tun_tx_ix].buf.map;
tun_tx_ix         645 drivers/infiniband/hw/mlx4/mad.c 	wr.wr.wr_id = ((u64) tun_tx_ix) | MLX4_TUN_SET_WRID_QPN(dest_qpt);
tun_tx_ix         658 drivers/infiniband/hw/mlx4/mad.c 	tun_qp->tx_ring[tun_tx_ix].ah = NULL;