Lines Matching refs:reg_idx
49 u16 reg_idx; in ixgbe_cache_ring_dcb_sriov() local
61 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
62 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
64 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
65 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
66 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
69 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
70 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_dcb_sriov()
72 if ((reg_idx & ~vmdq->mask) >= tcs) in ixgbe_cache_ring_dcb_sriov()
73 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_dcb_sriov()
74 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
91 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
93 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
94 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
95 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
98 reg_idx = (vmdq->offset + vmdq->indices) * queues_per_pool; in ixgbe_cache_ring_dcb_sriov()
100 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask) + fcoe_tc; in ixgbe_cache_ring_dcb_sriov()
101 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_dcb_sriov()
102 reg_idx++; in ixgbe_cache_ring_dcb_sriov()
184 adapter->tx_ring[offset + i]->reg_idx = tx_idx; in ixgbe_cache_ring_dcb()
185 adapter->rx_ring[offset + i]->reg_idx = rx_idx; in ixgbe_cache_ring_dcb()
211 u16 reg_idx; in ixgbe_cache_ring_sriov() local
218 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
219 for (i = 0; i < adapter->num_rx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
226 if ((reg_idx & ~vmdq->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
227 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
228 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
233 for (; i < adapter->num_rx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
234 adapter->rx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
237 reg_idx = vmdq->offset * __ALIGN_MASK(1, ~vmdq->mask); in ixgbe_cache_ring_sriov()
238 for (i = 0; i < adapter->num_tx_queues; i++, reg_idx++) { in ixgbe_cache_ring_sriov()
245 if ((reg_idx & rss->mask) >= rss->indices) in ixgbe_cache_ring_sriov()
246 reg_idx = __ALIGN_MASK(reg_idx, ~vmdq->mask); in ixgbe_cache_ring_sriov()
247 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
252 for (; i < adapter->num_tx_queues; i++, reg_idx++) in ixgbe_cache_ring_sriov()
253 adapter->tx_ring[i]->reg_idx = reg_idx; in ixgbe_cache_ring_sriov()
272 adapter->rx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
274 adapter->tx_ring[i]->reg_idx = i; in ixgbe_cache_ring_rss()
293 adapter->rx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()
294 adapter->tx_ring[0]->reg_idx = 0; in ixgbe_cache_ring_register()