Lines Matching refs:ring
801 if (q_vector->rx.ring) in igb_assign_vector()
802 rx_queue = q_vector->rx.ring->reg_idx; in igb_assign_vector()
803 if (q_vector->tx.ring) in igb_assign_vector()
804 tx_queue = q_vector->tx.ring->reg_idx; in igb_assign_vector()
962 if (q_vector->rx.ring && q_vector->tx.ring) in igb_request_msix()
964 q_vector->rx.ring->queue_index); in igb_request_msix()
965 else if (q_vector->tx.ring) in igb_request_msix()
967 q_vector->tx.ring->queue_index); in igb_request_msix()
968 else if (q_vector->rx.ring) in igb_request_msix()
970 q_vector->rx.ring->queue_index); in igb_request_msix()
1035 if (q_vector->tx.ring) in igb_reset_q_vector()
1036 adapter->tx_ring[q_vector->tx.ring->queue_index] = NULL; in igb_reset_q_vector()
1038 if (q_vector->rx.ring) in igb_reset_q_vector()
1039 adapter->rx_ring[q_vector->rx.ring->queue_index] = NULL; in igb_reset_q_vector()
1170 static void igb_add_ring(struct igb_ring *ring, in igb_add_ring() argument
1173 head->ring = ring; in igb_add_ring()
1195 struct igb_ring *ring; in igb_alloc_q_vector() local
1235 ring = q_vector->ring; in igb_alloc_q_vector()
1250 ring->dev = &adapter->pdev->dev; in igb_alloc_q_vector()
1251 ring->netdev = adapter->netdev; in igb_alloc_q_vector()
1254 ring->q_vector = q_vector; in igb_alloc_q_vector()
1257 igb_add_ring(ring, &q_vector->tx); in igb_alloc_q_vector()
1261 set_bit(IGB_RING_FLAG_TX_CTX_IDX, &ring->flags); in igb_alloc_q_vector()
1264 ring->count = adapter->tx_ring_count; in igb_alloc_q_vector()
1265 ring->queue_index = txr_idx; in igb_alloc_q_vector()
1267 u64_stats_init(&ring->tx_syncp); in igb_alloc_q_vector()
1268 u64_stats_init(&ring->tx_syncp2); in igb_alloc_q_vector()
1271 adapter->tx_ring[txr_idx] = ring; in igb_alloc_q_vector()
1274 ring++; in igb_alloc_q_vector()
1279 ring->dev = &adapter->pdev->dev; in igb_alloc_q_vector()
1280 ring->netdev = adapter->netdev; in igb_alloc_q_vector()
1283 ring->q_vector = q_vector; in igb_alloc_q_vector()
1286 igb_add_ring(ring, &q_vector->rx); in igb_alloc_q_vector()
1290 set_bit(IGB_RING_FLAG_RX_SCTP_CSUM, &ring->flags); in igb_alloc_q_vector()
1296 set_bit(IGB_RING_FLAG_RX_LB_VLAN_BSWAP, &ring->flags); in igb_alloc_q_vector()
1299 ring->count = adapter->rx_ring_count; in igb_alloc_q_vector()
1300 ring->queue_index = rxr_idx; in igb_alloc_q_vector()
1302 u64_stats_init(&ring->rx_syncp); in igb_alloc_q_vector()
1305 adapter->rx_ring[rxr_idx] = ring; in igb_alloc_q_vector()
1621 struct igb_ring *ring = adapter->rx_ring[i]; in igb_configure() local
1622 igb_alloc_rx_buffers(ring, igb_desc_unused(ring)); in igb_configure()
3271 struct igb_ring *ring) in igb_configure_tx_ring() argument
3275 u64 tdba = ring->dma; in igb_configure_tx_ring()
3276 int reg_idx = ring->reg_idx; in igb_configure_tx_ring()
3284 ring->count * sizeof(union e1000_adv_tx_desc)); in igb_configure_tx_ring()
3289 ring->tail = hw->hw_addr + E1000_TDT(reg_idx); in igb_configure_tx_ring()
3291 writel(0, ring->tail); in igb_configure_tx_ring()
3627 struct igb_ring *ring) in igb_configure_rx_ring() argument
3630 u64 rdba = ring->dma; in igb_configure_rx_ring()
3631 int reg_idx = ring->reg_idx; in igb_configure_rx_ring()
3642 ring->count * sizeof(union e1000_adv_rx_desc)); in igb_configure_rx_ring()
3645 ring->tail = hw->hw_addr + E1000_RDT(reg_idx); in igb_configure_rx_ring()
3647 writel(0, ring->tail); in igb_configure_rx_ring()
3735 void igb_unmap_and_free_tx_resource(struct igb_ring *ring, in igb_unmap_and_free_tx_resource() argument
3741 dma_unmap_single(ring->dev, in igb_unmap_and_free_tx_resource()
3746 dma_unmap_page(ring->dev, in igb_unmap_and_free_tx_resource()
4479 ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || in igb_update_ring_itr()
4480 (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) in igb_update_ring_itr()
4583 ((q_vector->rx.ring && adapter->rx_itr_setting == 3) || in igb_set_itr()
4584 (!q_vector->rx.ring && adapter->tx_itr_setting == 3))) in igb_set_itr()
5238 struct igb_ring *ring = adapter->rx_ring[i]; in igb_update_stats() local
5244 ring->rx_stats.drops += rqdpc; in igb_update_stats()
5249 start = u64_stats_fetch_begin_irq(&ring->rx_syncp); in igb_update_stats()
5250 _bytes = ring->rx_stats.bytes; in igb_update_stats()
5251 _packets = ring->rx_stats.packets; in igb_update_stats()
5252 } while (u64_stats_fetch_retry_irq(&ring->rx_syncp, start)); in igb_update_stats()
5263 struct igb_ring *ring = adapter->tx_ring[i]; in igb_update_stats() local
5265 start = u64_stats_fetch_begin_irq(&ring->tx_syncp); in igb_update_stats()
5266 _bytes = ring->tx_stats.bytes; in igb_update_stats()
5267 _packets = ring->tx_stats.packets; in igb_update_stats()
5268 } while (u64_stats_fetch_retry_irq(&ring->tx_syncp, start)); in igb_update_stats()
5595 if (q_vector->tx.ring) in igb_update_dca()
5596 igb_update_tx_dca(adapter, q_vector->tx.ring, cpu); in igb_update_dca()
5598 if (q_vector->rx.ring) in igb_update_dca()
5599 igb_update_rx_dca(adapter, q_vector->rx.ring, cpu); in igb_update_dca()
6348 if ((q_vector->rx.ring && (adapter->rx_itr_setting & 3)) || in igb_ring_irq_enable()
6349 (!q_vector->rx.ring && (adapter->tx_itr_setting & 3))) { in igb_ring_irq_enable()
6380 if (q_vector->tx.ring) in igb_poll()
6383 if (q_vector->rx.ring) in igb_poll()
6406 struct igb_ring *tx_ring = q_vector->tx.ring; in igb_clean_tx_irq()
6745 static inline void igb_rx_checksum(struct igb_ring *ring, in igb_rx_checksum() argument
6756 if (!(ring->netdev->features & NETIF_F_RXCSUM)) in igb_rx_checksum()
6768 test_bit(IGB_RING_FLAG_RX_SCTP_CSUM, &ring->flags))) { in igb_rx_checksum()
6769 u64_stats_update_begin(&ring->rx_syncp); in igb_rx_checksum()
6770 ring->rx_stats.csum_err++; in igb_rx_checksum()
6771 u64_stats_update_end(&ring->rx_syncp); in igb_rx_checksum()
6781 dev_dbg(ring->dev, "cksum success: bits %08X\n", in igb_rx_checksum()
6785 static inline void igb_rx_hash(struct igb_ring *ring, in igb_rx_hash() argument
6789 if (ring->netdev->features & NETIF_F_RXHASH) in igb_rx_hash()
6961 struct igb_ring *rx_ring = q_vector->rx.ring; in igb_clean_rx_irq()