Lines Matching refs:ring

904 void ixgbe_unmap_and_free_tx_resource(struct ixgbe_ring *ring,  in ixgbe_unmap_and_free_tx_resource()  argument
910 dma_unmap_single(ring->dev, in ixgbe_unmap_and_free_tx_resource()
915 dma_unmap_page(ring->dev, in ixgbe_unmap_and_free_tx_resource()
999 static u64 ixgbe_get_tx_completed(struct ixgbe_ring *ring) in ixgbe_get_tx_completed() argument
1001 return ring->stats.packets; in ixgbe_get_tx_completed()
1004 static u64 ixgbe_get_tx_pending(struct ixgbe_ring *ring) in ixgbe_get_tx_pending() argument
1010 if (ring->l2_accel_priv) in ixgbe_get_tx_pending()
1011 adapter = ring->l2_accel_priv->real_adapter; in ixgbe_get_tx_pending()
1013 adapter = netdev_priv(ring->netdev); in ixgbe_get_tx_pending()
1016 head = IXGBE_READ_REG(hw, IXGBE_TDH(ring->reg_idx)); in ixgbe_get_tx_pending()
1017 tail = IXGBE_READ_REG(hw, IXGBE_TDT(ring->reg_idx)); in ixgbe_get_tx_pending()
1021 tail - head : (tail + ring->count - head); in ixgbe_get_tx_pending()
1295 struct ixgbe_ring *ring; in ixgbe_update_dca() local
1301 ixgbe_for_each_ring(ring, q_vector->tx) in ixgbe_update_dca()
1302 ixgbe_update_tx_dca(adapter, ring, cpu); in ixgbe_update_dca()
1304 ixgbe_for_each_ring(ring, q_vector->rx) in ixgbe_update_dca()
1305 ixgbe_update_rx_dca(adapter, ring, cpu); in ixgbe_update_dca()
1360 static inline void ixgbe_rx_hash(struct ixgbe_ring *ring, in ixgbe_rx_hash() argument
1364 if (ring->netdev->features & NETIF_F_RXHASH) in ixgbe_rx_hash()
1378 static inline bool ixgbe_rx_is_fcoe(struct ixgbe_ring *ring, in ixgbe_rx_is_fcoe() argument
1383 return test_bit(__IXGBE_RX_FCOE, &ring->state) && in ixgbe_rx_is_fcoe()
1396 static inline void ixgbe_rx_checksum(struct ixgbe_ring *ring, in ixgbe_rx_checksum() argument
1407 if (!(ring->netdev->features & NETIF_F_RXCSUM)) in ixgbe_rx_checksum()
1420 ring->rx_stats.csum_err++; in ixgbe_rx_checksum()
1433 test_bit(__IXGBE_RX_CSUM_UDP_ZERO_ERR, &ring->state)) in ixgbe_rx_checksum()
1436 ring->rx_stats.csum_err++; in ixgbe_rx_checksum()
1447 ring->rx_stats.csum_err++; in ixgbe_rx_checksum()
1556 static void ixgbe_set_rsc_gso_size(struct ixgbe_ring *ring, in ixgbe_set_rsc_gso_size() argument
2110 struct ixgbe_ring *ring; in ixgbe_low_latency_recv() local
2119 ixgbe_for_each_ring(ring, q_vector->rx) { in ixgbe_low_latency_recv()
2120 found = ixgbe_clean_rx_irq(q_vector, ring, 4); in ixgbe_low_latency_recv()
2123 ring->stats.cleaned += found; in ixgbe_low_latency_recv()
2125 ring->stats.misses++; in ixgbe_low_latency_recv()
2161 struct ixgbe_ring *ring; in ixgbe_configure_msix() local
2164 ixgbe_for_each_ring(ring, q_vector->rx) in ixgbe_configure_msix()
2165 ixgbe_set_ivar(adapter, 0, ring->reg_idx, v_idx); in ixgbe_configure_msix()
2167 ixgbe_for_each_ring(ring, q_vector->tx) in ixgbe_configure_msix()
2168 ixgbe_set_ivar(adapter, 1, ring->reg_idx, v_idx); in ixgbe_configure_msix()
2640 struct ixgbe_ring *ring = adapter->tx_ring[i]; in ixgbe_msix_other() local
2642 &ring->state)) in ixgbe_msix_other()
2677 if (q_vector->rx.ring || q_vector->tx.ring) in ixgbe_msix_clean_rings()
2695 struct ixgbe_ring *ring; in ixgbe_poll() local
2704 ixgbe_for_each_ring(ring, q_vector->tx) in ixgbe_poll()
2705 clean_complete &= !!ixgbe_clean_tx_irq(q_vector, ring); in ixgbe_poll()
2717 ixgbe_for_each_ring(ring, q_vector->rx) in ixgbe_poll()
2718 clean_complete &= (ixgbe_clean_rx_irq(q_vector, ring, in ixgbe_poll()
2753 if (q_vector->tx.ring && q_vector->rx.ring) { in ixgbe_request_msix_irqs()
2757 } else if (q_vector->rx.ring) { in ixgbe_request_msix_irqs()
2760 } else if (q_vector->tx.ring) { in ixgbe_request_msix_irqs()
2920 if (!q_vector->rx.ring && !q_vector->tx.ring) in ixgbe_free_irq()
2990 struct ixgbe_ring *ring) in ixgbe_configure_tx_ring() argument
2993 u64 tdba = ring->dma; in ixgbe_configure_tx_ring()
2996 u8 reg_idx = ring->reg_idx; in ixgbe_configure_tx_ring()
3006 ring->count * sizeof(union ixgbe_adv_tx_desc)); in ixgbe_configure_tx_ring()
3009 ring->tail = adapter->io_addr + IXGBE_TDT(reg_idx); in ixgbe_configure_tx_ring()
3021 if (!ring->q_vector || (ring->q_vector->itr < IXGBE_100K_ITR)) in ixgbe_configure_tx_ring()
3035 ring->atr_sample_rate = adapter->atr_sample_rate; in ixgbe_configure_tx_ring()
3036 ring->atr_count = 0; in ixgbe_configure_tx_ring()
3037 set_bit(__IXGBE_TX_FDIR_INIT_DONE, &ring->state); in ixgbe_configure_tx_ring()
3039 ring->atr_sample_rate = 0; in ixgbe_configure_tx_ring()
3043 if (!test_and_set_bit(__IXGBE_TX_XPS_INIT_DONE, &ring->state)) { in ixgbe_configure_tx_ring()
3044 struct ixgbe_q_vector *q_vector = ring->q_vector; in ixgbe_configure_tx_ring()
3047 netif_set_xps_queue(ring->netdev, in ixgbe_configure_tx_ring()
3049 ring->queue_index); in ixgbe_configure_tx_ring()
3052 clear_bit(__IXGBE_HANG_CHECK_ARMED, &ring->state); in ixgbe_configure_tx_ring()
3146 struct ixgbe_ring *ring) in ixgbe_enable_rx_drop() argument
3149 u8 reg_idx = ring->reg_idx; in ixgbe_enable_rx_drop()
3158 struct ixgbe_ring *ring) in ixgbe_disable_rx_drop() argument
3161 u8 reg_idx = ring->reg_idx; in ixgbe_disable_rx_drop()
3443 struct ixgbe_ring *ring) in ixgbe_configure_rscctl() argument
3447 u8 reg_idx = ring->reg_idx; in ixgbe_configure_rscctl()
3449 if (!ring_is_rsc_enabled(ring)) in ixgbe_configure_rscctl()
3465 struct ixgbe_ring *ring) in ixgbe_rx_desc_queue_enable() argument
3470 u8 reg_idx = ring->reg_idx; in ixgbe_rx_desc_queue_enable()
3491 struct ixgbe_ring *ring) in ixgbe_disable_rx_queue() argument
3496 u8 reg_idx = ring->reg_idx; in ixgbe_disable_rx_queue()
3523 struct ixgbe_ring *ring) in ixgbe_configure_rx_ring() argument
3526 u64 rdba = ring->dma; in ixgbe_configure_rx_ring()
3528 u8 reg_idx = ring->reg_idx; in ixgbe_configure_rx_ring()
3532 ixgbe_disable_rx_queue(adapter, ring); in ixgbe_configure_rx_ring()
3537 ring->count * sizeof(union ixgbe_adv_rx_desc)); in ixgbe_configure_rx_ring()
3540 ring->tail = adapter->io_addr + IXGBE_RDT(reg_idx); in ixgbe_configure_rx_ring()
3542 ixgbe_configure_srrctl(adapter, ring); in ixgbe_configure_rx_ring()
3543 ixgbe_configure_rscctl(adapter, ring); in ixgbe_configure_rx_ring()
3561 ixgbe_rx_desc_queue_enable(adapter, ring); in ixgbe_configure_rx_ring()
3562 ixgbe_alloc_rx_buffers(ring, ixgbe_desc_unused(ring)); in ixgbe_configure_rx_ring()
3849 struct ixgbe_ring *ring = adapter->rx_ring[i]; in ixgbe_vlan_strip_disable() local
3851 if (ring->l2_accel_priv) in ixgbe_vlan_strip_disable()
3853 j = ring->reg_idx; in ixgbe_vlan_strip_disable()
3885 struct ixgbe_ring *ring = adapter->rx_ring[i]; in ixgbe_vlan_strip_enable() local
3887 if (ring->l2_accel_priv) in ixgbe_vlan_strip_enable()
3889 j = ring->reg_idx; in ixgbe_vlan_strip_enable()
6227 if (qv->rx.ring || qv->tx.ring) in ixgbe_check_hang_subtask()
7149 static void ixgbe_atr(struct ixgbe_ring *ring, in ixgbe_atr() argument
7152 struct ixgbe_q_vector *q_vector = ring->q_vector; in ixgbe_atr()
7168 if (!ring->atr_sample_rate) in ixgbe_atr()
7171 ring->atr_count++; in ixgbe_atr()
7190 if (!th->syn && (ring->atr_count < ring->atr_sample_rate)) in ixgbe_atr()
7194 ring->atr_count = 0; in ixgbe_atr()
7234 input, common, ring->queue_index); in ixgbe_atr()
7417 struct ixgbe_ring *ring) in __ixgbe_xmit_frame() argument
7429 tx_ring = ring ? ring : adapter->tx_ring[skb->queue_mapping]; in __ixgbe_xmit_frame()
7580 struct ixgbe_ring *ring = ACCESS_ONCE(adapter->rx_ring[i]); in ixgbe_get_stats64() local
7584 if (ring) { in ixgbe_get_stats64()
7586 start = u64_stats_fetch_begin_irq(&ring->syncp); in ixgbe_get_stats64()
7587 packets = ring->stats.packets; in ixgbe_get_stats64()
7588 bytes = ring->stats.bytes; in ixgbe_get_stats64()
7589 } while (u64_stats_fetch_retry_irq(&ring->syncp, start)); in ixgbe_get_stats64()
7596 struct ixgbe_ring *ring = ACCESS_ONCE(adapter->tx_ring[i]); in ixgbe_get_stats64() local
7600 if (ring) { in ixgbe_get_stats64()
7602 start = u64_stats_fetch_begin_irq(&ring->syncp); in ixgbe_get_stats64()
7603 packets = ring->stats.packets; in ixgbe_get_stats64()
7604 bytes = ring->stats.bytes; in ixgbe_get_stats64()
7605 } while (u64_stats_fetch_retry_irq(&ring->syncp, start)); in ixgbe_get_stats64()