Lines Matching refs:rx_ring

531 static void ixgbevf_process_skb_fields(struct ixgbevf_ring *rx_ring,  in ixgbevf_process_skb_fields()  argument
535 ixgbevf_rx_hash(rx_ring, rx_desc, skb); in ixgbevf_process_skb_fields()
536 ixgbevf_rx_checksum(rx_ring, rx_desc, skb); in ixgbevf_process_skb_fields()
540 unsigned long *active_vlans = netdev_priv(rx_ring->netdev); in ixgbevf_process_skb_fields()
546 skb->protocol = eth_type_trans(skb, rx_ring->netdev); in ixgbevf_process_skb_fields()
560 static bool ixgbevf_is_non_eop(struct ixgbevf_ring *rx_ring, in ixgbevf_is_non_eop() argument
563 u32 ntc = rx_ring->next_to_clean + 1; in ixgbevf_is_non_eop()
566 ntc = (ntc < rx_ring->count) ? ntc : 0; in ixgbevf_is_non_eop()
567 rx_ring->next_to_clean = ntc; in ixgbevf_is_non_eop()
569 prefetch(IXGBEVF_RX_DESC(rx_ring, ntc)); in ixgbevf_is_non_eop()
577 static bool ixgbevf_alloc_mapped_page(struct ixgbevf_ring *rx_ring, in ixgbevf_alloc_mapped_page() argument
590 rx_ring->rx_stats.alloc_rx_page_failed++; in ixgbevf_alloc_mapped_page()
595 dma = dma_map_page(rx_ring->dev, page, 0, in ixgbevf_alloc_mapped_page()
601 if (dma_mapping_error(rx_ring->dev, dma)) { in ixgbevf_alloc_mapped_page()
604 rx_ring->rx_stats.alloc_rx_buff_failed++; in ixgbevf_alloc_mapped_page()
620 static void ixgbevf_alloc_rx_buffers(struct ixgbevf_ring *rx_ring, in ixgbevf_alloc_rx_buffers() argument
625 unsigned int i = rx_ring->next_to_use; in ixgbevf_alloc_rx_buffers()
628 if (!cleaned_count || !rx_ring->netdev) in ixgbevf_alloc_rx_buffers()
631 rx_desc = IXGBEVF_RX_DESC(rx_ring, i); in ixgbevf_alloc_rx_buffers()
632 bi = &rx_ring->rx_buffer_info[i]; in ixgbevf_alloc_rx_buffers()
633 i -= rx_ring->count; in ixgbevf_alloc_rx_buffers()
636 if (!ixgbevf_alloc_mapped_page(rx_ring, bi)) in ixgbevf_alloc_rx_buffers()
648 rx_desc = IXGBEVF_RX_DESC(rx_ring, 0); in ixgbevf_alloc_rx_buffers()
649 bi = rx_ring->rx_buffer_info; in ixgbevf_alloc_rx_buffers()
650 i -= rx_ring->count; in ixgbevf_alloc_rx_buffers()
659 i += rx_ring->count; in ixgbevf_alloc_rx_buffers()
661 if (rx_ring->next_to_use != i) { in ixgbevf_alloc_rx_buffers()
663 rx_ring->next_to_use = i; in ixgbevf_alloc_rx_buffers()
666 rx_ring->next_to_alloc = i; in ixgbevf_alloc_rx_buffers()
674 ixgbevf_write_tail(rx_ring, i); in ixgbevf_alloc_rx_buffers()
696 static bool ixgbevf_cleanup_headers(struct ixgbevf_ring *rx_ring, in ixgbevf_cleanup_headers() argument
703 struct net_device *netdev = rx_ring->netdev; in ixgbevf_cleanup_headers()
725 static void ixgbevf_reuse_rx_page(struct ixgbevf_ring *rx_ring, in ixgbevf_reuse_rx_page() argument
729 u16 nta = rx_ring->next_to_alloc; in ixgbevf_reuse_rx_page()
731 new_buff = &rx_ring->rx_buffer_info[nta]; in ixgbevf_reuse_rx_page()
735 rx_ring->next_to_alloc = (nta < rx_ring->count) ? nta : 0; in ixgbevf_reuse_rx_page()
743 dma_sync_single_range_for_device(rx_ring->dev, new_buff->dma, in ixgbevf_reuse_rx_page()
769 static bool ixgbevf_add_rx_frag(struct ixgbevf_ring *rx_ring, in ixgbevf_add_rx_frag() argument
843 static struct sk_buff *ixgbevf_fetch_rx_buffer(struct ixgbevf_ring *rx_ring, in ixgbevf_fetch_rx_buffer() argument
850 rx_buffer = &rx_ring->rx_buffer_info[rx_ring->next_to_clean]; in ixgbevf_fetch_rx_buffer()
865 skb = netdev_alloc_skb_ip_align(rx_ring->netdev, in ixgbevf_fetch_rx_buffer()
868 rx_ring->rx_stats.alloc_rx_buff_failed++; in ixgbevf_fetch_rx_buffer()
880 dma_sync_single_range_for_cpu(rx_ring->dev, in ixgbevf_fetch_rx_buffer()
887 if (ixgbevf_add_rx_frag(rx_ring, rx_buffer, rx_desc, skb)) { in ixgbevf_fetch_rx_buffer()
889 ixgbevf_reuse_rx_page(rx_ring, rx_buffer); in ixgbevf_fetch_rx_buffer()
892 dma_unmap_page(rx_ring->dev, rx_buffer->dma, in ixgbevf_fetch_rx_buffer()
912 struct ixgbevf_ring *rx_ring, in ixgbevf_clean_rx_irq() argument
916 u16 cleaned_count = ixgbevf_desc_unused(rx_ring); in ixgbevf_clean_rx_irq()
917 struct sk_buff *skb = rx_ring->skb; in ixgbevf_clean_rx_irq()
924 ixgbevf_alloc_rx_buffers(rx_ring, cleaned_count); in ixgbevf_clean_rx_irq()
928 rx_desc = IXGBEVF_RX_DESC(rx_ring, rx_ring->next_to_clean); in ixgbevf_clean_rx_irq()
940 skb = ixgbevf_fetch_rx_buffer(rx_ring, rx_desc, skb); in ixgbevf_clean_rx_irq()
949 if (ixgbevf_is_non_eop(rx_ring, rx_desc)) in ixgbevf_clean_rx_irq()
953 if (ixgbevf_cleanup_headers(rx_ring, rx_desc, skb)) { in ixgbevf_clean_rx_irq()
966 ether_addr_equal(rx_ring->netdev->dev_addr, in ixgbevf_clean_rx_irq()
973 ixgbevf_process_skb_fields(rx_ring, rx_desc, skb); in ixgbevf_clean_rx_irq()
985 rx_ring->skb = skb; in ixgbevf_clean_rx_irq()
987 u64_stats_update_begin(&rx_ring->syncp); in ixgbevf_clean_rx_irq()
988 rx_ring->stats.packets += total_rx_packets; in ixgbevf_clean_rx_irq()
989 rx_ring->stats.bytes += total_rx_bytes; in ixgbevf_clean_rx_irq()
990 u64_stats_update_end(&rx_ring->syncp); in ixgbevf_clean_rx_irq()
1301 a->rx_ring[r_idx]->next = q_vector->rx.ring; in map_vector_to_rxq()
1302 q_vector->rx.ring = a->rx_ring[r_idx]; in map_vector_to_rxq()
1799 ixgbevf_configure_rx_ring(adapter, adapter->rx_ring[i]); in ixgbevf_configure_rx()
2108 static void ixgbevf_clean_rx_ring(struct ixgbevf_ring *rx_ring) in ixgbevf_clean_rx_ring() argument
2110 struct device *dev = rx_ring->dev; in ixgbevf_clean_rx_ring()
2115 if (rx_ring->skb) { in ixgbevf_clean_rx_ring()
2116 dev_kfree_skb(rx_ring->skb); in ixgbevf_clean_rx_ring()
2117 rx_ring->skb = NULL; in ixgbevf_clean_rx_ring()
2121 if (!rx_ring->rx_buffer_info) in ixgbevf_clean_rx_ring()
2125 for (i = 0; i < rx_ring->count; i++) { in ixgbevf_clean_rx_ring()
2128 rx_buffer = &rx_ring->rx_buffer_info[i]; in ixgbevf_clean_rx_ring()
2138 size = sizeof(struct ixgbevf_rx_buffer) * rx_ring->count; in ixgbevf_clean_rx_ring()
2139 memset(rx_ring->rx_buffer_info, 0, size); in ixgbevf_clean_rx_ring()
2142 memset(rx_ring->desc, 0, rx_ring->size); in ixgbevf_clean_rx_ring()
2179 ixgbevf_clean_rx_ring(adapter->rx_ring[i]); in ixgbevf_clean_all_rx_rings()
2206 ixgbevf_disable_rx_queue(adapter, adapter->rx_ring[i]); in ixgbevf_down()
2396 adapter->rx_ring[rx] = ring; in ixgbevf_alloc_queues()
2408 kfree(adapter->rx_ring[--rx]); in ixgbevf_alloc_queues()
2409 adapter->rx_ring[rx] = NULL; in ixgbevf_alloc_queues()
2606 kfree(adapter->rx_ring[i]); in ixgbevf_clear_interrupt_scheme()
2607 adapter->rx_ring[i] = NULL; in ixgbevf_clear_interrupt_scheme()
2739 adapter->rx_ring[i]->hw_csum_rx_error; in ixgbevf_update_stats()
2740 adapter->rx_ring[i]->hw_csum_rx_error = 0; in ixgbevf_update_stats()
3036 int ixgbevf_setup_rx_resources(struct ixgbevf_ring *rx_ring) in ixgbevf_setup_rx_resources() argument
3040 size = sizeof(struct ixgbevf_rx_buffer) * rx_ring->count; in ixgbevf_setup_rx_resources()
3041 rx_ring->rx_buffer_info = vzalloc(size); in ixgbevf_setup_rx_resources()
3042 if (!rx_ring->rx_buffer_info) in ixgbevf_setup_rx_resources()
3046 rx_ring->size = rx_ring->count * sizeof(union ixgbe_adv_rx_desc); in ixgbevf_setup_rx_resources()
3047 rx_ring->size = ALIGN(rx_ring->size, 4096); in ixgbevf_setup_rx_resources()
3049 rx_ring->desc = dma_alloc_coherent(rx_ring->dev, rx_ring->size, in ixgbevf_setup_rx_resources()
3050 &rx_ring->dma, GFP_KERNEL); in ixgbevf_setup_rx_resources()
3052 if (!rx_ring->desc) in ixgbevf_setup_rx_resources()
3057 vfree(rx_ring->rx_buffer_info); in ixgbevf_setup_rx_resources()
3058 rx_ring->rx_buffer_info = NULL; in ixgbevf_setup_rx_resources()
3059 dev_err(rx_ring->dev, "Unable to allocate memory for the Rx descriptor ring\n"); in ixgbevf_setup_rx_resources()
3078 err = ixgbevf_setup_rx_resources(adapter->rx_ring[i]); in ixgbevf_setup_all_rx_resources()
3093 void ixgbevf_free_rx_resources(struct ixgbevf_ring *rx_ring) in ixgbevf_free_rx_resources() argument
3095 ixgbevf_clean_rx_ring(rx_ring); in ixgbevf_free_rx_resources()
3097 vfree(rx_ring->rx_buffer_info); in ixgbevf_free_rx_resources()
3098 rx_ring->rx_buffer_info = NULL; in ixgbevf_free_rx_resources()
3100 dma_free_coherent(rx_ring->dev, rx_ring->size, rx_ring->desc, in ixgbevf_free_rx_resources()
3101 rx_ring->dma); in ixgbevf_free_rx_resources()
3103 rx_ring->desc = NULL; in ixgbevf_free_rx_resources()
3117 if (adapter->rx_ring[i]->desc) in ixgbevf_free_all_rx_resources()
3118 ixgbevf_free_rx_resources(adapter->rx_ring[i]); in ixgbevf_free_all_rx_resources()
3867 ring = adapter->rx_ring[i]; in ixgbevf_get_stats()