Lines Matching refs:rx_bi
528 struct i40e_rx_buffer *rx_bi; in i40evf_clean_rx_ring() local
533 if (!rx_ring->rx_bi) in i40evf_clean_rx_ring()
539 rx_bi = &rx_ring->rx_bi[0]; in i40evf_clean_rx_ring()
540 if (rx_bi->hdr_buf) { in i40evf_clean_rx_ring()
543 rx_bi->hdr_buf, in i40evf_clean_rx_ring()
544 rx_bi->dma); in i40evf_clean_rx_ring()
546 rx_bi = &rx_ring->rx_bi[i]; in i40evf_clean_rx_ring()
547 rx_bi->dma = 0; in i40evf_clean_rx_ring()
548 rx_bi->hdr_buf = NULL; in i40evf_clean_rx_ring()
554 rx_bi = &rx_ring->rx_bi[i]; in i40evf_clean_rx_ring()
555 if (rx_bi->dma) { in i40evf_clean_rx_ring()
557 rx_bi->dma, in i40evf_clean_rx_ring()
560 rx_bi->dma = 0; in i40evf_clean_rx_ring()
562 if (rx_bi->skb) { in i40evf_clean_rx_ring()
563 dev_kfree_skb(rx_bi->skb); in i40evf_clean_rx_ring()
564 rx_bi->skb = NULL; in i40evf_clean_rx_ring()
566 if (rx_bi->page) { in i40evf_clean_rx_ring()
567 if (rx_bi->page_dma) { in i40evf_clean_rx_ring()
569 rx_bi->page_dma, in i40evf_clean_rx_ring()
572 rx_bi->page_dma = 0; in i40evf_clean_rx_ring()
574 __free_page(rx_bi->page); in i40evf_clean_rx_ring()
575 rx_bi->page = NULL; in i40evf_clean_rx_ring()
576 rx_bi->page_offset = 0; in i40evf_clean_rx_ring()
581 memset(rx_ring->rx_bi, 0, bi_size); in i40evf_clean_rx_ring()
599 kfree(rx_ring->rx_bi); in i40evf_free_rx_resources()
600 rx_ring->rx_bi = NULL; in i40evf_free_rx_resources()
619 struct i40e_rx_buffer *rx_bi; in i40evf_alloc_rx_headers() local
625 if (rx_ring->rx_bi[0].hdr_buf) in i40evf_alloc_rx_headers()
634 rx_bi = &rx_ring->rx_bi[i]; in i40evf_alloc_rx_headers()
635 rx_bi->dma = dma + (i * buf_size); in i40evf_alloc_rx_headers()
636 rx_bi->hdr_buf = buffer + (i * buf_size); in i40evf_alloc_rx_headers()
652 rx_ring->rx_bi = kzalloc(bi_size, GFP_KERNEL); in i40evf_setup_rx_descriptors()
653 if (!rx_ring->rx_bi) in i40evf_setup_rx_descriptors()
677 kfree(rx_ring->rx_bi); in i40evf_setup_rx_descriptors()
678 rx_ring->rx_bi = NULL; in i40evf_setup_rx_descriptors()
716 bi = &rx_ring->rx_bi[i]; in i40evf_alloc_rx_buffers_ps()
782 bi = &rx_ring->rx_bi[i]; in i40evf_alloc_rx_buffers_1buf()
1015 struct i40e_rx_buffer *rx_bi; in i40e_clean_rx_irq_ps() local
1038 rx_bi = &rx_ring->rx_bi[i]; in i40e_clean_rx_irq_ps()
1039 skb = rx_bi->skb; in i40e_clean_rx_irq_ps()
1052 rx_bi->dma, in i40e_clean_rx_irq_ps()
1071 prefetch(rx_bi->page); in i40e_clean_rx_irq_ps()
1072 rx_bi->skb = NULL; in i40e_clean_rx_irq_ps()
1080 memcpy(__skb_put(skb, len), rx_bi->hdr_buf, len); in i40e_clean_rx_irq_ps()
1087 rx_bi->page + rx_bi->page_offset, in i40e_clean_rx_irq_ps()
1089 rx_bi->page_offset += len; in i40e_clean_rx_irq_ps()
1096 rx_bi->page, in i40e_clean_rx_irq_ps()
1097 rx_bi->page_offset, in i40e_clean_rx_irq_ps()
1104 if ((page_count(rx_bi->page) == 1) && in i40e_clean_rx_irq_ps()
1105 (page_to_nid(rx_bi->page) == current_node)) in i40e_clean_rx_irq_ps()
1106 get_page(rx_bi->page); in i40e_clean_rx_irq_ps()
1108 rx_bi->page = NULL; in i40e_clean_rx_irq_ps()
1111 rx_bi->page_dma, in i40e_clean_rx_irq_ps()
1114 rx_bi->page_dma = 0; in i40e_clean_rx_irq_ps()
1122 next_buffer = &rx_ring->rx_bi[i]; in i40e_clean_rx_irq_ps()
1194 struct i40e_rx_buffer *rx_bi; in i40e_clean_rx_irq_1buf() local
1218 rx_bi = &rx_ring->rx_bi[i]; in i40e_clean_rx_irq_1buf()
1219 skb = rx_bi->skb; in i40e_clean_rx_irq_1buf()
1231 rx_bi->skb = NULL; in i40e_clean_rx_irq_1buf()
1238 dma_unmap_single(rx_ring->dev, rx_bi->dma, rx_ring->rx_buf_len, in i40e_clean_rx_irq_1buf()
1240 rx_bi->dma = 0; in i40e_clean_rx_irq_1buf()