Lines Matching refs:pg_chunk
120 struct fl_pg_chunk pg_chunk; member
355 if (q->use_pages && d->pg_chunk.page) { in clear_rx_desc()
356 (*d->pg_chunk.p_cnt)--; in clear_rx_desc()
357 if (!*d->pg_chunk.p_cnt) in clear_rx_desc()
359 d->pg_chunk.mapping, in clear_rx_desc()
362 put_page(d->pg_chunk.page); in clear_rx_desc()
363 d->pg_chunk.page = NULL; in clear_rx_desc()
393 if (q->pg_chunk.page) { in free_rx_bufs()
394 __free_pages(q->pg_chunk.page, q->order); in free_rx_bufs()
395 q->pg_chunk.page = NULL; in free_rx_bufs()
446 if (!q->pg_chunk.page) { in alloc_pg_chunk()
449 q->pg_chunk.page = alloc_pages(gfp, order); in alloc_pg_chunk()
450 if (unlikely(!q->pg_chunk.page)) in alloc_pg_chunk()
452 q->pg_chunk.va = page_address(q->pg_chunk.page); in alloc_pg_chunk()
453 q->pg_chunk.p_cnt = q->pg_chunk.va + (PAGE_SIZE << order) - in alloc_pg_chunk()
455 q->pg_chunk.offset = 0; in alloc_pg_chunk()
456 mapping = pci_map_page(adapter->pdev, q->pg_chunk.page, in alloc_pg_chunk()
458 q->pg_chunk.mapping = mapping; in alloc_pg_chunk()
460 sd->pg_chunk = q->pg_chunk; in alloc_pg_chunk()
462 prefetch(sd->pg_chunk.p_cnt); in alloc_pg_chunk()
464 q->pg_chunk.offset += q->buf_size; in alloc_pg_chunk()
465 if (q->pg_chunk.offset == (PAGE_SIZE << order)) in alloc_pg_chunk()
466 q->pg_chunk.page = NULL; in alloc_pg_chunk()
468 q->pg_chunk.va += q->buf_size; in alloc_pg_chunk()
469 get_page(q->pg_chunk.page); in alloc_pg_chunk()
472 if (sd->pg_chunk.offset == 0) in alloc_pg_chunk()
473 *sd->pg_chunk.p_cnt = 1; in alloc_pg_chunk()
475 *sd->pg_chunk.p_cnt += 1; in alloc_pg_chunk()
516 mapping = sd->pg_chunk.mapping + sd->pg_chunk.offset; in refill_fl()
854 memcpy(newskb->data, sd->pg_chunk.va, len); in get_packet_pg()
870 prefetch(sd->pg_chunk.p_cnt); in get_packet_pg()
883 (*sd->pg_chunk.p_cnt)--; in get_packet_pg()
884 if (!*sd->pg_chunk.p_cnt && sd->pg_chunk.page != fl->pg_chunk.page) in get_packet_pg()
886 sd->pg_chunk.mapping, in get_packet_pg()
891 memcpy(newskb->data, sd->pg_chunk.va, SGE_RX_PULL_LEN); in get_packet_pg()
892 skb_fill_page_desc(newskb, 0, sd->pg_chunk.page, in get_packet_pg()
893 sd->pg_chunk.offset + SGE_RX_PULL_LEN, in get_packet_pg()
900 sd->pg_chunk.page, in get_packet_pg()
901 sd->pg_chunk.offset, len); in get_packet_pg()
2086 (*sd->pg_chunk.p_cnt)--; in lro_add_page()
2087 if (!*sd->pg_chunk.p_cnt && sd->pg_chunk.page != fl->pg_chunk.page) in lro_add_page()
2089 sd->pg_chunk.mapping, in lro_add_page()
2094 put_page(sd->pg_chunk.page); in lro_add_page()
2105 cpl = qs->lro_va = sd->pg_chunk.va + 2; in lro_add_page()
2119 __skb_frag_set_page(rx_frag, sd->pg_chunk.page); in lro_add_page()
2120 rx_frag->page_offset = sd->pg_chunk.offset + offset; in lro_add_page()
2308 void *addr = fl->sdesc[fl->cidx].pg_chunk.va; in process_responses()