qpl 59 drivers/net/ethernet/google/gve/gve.h struct gve_queue_page_list *qpl; /* qpl assigned to this queue */ qpl 108 drivers/net/ethernet/google/gve/gve.h struct gve_queue_page_list *qpl; /* QPL mapped into this FIFO */ qpl 205 drivers/net/ethernet/google/gve/gve_adminq.c .queue_page_list_id = cpu_to_be32(tx->tx_fifo.qpl->id), qpl 227 drivers/net/ethernet/google/gve/gve_adminq.c .queue_page_list_id = cpu_to_be32(rx->data.qpl->id), qpl 331 drivers/net/ethernet/google/gve/gve_adminq.c struct gve_queue_page_list *qpl) qpl 334 drivers/net/ethernet/google/gve/gve_adminq.c u32 num_entries = qpl->num_entries; qpl 335 drivers/net/ethernet/google/gve/gve_adminq.c u32 size = num_entries * sizeof(qpl->page_buses[0]); qpl 348 drivers/net/ethernet/google/gve/gve_adminq.c page_list[i] = cpu_to_be64(qpl->page_buses[i]); qpl 352 drivers/net/ethernet/google/gve/gve_adminq.c .page_list_id = cpu_to_be32(qpl->id), qpl 214 drivers/net/ethernet/google/gve/gve_adminq.h struct gve_queue_page_list *qpl); qpl 534 drivers/net/ethernet/google/gve/gve_main.c struct gve_queue_page_list *qpl = &priv->qpls[id]; qpl 546 drivers/net/ethernet/google/gve/gve_main.c qpl->id = id; qpl 547 drivers/net/ethernet/google/gve/gve_main.c qpl->num_entries = 0; qpl 548 drivers/net/ethernet/google/gve/gve_main.c qpl->pages = kvzalloc(pages * sizeof(*qpl->pages), GFP_KERNEL); qpl 550 drivers/net/ethernet/google/gve/gve_main.c if (!qpl->pages) qpl 552 drivers/net/ethernet/google/gve/gve_main.c qpl->page_buses = kvzalloc(pages * sizeof(*qpl->page_buses), qpl 555 drivers/net/ethernet/google/gve/gve_main.c if (!qpl->page_buses) qpl 559 drivers/net/ethernet/google/gve/gve_main.c err = gve_alloc_page(&priv->pdev->dev, &qpl->pages[i], qpl 560 drivers/net/ethernet/google/gve/gve_main.c &qpl->page_buses[i], qpl 565 drivers/net/ethernet/google/gve/gve_main.c qpl->num_entries++; qpl 584 drivers/net/ethernet/google/gve/gve_main.c struct gve_queue_page_list *qpl = &priv->qpls[id]; qpl 587 drivers/net/ethernet/google/gve/gve_main.c if (!qpl->pages) qpl 589 drivers/net/ethernet/google/gve/gve_main.c if (!qpl->page_buses) qpl 592 drivers/net/ethernet/google/gve/gve_main.c for (i = 0; i < qpl->num_entries; i++) qpl 593 drivers/net/ethernet/google/gve/gve_main.c gve_free_page(&priv->pdev->dev, qpl->pages[i], qpl 594 drivers/net/ethernet/google/gve/gve_main.c qpl->page_buses[i], gve_qpl_dma_dir(priv, id)); qpl 596 drivers/net/ethernet/google/gve/gve_main.c kvfree(qpl->page_buses); qpl 598 drivers/net/ethernet/google/gve/gve_main.c kvfree(qpl->pages); qpl 599 drivers/net/ethernet/google/gve/gve_main.c priv->num_registered_pages -= qpl->num_entries; qpl 36 drivers/net/ethernet/google/gve/gve_rx.c gve_unassign_qpl(priv, rx->data.qpl->id); qpl 37 drivers/net/ethernet/google/gve/gve_rx.c rx->data.qpl = NULL; qpl 74 drivers/net/ethernet/google/gve/gve_rx.c rx->data.qpl = gve_assign_rx_qpl(priv); qpl 77 drivers/net/ethernet/google/gve/gve_rx.c struct page *page = rx->data.qpl->pages[i]; qpl 292 drivers/net/ethernet/google/gve/gve_rx.c dma_sync_single_for_cpu(&priv->pdev->dev, rx->data.qpl->page_buses[idx], qpl 31 drivers/net/ethernet/google/gve/gve_tx.c fifo->base = vmap(fifo->qpl->pages, fifo->qpl->num_entries, VM_MAP, qpl 35 drivers/net/ethernet/google/gve/gve_tx.c fifo->qpl->id); qpl 39 drivers/net/ethernet/google/gve/gve_tx.c fifo->size = fifo->qpl->num_entries * PAGE_SIZE; qpl 162 drivers/net/ethernet/google/gve/gve_tx.c gve_unassign_qpl(priv, tx->tx_fifo.qpl->id); qpl 163 drivers/net/ethernet/google/gve/gve_tx.c tx->tx_fifo.qpl = NULL; qpl 209 drivers/net/ethernet/google/gve/gve_tx.c tx->tx_fifo.qpl = gve_assign_tx_qpl(priv); qpl 450 drivers/net/ethernet/google/gve/gve_tx.c gve_dma_sync_for_device(dev, tx->tx_fifo.qpl->page_buses, qpl 466 drivers/net/ethernet/google/gve/gve_tx.c gve_dma_sync_for_device(dev, tx->tx_fifo.qpl->page_buses,