rx_fdq 96 drivers/net/ethernet/ti/netcp.h void *rx_fdq[KNAV_DMA_FDQ_PER_CHAN]; rx_fdq 797 drivers/net/ethernet/ti/netcp_core.c while ((dma = knav_queue_pop(netcp->rx_fdq[fdq], &dma_sz))) { rx_fdq 841 drivers/net/ethernet/ti/netcp_core.c !IS_ERR_OR_NULL(netcp->rx_fdq[i]); i++) rx_fdq 923 drivers/net/ethernet/ti/netcp_core.c knav_queue_push(netcp->rx_fdq[fdq], dma, sizeof(*hwdesc), 0); rx_fdq 938 drivers/net/ethernet/ti/netcp_core.c for (i = 0; i < KNAV_DMA_FDQ_PER_CHAN && netcp->rx_fdq[i]; i++) { rx_fdq 940 drivers/net/ethernet/ti/netcp_core.c knav_queue_get_count(netcp->rx_fdq[i]); rx_fdq 1567 drivers/net/ethernet/ti/netcp_core.c !IS_ERR_OR_NULL(netcp->rx_fdq[i]) ; ++i) { rx_fdq 1568 drivers/net/ethernet/ti/netcp_core.c knav_queue_close(netcp->rx_fdq[i]); rx_fdq 1569 drivers/net/ethernet/ti/netcp_core.c netcp->rx_fdq[i] = NULL; rx_fdq 1656 drivers/net/ethernet/ti/netcp_core.c netcp->rx_fdq[i] = knav_queue_open(name, KNAV_QUEUE_GP, 0); rx_fdq 1657 drivers/net/ethernet/ti/netcp_core.c if (IS_ERR(netcp->rx_fdq[i])) { rx_fdq 1658 drivers/net/ethernet/ti/netcp_core.c ret = PTR_ERR(netcp->rx_fdq[i]); rx_fdq 1675 drivers/net/ethernet/ti/netcp_core.c if (netcp->rx_fdq[i]) rx_fdq 1676 drivers/net/ethernet/ti/netcp_core.c last_fdq = knav_queue_get_id(netcp->rx_fdq[i]);