Lines Matching refs:queue_num
565 int queue_num, ret; in init_dma_desc_rings() local
571 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in init_dma_desc_rings()
572 ret = init_tx_ring(priv->device, queue_num, in init_dma_desc_rings()
573 priv->txq[queue_num], tx_rsize); in init_dma_desc_rings()
582 priv->txq[queue_num]->priv_ptr = priv; in init_dma_desc_rings()
586 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in init_dma_desc_rings()
587 ret = init_rx_ring(netd, queue_num, in init_dma_desc_rings()
588 priv->rxq[queue_num], rx_rsize); in init_dma_desc_rings()
597 priv->rxq[queue_num]->priv_ptr = priv; in init_dma_desc_rings()
605 while (queue_num--) in init_dma_desc_rings()
606 free_tx_ring(priv->device, priv->txq[queue_num], tx_rsize); in init_dma_desc_rings()
610 while (queue_num--) in init_dma_desc_rings()
611 free_rx_ring(priv->device, priv->rxq[queue_num], rx_rsize); in init_dma_desc_rings()
639 int queue_num; in dma_free_tx_skbufs() local
641 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in dma_free_tx_skbufs()
642 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in dma_free_tx_skbufs()
649 int queue_num; in free_dma_desc_resources() local
657 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in free_dma_desc_resources()
658 free_tx_ring(priv->device, priv->txq[queue_num], tx_rsize); in free_dma_desc_resources()
662 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in free_dma_desc_resources()
663 free_rx_ring(priv->device, priv->rxq[queue_num], rx_rsize); in free_dma_desc_resources()
669 int queue_num; in txring_mem_alloc() local
671 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in txring_mem_alloc()
672 priv->txq[queue_num] = devm_kmalloc(priv->device, in txring_mem_alloc()
674 if (!priv->txq[queue_num]) in txring_mem_alloc()
683 int queue_num; in rxring_mem_alloc() local
685 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in rxring_mem_alloc()
686 priv->rxq[queue_num] = devm_kmalloc(priv->device, in rxring_mem_alloc()
688 if (!priv->rxq[queue_num]) in rxring_mem_alloc()
703 int queue_num; in sxgbe_mtl_operation_mode() local
708 SXGBE_FOR_EACH_QUEUE(priv->hw_cap.tx_mtl_queues, queue_num) in sxgbe_mtl_operation_mode()
709 priv->hw->mtl->set_tx_mtl_mode(priv->ioaddr, queue_num, in sxgbe_mtl_operation_mode()
714 SXGBE_FOR_EACH_QUEUE(priv->hw_cap.rx_mtl_queues, queue_num) in sxgbe_mtl_operation_mode()
715 priv->hw->mtl->set_rx_mtl_mode(priv->ioaddr, queue_num, in sxgbe_mtl_operation_mode()
720 SXGBE_FOR_EACH_QUEUE(priv->hw_cap.tx_mtl_queues, queue_num) in sxgbe_mtl_operation_mode()
721 priv->hw->mtl->set_tx_mtl_mode(priv->ioaddr, queue_num, in sxgbe_mtl_operation_mode()
724 SXGBE_FOR_EACH_QUEUE(priv->hw_cap.rx_mtl_queues, queue_num) in sxgbe_mtl_operation_mode()
725 priv->hw->mtl->set_rx_mtl_mode(priv->ioaddr, queue_num, in sxgbe_mtl_operation_mode()
805 u8 queue_num; in sxgbe_tx_all_clean() local
807 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_tx_all_clean()
808 struct sxgbe_tx_queue *tqueue = priv->txq[queue_num]; in sxgbe_tx_all_clean()
825 static void sxgbe_restart_tx_queue(struct sxgbe_priv_data *priv, int queue_num) in sxgbe_restart_tx_queue() argument
827 struct sxgbe_tx_queue *tx_ring = priv->txq[queue_num]; in sxgbe_restart_tx_queue()
829 queue_num); in sxgbe_restart_tx_queue()
835 priv->hw->dma->stop_tx_queue(priv->ioaddr, queue_num); in sxgbe_restart_tx_queue()
845 priv->hw->dma->start_tx_queue(priv->ioaddr, queue_num); in sxgbe_restart_tx_queue()
861 int queue_num; in sxgbe_reset_all_tx_queues() local
866 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) in sxgbe_reset_all_tx_queues()
867 sxgbe_restart_tx_queue(priv, queue_num); in sxgbe_reset_all_tx_queues()
961 int queue_num; in sxgbe_init_dma_engine() local
969 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) in sxgbe_init_dma_engine()
970 priv->hw->dma->cha_init(priv->ioaddr, queue_num, in sxgbe_init_dma_engine()
972 (priv->txq[queue_num])->dma_tx_phy, in sxgbe_init_dma_engine()
973 (priv->rxq[queue_num])->dma_rx_phy, in sxgbe_init_dma_engine()
987 int queue_num; in sxgbe_init_mtl_engine() local
989 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_init_mtl_engine()
990 priv->hw->mtl->mtl_set_txfifosize(priv->ioaddr, queue_num, in sxgbe_init_mtl_engine()
992 priv->hw->mtl->mtl_enable_txqueue(priv->ioaddr, queue_num); in sxgbe_init_mtl_engine()
1004 int queue_num; in sxgbe_disable_mtl_engine() local
1006 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) in sxgbe_disable_mtl_engine()
1007 priv->hw->mtl->mtl_disable_txqueue(priv->ioaddr, queue_num); in sxgbe_disable_mtl_engine()
1033 u8 queue_num; in sxgbe_tx_init_coalesce() local
1035 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_tx_init_coalesce()
1036 struct sxgbe_tx_queue *p = priv->txq[queue_num]; in sxgbe_tx_init_coalesce()
1040 (unsigned long)&priv->txq[queue_num]); in sxgbe_tx_init_coalesce()
1048 u8 queue_num; in sxgbe_tx_del_timer() local
1050 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_tx_del_timer()
1051 struct sxgbe_tx_queue *p = priv->txq[queue_num]; in sxgbe_tx_del_timer()
1068 int ret, queue_num; in sxgbe_open() local
1105 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in sxgbe_open()
1106 priv->hw->mac->enable_rxqueue(priv->ioaddr, queue_num); in sxgbe_open()
1133 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_open()
1135 (priv->txq[queue_num])->irq_no, in sxgbe_open()
1137 dev->name, priv->txq[queue_num]); in sxgbe_open()
1146 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in sxgbe_open()
1148 (priv->rxq[queue_num])->irq_no, in sxgbe_open()
1150 dev->name, priv->rxq[queue_num]); in sxgbe_open()
2100 u8 queue_num; in sxgbe_drv_probe() local
2153 SXGBE_FOR_EACH_QUEUE(SXGBE_TX_QUEUES, queue_num) { in sxgbe_drv_probe()
2154 priv->hw->dma->enable_tso(priv->ioaddr, queue_num); in sxgbe_drv_probe()
2237 u8 queue_num; in sxgbe_drv_remove() local
2241 SXGBE_FOR_EACH_QUEUE(SXGBE_RX_QUEUES, queue_num) { in sxgbe_drv_remove()
2242 priv->hw->mac->disable_rxqueue(priv->ioaddr, queue_num); in sxgbe_drv_remove()