Lines Matching refs:channel
135 struct xgbe_channel *channel_mem, *channel; in xgbe_alloc_channels() local
156 for (i = 0, channel = channel_mem; i < count; i++, channel++) { in xgbe_alloc_channels()
157 snprintf(channel->name, sizeof(channel->name), "channel-%d", i); in xgbe_alloc_channels()
158 channel->pdata = pdata; in xgbe_alloc_channels()
159 channel->queue_index = i; in xgbe_alloc_channels()
160 channel->dma_regs = pdata->xgmac_regs + DMA_CH_BASE + in xgbe_alloc_channels()
173 channel->dma_irq = ret; in xgbe_alloc_channels()
178 channel->tx_ring = tx_ring++; in xgbe_alloc_channels()
183 channel->rx_ring = rx_ring++; in xgbe_alloc_channels()
188 channel->name, channel->dma_regs, channel->dma_irq, in xgbe_alloc_channels()
189 channel->tx_ring, channel->rx_ring); in xgbe_alloc_channels()
192 pdata->channel = channel_mem; in xgbe_alloc_channels()
212 if (!pdata->channel) in xgbe_free_channels()
215 kfree(pdata->channel->rx_ring); in xgbe_free_channels()
216 kfree(pdata->channel->tx_ring); in xgbe_free_channels()
217 kfree(pdata->channel); in xgbe_free_channels()
219 pdata->channel = NULL; in xgbe_free_channels()
233 static int xgbe_maybe_stop_tx_queue(struct xgbe_channel *channel, in xgbe_maybe_stop_tx_queue() argument
236 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_maybe_stop_tx_queue()
241 netif_stop_subqueue(pdata->netdev, channel->queue_index); in xgbe_maybe_stop_tx_queue()
248 pdata->hw_if.tx_start_xmit(channel, ring); in xgbe_maybe_stop_tx_queue()
277 struct xgbe_channel *channel; in xgbe_enable_rx_tx_ints() local
281 channel = pdata->channel; in xgbe_enable_rx_tx_ints()
282 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_enable_rx_tx_ints()
283 if (channel->tx_ring && channel->rx_ring) in xgbe_enable_rx_tx_ints()
285 else if (channel->tx_ring) in xgbe_enable_rx_tx_ints()
287 else if (channel->rx_ring) in xgbe_enable_rx_tx_ints()
292 hw_if->enable_int(channel, int_id); in xgbe_enable_rx_tx_ints()
299 struct xgbe_channel *channel; in xgbe_disable_rx_tx_ints() local
303 channel = pdata->channel; in xgbe_disable_rx_tx_ints()
304 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_disable_rx_tx_ints()
305 if (channel->tx_ring && channel->rx_ring) in xgbe_disable_rx_tx_ints()
307 else if (channel->tx_ring) in xgbe_disable_rx_tx_ints()
309 else if (channel->rx_ring) in xgbe_disable_rx_tx_ints()
314 hw_if->disable_int(channel, int_id); in xgbe_disable_rx_tx_ints()
322 struct xgbe_channel *channel; in xgbe_isr() local
341 channel = pdata->channel + i; in xgbe_isr()
343 dma_ch_isr = XGMAC_DMA_IOREAD(channel, DMA_CH_SR); in xgbe_isr()
371 XGMAC_DMA_IOWRITE(channel, DMA_CH_SR, dma_ch_isr); in xgbe_isr()
402 struct xgbe_channel *channel = data; in xgbe_dma_isr() local
407 if (napi_schedule_prep(&channel->napi)) { in xgbe_dma_isr()
409 disable_irq_nosync(channel->dma_irq); in xgbe_dma_isr()
412 __napi_schedule(&channel->napi); in xgbe_dma_isr()
420 struct xgbe_channel *channel = (struct xgbe_channel *)data; in xgbe_tx_timer() local
421 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_tx_timer()
426 napi = (pdata->per_channel_irq) ? &channel->napi : &pdata->napi; in xgbe_tx_timer()
431 disable_irq_nosync(channel->dma_irq); in xgbe_tx_timer()
439 channel->tx_timer_active = 0; in xgbe_tx_timer()
464 struct xgbe_channel *channel; in xgbe_init_timers() local
470 channel = pdata->channel; in xgbe_init_timers()
471 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_init_timers()
472 if (!channel->tx_ring) in xgbe_init_timers()
475 setup_timer(&channel->tx_timer, xgbe_tx_timer, in xgbe_init_timers()
476 (unsigned long)channel); in xgbe_init_timers()
487 struct xgbe_channel *channel; in xgbe_stop_timers() local
492 channel = pdata->channel; in xgbe_stop_timers()
493 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_stop_timers()
494 if (!channel->tx_ring) in xgbe_stop_timers()
497 del_timer_sync(&channel->tx_timer); in xgbe_stop_timers()
603 struct xgbe_channel *channel; in xgbe_napi_enable() local
607 channel = pdata->channel; in xgbe_napi_enable()
608 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_napi_enable()
610 netif_napi_add(pdata->netdev, &channel->napi, in xgbe_napi_enable()
613 napi_enable(&channel->napi); in xgbe_napi_enable()
626 struct xgbe_channel *channel; in xgbe_napi_disable() local
630 channel = pdata->channel; in xgbe_napi_disable()
631 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_napi_disable()
632 napi_disable(&channel->napi); in xgbe_napi_disable()
635 netif_napi_del(&channel->napi); in xgbe_napi_disable()
647 struct xgbe_channel *channel; in xgbe_request_irqs() local
663 channel = pdata->channel; in xgbe_request_irqs()
664 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_request_irqs()
665 snprintf(channel->dma_irq_name, in xgbe_request_irqs()
666 sizeof(channel->dma_irq_name) - 1, in xgbe_request_irqs()
668 channel->queue_index); in xgbe_request_irqs()
670 ret = devm_request_irq(pdata->dev, channel->dma_irq, in xgbe_request_irqs()
672 channel->dma_irq_name, channel); in xgbe_request_irqs()
675 channel->dma_irq); in xgbe_request_irqs()
684 for (i--, channel--; i < pdata->channel_count; i--, channel--) in xgbe_request_irqs()
685 devm_free_irq(pdata->dev, channel->dma_irq, channel); in xgbe_request_irqs()
694 struct xgbe_channel *channel; in xgbe_free_irqs() local
702 channel = pdata->channel; in xgbe_free_irqs()
703 for (i = 0; i < pdata->channel_count; i++, channel++) in xgbe_free_irqs()
704 devm_free_irq(pdata->dev, channel->dma_irq, channel); in xgbe_free_irqs()
739 struct xgbe_channel *channel; in xgbe_free_tx_data() local
746 channel = pdata->channel; in xgbe_free_tx_data()
747 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_free_tx_data()
748 ring = channel->tx_ring; in xgbe_free_tx_data()
764 struct xgbe_channel *channel; in xgbe_free_rx_data() local
771 channel = pdata->channel; in xgbe_free_rx_data()
772 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_free_rx_data()
773 ring = channel->rx_ring; in xgbe_free_rx_data()
919 struct xgbe_channel *channel; in xgbe_stop() local
942 channel = pdata->channel; in xgbe_stop()
943 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_stop()
944 if (!channel->tx_ring) in xgbe_stop()
947 txq = netdev_get_tx_queue(netdev, channel->queue_index); in xgbe_stop()
1396 struct xgbe_channel *channel; in xgbe_xmit() local
1404 channel = pdata->channel + skb->queue_mapping; in xgbe_xmit()
1405 txq = netdev_get_tx_queue(netdev, channel->queue_index); in xgbe_xmit()
1406 ring = channel->tx_ring; in xgbe_xmit()
1423 ret = xgbe_maybe_stop_tx_queue(channel, ring, packet->rdesc_count); in xgbe_xmit()
1436 if (!desc_if->map_tx_skb(channel, skb)) { in xgbe_xmit()
1447 hw_if->dev_xmit(channel); in xgbe_xmit()
1453 xgbe_maybe_stop_tx_queue(channel, ring, XGBE_TX_MAX_DESCS); in xgbe_xmit()
1610 struct xgbe_channel *channel; in xgbe_poll_controller() local
1616 channel = pdata->channel; in xgbe_poll_controller()
1617 for (i = 0; i < pdata->channel_count; i++, channel++) in xgbe_poll_controller()
1618 xgbe_dma_isr(channel->dma_irq, channel); in xgbe_poll_controller()
1724 static void xgbe_rx_refresh(struct xgbe_channel *channel) in xgbe_rx_refresh() argument
1726 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_rx_refresh()
1729 struct xgbe_ring *ring = channel->rx_ring; in xgbe_rx_refresh()
1752 XGMAC_DMA_IOWRITE(channel, DMA_CH_RDTR_LO, in xgbe_rx_refresh()
1802 static int xgbe_tx_poll(struct xgbe_channel *channel) in xgbe_tx_poll() argument
1804 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_tx_poll()
1807 struct xgbe_ring *ring = channel->tx_ring; in xgbe_tx_poll()
1827 txq = netdev_get_tx_queue(netdev, channel->queue_index); in xgbe_tx_poll()
1873 static int xgbe_rx_poll(struct xgbe_channel *channel, int budget) in xgbe_rx_poll() argument
1875 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_rx_poll()
1877 struct xgbe_ring *ring = channel->rx_ring; in xgbe_rx_poll()
1898 napi = (pdata->per_channel_irq) ? &channel->napi : &pdata->napi; in xgbe_rx_poll()
1921 xgbe_rx_refresh(channel); in xgbe_rx_poll()
1923 if (hw_if->dev_read(channel)) in xgbe_rx_poll()
2026 skb_record_rx_queue(skb, channel->queue_index); in xgbe_rx_poll()
2051 struct xgbe_channel *channel = container_of(napi, struct xgbe_channel, in xgbe_one_poll() local
2058 xgbe_tx_poll(channel); in xgbe_one_poll()
2061 processed = xgbe_rx_poll(channel, budget); in xgbe_one_poll()
2069 enable_irq(channel->dma_irq); in xgbe_one_poll()
2081 struct xgbe_channel *channel; in xgbe_all_poll() local
2093 channel = pdata->channel; in xgbe_all_poll()
2094 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_all_poll()
2096 xgbe_tx_poll(channel); in xgbe_all_poll()
2101 processed += xgbe_rx_poll(channel, ring_budget); in xgbe_all_poll()