Lines Matching refs:channel

174 	struct xgbe_channel *channel;  in xgbe_config_pblx8()  local
177 channel = pdata->channel; in xgbe_config_pblx8()
178 for (i = 0; i < pdata->channel_count; i++, channel++) in xgbe_config_pblx8()
179 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_CR, PBLX8, in xgbe_config_pblx8()
187 return XGMAC_DMA_IOREAD_BITS(pdata->channel, DMA_CH_TCR, PBL); in xgbe_get_tx_pbl_val()
192 struct xgbe_channel *channel; in xgbe_config_tx_pbl_val() local
195 channel = pdata->channel; in xgbe_config_tx_pbl_val()
196 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_tx_pbl_val()
197 if (!channel->tx_ring) in xgbe_config_tx_pbl_val()
200 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, PBL, in xgbe_config_tx_pbl_val()
209 return XGMAC_DMA_IOREAD_BITS(pdata->channel, DMA_CH_RCR, PBL); in xgbe_get_rx_pbl_val()
214 struct xgbe_channel *channel; in xgbe_config_rx_pbl_val() local
217 channel = pdata->channel; in xgbe_config_rx_pbl_val()
218 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_rx_pbl_val()
219 if (!channel->rx_ring) in xgbe_config_rx_pbl_val()
222 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, PBL, in xgbe_config_rx_pbl_val()
231 struct xgbe_channel *channel; in xgbe_config_osp_mode() local
234 channel = pdata->channel; in xgbe_config_osp_mode()
235 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_osp_mode()
236 if (!channel->tx_ring) in xgbe_config_osp_mode()
239 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, OSP, in xgbe_config_osp_mode()
290 struct xgbe_channel *channel; in xgbe_config_rx_coalesce() local
293 channel = pdata->channel; in xgbe_config_rx_coalesce()
294 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_rx_coalesce()
295 if (!channel->rx_ring) in xgbe_config_rx_coalesce()
298 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RIWT, RWT, in xgbe_config_rx_coalesce()
312 struct xgbe_channel *channel; in xgbe_config_rx_buffer_size() local
315 channel = pdata->channel; in xgbe_config_rx_buffer_size()
316 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_rx_buffer_size()
317 if (!channel->rx_ring) in xgbe_config_rx_buffer_size()
320 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, RBSZ, in xgbe_config_rx_buffer_size()
327 struct xgbe_channel *channel; in xgbe_config_tso_mode() local
330 channel = pdata->channel; in xgbe_config_tso_mode()
331 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_tso_mode()
332 if (!channel->tx_ring) in xgbe_config_tso_mode()
335 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, TSE, 1); in xgbe_config_tso_mode()
341 struct xgbe_channel *channel; in xgbe_config_sph_mode() local
344 channel = pdata->channel; in xgbe_config_sph_mode()
345 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_config_sph_mode()
346 if (!channel->rx_ring) in xgbe_config_sph_mode()
349 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_CR, SPH, 1); in xgbe_config_sph_mode()
600 struct xgbe_channel *channel; in xgbe_enable_dma_interrupts() local
604 channel = pdata->channel; in xgbe_enable_dma_interrupts()
605 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_enable_dma_interrupts()
607 dma_ch_isr = XGMAC_DMA_IOREAD(channel, DMA_CH_SR); in xgbe_enable_dma_interrupts()
608 XGMAC_DMA_IOWRITE(channel, DMA_CH_SR, dma_ch_isr); in xgbe_enable_dma_interrupts()
622 if (channel->tx_ring) { in xgbe_enable_dma_interrupts()
630 if (channel->rx_ring) { in xgbe_enable_dma_interrupts()
641 XGMAC_DMA_IOWRITE(channel, DMA_CH_IER, dma_ch_ier); in xgbe_enable_dma_interrupts()
1090 static void xgbe_tx_desc_init(struct xgbe_channel *channel) in xgbe_tx_desc_init() argument
1092 struct xgbe_ring *ring = channel->tx_ring; in xgbe_tx_desc_init()
1108 XGMAC_DMA_IOWRITE(channel, DMA_CH_TDRLR, ring->rdesc_count - 1); in xgbe_tx_desc_init()
1112 XGMAC_DMA_IOWRITE(channel, DMA_CH_TDLR_HI, in xgbe_tx_desc_init()
1114 XGMAC_DMA_IOWRITE(channel, DMA_CH_TDLR_LO, in xgbe_tx_desc_init()
1165 static void xgbe_rx_desc_init(struct xgbe_channel *channel) in xgbe_rx_desc_init() argument
1167 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_rx_desc_init()
1168 struct xgbe_ring *ring = channel->rx_ring; in xgbe_rx_desc_init()
1184 XGMAC_DMA_IOWRITE(channel, DMA_CH_RDRLR, ring->rdesc_count - 1); in xgbe_rx_desc_init()
1188 XGMAC_DMA_IOWRITE(channel, DMA_CH_RDLR_HI, in xgbe_rx_desc_init()
1190 XGMAC_DMA_IOWRITE(channel, DMA_CH_RDLR_LO, in xgbe_rx_desc_init()
1195 XGMAC_DMA_IOWRITE(channel, DMA_CH_RDTR_LO, in xgbe_rx_desc_init()
1375 static void xgbe_tx_start_xmit(struct xgbe_channel *channel, in xgbe_tx_start_xmit() argument
1378 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_tx_start_xmit()
1387 XGMAC_DMA_IOWRITE(channel, DMA_CH_TDTR_LO, in xgbe_tx_start_xmit()
1391 if (pdata->tx_usecs && !channel->tx_timer_active) { in xgbe_tx_start_xmit()
1392 channel->tx_timer_active = 1; in xgbe_tx_start_xmit()
1393 mod_timer(&channel->tx_timer, in xgbe_tx_start_xmit()
1400 static void xgbe_dev_xmit(struct xgbe_channel *channel) in xgbe_dev_xmit() argument
1402 struct xgbe_prv_data *pdata = channel->pdata; in xgbe_dev_xmit()
1403 struct xgbe_ring *ring = channel->tx_ring; in xgbe_dev_xmit()
1607 channel->queue_index))) in xgbe_dev_xmit()
1608 xgbe_tx_start_xmit(channel, ring); in xgbe_dev_xmit()
1613 channel->name, start_index & (ring->rdesc_count - 1), in xgbe_dev_xmit()
1619 static int xgbe_dev_read(struct xgbe_channel *channel) in xgbe_dev_read() argument
1621 struct xgbe_ring *ring = channel->rx_ring; in xgbe_dev_read()
1625 struct net_device *netdev = channel->pdata->netdev; in xgbe_dev_read()
1703 if (channel->pdata->netdev->features & NETIF_F_RXCSUM) in xgbe_dev_read()
1732 DBGPR("<--xgbe_dev_read: %s - descriptor=%u (cur=%d)\n", channel->name, in xgbe_dev_read()
1750 static int xgbe_enable_int(struct xgbe_channel *channel, in xgbe_enable_int() argument
1755 dma_ch_ier = XGMAC_DMA_IOREAD(channel, DMA_CH_IER); in xgbe_enable_int()
1784 dma_ch_ier |= channel->saved_ier; in xgbe_enable_int()
1790 XGMAC_DMA_IOWRITE(channel, DMA_CH_IER, dma_ch_ier); in xgbe_enable_int()
1795 static int xgbe_disable_int(struct xgbe_channel *channel, in xgbe_disable_int() argument
1800 dma_ch_ier = XGMAC_DMA_IOREAD(channel, DMA_CH_IER); in xgbe_disable_int()
1829 channel->saved_ier = dma_ch_ier & XGBE_DMA_INTERRUPT_MASK; in xgbe_disable_int()
1836 XGMAC_DMA_IOWRITE(channel, DMA_CH_IER, dma_ch_ier); in xgbe_disable_int()
2553 struct xgbe_channel *channel) in xgbe_prepare_tx_stop() argument
2560 if (channel->queue_index < DMA_DSRX_FIRST_QUEUE) { in xgbe_prepare_tx_stop()
2562 tx_pos = (channel->queue_index * DMA_DSR_Q_WIDTH) + in xgbe_prepare_tx_stop()
2565 tx_qidx = channel->queue_index - DMA_DSRX_FIRST_QUEUE; in xgbe_prepare_tx_stop()
2590 channel->queue_index); in xgbe_prepare_tx_stop()
2595 struct xgbe_channel *channel; in xgbe_enable_tx() local
2599 channel = pdata->channel; in xgbe_enable_tx()
2600 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_enable_tx()
2601 if (!channel->tx_ring) in xgbe_enable_tx()
2604 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, ST, 1); in xgbe_enable_tx()
2618 struct xgbe_channel *channel; in xgbe_disable_tx() local
2622 channel = pdata->channel; in xgbe_disable_tx()
2623 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_disable_tx()
2624 if (!channel->tx_ring) in xgbe_disable_tx()
2627 xgbe_prepare_tx_stop(pdata, channel); in xgbe_disable_tx()
2638 channel = pdata->channel; in xgbe_disable_tx()
2639 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_disable_tx()
2640 if (!channel->tx_ring) in xgbe_disable_tx()
2643 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, ST, 0); in xgbe_disable_tx()
2649 struct xgbe_channel *channel; in xgbe_enable_rx() local
2653 channel = pdata->channel; in xgbe_enable_rx()
2654 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_enable_rx()
2655 if (!channel->rx_ring) in xgbe_enable_rx()
2658 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, SR, 1); in xgbe_enable_rx()
2676 struct xgbe_channel *channel; in xgbe_disable_rx() local
2689 channel = pdata->channel; in xgbe_disable_rx()
2690 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_disable_rx()
2691 if (!channel->rx_ring) in xgbe_disable_rx()
2694 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, SR, 0); in xgbe_disable_rx()
2700 struct xgbe_channel *channel; in xgbe_powerup_tx() local
2704 channel = pdata->channel; in xgbe_powerup_tx()
2705 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_powerup_tx()
2706 if (!channel->tx_ring) in xgbe_powerup_tx()
2709 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, ST, 1); in xgbe_powerup_tx()
2718 struct xgbe_channel *channel; in xgbe_powerdown_tx() local
2722 channel = pdata->channel; in xgbe_powerdown_tx()
2723 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_powerdown_tx()
2724 if (!channel->tx_ring) in xgbe_powerdown_tx()
2727 xgbe_prepare_tx_stop(pdata, channel); in xgbe_powerdown_tx()
2734 channel = pdata->channel; in xgbe_powerdown_tx()
2735 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_powerdown_tx()
2736 if (!channel->tx_ring) in xgbe_powerdown_tx()
2739 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_TCR, ST, 0); in xgbe_powerdown_tx()
2745 struct xgbe_channel *channel; in xgbe_powerup_rx() local
2749 channel = pdata->channel; in xgbe_powerup_rx()
2750 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_powerup_rx()
2751 if (!channel->rx_ring) in xgbe_powerup_rx()
2754 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, SR, 1); in xgbe_powerup_rx()
2760 struct xgbe_channel *channel; in xgbe_powerdown_rx() local
2764 channel = pdata->channel; in xgbe_powerdown_rx()
2765 for (i = 0; i < pdata->channel_count; i++, channel++) { in xgbe_powerdown_rx()
2766 if (!channel->rx_ring) in xgbe_powerdown_rx()
2769 XGMAC_DMA_IOWRITE_BITS(channel, DMA_CH_RCR, SR, 0); in xgbe_powerdown_rx()