Lines Matching refs:fifo

97 static inline void VXGE_COMPLETE_VPATH_TX(struct vxge_fifo *fifo)  in VXGE_COMPLETE_VPATH_TX()  argument
109 if (__netif_tx_trylock(fifo->txq)) { in VXGE_COMPLETE_VPATH_TX()
110 vxge_hw_vpath_poll_tx(fifo->handle, &skb_ptr, in VXGE_COMPLETE_VPATH_TX()
112 __netif_tx_unlock(fifo->txq); in VXGE_COMPLETE_VPATH_TX()
127 VXGE_COMPLETE_VPATH_TX(&vdev->vpaths[i].fifo); in VXGE_COMPLETE_ALL_TX()
550 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata; in vxge_xmit_compl() local
570 "tcode = 0x%x", fifo->ndev->name, __func__, in vxge_xmit_compl()
576 fifo->ndev->name, __func__, __LINE__, in vxge_xmit_compl()
579 fifo->stats.tx_errors++; in vxge_xmit_compl()
582 "error t_code %01x", fifo->ndev->name, in vxge_xmit_compl()
588 pci_unmap_single(fifo->pdev, txd_priv->dma_buffers[i++], in vxge_xmit_compl()
592 pci_unmap_page(fifo->pdev, in vxge_xmit_compl()
601 u64_stats_update_begin(&fifo->stats.syncp); in vxge_xmit_compl()
602 fifo->stats.tx_frms++; in vxge_xmit_compl()
603 fifo->stats.tx_bytes += skb->len; in vxge_xmit_compl()
604 u64_stats_update_end(&fifo->stats.syncp); in vxge_xmit_compl()
614 if (pkt_cnt > fifo->indicate_max_pkts) in vxge_xmit_compl()
621 if (netif_tx_queue_stopped(fifo->txq)) in vxge_xmit_compl()
622 netif_tx_wake_queue(fifo->txq); in vxge_xmit_compl()
626 fifo->ndev->name, __func__, __LINE__); in vxge_xmit_compl()
805 struct vxge_fifo *fifo = NULL; in vxge_xmit() local
860 fifo = &vdev->vpaths[vpath_no].fifo; in vxge_xmit()
861 fifo_hw = fifo->handle; in vxge_xmit()
863 if (netif_tx_queue_stopped(fifo->txq)) in vxge_xmit()
870 fifo->stats.txd_not_free++; in vxge_xmit()
878 netif_tx_stop_queue(fifo->txq); in vxge_xmit()
884 fifo->stats.txd_out_of_desc++; in vxge_xmit()
900 dma_pointer = pci_map_single(fifo->pdev, skb->data, first_frg_len, in vxge_xmit()
903 if (unlikely(pci_dma_mapping_error(fifo->pdev, dma_pointer))) { in vxge_xmit()
905 fifo->stats.pci_map_fail++; in vxge_xmit()
929 dma_pointer = (u64)skb_frag_dma_map(&fifo->pdev->dev, frag, in vxge_xmit()
933 if (unlikely(dma_mapping_error(&fifo->pdev->dev, dma_pointer))) in vxge_xmit()
980 pci_unmap_single(fifo->pdev, txdl_priv->dma_buffers[j++], in vxge_xmit()
984 pci_unmap_page(fifo->pdev, txdl_priv->dma_buffers[j], in vxge_xmit()
991 netif_tx_stop_queue(fifo->txq); in vxge_xmit()
1034 struct vxge_fifo *fifo = (struct vxge_fifo *)userdata; in vxge_tx_term() local
1051 pci_unmap_single(fifo->pdev, txd_priv->dma_buffers[i++], in vxge_tx_term()
1055 pci_unmap_page(fifo->pdev, txd_priv->dma_buffers[i++], in vxge_tx_term()
1573 if (netif_tx_queue_stopped(vpath->fifo.txq)) in vxge_reset_vpath()
1574 netif_tx_wake_queue(vpath->fifo.txq); in vxge_reset_vpath()
1596 struct __vxge_hw_fifo *hw_fifo = vdev->vpaths[i].fifo.handle; in vxge_config_ci_for_tti_rti()
2056 attr.fifo_attr.userdata = &vpath->fifo; in vxge_open_vpaths()
2069 vpath->fifo.handle = in vxge_open_vpaths()
2073 vpath->fifo.tx_steering_type = in vxge_open_vpaths()
2075 vpath->fifo.ndev = vdev->ndev; in vxge_open_vpaths()
2076 vpath->fifo.pdev = vdev->pdev; in vxge_open_vpaths()
2078 u64_stats_init(&vpath->fifo.stats.syncp); in vxge_open_vpaths()
2082 vpath->fifo.txq = in vxge_open_vpaths()
2085 vpath->fifo.txq = in vxge_open_vpaths()
2087 vpath->fifo.indicate_max_pkts = in vxge_open_vpaths()
2089 vpath->fifo.tx_vector_no = 0; in vxge_open_vpaths()
2121 static void adaptive_coalesce_tx_interrupts(struct vxge_fifo *fifo) in adaptive_coalesce_tx_interrupts() argument
2123 fifo->interrupt_count++; in adaptive_coalesce_tx_interrupts()
2124 if (time_before(fifo->jiffies + HZ / 100, jiffies)) { in adaptive_coalesce_tx_interrupts()
2125 struct __vxge_hw_fifo *hw_fifo = fifo->handle; in adaptive_coalesce_tx_interrupts()
2127 fifo->jiffies = jiffies; in adaptive_coalesce_tx_interrupts()
2128 if (fifo->interrupt_count > VXGE_T1A_MAX_TX_INTERRUPT_COUNT && in adaptive_coalesce_tx_interrupts()
2136 fifo->interrupt_count = 0; in adaptive_coalesce_tx_interrupts()
2230 struct vxge_fifo *fifo = (struct vxge_fifo *)dev_id; in vxge_tx_msix_handle() local
2232 adaptive_coalesce_tx_interrupts(fifo); in vxge_tx_msix_handle()
2234 vxge_hw_channel_msix_mask((struct __vxge_hw_channel *)fifo->handle, in vxge_tx_msix_handle()
2235 fifo->tx_vector_no); in vxge_tx_msix_handle()
2237 vxge_hw_channel_msix_clear((struct __vxge_hw_channel *)fifo->handle, in vxge_tx_msix_handle()
2238 fifo->tx_vector_no); in vxge_tx_msix_handle()
2240 VXGE_COMPLETE_VPATH_TX(fifo); in vxge_tx_msix_handle()
2242 vxge_hw_channel_msix_unmask((struct __vxge_hw_channel *)fifo->handle, in vxge_tx_msix_handle()
2243 fifo->tx_vector_no); in vxge_tx_msix_handle()
2412 vpath->fifo.tx_vector_no = (vpath->device_id * in vxge_enable_msix()
2497 &vdev->vpaths[vp_idx].fifo); in vxge_add_isr()
2499 &vdev->vpaths[vp_idx].fifo; in vxge_add_isr()
2588 vxge_hw_vpath_tti_ci_set(vdev->vpaths[0].fifo.handle); in vxge_add_isr()
2657 netif_tx_stop_queue(vpath->fifo.txq); in vxge_poll_vp_lockup()
3135 struct vxge_fifo_stats *txstats = &vdev->vpaths[k].fifo.stats; in vxge_get_stats64()
3622 netif_tx_stop_queue(vpath->fifo.txq); in vxge_callback_crit_err()
3765 device_config->vp_config[i].fifo.enable = in vxge_config_vpaths()
3767 device_config->vp_config[i].fifo.max_frags = in vxge_config_vpaths()
3769 device_config->vp_config[i].fifo.memblock_size = in vxge_config_vpaths()
3772 txdl_size = device_config->vp_config[i].fifo.max_frags * in vxge_config_vpaths()
3776 device_config->vp_config[i].fifo.fifo_blocks = in vxge_config_vpaths()
3779 device_config->vp_config[i].fifo.intr = in vxge_config_vpaths()
4019 config.vp_config[i].fifo.max_frags); in vxge_print_parm()