Home
last modified time | relevance | path

Searched refs:rx_buf_sz (Results 1 – 29 of 29) sorted by relevance

/linux-4.4.14/drivers/net/ethernet/sis/
Dsis190.c270 u32 rx_buf_sz; member
461 static inline void sis190_give_to_asic(struct RxDesc *desc, u32 rx_buf_sz) in sis190_give_to_asic() argument
466 desc->size = cpu_to_le32((rx_buf_sz & RX_BUF_MASK) | eor); in sis190_give_to_asic()
472 u32 rx_buf_sz) in sis190_map_to_asic() argument
475 sis190_give_to_asic(desc, rx_buf_sz); in sis190_map_to_asic()
490 u32 rx_buf_sz = tp->rx_buf_sz; in sis190_alloc_rx_skb() local
494 skb = netdev_alloc_skb(tp->dev, rx_buf_sz); in sis190_alloc_rx_skb()
497 mapping = pci_map_single(tp->pci_dev, skb->data, tp->rx_buf_sz, in sis190_alloc_rx_skb()
501 sis190_map_to_asic(desc, mapping, rx_buf_sz); in sis190_alloc_rx_skb()
545 pci_dma_sync_single_for_cpu(tp->pci_dev, addr, tp->rx_buf_sz, in sis190_try_rx_copy()
[all …]
/linux-4.4.14/drivers/net/ethernet/packetengines/
Dhamachi.c496 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
1123 hmp->rx_buf_sz, PCI_DMA_FROMDEVICE); in hamachi_tx_timeout()
1132 skb = netdev_alloc_skb_ip_align(dev, hmp->rx_buf_sz); in hamachi_tx_timeout()
1138 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_tx_timeout()
1140 DescEndPacket | DescIntr | (hmp->rx_buf_sz - 2)); in hamachi_tx_timeout()
1174 hmp->rx_buf_sz = (dev->mtu <= 1492 ? PKT_BUF_SZ : in hamachi_init_ring()
1184 struct sk_buff *skb = netdev_alloc_skb(dev, hmp->rx_buf_sz + 2); in hamachi_init_ring()
1190 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_init_ring()
1193 DescEndPacket | DescIntr | (hmp->rx_buf_sz -2)); in hamachi_init_ring()
1421 hmp->rx_buf_sz, in hamachi_rx()
[all …]
Dyellowfin.c324 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
731 yp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in yellowfin_init_ring()
735 cpu_to_le32(CMD_RX_BUF | INTR_ALWAYS | yp->rx_buf_sz); in yellowfin_init_ring()
741 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_init_ring()
747 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_init_ring()
1062 yp->rx_buf_sz, PCI_DMA_FROMDEVICE); in yellowfin_rx()
1129 yp->rx_buf_sz, in yellowfin_rx()
1141 yp->rx_buf_sz, in yellowfin_rx()
1156 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_rx()
1162 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_rx()
[all …]
/linux-4.4.14/drivers/net/ethernet/smsc/
Depic100.c272 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
907 ep->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in epic_init_ring()
912 ep->rx_ring[i].buflength = ep->rx_buf_sz; in epic_init_ring()
922 struct sk_buff *skb = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_init_ring()
928 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_init_ring()
1190 ep->rx_buf_sz, in epic_rx()
1196 ep->rx_buf_sz, in epic_rx()
1201 ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx()
1219 skb = ep->rx_skbuff[entry] = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_rx()
1224 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx()
[all …]
/linux-4.4.14/drivers/net/ethernet/dlink/
Ddl2k.c552 np->rx_buf_sz); in rio_timer()
563 (np->pdev, skb->data, np->rx_buf_sz, in rio_timer()
567 cpu_to_le64((u64)np->rx_buf_sz << 48); in rio_timer()
599 np->rx_buf_sz = (dev->mtu <= 1500 ? PACKET_SIZE : dev->mtu + 32); in alloc_list()
625 skb = netdev_alloc_skb_ip_align(dev, np->rx_buf_sz); in alloc_list()
633 np->pdev, skb->data, np->rx_buf_sz, in alloc_list()
635 np->rx_ring[i].fraginfo |= cpu_to_le64((u64)np->rx_buf_sz << 48); in alloc_list()
905 np->rx_buf_sz, in receive_packet()
912 np->rx_buf_sz, in receive_packet()
920 np->rx_buf_sz, in receive_packet()
[all …]
Dsundance.c396 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
1034 np->rx_buf_sz = (dev->mtu <= 1520 ? PKT_BUF_SZ : dev->mtu + 16); in init_ring()
1048 netdev_alloc_skb(dev, np->rx_buf_sz + 2); in init_ring()
1055 np->rx_buf_sz, DMA_FROM_DEVICE)); in init_ring()
1062 np->rx_ring[i].frag[0].length = cpu_to_le32(np->rx_buf_sz | LastFrag); in init_ring()
1389 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll()
1393 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll()
1398 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll()
1438 skb = netdev_alloc_skb(dev, np->rx_buf_sz + 2); in refill_rx()
1445 np->rx_buf_sz, DMA_FROM_DEVICE)); in refill_rx()
[all …]
Ddl2k.h381 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
/linux-4.4.14/drivers/net/ethernet/
Dfealnx.c407 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
1070 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in allocate_rx_buffers()
1079 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in allocate_rx_buffers()
1243 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring()
1251 np->rx_ring[i].control = np->rx_buf_sz << RBSShift; in init_ring()
1264 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring()
1274 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring()
1706 np->rx_buf_sz, in netdev_rx()
1720 np->rx_buf_sz, in netdev_rx()
1725 np->rx_buf_sz, in netdev_rx()
[all …]
/linux-4.4.14/drivers/staging/vt6655/
Dcard.c576 pDesc->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx()
578 pDesc->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx()
584 pDesc->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx()
586 pDesc->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in CARDvSafeResetRx()
Ddpc.c144 priv->rx_buf_sz, DMA_FROM_DEVICE); in vnt_receive_frame()
Ddevice.h166 u32 rx_buf_sz; member
Ddevice_main.c601 priv->rx_buf_sz, DMA_FROM_DEVICE); in device_free_rd0_ring()
618 priv->rx_buf_sz, DMA_FROM_DEVICE); in device_free_rd1_ring()
737 rd_info->skb = dev_alloc_skb((int)priv->rx_buf_sz); in device_alloc_rx_buf()
744 priv->rx_buf_sz, DMA_FROM_DEVICE); in device_alloc_rx_buf()
748 rd->rd0.res_count = cpu_to_le16(priv->rx_buf_sz); in device_alloc_rx_buf()
750 rd->rd1.req_count = cpu_to_le16(priv->rx_buf_sz); in device_alloc_rx_buf()
1165 priv->rx_buf_sz = PKT_BUF_SZ; in vnt_start()
/linux-4.4.14/drivers/net/ethernet/realtek/
D8139cp.c347 unsigned rx_buf_sz; member
413 cp->rx_buf_sz = mtu + ETH_HLEN + 8; in cp_set_rxbufsize()
415 cp->rx_buf_sz = PKT_BUF_SZ; in cp_set_rxbufsize()
479 const unsigned buflen = cp->rx_buf_sz; in cp_rx_poll()
548 cp->rx_buf_sz); in cp_rx_poll()
550 desc->opts1 = cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_rx_poll()
1069 skb = netdev_alloc_skb_ip_align(dev, cp->rx_buf_sz); in cp_refill_rx()
1074 cp->rx_buf_sz, PCI_DMA_FROMDEVICE); in cp_refill_rx()
1085 cpu_to_le32(DescOwn | RingEnd | cp->rx_buf_sz); in cp_refill_rx()
1088 cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_refill_rx()
[all …]
Dr8169.c347 static int rx_buf_sz = 16383; variable
5288 static void rtl_set_rx_max_size(void __iomem *ioaddr, unsigned int rx_buf_sz) in rtl_set_rx_max_size() argument
5291 RTL_W16(RxMaxSize, rx_buf_sz + 1); in rtl_set_rx_max_size()
5394 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8169()
6302 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8168()
6596 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8101()
6682 dma_unmap_single(&tp->pci_dev->dev, le64_to_cpu(desc->addr), rx_buf_sz, in rtl8169_free_rx_databuff()
6690 static inline void rtl8169_mark_to_asic(struct RxDesc *desc, u32 rx_buf_sz) in rtl8169_mark_to_asic() argument
6697 desc->opts1 = cpu_to_le32(DescOwn | eor | rx_buf_sz); in rtl8169_mark_to_asic()
6701 u32 rx_buf_sz) in rtl8169_map_to_asic() argument
[all …]
/linux-4.4.14/drivers/net/ethernet/dec/tulip/
Dde2104x.c310 unsigned rx_buf_sz; member
441 buflen = copying_skb ? (len + RX_OFFSET) : de->rx_buf_sz; in de_rx()
482 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_rx()
484 de->rx_ring[rx_tail].opts2 = cpu_to_le32(de->rx_buf_sz); in de_rx()
1283 skb = netdev_alloc_skb(de->dev, de->rx_buf_sz); in de_refill_rx()
1288 skb->data, de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_refill_rx()
1294 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_refill_rx()
1296 de->rx_ring[i].opts2 = cpu_to_le32(de->rx_buf_sz); in de_refill_rx()
1342 de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_clean_rings()
1385 de->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in de_open()
Dwinbond-840.c313 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
805 np->rx_ring[i].length = np->rx_buf_sz; in init_rxtx_rings()
814 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_rxtx_rings()
819 np->rx_buf_sz,PCI_DMA_FROMDEVICE); in init_rxtx_rings()
978 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_ringdesc()
1270 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in netdev_rx()
1276 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_rx()
/linux-4.4.14/drivers/net/ethernet/adaptec/
Dstarfire.c561 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
925 writel((np->rx_buf_sz << RxBufferLenShift) | in netdev_open()
1147 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring()
1151 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring()
1155 …np->rx_info[i].mapping = pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring()
1454 pci_unmap_single(np->pci_dev, np->rx_info[entry].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in __netdev_rx()
1567 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in refill_rx_ring()
1572 pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in refill_rx_ring()
1940 pci_unmap_single(np->pci_dev, np->rx_info[i].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_close()
/linux-4.4.14/drivers/net/ethernet/sun/
Dsungem.h938 #define RX_BUF_ALLOC_SIZE(gp) ((gp)->rx_buf_sz + 28 + RX_OFFSET + 64)
993 int rx_buf_sz; member
Dsungem.c830 skb_put(new_skb, (gp->rx_buf_sz + RX_OFFSET)); in gem_rx()
1628 gp->rx_buf_sz = max(dev->mtu + ETH_HLEN + VLAN_HLEN, in gem_init_rings()
1643 skb_put(skb, (gp->rx_buf_sz + RX_OFFSET)); in gem_init_rings()
1825 writel(0x20000000 | (gp->rx_buf_sz + 4), gp->regs + MAC_MAXFSZ); in gem_init_mac()
1906 int max_frame = (gp->rx_buf_sz + 4 + 64) & ~63; in gem_init_pause_thresholds()
/linux-4.4.14/drivers/net/ethernet/via/
Dvia-rhine.c476 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
1224 const int size = rp->rx_buf_sz; in rhine_skb_dma_init()
1268 rp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_rbufs()
1274 rp->rx_ring[i].desc_length = cpu_to_le32(rp->rx_buf_sz); in alloc_rbufs()
1313 rp->rx_buf_sz, DMA_FROM_DEVICE); in free_rbufs()
2110 rp->rx_buf_sz, in rhine_rx()
2119 rp->rx_buf_sz, in rhine_rx()
2131 rp->rx_buf_sz, in rhine_rx()
/linux-4.4.14/drivers/staging/vt6656/
Ddevice.h279 u32 rx_buf_sz; member
Dusbpipe.c187 rcb->skb = dev_alloc_skb(priv->rx_buf_sz); in vnt_submit_rx_urb_complete()
Dmain_usb.c471 rcb->skb = dev_alloc_skb(priv->rx_buf_sz); in vnt_alloc_bufs()
518 priv->rx_buf_sz = MAX_TOTAL_SIZE_WITH_ALL_HEADERS; in vnt_start()
/linux-4.4.14/drivers/net/ethernet/natsemi/
Dnatsemi.c561 unsigned int rx_buf_sz; member
1744 if (np->rx_buf_sz > NATSEMI_LONGPKT) in init_registers()
1933 unsigned int buflen = np->rx_buf_sz+NATSEMI_PADDING; in refill_rx()
1948 np->rx_ring[entry].cmd_status = cpu_to_le32(np->rx_buf_sz); in refill_rx()
1961 np->rx_buf_sz = ETH_DATA_LEN + NATSEMI_HEADERS; in set_bufsize()
1963 np->rx_buf_sz = dev->mtu + NATSEMI_HEADERS; in set_bufsize()
2025 unsigned int buflen = np->rx_buf_sz; in drain_rx()
2284 unsigned int buflen = np->rx_buf_sz; in netdev_rx()
2344 } else if (pkt_len > np->rx_buf_sz) { in netdev_rx()
/linux-4.4.14/drivers/atm/
Diphase.c767 iadev->rx_buf_sz = IA_RX_BUF_SZ;
778 iadev->rx_buf_sz = IA_RX_BUF_SZ;
790 iadev->rx_buf_sz = IA_RX_BUF_SZ;
795 iadev->rx_buf_sz, iadev->rx_pkt_ram);)
1170 if (len > iadev->rx_buf_sz) {
1171 printk("Over %d bytes sdu received, dropped!!!\n", iadev->rx_buf_sz);
1331 if ((length > iadev->rx_buf_sz) || (length >
1478 writew(iadev->rx_buf_sz, iadev->reass_reg+BUF_SIZE);
1492 rx_pkt_start += iadev->rx_buf_sz;
3152 iadev->num_rx_desc, iadev->rx_buf_sz,
Diphase.h1017 u16 num_rx_desc, rx_buf_sz, rxing; member
/linux-4.4.14/drivers/net/ethernet/renesas/
Dsh_eth.h515 u32 rx_buf_sz; /* Based on MTU+slack. */ member
Dsh_eth.c1168 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; in sh_eth_ring_format()
1189 buf_len = ALIGN(mdp->rx_buf_sz, 32); in sh_eth_ring_format()
1250 mdp->rx_buf_sz = (ndev->mtu <= 1492 ? PKT_BUF_SZ : in sh_eth_ring_init()
1253 mdp->rx_buf_sz += NET_IP_ALIGN; in sh_eth_ring_init()
1463 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN + 32 - 1; in sh_eth_rx()
1521 ALIGN(mdp->rx_buf_sz, 32), in sh_eth_rx()
1540 buf_len = ALIGN(mdp->rx_buf_sz, 32); in sh_eth_rx()
/linux-4.4.14/drivers/net/ethernet/nvidia/
Dforcedeth.c802 unsigned int rx_buf_sz; member
1815 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz + NV_RX_ALLOC_PAD); in nv_alloc_rx()
1830 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx()
1856 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz + NV_RX_ALLOC_PAD); in nv_alloc_rx_optimized()
1872 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized()
2997 np->rx_buf_sz = ETH_DATA_LEN + NV_RX_HEADERS; in set_bufsize()
2999 np->rx_buf_sz = dev->mtu + NV_RX_HEADERS; in set_bufsize()
3049 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_change_mtu()
4133 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_do_nic_poll()
4665 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_set_ringparam()
[all …]