/linux-4.1.27/drivers/net/ethernet/sis/ |
D | sis190.c | 270 u32 rx_buf_sz; member 461 static inline void sis190_give_to_asic(struct RxDesc *desc, u32 rx_buf_sz) in sis190_give_to_asic() argument 466 desc->size = cpu_to_le32((rx_buf_sz & RX_BUF_MASK) | eor); in sis190_give_to_asic() 472 u32 rx_buf_sz) in sis190_map_to_asic() argument 475 sis190_give_to_asic(desc, rx_buf_sz); in sis190_map_to_asic() 490 u32 rx_buf_sz = tp->rx_buf_sz; in sis190_alloc_rx_skb() local 494 skb = netdev_alloc_skb(tp->dev, rx_buf_sz); in sis190_alloc_rx_skb() 497 mapping = pci_map_single(tp->pci_dev, skb->data, tp->rx_buf_sz, in sis190_alloc_rx_skb() 501 sis190_map_to_asic(desc, mapping, rx_buf_sz); in sis190_alloc_rx_skb() 545 pci_dma_sync_single_for_cpu(tp->pci_dev, addr, tp->rx_buf_sz, in sis190_try_rx_copy() [all …]
|
/linux-4.1.27/drivers/net/ethernet/packetengines/ |
D | hamachi.c | 496 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1123 hmp->rx_buf_sz, PCI_DMA_FROMDEVICE); in hamachi_tx_timeout() 1132 skb = netdev_alloc_skb_ip_align(dev, hmp->rx_buf_sz); in hamachi_tx_timeout() 1138 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_tx_timeout() 1140 DescEndPacket | DescIntr | (hmp->rx_buf_sz - 2)); in hamachi_tx_timeout() 1174 hmp->rx_buf_sz = (dev->mtu <= 1492 ? PKT_BUF_SZ : in hamachi_init_ring() 1184 struct sk_buff *skb = netdev_alloc_skb(dev, hmp->rx_buf_sz + 2); in hamachi_init_ring() 1190 skb->data, hmp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in hamachi_init_ring() 1193 DescEndPacket | DescIntr | (hmp->rx_buf_sz -2)); in hamachi_init_ring() 1421 hmp->rx_buf_sz, in hamachi_rx() [all …]
|
D | yellowfin.c | 324 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 731 yp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in yellowfin_init_ring() 735 cpu_to_le32(CMD_RX_BUF | INTR_ALWAYS | yp->rx_buf_sz); in yellowfin_init_ring() 741 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_init_ring() 747 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_init_ring() 1062 yp->rx_buf_sz, PCI_DMA_FROMDEVICE); in yellowfin_rx() 1129 yp->rx_buf_sz, in yellowfin_rx() 1141 yp->rx_buf_sz, in yellowfin_rx() 1156 struct sk_buff *skb = netdev_alloc_skb(dev, yp->rx_buf_sz + 2); in yellowfin_rx() 1162 skb->data, yp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in yellowfin_rx() [all …]
|
/linux-4.1.27/drivers/net/ethernet/dlink/ |
D | dl2k.c | 508 np->rx_buf_sz); in rio_timer() 519 (np->pdev, skb->data, np->rx_buf_sz, in rio_timer() 523 cpu_to_le64((u64)np->rx_buf_sz << 48); in rio_timer() 555 np->rx_buf_sz = (dev->mtu <= 1500 ? PACKET_SIZE : dev->mtu + 32); in alloc_list() 581 skb = netdev_alloc_skb_ip_align(dev, np->rx_buf_sz); in alloc_list() 589 np->pdev, skb->data, np->rx_buf_sz, in alloc_list() 591 np->rx_ring[i].fraginfo |= cpu_to_le64((u64)np->rx_buf_sz << 48); in alloc_list() 859 np->rx_buf_sz, in receive_packet() 866 np->rx_buf_sz, in receive_packet() 874 np->rx_buf_sz, in receive_packet() [all …]
|
D | sundance.c | 396 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1034 np->rx_buf_sz = (dev->mtu <= 1520 ? PKT_BUF_SZ : dev->mtu + 16); in init_ring() 1048 netdev_alloc_skb(dev, np->rx_buf_sz + 2); in init_ring() 1055 np->rx_buf_sz, DMA_FROM_DEVICE)); in init_ring() 1062 np->rx_ring[i].frag[0].length = cpu_to_le32(np->rx_buf_sz | LastFrag); in init_ring() 1389 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1393 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1398 np->rx_buf_sz, DMA_FROM_DEVICE); in rx_poll() 1438 skb = netdev_alloc_skb(dev, np->rx_buf_sz + 2); in refill_rx() 1445 np->rx_buf_sz, DMA_FROM_DEVICE)); in refill_rx() [all …]
|
D | dl2k.h | 374 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member
|
/linux-4.1.27/drivers/net/ethernet/smsc/ |
D | epic100.c | 272 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 907 ep->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in epic_init_ring() 912 ep->rx_ring[i].buflength = ep->rx_buf_sz; in epic_init_ring() 922 struct sk_buff *skb = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_init_ring() 928 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_init_ring() 1190 ep->rx_buf_sz, in epic_rx() 1196 ep->rx_buf_sz, in epic_rx() 1201 ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx() 1219 skb = ep->rx_skbuff[entry] = netdev_alloc_skb(dev, ep->rx_buf_sz + 2); in epic_rx() 1224 skb->data, ep->rx_buf_sz, PCI_DMA_FROMDEVICE); in epic_rx() [all …]
|
/linux-4.1.27/drivers/net/ethernet/ |
D | fealnx.c | 407 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1070 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in allocate_rx_buffers() 1079 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in allocate_rx_buffers() 1243 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring() 1251 np->rx_ring[i].control = np->rx_buf_sz << RBSShift; in init_ring() 1264 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring() 1274 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring() 1706 np->rx_buf_sz, in netdev_rx() 1720 np->rx_buf_sz, in netdev_rx() 1725 np->rx_buf_sz, in netdev_rx() [all …]
|
/linux-4.1.27/drivers/staging/vt6655/ |
D | card.c | 576 pDesc->m_rd0RD0.wResCount = (unsigned short)(pDevice->rx_buf_sz); in CARDvSafeResetRx() 578 pDesc->m_rd1RD1.wReqCount = (unsigned short)(pDevice->rx_buf_sz); in CARDvSafeResetRx() 584 pDesc->m_rd0RD0.wResCount = (unsigned short)(pDevice->rx_buf_sz); in CARDvSafeResetRx() 586 pDesc->m_rd1RD1.wReqCount = (unsigned short)(pDevice->rx_buf_sz); in CARDvSafeResetRx()
|
D | dpc.c | 144 priv->rx_buf_sz, DMA_FROM_DEVICE); in vnt_receive_frame()
|
D | device_main.c | 684 pDevice->rx_buf_sz, DMA_FROM_DEVICE); in device_free_rd0_ring() 701 pDevice->rx_buf_sz, DMA_FROM_DEVICE); in device_free_rd1_ring() 833 pRDInfo->skb = dev_alloc_skb((int)pDevice->rx_buf_sz); in device_alloc_rx_buf() 841 pDevice->rx_buf_sz, DMA_FROM_DEVICE); in device_alloc_rx_buf() 845 pRD->m_rd0RD0.wResCount = cpu_to_le16(pDevice->rx_buf_sz); in device_alloc_rx_buf() 847 pRD->m_rd1RD1.wReqCount = cpu_to_le16(pDevice->rx_buf_sz); in device_alloc_rx_buf() 1270 priv->rx_buf_sz = PKT_BUF_SZ; in vnt_start()
|
D | device.h | 271 u32 rx_buf_sz; member
|
/linux-4.1.27/drivers/net/ethernet/realtek/ |
D | 8139cp.c | 345 unsigned rx_buf_sz; member 411 cp->rx_buf_sz = mtu + ETH_HLEN + 8; in cp_set_rxbufsize() 413 cp->rx_buf_sz = PKT_BUF_SZ; in cp_set_rxbufsize() 477 const unsigned buflen = cp->rx_buf_sz; in cp_rx_poll() 546 cp->rx_buf_sz); in cp_rx_poll() 548 desc->opts1 = cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_rx_poll() 1079 skb = netdev_alloc_skb_ip_align(dev, cp->rx_buf_sz); in cp_refill_rx() 1084 cp->rx_buf_sz, PCI_DMA_FROMDEVICE); in cp_refill_rx() 1095 cpu_to_le32(DescOwn | RingEnd | cp->rx_buf_sz); in cp_refill_rx() 1098 cpu_to_le32(DescOwn | cp->rx_buf_sz); in cp_refill_rx() [all …]
|
D | r8169.c | 347 static int rx_buf_sz = 16383; variable 5218 static void rtl_set_rx_max_size(void __iomem *ioaddr, unsigned int rx_buf_sz) in rtl_set_rx_max_size() argument 5221 RTL_W16(RxMaxSize, rx_buf_sz + 1); in rtl_set_rx_max_size() 5324 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8169() 6232 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8168() 6526 rtl_set_rx_max_size(ioaddr, rx_buf_sz); in rtl_hw_start_8101() 6612 dma_unmap_single(&tp->pci_dev->dev, le64_to_cpu(desc->addr), rx_buf_sz, in rtl8169_free_rx_databuff() 6620 static inline void rtl8169_mark_to_asic(struct RxDesc *desc, u32 rx_buf_sz) in rtl8169_mark_to_asic() argument 6627 desc->opts1 = cpu_to_le32(DescOwn | eor | rx_buf_sz); in rtl8169_mark_to_asic() 6631 u32 rx_buf_sz) in rtl8169_map_to_asic() argument [all …]
|
/linux-4.1.27/drivers/net/ethernet/dec/tulip/ |
D | de2104x.c | 310 unsigned rx_buf_sz; member 441 buflen = copying_skb ? (len + RX_OFFSET) : de->rx_buf_sz; in de_rx() 482 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_rx() 484 de->rx_ring[rx_tail].opts2 = cpu_to_le32(de->rx_buf_sz); in de_rx() 1283 skb = netdev_alloc_skb(de->dev, de->rx_buf_sz); in de_refill_rx() 1288 skb->data, de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_refill_rx() 1294 cpu_to_le32(RingEnd | de->rx_buf_sz); in de_refill_rx() 1296 de->rx_ring[i].opts2 = cpu_to_le32(de->rx_buf_sz); in de_refill_rx() 1342 de->rx_buf_sz, PCI_DMA_FROMDEVICE); in de_clean_rings() 1385 de->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in de_open()
|
D | winbond-840.c | 313 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 805 np->rx_ring[i].length = np->rx_buf_sz; in init_rxtx_rings() 814 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_rxtx_rings() 819 np->rx_buf_sz,PCI_DMA_FROMDEVICE); in init_rxtx_rings() 978 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_ringdesc() 1270 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in netdev_rx() 1276 np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_rx()
|
/linux-4.1.27/drivers/net/ethernet/icplus/ |
D | ipg.c | 751 sp->rx_buf_sz, PCI_DMA_FROMDEVICE)); in ipg_get_rxbuff() 774 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_rfdlist() 1103 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_nic_rx_free_skb() 1170 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_nic_rx_check_error() 1236 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_nic_rx_with_start() 1453 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_nic_rx() 1520 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_nic_rx() 1696 sp->rx_buf_sz, PCI_DMA_FROMDEVICE); in ipg_rx_clear() 1729 sp->rx_buf_sz = sp->rxsupport_size; in ipg_nic_open()
|
D | ipg.h | 727 unsigned int rx_buf_sz; member
|
/linux-4.1.27/drivers/net/ethernet/via/ |
D | via-rhine.c | 478 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 1225 rp->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in alloc_rbufs() 1232 rp->rx_ring[i].desc_length = cpu_to_le32(rp->rx_buf_sz); in alloc_rbufs() 1242 struct sk_buff *skb = netdev_alloc_skb(dev, rp->rx_buf_sz); in alloc_rbufs() 1248 dma_map_single(hwdev, skb->data, rp->rx_buf_sz, in alloc_rbufs() 1274 rp->rx_buf_sz, DMA_FROM_DEVICE); in free_rbufs() 2015 rp->rx_buf_sz, in rhine_rx() 2024 rp->rx_buf_sz, in rhine_rx() 2036 rp->rx_buf_sz, in rhine_rx() 2063 skb = netdev_alloc_skb(dev, rp->rx_buf_sz); in rhine_rx() [all …]
|
/linux-4.1.27/drivers/net/ethernet/adaptec/ |
D | starfire.c | 561 unsigned int rx_buf_sz; /* Based on MTU+slack. */ member 925 writel((np->rx_buf_sz << RxBufferLenShift) | in netdev_open() 1147 np->rx_buf_sz = (dev->mtu <= 1500 ? PKT_BUF_SZ : dev->mtu + 32); in init_ring() 1151 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz); in init_ring() 1155 …np->rx_info[i].mapping = pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in init_ring() 1454 pci_unmap_single(np->pci_dev, np->rx_info[entry].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in __netdev_rx() 1567 skb = netdev_alloc_skb(dev, np->rx_buf_sz); in refill_rx_ring() 1572 pci_map_single(np->pci_dev, skb->data, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in refill_rx_ring() 1940 pci_unmap_single(np->pci_dev, np->rx_info[i].mapping, np->rx_buf_sz, PCI_DMA_FROMDEVICE); in netdev_close()
|
/linux-4.1.27/drivers/net/ethernet/sun/ |
D | sungem.h | 938 #define RX_BUF_ALLOC_SIZE(gp) ((gp)->rx_buf_sz + 28 + RX_OFFSET + 64) 993 int rx_buf_sz; member
|
D | sungem.c | 830 skb_put(new_skb, (gp->rx_buf_sz + RX_OFFSET)); in gem_rx() 1628 gp->rx_buf_sz = max(dev->mtu + ETH_HLEN + VLAN_HLEN, in gem_init_rings() 1643 skb_put(skb, (gp->rx_buf_sz + RX_OFFSET)); in gem_init_rings() 1825 writel(0x20000000 | (gp->rx_buf_sz + 4), gp->regs + MAC_MAXFSZ); in gem_init_mac() 1906 int max_frame = (gp->rx_buf_sz + 4 + 64) & ~63; in gem_init_pause_thresholds()
|
/linux-4.1.27/drivers/staging/vt6656/ |
D | device.h | 279 u32 rx_buf_sz; member
|
D | usbpipe.c | 190 rcb->skb = dev_alloc_skb(priv->rx_buf_sz); in vnt_submit_rx_urb_complete()
|
D | main_usb.c | 471 rcb->skb = dev_alloc_skb(priv->rx_buf_sz); in vnt_alloc_bufs() 523 priv->rx_buf_sz = MAX_TOTAL_SIZE_WITH_ALL_HEADERS; in vnt_start()
|
/linux-4.1.27/drivers/net/ethernet/natsemi/ |
D | natsemi.c | 561 unsigned int rx_buf_sz; member 1744 if (np->rx_buf_sz > NATSEMI_LONGPKT) in init_registers() 1933 unsigned int buflen = np->rx_buf_sz+NATSEMI_PADDING; in refill_rx() 1942 np->rx_ring[entry].cmd_status = cpu_to_le32(np->rx_buf_sz); in refill_rx() 1955 np->rx_buf_sz = ETH_DATA_LEN + NATSEMI_HEADERS; in set_bufsize() 1957 np->rx_buf_sz = dev->mtu + NATSEMI_HEADERS; in set_bufsize() 2019 unsigned int buflen = np->rx_buf_sz; in drain_rx() 2272 unsigned int buflen = np->rx_buf_sz; in netdev_rx() 2332 } else if (pkt_len > np->rx_buf_sz) { in netdev_rx()
|
/linux-4.1.27/drivers/atm/ |
D | iphase.c | 766 iadev->rx_buf_sz = IA_RX_BUF_SZ; 777 iadev->rx_buf_sz = IA_RX_BUF_SZ; 789 iadev->rx_buf_sz = IA_RX_BUF_SZ; 794 iadev->rx_buf_sz, iadev->rx_pkt_ram);) 1169 if (len > iadev->rx_buf_sz) { 1170 printk("Over %d bytes sdu received, dropped!!!\n", iadev->rx_buf_sz); 1330 if ((length > iadev->rx_buf_sz) || (length > 1477 writew(iadev->rx_buf_sz, iadev->reass_reg+BUF_SIZE); 1491 rx_pkt_start += iadev->rx_buf_sz; 3151 iadev->num_rx_desc, iadev->rx_buf_sz,
|
D | iphase.h | 1017 u16 num_rx_desc, rx_buf_sz, rxing; member
|
/linux-4.1.27/drivers/net/ethernet/renesas/ |
D | sh_eth.c | 1130 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN - 1; in sh_eth_ring_format() 1152 rxdesc->buffer_length = ALIGN(mdp->rx_buf_sz, 16); in sh_eth_ring_format() 1209 mdp->rx_buf_sz = (ndev->mtu <= 1492 ? PKT_BUF_SZ : in sh_eth_ring_init() 1212 mdp->rx_buf_sz += NET_IP_ALIGN; in sh_eth_ring_init() 1453 int skbuff_size = mdp->rx_buf_sz + SH_ETH_RX_ALIGN - 1; in sh_eth_rx() 1509 ALIGN(mdp->rx_buf_sz, 16), in sh_eth_rx() 1528 rxdesc->buffer_length = ALIGN(mdp->rx_buf_sz, 16); in sh_eth_rx()
|
D | sh_eth.h | 516 u32 rx_buf_sz; /* Based on MTU+slack. */ member
|
/linux-4.1.27/drivers/net/ethernet/nvidia/ |
D | forcedeth.c | 802 unsigned int rx_buf_sz; member 1815 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz + NV_RX_ALLOC_PAD); in nv_alloc_rx() 1830 np->put_rx.orig->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX_AVAIL); in nv_alloc_rx() 1856 struct sk_buff *skb = netdev_alloc_skb(dev, np->rx_buf_sz + NV_RX_ALLOC_PAD); in nv_alloc_rx_optimized() 1872 np->put_rx.ex->flaglen = cpu_to_le32(np->rx_buf_sz | NV_RX2_AVAIL); in nv_alloc_rx_optimized() 2997 np->rx_buf_sz = ETH_DATA_LEN + NV_RX_HEADERS; in set_bufsize() 2999 np->rx_buf_sz = dev->mtu + NV_RX_HEADERS; in set_bufsize() 3049 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_change_mtu() 4129 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_do_nic_poll() 4667 writel(np->rx_buf_sz, base + NvRegOffloadConfig); in nv_set_ringparam() [all …]
|