Lines Matching refs:desc

43 #define POOL_ALLOC_SIZE		(sizeof(struct desc) * (RX_DESCS + TX_DESCS))
263 struct desc *desc_tab; /* coherent */
292 struct desc { struct
315 (n) * sizeof(struct desc)) argument
319 ((n) + RX_DESCS) * sizeof(struct desc))
573 static inline void debug_desc(u32 phys, struct desc *desc) in debug_desc() argument
577 phys, desc->next, desc->buf_len, desc->pkt_len, in debug_desc()
578 desc->data, desc->status, desc->error_count); in debug_desc()
586 struct desc *tab; in queue_get_desc()
594 n_desc = (phys - tab_phys) / sizeof(struct desc); in queue_get_desc()
602 struct desc *desc) in queue_put_desc() argument
604 debug_desc(phys, desc); in queue_put_desc()
612 static inline void dma_unmap_tx(struct port *port, struct desc *desc) in dma_unmap_tx() argument
615 dma_unmap_single(&port->netdev->dev, desc->data, in dma_unmap_tx()
616 desc->buf_len, DMA_TO_DEVICE); in dma_unmap_tx()
618 dma_unmap_single(&port->netdev->dev, desc->data & ~3, in dma_unmap_tx()
619 ALIGN((desc->data & 3) + desc->buf_len, 4), in dma_unmap_tx()
668 struct desc *desc; in hss_hdlc_poll() local
699 desc = rx_desc_ptr(port, n); in hss_hdlc_poll()
701 if (desc->error_count) in hss_hdlc_poll()
703 " errors %u\n", dev->name, desc->status, in hss_hdlc_poll()
704 desc->error_count); in hss_hdlc_poll()
707 switch (desc->status) { in hss_hdlc_poll()
720 skb = netdev_alloc_skb(dev, desc->pkt_len); in hss_hdlc_poll()
740 desc->status, desc->error_count); in hss_hdlc_poll()
746 desc->buf_len = RX_SIZE; in hss_hdlc_poll()
747 desc->pkt_len = desc->status = 0; in hss_hdlc_poll()
748 queue_put_desc(rxfreeq, rx_desc_phys(port, n), desc); in hss_hdlc_poll()
756 dma_unmap_single(&dev->dev, desc->data, in hss_hdlc_poll()
759 dma_sync_single_for_cpu(&dev->dev, desc->data, in hss_hdlc_poll()
762 ALIGN(desc->pkt_len, 4) / 4); in hss_hdlc_poll()
764 skb_put(skb, desc->pkt_len); in hss_hdlc_poll()
776 desc->data = phys; in hss_hdlc_poll()
778 desc->buf_len = RX_SIZE; in hss_hdlc_poll()
779 desc->pkt_len = 0; in hss_hdlc_poll()
780 queue_put_desc(rxfreeq, rx_desc_phys(port, n), desc); in hss_hdlc_poll()
801 struct desc *desc; in hss_hdlc_txdone_irq() local
804 desc = tx_desc_ptr(port, n_desc); in hss_hdlc_txdone_irq()
807 dev->stats.tx_bytes += desc->pkt_len; in hss_hdlc_txdone_irq()
809 dma_unmap_tx(port, desc); in hss_hdlc_txdone_irq()
819 tx_desc_phys(port, n_desc), desc); in hss_hdlc_txdone_irq()
837 struct desc *desc; in hss_hdlc_xmit() local
881 desc = tx_desc_ptr(port, n); in hss_hdlc_xmit()
888 desc->data = phys + offset; in hss_hdlc_xmit()
889 desc->buf_len = desc->pkt_len = len; in hss_hdlc_xmit()
892 queue_put_desc(queue_ids[port->id].tx, tx_desc_phys(port, n), desc); in hss_hdlc_xmit()
988 struct desc *desc = rx_desc_ptr(port, i); in init_hdlc_queues() local
1000 desc->buf_len = RX_SIZE; in init_hdlc_queues()
1001 desc->data = dma_map_single(&port->netdev->dev, data, in init_hdlc_queues()
1003 if (dma_mapping_error(&port->netdev->dev, desc->data)) { in init_hdlc_queues()
1019 struct desc *desc = rx_desc_ptr(port, i); in destroy_hdlc_queues() local
1023 desc->data, RX_SIZE, in destroy_hdlc_queues()
1029 struct desc *desc = tx_desc_ptr(port, i); in destroy_hdlc_queues() local
1032 dma_unmap_tx(port, desc); in destroy_hdlc_queues()