Lines Matching refs:rd

139 	struct ring_descr *rd;  in vlsi_ring_debug()  local
143 __func__, r, r->size, r->mask, r->len, r->dir, r->rd[0].hw); in vlsi_ring_debug()
147 rd = &r->rd[i]; in vlsi_ring_debug()
149 printk("skb=%p data=%p hw=%p\n", rd->skb, rd->buf, rd->hw); in vlsi_ring_debug()
151 __func__, (unsigned) rd_get_status(rd), in vlsi_ring_debug()
152 (unsigned) rd_get_count(rd), (unsigned) rd_get_addr(rd)); in vlsi_ring_debug()
298 struct ring_descr *rd; in vlsi_proc_ring() local
303 r->size, r->mask, r->len, r->dir, r->rd[0].hw); in vlsi_proc_ring()
314 rd = &r->rd[h]; in vlsi_proc_ring()
315 j = (unsigned) rd_get_count(rd); in vlsi_proc_ring()
317 h, (unsigned)rd_get_status(rd), j); in vlsi_proc_ring()
320 min_t(unsigned, j, 20), rd->buf); in vlsi_proc_ring()
324 rd = &r->rd[i]; in vlsi_proc_ring()
326 seq_printf(seq, "skb=%p data=%p hw=%p\n", rd->skb, rd->buf, rd->hw); in vlsi_proc_ring()
328 (unsigned) rd_get_status(rd), in vlsi_proc_ring()
329 (unsigned) rd_get_count(rd), (unsigned) rd_get_addr(rd)); in vlsi_proc_ring()
395 struct ring_descr *rd; in vlsi_alloc_ring() local
410 r->rd = (struct ring_descr *)(r+1); in vlsi_alloc_ring()
417 rd = r->rd + i; in vlsi_alloc_ring()
418 memset(rd, 0, sizeof(*rd)); in vlsi_alloc_ring()
419 rd->hw = hwmap + i; in vlsi_alloc_ring()
420 rd->buf = kmalloc(len, GFP_KERNEL|GFP_DMA); in vlsi_alloc_ring()
421 if (rd->buf == NULL || in vlsi_alloc_ring()
422 !(busaddr = pci_map_single(pdev, rd->buf, len, dir))) { in vlsi_alloc_ring()
423 if (rd->buf) { in vlsi_alloc_ring()
425 __func__, rd->buf); in vlsi_alloc_ring()
426 kfree(rd->buf); in vlsi_alloc_ring()
427 rd->buf = NULL; in vlsi_alloc_ring()
430 rd = r->rd + j; in vlsi_alloc_ring()
431 busaddr = rd_get_addr(rd); in vlsi_alloc_ring()
432 rd_set_addr_status(rd, 0, 0); in vlsi_alloc_ring()
435 kfree(rd->buf); in vlsi_alloc_ring()
436 rd->buf = NULL; in vlsi_alloc_ring()
441 rd_set_addr_status(rd, busaddr, 0); in vlsi_alloc_ring()
443 rd->skb = NULL; in vlsi_alloc_ring()
450 struct ring_descr *rd; in vlsi_free_ring() local
455 rd = r->rd + i; in vlsi_free_ring()
456 if (rd->skb) in vlsi_free_ring()
457 dev_kfree_skb_any(rd->skb); in vlsi_free_ring()
458 busaddr = rd_get_addr(rd); in vlsi_free_ring()
459 rd_set_addr_status(rd, 0, 0); in vlsi_free_ring()
462 kfree(rd->buf); in vlsi_free_ring()
523 static int vlsi_process_rx(struct vlsi_ring *r, struct ring_descr *rd) in vlsi_process_rx() argument
532 pci_dma_sync_single_for_cpu(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_process_rx()
534 status = rd_get_status(rd); in vlsi_process_rx()
547 len = rd_get_count(rd); in vlsi_process_rx()
564 le16_to_cpus(rd->buf+len); in vlsi_process_rx()
565 if (irda_calc_crc16(INIT_FCS,rd->buf,len+crclen) != GOOD_FCS) { in vlsi_process_rx()
572 if (!rd->skb) { in vlsi_process_rx()
578 skb = rd->skb; in vlsi_process_rx()
579 rd->skb = NULL; in vlsi_process_rx()
581 memcpy(skb_put(skb,len), rd->buf, len); in vlsi_process_rx()
589 rd_set_status(rd, 0); in vlsi_process_rx()
590 rd_set_count(rd, 0); in vlsi_process_rx()
598 struct ring_descr *rd; in vlsi_fill_rx() local
600 for (rd = ring_last(r); rd != NULL; rd = ring_put(r)) { in vlsi_fill_rx()
601 if (rd_is_active(rd)) { in vlsi_fill_rx()
607 if (!rd->skb) { in vlsi_fill_rx()
608 rd->skb = dev_alloc_skb(IRLAP_SKB_ALLOCSIZE); in vlsi_fill_rx()
609 if (rd->skb) { in vlsi_fill_rx()
610 skb_reserve(rd->skb,1); in vlsi_fill_rx()
611 rd->skb->protocol = htons(ETH_P_IRDA); in vlsi_fill_rx()
617 pci_dma_sync_single_for_device(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_fill_rx()
618 rd_activate(rd); in vlsi_fill_rx()
626 struct ring_descr *rd; in vlsi_rx_interrupt() local
629 for (rd = ring_first(r); rd != NULL; rd = ring_get(r)) { in vlsi_rx_interrupt()
631 if (rd_is_active(rd)) in vlsi_rx_interrupt()
634 ret = vlsi_process_rx(r, rd); in vlsi_rx_interrupt()
675 struct ring_descr *rd; in vlsi_unarm_rx() local
678 for (rd = ring_first(r); rd != NULL; rd = ring_get(r)) { in vlsi_unarm_rx()
681 if (rd_is_active(rd)) { in vlsi_unarm_rx()
682 rd_set_status(rd, 0); in vlsi_unarm_rx()
683 if (rd_get_count(rd)) { in vlsi_unarm_rx()
687 rd_set_count(rd, 0); in vlsi_unarm_rx()
688 pci_dma_sync_single_for_cpu(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_unarm_rx()
689 if (rd->skb) { in vlsi_unarm_rx()
690 dev_kfree_skb_any(rd->skb); in vlsi_unarm_rx()
691 rd->skb = NULL; in vlsi_unarm_rx()
695 ret = vlsi_process_rx(r, rd); in vlsi_unarm_rx()
720 static int vlsi_process_tx(struct vlsi_ring *r, struct ring_descr *rd) in vlsi_process_tx() argument
726 pci_dma_sync_single_for_cpu(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_process_tx()
728 status = rd_get_status(rd); in vlsi_process_tx()
733 rd_set_status(rd, 0); in vlsi_process_tx()
735 if (rd->skb) { in vlsi_process_tx()
736 len = rd->skb->len; in vlsi_process_tx()
737 dev_kfree_skb_any(rd->skb); in vlsi_process_tx()
738 rd->skb = NULL; in vlsi_process_tx()
741 len = rd_get_count(rd); /* incorrect for SIR! (due to wrapping) */ in vlsi_process_tx()
743 rd_set_count(rd, 0); in vlsi_process_tx()
848 struct ring_descr *rd; in vlsi_hard_start_xmit() local
909 rd = ring_last(r); in vlsi_hard_start_xmit()
910 if (!rd) { in vlsi_hard_start_xmit()
915 if (rd_is_active(rd)) { in vlsi_hard_start_xmit()
920 if (!rd->buf) { in vlsi_hard_start_xmit()
925 if (rd->skb) { in vlsi_hard_start_xmit()
946 len = async_wrap_skb(skb, rd->buf, r->len); in vlsi_hard_start_xmit()
971 skb_copy_from_linear_data(skb, rd->buf, len); in vlsi_hard_start_xmit()
974 rd->skb = skb; /* remember skb for tx-complete stats */ in vlsi_hard_start_xmit()
976 rd_set_count(rd, len); in vlsi_hard_start_xmit()
977 rd_set_status(rd, status); /* not yet active! */ in vlsi_hard_start_xmit()
983 pci_dma_sync_single_for_device(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_hard_start_xmit()
998 rd_activate(rd); in vlsi_hard_start_xmit()
1043 struct ring_descr *rd; in vlsi_tx_interrupt() local
1048 for (rd = ring_first(r); rd != NULL; rd = ring_get(r)) { in vlsi_tx_interrupt()
1050 if (rd_is_active(rd)) in vlsi_tx_interrupt()
1053 ret = vlsi_process_tx(r, rd); in vlsi_tx_interrupt()
1071 if (idev->new_baud && rd == NULL) /* tx ring empty and speed change pending */ in vlsi_tx_interrupt()
1075 if (rd == NULL) /* tx ring empty: re-enable rx */ in vlsi_tx_interrupt()
1103 struct ring_descr *rd; in vlsi_unarm_tx() local
1106 for (rd = ring_first(r); rd != NULL; rd = ring_get(r)) { in vlsi_unarm_tx()
1109 if (rd_is_active(rd)) { in vlsi_unarm_tx()
1110 rd_set_status(rd, 0); in vlsi_unarm_tx()
1111 rd_set_count(rd, 0); in vlsi_unarm_tx()
1112 pci_dma_sync_single_for_cpu(r->pdev, rd_get_addr(rd), r->len, r->dir); in vlsi_unarm_tx()
1113 if (rd->skb) { in vlsi_unarm_tx()
1114 dev_kfree_skb_any(rd->skb); in vlsi_unarm_tx()
1115 rd->skb = NULL; in vlsi_unarm_tx()
1121 ret = vlsi_process_tx(r, rd); in vlsi_unarm_tx()