Lines Matching refs:ring
532 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[hw_queue]; in _rtl_pci_tx_chk_waitq() local
540 (ring->entries - skb_queue_len(&ring->queue) > in _rtl_pci_tx_chk_waitq()
567 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio]; in _rtl_pci_tx_isr() local
569 while (skb_queue_len(&ring->queue)) { in _rtl_pci_tx_isr()
577 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_tx_isr()
579 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_tx_isr()
588 if (!rtlpriv->cfg->ops->is_tx_desc_closed(hw, prio, ring->idx)) in _rtl_pci_tx_isr()
590 ring->idx = (ring->idx + 1) % ring->entries; in _rtl_pci_tx_isr()
592 skb = __skb_dequeue(&ring->queue); in _rtl_pci_tx_isr()
605 ring->idx, in _rtl_pci_tx_isr()
606 skb_queue_len(&ring->queue), in _rtl_pci_tx_isr()
651 if ((ring->entries - skb_queue_len(&ring->queue)) <= 4) { in _rtl_pci_tx_isr()
655 prio, ring->idx, in _rtl_pci_tx_isr()
656 skb_queue_len(&ring->queue)); in _rtl_pci_tx_isr()
1113 struct rtl8192_tx_ring *ring = NULL; in _rtl_pci_prepare_bcn_tasklet() local
1125 ring = &rtlpci->tx_ring[BEACON_QUEUE]; in _rtl_pci_prepare_bcn_tasklet()
1126 pskb = __skb_dequeue(&ring->queue); in _rtl_pci_prepare_bcn_tasklet()
1128 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_prepare_bcn_tasklet()
1130 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_prepare_bcn_tasklet()
1145 pdesc = &ring->desc[0]; in _rtl_pci_prepare_bcn_tasklet()
1147 pbuffer_desc = &ring->buffer_desc[0]; in _rtl_pci_prepare_bcn_tasklet()
1153 __skb_queue_tail(&ring->queue, pskb); in _rtl_pci_prepare_bcn_tasklet()
1375 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[prio]; in _rtl_pci_free_tx_ring() local
1378 while (skb_queue_len(&ring->queue)) { in _rtl_pci_free_tx_ring()
1380 struct sk_buff *skb = __skb_dequeue(&ring->queue); in _rtl_pci_free_tx_ring()
1383 entry = (u8 *)(&ring->buffer_desc[ring->idx]); in _rtl_pci_free_tx_ring()
1385 entry = (u8 *)(&ring->desc[ring->idx]); in _rtl_pci_free_tx_ring()
1393 ring->idx = (ring->idx + 1) % ring->entries; in _rtl_pci_free_tx_ring()
1398 sizeof(*ring->desc) * ring->entries, in _rtl_pci_free_tx_ring()
1399 ring->desc, ring->dma); in _rtl_pci_free_tx_ring()
1400 ring->desc = NULL; in _rtl_pci_free_tx_ring()
1403 sizeof(*ring->buffer_desc) * ring->entries, in _rtl_pci_free_tx_ring()
1404 ring->buffer_desc, ring->buffer_desc_dma); in _rtl_pci_free_tx_ring()
1405 ring->buffer_desc = NULL; in _rtl_pci_free_tx_ring()
1557 struct rtl8192_tx_ring *ring = &rtlpci->tx_ring[i]; in rtl_pci_reset_trx_ring() local
1559 while (skb_queue_len(&ring->queue)) { in rtl_pci_reset_trx_ring()
1562 __skb_dequeue(&ring->queue); in rtl_pci_reset_trx_ring()
1564 entry = (u8 *)(&ring->buffer_desc in rtl_pci_reset_trx_ring()
1565 [ring->idx]); in rtl_pci_reset_trx_ring()
1567 entry = (u8 *)(&ring->desc[ring->idx]); in rtl_pci_reset_trx_ring()
1577 ring->idx = (ring->idx + 1) % ring->entries; in rtl_pci_reset_trx_ring()
1579 ring->idx = 0; in rtl_pci_reset_trx_ring()
1634 struct rtl8192_tx_ring *ring; in rtl_pci_tx() local
1669 ring = &rtlpci->tx_ring[hw_queue]; in rtl_pci_tx()
1672 idx = ring->cur_tx_wp; in rtl_pci_tx()
1674 idx = (ring->idx + skb_queue_len(&ring->queue)) % in rtl_pci_tx()
1675 ring->entries; in rtl_pci_tx()
1680 pdesc = &ring->desc[idx]; in rtl_pci_tx()
1682 ptx_bd_desc = &ring->buffer_desc[idx]; in rtl_pci_tx()
1690 hw_queue, ring->idx, idx, in rtl_pci_tx()
1691 skb_queue_len(&ring->queue)); in rtl_pci_tx()
1727 __skb_queue_tail(&ring->queue, skb); in rtl_pci_tx()
1737 if ((ring->entries - skb_queue_len(&ring->queue)) < 2 && in rtl_pci_tx()
1741 hw_queue, ring->idx, idx, in rtl_pci_tx()
1742 skb_queue_len(&ring->queue)); in rtl_pci_tx()
1762 struct rtl8192_tx_ring *ring; in rtl_pci_flush() local
1774 ring = &pcipriv->dev.tx_ring[queue_id]; in rtl_pci_flush()
1775 queue_len = skb_queue_len(&ring->queue); in rtl_pci_flush()