Lines Matching refs:ring

85 struct b43_dmadesc_generic *op32_idx2desc(struct b43_dmaring *ring,  in op32_idx2desc()  argument
91 *meta = &(ring->meta[slot]); in op32_idx2desc()
92 desc = ring->descbase; in op32_idx2desc()
98 static void op32_fill_descriptor(struct b43_dmaring *ring, in op32_fill_descriptor() argument
103 struct b43_dmadesc32 *descbase = ring->descbase; in op32_fill_descriptor()
110 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op32_fill_descriptor()
112 addr = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op32_fill_descriptor()
113 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op32_fill_descriptor()
116 if (slot == ring->nr_slots - 1) in op32_fill_descriptor()
131 static void op32_poke_tx(struct b43_dmaring *ring, int slot) in op32_poke_tx() argument
133 b43_dma_write(ring, B43_DMA32_TXINDEX, in op32_poke_tx()
137 static void op32_tx_suspend(struct b43_dmaring *ring) in op32_tx_suspend() argument
139 b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL) in op32_tx_suspend()
143 static void op32_tx_resume(struct b43_dmaring *ring) in op32_tx_resume() argument
145 b43_dma_write(ring, B43_DMA32_TXCTL, b43_dma_read(ring, B43_DMA32_TXCTL) in op32_tx_resume()
149 static int op32_get_current_rxslot(struct b43_dmaring *ring) in op32_get_current_rxslot() argument
153 val = b43_dma_read(ring, B43_DMA32_RXSTATUS); in op32_get_current_rxslot()
159 static void op32_set_current_rxslot(struct b43_dmaring *ring, int slot) in op32_set_current_rxslot() argument
161 b43_dma_write(ring, B43_DMA32_RXINDEX, in op32_set_current_rxslot()
177 struct b43_dmadesc_generic *op64_idx2desc(struct b43_dmaring *ring, in op64_idx2desc() argument
183 *meta = &(ring->meta[slot]); in op64_idx2desc()
184 desc = ring->descbase; in op64_idx2desc()
190 static void op64_fill_descriptor(struct b43_dmaring *ring, in op64_fill_descriptor() argument
195 struct b43_dmadesc64 *descbase = ring->descbase; in op64_fill_descriptor()
202 B43_WARN_ON(!(slot >= 0 && slot < ring->nr_slots)); in op64_fill_descriptor()
204 addrlo = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_LOW); in op64_fill_descriptor()
205 addrhi = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_HIGH); in op64_fill_descriptor()
206 addrext = b43_dma_address(&ring->dev->dma, dmaaddr, B43_DMA_ADDR_EXT); in op64_fill_descriptor()
208 if (slot == ring->nr_slots - 1) in op64_fill_descriptor()
226 static void op64_poke_tx(struct b43_dmaring *ring, int slot) in op64_poke_tx() argument
228 b43_dma_write(ring, B43_DMA64_TXINDEX, in op64_poke_tx()
232 static void op64_tx_suspend(struct b43_dmaring *ring) in op64_tx_suspend() argument
234 b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL) in op64_tx_suspend()
238 static void op64_tx_resume(struct b43_dmaring *ring) in op64_tx_resume() argument
240 b43_dma_write(ring, B43_DMA64_TXCTL, b43_dma_read(ring, B43_DMA64_TXCTL) in op64_tx_resume()
244 static int op64_get_current_rxslot(struct b43_dmaring *ring) in op64_get_current_rxslot() argument
248 val = b43_dma_read(ring, B43_DMA64_RXSTATUS); in op64_get_current_rxslot()
254 static void op64_set_current_rxslot(struct b43_dmaring *ring, int slot) in op64_set_current_rxslot() argument
256 b43_dma_write(ring, B43_DMA64_RXINDEX, in op64_set_current_rxslot()
270 static inline int free_slots(struct b43_dmaring *ring) in free_slots() argument
272 return (ring->nr_slots - ring->used_slots); in free_slots()
275 static inline int next_slot(struct b43_dmaring *ring, int slot) in next_slot() argument
277 B43_WARN_ON(!(slot >= -1 && slot <= ring->nr_slots - 1)); in next_slot()
278 if (slot == ring->nr_slots - 1) in next_slot()
283 static inline int prev_slot(struct b43_dmaring *ring, int slot) in prev_slot() argument
285 B43_WARN_ON(!(slot >= 0 && slot <= ring->nr_slots - 1)); in prev_slot()
287 return ring->nr_slots - 1; in prev_slot()
292 static void update_max_used_slots(struct b43_dmaring *ring, in update_max_used_slots() argument
295 if (current_used_slots <= ring->max_used_slots) in update_max_used_slots()
297 ring->max_used_slots = current_used_slots; in update_max_used_slots()
298 if (b43_debug(ring->dev, B43_DBG_DMAVERBOSE)) { in update_max_used_slots()
299 b43dbg(ring->dev->wl, in update_max_used_slots()
301 ring->max_used_slots, in update_max_used_slots()
302 ring->tx ? "TX" : "RX", ring->index); in update_max_used_slots()
307 void update_max_used_slots(struct b43_dmaring *ring, int current_used_slots) in update_max_used_slots() argument
313 static inline int request_slot(struct b43_dmaring *ring) in request_slot() argument
317 B43_WARN_ON(!ring->tx); in request_slot()
318 B43_WARN_ON(ring->stopped); in request_slot()
319 B43_WARN_ON(free_slots(ring) == 0); in request_slot()
321 slot = next_slot(ring, ring->current_slot); in request_slot()
322 ring->current_slot = slot; in request_slot()
323 ring->used_slots++; in request_slot()
325 update_max_used_slots(ring, ring->used_slots); in request_slot()
360 dma_addr_t map_descbuffer(struct b43_dmaring *ring, in map_descbuffer() argument
366 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
369 dmaaddr = dma_map_single(ring->dev->dev->dma_dev, in map_descbuffer()
377 void unmap_descbuffer(struct b43_dmaring *ring, in unmap_descbuffer() argument
381 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
384 dma_unmap_single(ring->dev->dev->dma_dev, in unmap_descbuffer()
390 void sync_descbuffer_for_cpu(struct b43_dmaring *ring, in sync_descbuffer_for_cpu() argument
393 B43_WARN_ON(ring->tx); in sync_descbuffer_for_cpu()
394 dma_sync_single_for_cpu(ring->dev->dev->dma_dev, in sync_descbuffer_for_cpu()
399 void sync_descbuffer_for_device(struct b43_dmaring *ring, in sync_descbuffer_for_device() argument
402 B43_WARN_ON(ring->tx); in sync_descbuffer_for_device()
403 dma_sync_single_for_device(ring->dev->dev->dma_dev, in sync_descbuffer_for_device()
408 void free_descriptor_buffer(struct b43_dmaring *ring, in free_descriptor_buffer() argument
412 if (ring->tx) in free_descriptor_buffer()
413 ieee80211_free_txskb(ring->dev->wl->hw, meta->skb); in free_descriptor_buffer()
420 static int alloc_ringmemory(struct b43_dmaring *ring) in alloc_ringmemory() argument
431 u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ? in alloc_ringmemory()
434 ring->descbase = dma_zalloc_coherent(ring->dev->dev->dma_dev, in alloc_ringmemory()
435 ring_mem_size, &(ring->dmabase), in alloc_ringmemory()
437 if (!ring->descbase) in alloc_ringmemory()
443 static void free_ringmemory(struct b43_dmaring *ring) in free_ringmemory() argument
445 u16 ring_mem_size = (ring->type == B43_DMA_64BIT) ? in free_ringmemory()
447 dma_free_coherent(ring->dev->dev->dma_dev, ring_mem_size, in free_ringmemory()
448 ring->descbase, ring->dmabase); in free_ringmemory()
551 static bool b43_dma_mapping_error(struct b43_dmaring *ring, in b43_dma_mapping_error() argument
555 if (unlikely(dma_mapping_error(ring->dev->dev->dma_dev, addr))) in b43_dma_mapping_error()
558 switch (ring->type) { in b43_dma_mapping_error()
578 unmap_descbuffer(ring, addr, buffersize, dma_to_device); in b43_dma_mapping_error()
583 static bool b43_rx_buffer_is_poisoned(struct b43_dmaring *ring, struct sk_buff *skb) in b43_rx_buffer_is_poisoned() argument
585 unsigned char *f = skb->data + ring->frameoffset; in b43_rx_buffer_is_poisoned()
590 static void b43_poison_rx_buffer(struct b43_dmaring *ring, struct sk_buff *skb) in b43_poison_rx_buffer() argument
600 B43_WARN_ON(ring->rx_buffersize < ring->frameoffset + sizeof(struct b43_plcp_hdr6) + 2); in b43_poison_rx_buffer()
601 frame = skb->data + ring->frameoffset; in b43_poison_rx_buffer()
605 static int setup_rx_descbuffer(struct b43_dmaring *ring, in setup_rx_descbuffer() argument
612 B43_WARN_ON(ring->tx); in setup_rx_descbuffer()
614 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
617 b43_poison_rx_buffer(ring, skb); in setup_rx_descbuffer()
618 dmaaddr = map_descbuffer(ring, skb->data, ring->rx_buffersize, 0); in setup_rx_descbuffer()
619 if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
625 skb = __dev_alloc_skb(ring->rx_buffersize, gfp_flags); in setup_rx_descbuffer()
628 b43_poison_rx_buffer(ring, skb); in setup_rx_descbuffer()
629 dmaaddr = map_descbuffer(ring, skb->data, in setup_rx_descbuffer()
630 ring->rx_buffersize, 0); in setup_rx_descbuffer()
631 if (b43_dma_mapping_error(ring, dmaaddr, ring->rx_buffersize, 0)) { in setup_rx_descbuffer()
632 b43err(ring->dev->wl, "RX DMA buffer allocation failed\n"); in setup_rx_descbuffer()
640 ring->ops->fill_descriptor(ring, desc, dmaaddr, in setup_rx_descbuffer()
641 ring->rx_buffersize, 0, 0, 0); in setup_rx_descbuffer()
649 static int alloc_initial_descbuffers(struct b43_dmaring *ring) in alloc_initial_descbuffers() argument
655 for (i = 0; i < ring->nr_slots; i++) { in alloc_initial_descbuffers()
656 desc = ring->ops->idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
658 err = setup_rx_descbuffer(ring, desc, meta, GFP_KERNEL); in alloc_initial_descbuffers()
660 b43err(ring->dev->wl, in alloc_initial_descbuffers()
666 ring->used_slots = ring->nr_slots; in alloc_initial_descbuffers()
673 desc = ring->ops->idx2desc(ring, i, &meta); in alloc_initial_descbuffers()
675 unmap_descbuffer(ring, meta->dmaaddr, ring->rx_buffersize, 0); in alloc_initial_descbuffers()
685 static int dmacontroller_setup(struct b43_dmaring *ring) in dmacontroller_setup() argument
690 bool parity = ring->dev->dma.parity; in dmacontroller_setup()
694 if (ring->tx) { in dmacontroller_setup()
695 if (ring->type == B43_DMA_64BIT) { in dmacontroller_setup()
696 u64 ringbase = (u64) (ring->dmabase); in dmacontroller_setup()
697 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
698 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
699 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); in dmacontroller_setup()
706 b43_dma_write(ring, B43_DMA64_TXCTL, value); in dmacontroller_setup()
707 b43_dma_write(ring, B43_DMA64_TXRINGLO, addrlo); in dmacontroller_setup()
708 b43_dma_write(ring, B43_DMA64_TXRINGHI, addrhi); in dmacontroller_setup()
710 u32 ringbase = (u32) (ring->dmabase); in dmacontroller_setup()
711 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
712 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
719 b43_dma_write(ring, B43_DMA32_TXCTL, value); in dmacontroller_setup()
720 b43_dma_write(ring, B43_DMA32_TXRING, addrlo); in dmacontroller_setup()
723 err = alloc_initial_descbuffers(ring); in dmacontroller_setup()
726 if (ring->type == B43_DMA_64BIT) { in dmacontroller_setup()
727 u64 ringbase = (u64) (ring->dmabase); in dmacontroller_setup()
728 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
729 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
730 addrhi = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_HIGH); in dmacontroller_setup()
732 value = (ring->frameoffset << B43_DMA64_RXFROFF_SHIFT); in dmacontroller_setup()
738 b43_dma_write(ring, B43_DMA64_RXCTL, value); in dmacontroller_setup()
739 b43_dma_write(ring, B43_DMA64_RXRINGLO, addrlo); in dmacontroller_setup()
740 b43_dma_write(ring, B43_DMA64_RXRINGHI, addrhi); in dmacontroller_setup()
741 b43_dma_write(ring, B43_DMA64_RXINDEX, ring->nr_slots * in dmacontroller_setup()
744 u32 ringbase = (u32) (ring->dmabase); in dmacontroller_setup()
745 addrext = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_EXT); in dmacontroller_setup()
746 addrlo = b43_dma_address(&ring->dev->dma, ringbase, B43_DMA_ADDR_LOW); in dmacontroller_setup()
748 value = (ring->frameoffset << B43_DMA32_RXFROFF_SHIFT); in dmacontroller_setup()
754 b43_dma_write(ring, B43_DMA32_RXCTL, value); in dmacontroller_setup()
755 b43_dma_write(ring, B43_DMA32_RXRING, addrlo); in dmacontroller_setup()
756 b43_dma_write(ring, B43_DMA32_RXINDEX, ring->nr_slots * in dmacontroller_setup()
766 static void dmacontroller_cleanup(struct b43_dmaring *ring) in dmacontroller_cleanup() argument
768 if (ring->tx) { in dmacontroller_cleanup()
769 b43_dmacontroller_tx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
770 ring->type); in dmacontroller_cleanup()
771 if (ring->type == B43_DMA_64BIT) { in dmacontroller_cleanup()
772 b43_dma_write(ring, B43_DMA64_TXRINGLO, 0); in dmacontroller_cleanup()
773 b43_dma_write(ring, B43_DMA64_TXRINGHI, 0); in dmacontroller_cleanup()
775 b43_dma_write(ring, B43_DMA32_TXRING, 0); in dmacontroller_cleanup()
777 b43_dmacontroller_rx_reset(ring->dev, ring->mmio_base, in dmacontroller_cleanup()
778 ring->type); in dmacontroller_cleanup()
779 if (ring->type == B43_DMA_64BIT) { in dmacontroller_cleanup()
780 b43_dma_write(ring, B43_DMA64_RXRINGLO, 0); in dmacontroller_cleanup()
781 b43_dma_write(ring, B43_DMA64_RXRINGHI, 0); in dmacontroller_cleanup()
783 b43_dma_write(ring, B43_DMA32_RXRING, 0); in dmacontroller_cleanup()
787 static void free_all_descbuffers(struct b43_dmaring *ring) in free_all_descbuffers() argument
792 if (!ring->used_slots) in free_all_descbuffers()
794 for (i = 0; i < ring->nr_slots; i++) { in free_all_descbuffers()
796 ring->ops->idx2desc(ring, i, &meta); in free_all_descbuffers()
799 B43_WARN_ON(!ring->tx); in free_all_descbuffers()
802 if (ring->tx) { in free_all_descbuffers()
803 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
806 unmap_descbuffer(ring, meta->dmaaddr, in free_all_descbuffers()
807 ring->rx_buffersize, 0); in free_all_descbuffers()
809 free_descriptor_buffer(ring, meta); in free_all_descbuffers()
863 struct b43_dmaring *ring; in b43_setup_dmaring() local
867 ring = kzalloc(sizeof(*ring), GFP_KERNEL); in b43_setup_dmaring()
868 if (!ring) in b43_setup_dmaring()
871 ring->nr_slots = B43_RXRING_SLOTS; in b43_setup_dmaring()
873 ring->nr_slots = B43_TXRING_SLOTS; in b43_setup_dmaring()
875 ring->meta = kcalloc(ring->nr_slots, sizeof(struct b43_dmadesc_meta), in b43_setup_dmaring()
877 if (!ring->meta) in b43_setup_dmaring()
879 for (i = 0; i < ring->nr_slots; i++) in b43_setup_dmaring()
880 ring->meta->skb = B43_DMA_PTR_POISON; in b43_setup_dmaring()
882 ring->type = type; in b43_setup_dmaring()
883 ring->dev = dev; in b43_setup_dmaring()
884 ring->mmio_base = b43_dmacontroller_base(type, controller_index); in b43_setup_dmaring()
885 ring->index = controller_index; in b43_setup_dmaring()
887 ring->ops = &dma64_ops; in b43_setup_dmaring()
889 ring->ops = &dma32_ops; in b43_setup_dmaring()
891 ring->tx = true; in b43_setup_dmaring()
892 ring->current_slot = -1; in b43_setup_dmaring()
894 if (ring->index == 0) { in b43_setup_dmaring()
897 ring->rx_buffersize = B43_DMA0_RX_FW598_BUFSIZE; in b43_setup_dmaring()
898 ring->frameoffset = B43_DMA0_RX_FW598_FO; in b43_setup_dmaring()
902 ring->rx_buffersize = B43_DMA0_RX_FW351_BUFSIZE; in b43_setup_dmaring()
903 ring->frameoffset = B43_DMA0_RX_FW351_FO; in b43_setup_dmaring()
910 ring->last_injected_overflow = jiffies; in b43_setup_dmaring()
917 ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME, in b43_setup_dmaring()
920 if (!ring->txhdr_cache) in b43_setup_dmaring()
925 ring->txhdr_cache, in b43_setup_dmaring()
929 if (b43_dma_mapping_error(ring, dma_test, in b43_setup_dmaring()
932 kfree(ring->txhdr_cache); in b43_setup_dmaring()
933 ring->txhdr_cache = kcalloc(ring->nr_slots / TX_SLOTS_PER_FRAME, in b43_setup_dmaring()
936 if (!ring->txhdr_cache) in b43_setup_dmaring()
940 ring->txhdr_cache, in b43_setup_dmaring()
944 if (b43_dma_mapping_error(ring, dma_test, in b43_setup_dmaring()
958 err = alloc_ringmemory(ring); in b43_setup_dmaring()
961 err = dmacontroller_setup(ring); in b43_setup_dmaring()
966 return ring; in b43_setup_dmaring()
969 free_ringmemory(ring); in b43_setup_dmaring()
971 kfree(ring->txhdr_cache); in b43_setup_dmaring()
973 kfree(ring->meta); in b43_setup_dmaring()
975 kfree(ring); in b43_setup_dmaring()
976 ring = NULL; in b43_setup_dmaring()
992 static void b43_destroy_dmaring(struct b43_dmaring *ring, in b43_destroy_dmaring() argument
995 if (!ring) in b43_destroy_dmaring()
1001 u64 failed_packets = ring->nr_failed_tx_packets; in b43_destroy_dmaring()
1002 u64 succeed_packets = ring->nr_succeed_tx_packets; in b43_destroy_dmaring()
1009 average_tries = divide(ring->nr_total_packet_tries * 100, nr_packets); in b43_destroy_dmaring()
1011 b43dbg(ring->dev->wl, "DMA-%u %s: " in b43_destroy_dmaring()
1014 (unsigned int)(ring->type), ringname, in b43_destroy_dmaring()
1015 ring->max_used_slots, in b43_destroy_dmaring()
1016 ring->nr_slots, in b43_destroy_dmaring()
1029 dmacontroller_cleanup(ring); in b43_destroy_dmaring()
1030 free_all_descbuffers(ring); in b43_destroy_dmaring()
1031 free_ringmemory(ring); in b43_destroy_dmaring()
1033 kfree(ring->txhdr_cache); in b43_destroy_dmaring()
1034 kfree(ring->meta); in b43_destroy_dmaring()
1035 kfree(ring); in b43_destroy_dmaring()
1038 #define destroy_ring(dma, ring) do { \ argument
1039 b43_destroy_dmaring((dma)->ring, __stringify(ring)); \
1040 (dma)->ring = NULL; \
1198 static u16 generate_cookie(struct b43_dmaring *ring, int slot) in generate_cookie() argument
1210 cookie = (((u16)ring->index + 1) << 12); in generate_cookie()
1222 struct b43_dmaring *ring = NULL; in parse_cookie() local
1226 ring = dma->tx_ring_AC_BK; in parse_cookie()
1229 ring = dma->tx_ring_AC_BE; in parse_cookie()
1232 ring = dma->tx_ring_AC_VI; in parse_cookie()
1235 ring = dma->tx_ring_AC_VO; in parse_cookie()
1238 ring = dma->tx_ring_mcast; in parse_cookie()
1242 if (unlikely(!ring || *slot < 0 || *slot >= ring->nr_slots)) { in parse_cookie()
1248 return ring; in parse_cookie()
1251 static int dma_tx_fragment(struct b43_dmaring *ring, in dma_tx_fragment() argument
1254 const struct b43_dma_ops *ops = ring->ops; in dma_tx_fragment()
1264 size_t hdrsize = b43_txhdr_size(ring->dev); in dma_tx_fragment()
1271 old_top_slot = ring->current_slot; in dma_tx_fragment()
1272 old_used_slots = ring->used_slots; in dma_tx_fragment()
1275 slot = request_slot(ring); in dma_tx_fragment()
1276 desc = ops->idx2desc(ring, slot, &meta_hdr); in dma_tx_fragment()
1279 header = &(ring->txhdr_cache[(slot / TX_SLOTS_PER_FRAME) * hdrsize]); in dma_tx_fragment()
1280 cookie = generate_cookie(ring, slot); in dma_tx_fragment()
1281 err = b43_generate_txhdr(ring->dev, header, in dma_tx_fragment()
1284 ring->current_slot = old_top_slot; in dma_tx_fragment()
1285 ring->used_slots = old_used_slots; in dma_tx_fragment()
1289 meta_hdr->dmaaddr = map_descbuffer(ring, (unsigned char *)header, in dma_tx_fragment()
1291 if (b43_dma_mapping_error(ring, meta_hdr->dmaaddr, hdrsize, 1)) { in dma_tx_fragment()
1292 ring->current_slot = old_top_slot; in dma_tx_fragment()
1293 ring->used_slots = old_used_slots; in dma_tx_fragment()
1296 ops->fill_descriptor(ring, desc, meta_hdr->dmaaddr, in dma_tx_fragment()
1300 slot = request_slot(ring); in dma_tx_fragment()
1301 desc = ops->idx2desc(ring, slot, &meta); in dma_tx_fragment()
1308 meta->dmaaddr = map_descbuffer(ring, skb->data, skb->len, 1); in dma_tx_fragment()
1310 if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1314 ring->current_slot = old_top_slot; in dma_tx_fragment()
1315 ring->used_slots = old_used_slots; in dma_tx_fragment()
1320 meta->dmaaddr = map_descbuffer(ring, priv_info->bouncebuffer, skb->len, 1); in dma_tx_fragment()
1321 if (b43_dma_mapping_error(ring, meta->dmaaddr, skb->len, 1)) { in dma_tx_fragment()
1324 ring->current_slot = old_top_slot; in dma_tx_fragment()
1325 ring->used_slots = old_used_slots; in dma_tx_fragment()
1331 ops->fill_descriptor(ring, desc, meta->dmaaddr, skb->len, 0, 1, 1); in dma_tx_fragment()
1336 b43_shm_write16(ring->dev, B43_SHM_SHARED, in dma_tx_fragment()
1341 ops->poke_tx(ring, next_slot(ring, slot)); in dma_tx_fragment()
1345 unmap_descbuffer(ring, meta_hdr->dmaaddr, in dma_tx_fragment()
1350 static inline int should_inject_overflow(struct b43_dmaring *ring) in should_inject_overflow() argument
1353 if (unlikely(b43_debug(ring->dev, B43_DBG_DMAOVERFLOW))) { in should_inject_overflow()
1358 next_overflow = ring->last_injected_overflow + HZ; in should_inject_overflow()
1360 ring->last_injected_overflow = jiffies; in should_inject_overflow()
1361 b43dbg(ring->dev->wl, in should_inject_overflow()
1363 "DMA controller %d\n", ring->index); in should_inject_overflow()
1375 struct b43_dmaring *ring; in select_ring_by_priority() local
1384 ring = dev->dma.tx_ring_AC_VO; in select_ring_by_priority()
1387 ring = dev->dma.tx_ring_AC_VI; in select_ring_by_priority()
1390 ring = dev->dma.tx_ring_AC_BE; in select_ring_by_priority()
1393 ring = dev->dma.tx_ring_AC_BK; in select_ring_by_priority()
1397 ring = dev->dma.tx_ring_AC_BE; in select_ring_by_priority()
1399 return ring; in select_ring_by_priority()
1404 struct b43_dmaring *ring; in b43_dma_tx() local
1412 ring = dev->dma.tx_ring_mcast; in b43_dma_tx()
1418 ring = select_ring_by_priority( in b43_dma_tx()
1422 B43_WARN_ON(!ring->tx); in b43_dma_tx()
1424 if (unlikely(ring->stopped)) { in b43_dma_tx()
1435 if (unlikely(WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME))) { in b43_dma_tx()
1446 ring->queue_prio = skb_get_queue_mapping(skb); in b43_dma_tx()
1448 err = dma_tx_fragment(ring, skb); in b43_dma_tx()
1460 if ((free_slots(ring) < TX_SLOTS_PER_FRAME) || in b43_dma_tx()
1461 should_inject_overflow(ring)) { in b43_dma_tx()
1466 ring->stopped = true; in b43_dma_tx()
1468 b43dbg(dev->wl, "Stopped TX ring %d\n", ring->index); in b43_dma_tx()
1480 struct b43_dmaring *ring; in b43_dma_handle_txstatus() local
1489 ring = parse_cookie(dev, status->cookie, &slot); in b43_dma_handle_txstatus()
1490 if (unlikely(!ring)) in b43_dma_handle_txstatus()
1492 B43_WARN_ON(!ring->tx); in b43_dma_handle_txstatus()
1497 firstused = ring->current_slot - ring->used_slots + 1; in b43_dma_handle_txstatus()
1499 firstused = ring->nr_slots + firstused; in b43_dma_handle_txstatus()
1506 if (slot == next_slot(ring, next_slot(ring, firstused))) { in b43_dma_handle_txstatus()
1516 ring->index, slot); in b43_dma_handle_txstatus()
1526 ring->index, firstused, slot); in b43_dma_handle_txstatus()
1532 ops = ring->ops; in b43_dma_handle_txstatus()
1534 B43_WARN_ON(slot < 0 || slot >= ring->nr_slots); in b43_dma_handle_txstatus()
1536 ops->idx2desc(ring, slot, &meta); in b43_dma_handle_txstatus()
1541 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1549 unmap_descbuffer(ring, meta->dmaaddr, in b43_dma_handle_txstatus()
1554 unmap_descbuffer(ring, meta->dmaaddr, in b43_dma_handle_txstatus()
1567 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1589 ring->nr_succeed_tx_packets++; in b43_dma_handle_txstatus()
1591 ring->nr_failed_tx_packets++; in b43_dma_handle_txstatus()
1592 ring->nr_total_packet_tries += status->frame_count; in b43_dma_handle_txstatus()
1606 slot, firstused, ring->index); in b43_dma_handle_txstatus()
1612 ring->used_slots--; in b43_dma_handle_txstatus()
1619 slot = next_slot(ring, slot); in b43_dma_handle_txstatus()
1623 if (ring->stopped) { in b43_dma_handle_txstatus()
1624 B43_WARN_ON(free_slots(ring) < TX_SLOTS_PER_FRAME); in b43_dma_handle_txstatus()
1625 ring->stopped = false; in b43_dma_handle_txstatus()
1628 if (dev->wl->tx_queue_stopped[ring->queue_prio]) { in b43_dma_handle_txstatus()
1629 dev->wl->tx_queue_stopped[ring->queue_prio] = 0; in b43_dma_handle_txstatus()
1633 ieee80211_wake_queue(dev->wl->hw, ring->queue_prio); in b43_dma_handle_txstatus()
1635 b43dbg(dev->wl, "Woke up TX ring %d\n", ring->index); in b43_dma_handle_txstatus()
1642 static void dma_rx(struct b43_dmaring *ring, int *slot) in dma_rx() argument
1644 const struct b43_dma_ops *ops = ring->ops; in dma_rx()
1653 desc = ops->idx2desc(ring, *slot, &meta); in dma_rx()
1655 sync_descbuffer_for_cpu(ring, meta->dmaaddr, ring->rx_buffersize); in dma_rx()
1673 if (unlikely(b43_rx_buffer_is_poisoned(ring, skb))) { in dma_rx()
1676 b43dbg(ring->dev->wl, "DMA RX: Dropping poisoned buffer.\n"); in dma_rx()
1680 if (unlikely(len + ring->frameoffset > ring->rx_buffersize)) { in dma_rx()
1690 desc = ops->idx2desc(ring, *slot, &meta); in dma_rx()
1692 b43_poison_rx_buffer(ring, meta->skb); in dma_rx()
1693 sync_descbuffer_for_device(ring, meta->dmaaddr, in dma_rx()
1694 ring->rx_buffersize); in dma_rx()
1695 *slot = next_slot(ring, *slot); in dma_rx()
1697 tmp -= ring->rx_buffersize; in dma_rx()
1701 b43err(ring->dev->wl, "DMA RX buffer too small " in dma_rx()
1703 len, ring->rx_buffersize, cnt); in dma_rx()
1708 err = setup_rx_descbuffer(ring, desc, meta, GFP_ATOMIC); in dma_rx()
1710 b43dbg(ring->dev->wl, "DMA RX: setup_rx_descbuffer() failed\n"); in dma_rx()
1714 unmap_descbuffer(ring, dmaaddr, ring->rx_buffersize, 0); in dma_rx()
1715 skb_put(skb, len + ring->frameoffset); in dma_rx()
1716 skb_pull(skb, ring->frameoffset); in dma_rx()
1718 b43_rx(ring->dev, skb, rxhdr); in dma_rx()
1724 b43_poison_rx_buffer(ring, skb); in dma_rx()
1725 sync_descbuffer_for_device(ring, dmaaddr, ring->rx_buffersize); in dma_rx()
1728 void b43_dma_handle_rx_overflow(struct b43_dmaring *ring) in b43_dma_handle_rx_overflow() argument
1732 B43_WARN_ON(ring->tx); in b43_dma_handle_rx_overflow()
1742 current_slot = ring->ops->get_current_rxslot(ring); in b43_dma_handle_rx_overflow()
1743 previous_slot = prev_slot(ring, current_slot); in b43_dma_handle_rx_overflow()
1744 ring->ops->set_current_rxslot(ring, previous_slot); in b43_dma_handle_rx_overflow()
1747 void b43_dma_rx(struct b43_dmaring *ring) in b43_dma_rx() argument
1749 const struct b43_dma_ops *ops = ring->ops; in b43_dma_rx()
1753 B43_WARN_ON(ring->tx); in b43_dma_rx()
1754 current_slot = ops->get_current_rxslot(ring); in b43_dma_rx()
1755 B43_WARN_ON(!(current_slot >= 0 && current_slot < ring->nr_slots)); in b43_dma_rx()
1757 slot = ring->current_slot; in b43_dma_rx()
1758 for (; slot != current_slot; slot = next_slot(ring, slot)) { in b43_dma_rx()
1759 dma_rx(ring, &slot); in b43_dma_rx()
1760 update_max_used_slots(ring, ++used_slots); in b43_dma_rx()
1763 ops->set_current_rxslot(ring, slot); in b43_dma_rx()
1764 ring->current_slot = slot; in b43_dma_rx()
1767 static void b43_dma_tx_suspend_ring(struct b43_dmaring *ring) in b43_dma_tx_suspend_ring() argument
1769 B43_WARN_ON(!ring->tx); in b43_dma_tx_suspend_ring()
1770 ring->ops->tx_suspend(ring); in b43_dma_tx_suspend_ring()
1773 static void b43_dma_tx_resume_ring(struct b43_dmaring *ring) in b43_dma_tx_resume_ring() argument
1775 B43_WARN_ON(!ring->tx); in b43_dma_tx_resume_ring()
1776 ring->ops->tx_resume(ring); in b43_dma_tx_resume_ring()