Lines Matching refs:htt

42 	hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr)  in ath10k_htt_rx_find_skb_paddr()
50 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
57 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
58 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
60 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
67 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
68 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
73 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
80 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
81 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
82 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
83 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
86 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
101 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
118 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
122 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
130 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
131 htt->rx_ring.paddrs_ring[idx] = __cpu_to_le32(paddr); in __ath10k_htt_rx_ring_fill_n()
132 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
134 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
135 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
142 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
151 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
155 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
157 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
158 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
161 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
180 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
181 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
184 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
192 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
195 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_msdu_buff_replenish()
197 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
202 struct ath10k_htt *htt = (struct ath10k_htt *)arg; in ath10k_htt_rx_ring_refill_retry() local
204 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
209 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
212 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
213 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
214 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
215 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
218 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
223 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
225 del_timer_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
226 tasklet_kill(&htt->rx_replenish_task); in ath10k_htt_rx_free()
227 tasklet_kill(&htt->txrx_compl_task); in ath10k_htt_rx_free()
229 skb_queue_purge(&htt->tx_compl_q); in ath10k_htt_rx_free()
230 skb_queue_purge(&htt->rx_compl_q); in ath10k_htt_rx_free()
231 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
233 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
235 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
236 (htt->rx_ring.size * in ath10k_htt_rx_free()
237 sizeof(htt->rx_ring.paddrs_ring)), in ath10k_htt_rx_free()
238 htt->rx_ring.paddrs_ring, in ath10k_htt_rx_free()
239 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
241 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
242 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
243 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
244 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
246 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
249 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
251 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
255 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
257 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
262 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
263 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
264 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
265 htt->rx_ring.paddrs_ring[idx] = 0; in ath10k_htt_rx_netbuf_pop()
268 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
269 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
270 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
272 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
283 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
287 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
292 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
297 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
384 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
428 struct ath10k_htt *htt = (struct ath10k_htt *)ptr; in ath10k_htt_rx_replenish_task() local
430 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_replenish_task()
433 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
436 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
440 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
448 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
450 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
459 static int ath10k_htt_rx_pop_paddr_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr_list() argument
463 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr_list()
471 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr_list()
479 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr_list()
498 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr_list()
509 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
511 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
515 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
517 htt->rx_confused = false; in ath10k_htt_rx_alloc()
522 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
523 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
524 htt->rx_ring.fill_level = HTT_RX_RING_FILL_LEVEL; in ath10k_htt_rx_alloc()
526 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
531 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
532 kzalloc(htt->rx_ring.size * sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
534 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
537 size = htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring); in ath10k_htt_rx_alloc()
539 vaddr = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_DMA); in ath10k_htt_rx_alloc()
543 htt->rx_ring.paddrs_ring = vaddr; in ath10k_htt_rx_alloc()
544 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
546 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
547 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
552 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
553 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
554 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
555 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
558 setup_timer(timer, ath10k_htt_rx_ring_refill_retry, (unsigned long)htt); in ath10k_htt_rx_alloc()
560 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
562 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
563 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
564 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
566 tasklet_init(&htt->rx_replenish_task, ath10k_htt_rx_replenish_task, in ath10k_htt_rx_alloc()
567 (unsigned long)htt); in ath10k_htt_rx_alloc()
569 skb_queue_head_init(&htt->tx_compl_q); in ath10k_htt_rx_alloc()
570 skb_queue_head_init(&htt->rx_compl_q); in ath10k_htt_rx_alloc()
571 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
573 tasklet_init(&htt->txrx_compl_task, ath10k_htt_txrx_compl_task, in ath10k_htt_rx_alloc()
574 (unsigned long)htt); in ath10k_htt_rx_alloc()
577 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
581 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
582 (htt->rx_ring.size * in ath10k_htt_rx_alloc()
583 sizeof(htt->rx_ring.paddrs_ring)), in ath10k_htt_rx_alloc()
584 htt->rx_ring.paddrs_ring, in ath10k_htt_rx_alloc()
585 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
587 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
1479 static void ath10k_htt_rx_handler(struct ath10k_htt *htt, in ath10k_htt_rx_handler() argument
1482 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handler()
1483 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handler()
1491 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_handler()
1493 if (htt->rx_confused) in ath10k_htt_rx_handler()
1513 ret = ath10k_htt_rx_amsdu_pop(htt, &fw_desc, in ath10k_htt_rx_handler()
1521 htt->rx_confused = true; in ath10k_htt_rx_handler()
1532 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_handler()
1535 static void ath10k_htt_rx_frag_handler(struct ath10k_htt *htt, in ath10k_htt_rx_frag_handler() argument
1538 struct ath10k *ar = htt->ar; in ath10k_htt_rx_frag_handler()
1539 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_frag_handler()
1550 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_frag_handler()
1551 ret = ath10k_htt_rx_amsdu_pop(htt, &fw_desc, &fw_desc_len, in ath10k_htt_rx_frag_handler()
1553 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_frag_handler()
1555 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_frag_handler()
1587 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_frm_tx_compl() local
1594 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_rx_frm_tx_compl()
1619 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_frm_tx_compl()
1757 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
1758 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
1806 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
1808 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
1819 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
1821 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
1848 ret = ath10k_htt_rx_pop_paddr_list(htt, &resp->rx_in_ord_ind, &list); in ath10k_htt_rx_in_ord_ind()
1851 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
1882 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
1888 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_in_ord_ind()
1893 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
1904 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
1905 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
1906 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
1910 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
1911 __skb_queue_tail(&htt->rx_compl_q, skb); in ath10k_htt_t2h_msg_handler()
1912 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
1913 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
1921 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
1928 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
1949 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
1950 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
1951 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
1955 spin_lock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
1956 __skb_queue_tail(&htt->tx_compl_q, skb); in ath10k_htt_t2h_msg_handler()
1957 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_t2h_msg_handler()
1958 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
1961 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
1975 ath10k_htt_rx_frag_handler(htt, &resp->rx_frag_ind); in ath10k_htt_t2h_msg_handler()
2014 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2015 __skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
2016 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2017 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
2041 struct ath10k_htt *htt = (struct ath10k_htt *)ptr; in ath10k_htt_txrx_compl_task() local
2042 struct ath10k *ar = htt->ar; in ath10k_htt_txrx_compl_task()
2046 spin_lock_bh(&htt->tx_lock); in ath10k_htt_txrx_compl_task()
2047 while ((skb = __skb_dequeue(&htt->tx_compl_q))) { in ath10k_htt_txrx_compl_task()
2048 ath10k_htt_rx_frm_tx_compl(htt->ar, skb); in ath10k_htt_txrx_compl_task()
2051 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_txrx_compl_task()
2053 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
2054 while ((skb = __skb_dequeue(&htt->rx_compl_q))) { in ath10k_htt_txrx_compl_task()
2056 ath10k_htt_rx_handler(htt, &resp->rx_ind); in ath10k_htt_txrx_compl_task()
2060 while ((skb = __skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
2064 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()