Lines Matching refs:htt
42 hash_for_each_possible(ar->htt.rx_ring.skb_table, rxcb, hlist, paddr) in ath10k_htt_rx_find_skb_paddr()
50 static void ath10k_htt_rx_ring_free(struct ath10k_htt *htt) in ath10k_htt_rx_ring_free() argument
57 if (htt->rx_ring.in_ord_rx) { in ath10k_htt_rx_ring_free()
58 hash_for_each_safe(htt->rx_ring.skb_table, i, n, rxcb, hlist) { in ath10k_htt_rx_ring_free()
60 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
67 for (i = 0; i < htt->rx_ring.size; i++) { in ath10k_htt_rx_ring_free()
68 skb = htt->rx_ring.netbufs_ring[i]; in ath10k_htt_rx_ring_free()
73 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_ring_free()
80 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_ring_free()
81 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_ring_free()
82 memset(htt->rx_ring.netbufs_ring, 0, in ath10k_htt_rx_ring_free()
83 htt->rx_ring.size * sizeof(htt->rx_ring.netbufs_ring[0])); in ath10k_htt_rx_ring_free()
86 static int __ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in __ath10k_htt_rx_ring_fill_n() argument
101 idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in __ath10k_htt_rx_ring_fill_n()
118 paddr = dma_map_single(htt->ar->dev, skb->data, in __ath10k_htt_rx_ring_fill_n()
122 if (unlikely(dma_mapping_error(htt->ar->dev, paddr))) { in __ath10k_htt_rx_ring_fill_n()
130 htt->rx_ring.netbufs_ring[idx] = skb; in __ath10k_htt_rx_ring_fill_n()
131 htt->rx_ring.paddrs_ring[idx] = __cpu_to_le32(paddr); in __ath10k_htt_rx_ring_fill_n()
132 htt->rx_ring.fill_cnt++; in __ath10k_htt_rx_ring_fill_n()
134 if (htt->rx_ring.in_ord_rx) { in __ath10k_htt_rx_ring_fill_n()
135 hash_add(htt->rx_ring.skb_table, in __ath10k_htt_rx_ring_fill_n()
142 idx &= htt->rx_ring.size_mask; in __ath10k_htt_rx_ring_fill_n()
151 *htt->rx_ring.alloc_idx.vaddr = __cpu_to_le32(idx); in __ath10k_htt_rx_ring_fill_n()
155 static int ath10k_htt_rx_ring_fill_n(struct ath10k_htt *htt, int num) in ath10k_htt_rx_ring_fill_n() argument
157 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_ring_fill_n()
158 return __ath10k_htt_rx_ring_fill_n(htt, num); in ath10k_htt_rx_ring_fill_n()
161 static void ath10k_htt_rx_msdu_buff_replenish(struct ath10k_htt *htt) in ath10k_htt_rx_msdu_buff_replenish() argument
180 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
181 num_deficit = htt->rx_ring.fill_level - htt->rx_ring.fill_cnt; in ath10k_htt_rx_msdu_buff_replenish()
184 ret = ath10k_htt_rx_ring_fill_n(htt, num_to_fill); in ath10k_htt_rx_msdu_buff_replenish()
192 mod_timer(&htt->rx_ring.refill_retry_timer, jiffies + in ath10k_htt_rx_msdu_buff_replenish()
195 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_msdu_buff_replenish()
197 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_msdu_buff_replenish()
202 struct ath10k_htt *htt = (struct ath10k_htt *)arg; in ath10k_htt_rx_ring_refill_retry() local
204 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_ring_refill_retry()
209 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_ring_refill() local
212 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
213 ret = ath10k_htt_rx_ring_fill_n(htt, (htt->rx_ring.fill_level - in ath10k_htt_rx_ring_refill()
214 htt->rx_ring.fill_cnt)); in ath10k_htt_rx_ring_refill()
215 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_ring_refill()
218 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_ring_refill()
223 void ath10k_htt_rx_free(struct ath10k_htt *htt) in ath10k_htt_rx_free() argument
225 del_timer_sync(&htt->rx_ring.refill_retry_timer); in ath10k_htt_rx_free()
226 tasklet_kill(&htt->rx_replenish_task); in ath10k_htt_rx_free()
227 tasklet_kill(&htt->txrx_compl_task); in ath10k_htt_rx_free()
229 skb_queue_purge(&htt->tx_compl_q); in ath10k_htt_rx_free()
230 skb_queue_purge(&htt->rx_compl_q); in ath10k_htt_rx_free()
231 skb_queue_purge(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_free()
233 ath10k_htt_rx_ring_free(htt); in ath10k_htt_rx_free()
235 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
236 (htt->rx_ring.size * in ath10k_htt_rx_free()
237 sizeof(htt->rx_ring.paddrs_ring)), in ath10k_htt_rx_free()
238 htt->rx_ring.paddrs_ring, in ath10k_htt_rx_free()
239 htt->rx_ring.base_paddr); in ath10k_htt_rx_free()
241 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_free()
242 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_free()
243 htt->rx_ring.alloc_idx.vaddr, in ath10k_htt_rx_free()
244 htt->rx_ring.alloc_idx.paddr); in ath10k_htt_rx_free()
246 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_free()
249 static inline struct sk_buff *ath10k_htt_rx_netbuf_pop(struct ath10k_htt *htt) in ath10k_htt_rx_netbuf_pop() argument
251 struct ath10k *ar = htt->ar; in ath10k_htt_rx_netbuf_pop()
255 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_netbuf_pop()
257 if (htt->rx_ring.fill_cnt == 0) { in ath10k_htt_rx_netbuf_pop()
262 idx = htt->rx_ring.sw_rd_idx.msdu_payld; in ath10k_htt_rx_netbuf_pop()
263 msdu = htt->rx_ring.netbufs_ring[idx]; in ath10k_htt_rx_netbuf_pop()
264 htt->rx_ring.netbufs_ring[idx] = NULL; in ath10k_htt_rx_netbuf_pop()
265 htt->rx_ring.paddrs_ring[idx] = 0; in ath10k_htt_rx_netbuf_pop()
268 idx &= htt->rx_ring.size_mask; in ath10k_htt_rx_netbuf_pop()
269 htt->rx_ring.sw_rd_idx.msdu_payld = idx; in ath10k_htt_rx_netbuf_pop()
270 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_netbuf_pop()
272 dma_unmap_single(htt->ar->dev, in ath10k_htt_rx_netbuf_pop()
283 static int ath10k_htt_rx_amsdu_pop(struct ath10k_htt *htt, in ath10k_htt_rx_amsdu_pop() argument
287 struct ath10k *ar = htt->ar; in ath10k_htt_rx_amsdu_pop()
292 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_amsdu_pop()
297 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
384 msdu = ath10k_htt_rx_netbuf_pop(htt); in ath10k_htt_rx_amsdu_pop()
428 struct ath10k_htt *htt = (struct ath10k_htt *)ptr; in ath10k_htt_rx_replenish_task() local
430 ath10k_htt_rx_msdu_buff_replenish(htt); in ath10k_htt_rx_replenish_task()
433 static struct sk_buff *ath10k_htt_rx_pop_paddr(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr() argument
436 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr()
440 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr()
448 htt->rx_ring.fill_cnt--; in ath10k_htt_rx_pop_paddr()
450 dma_unmap_single(htt->ar->dev, rxcb->paddr, in ath10k_htt_rx_pop_paddr()
459 static int ath10k_htt_rx_pop_paddr_list(struct ath10k_htt *htt, in ath10k_htt_rx_pop_paddr_list() argument
463 struct ath10k *ar = htt->ar; in ath10k_htt_rx_pop_paddr_list()
471 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_pop_paddr_list()
479 msdu = ath10k_htt_rx_pop_paddr(htt, paddr); in ath10k_htt_rx_pop_paddr_list()
498 ath10k_warn(htt->ar, "tried to pop an incomplete frame, oops!\n"); in ath10k_htt_rx_pop_paddr_list()
509 int ath10k_htt_rx_alloc(struct ath10k_htt *htt) in ath10k_htt_rx_alloc() argument
511 struct ath10k *ar = htt->ar; in ath10k_htt_rx_alloc()
515 struct timer_list *timer = &htt->rx_ring.refill_retry_timer; in ath10k_htt_rx_alloc()
517 htt->rx_confused = false; in ath10k_htt_rx_alloc()
522 htt->rx_ring.size = HTT_RX_RING_SIZE; in ath10k_htt_rx_alloc()
523 htt->rx_ring.size_mask = htt->rx_ring.size - 1; in ath10k_htt_rx_alloc()
524 htt->rx_ring.fill_level = HTT_RX_RING_FILL_LEVEL; in ath10k_htt_rx_alloc()
526 if (!is_power_of_2(htt->rx_ring.size)) { in ath10k_htt_rx_alloc()
531 htt->rx_ring.netbufs_ring = in ath10k_htt_rx_alloc()
532 kzalloc(htt->rx_ring.size * sizeof(struct sk_buff *), in ath10k_htt_rx_alloc()
534 if (!htt->rx_ring.netbufs_ring) in ath10k_htt_rx_alloc()
537 size = htt->rx_ring.size * sizeof(htt->rx_ring.paddrs_ring); in ath10k_htt_rx_alloc()
539 vaddr = dma_alloc_coherent(htt->ar->dev, size, &paddr, GFP_DMA); in ath10k_htt_rx_alloc()
543 htt->rx_ring.paddrs_ring = vaddr; in ath10k_htt_rx_alloc()
544 htt->rx_ring.base_paddr = paddr; in ath10k_htt_rx_alloc()
546 vaddr = dma_alloc_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
547 sizeof(*htt->rx_ring.alloc_idx.vaddr), in ath10k_htt_rx_alloc()
552 htt->rx_ring.alloc_idx.vaddr = vaddr; in ath10k_htt_rx_alloc()
553 htt->rx_ring.alloc_idx.paddr = paddr; in ath10k_htt_rx_alloc()
554 htt->rx_ring.sw_rd_idx.msdu_payld = htt->rx_ring.size_mask; in ath10k_htt_rx_alloc()
555 *htt->rx_ring.alloc_idx.vaddr = 0; in ath10k_htt_rx_alloc()
558 setup_timer(timer, ath10k_htt_rx_ring_refill_retry, (unsigned long)htt); in ath10k_htt_rx_alloc()
560 spin_lock_init(&htt->rx_ring.lock); in ath10k_htt_rx_alloc()
562 htt->rx_ring.fill_cnt = 0; in ath10k_htt_rx_alloc()
563 htt->rx_ring.sw_rd_idx.msdu_payld = 0; in ath10k_htt_rx_alloc()
564 hash_init(htt->rx_ring.skb_table); in ath10k_htt_rx_alloc()
566 tasklet_init(&htt->rx_replenish_task, ath10k_htt_rx_replenish_task, in ath10k_htt_rx_alloc()
567 (unsigned long)htt); in ath10k_htt_rx_alloc()
569 skb_queue_head_init(&htt->tx_compl_q); in ath10k_htt_rx_alloc()
570 skb_queue_head_init(&htt->rx_compl_q); in ath10k_htt_rx_alloc()
571 skb_queue_head_init(&htt->rx_in_ord_compl_q); in ath10k_htt_rx_alloc()
573 tasklet_init(&htt->txrx_compl_task, ath10k_htt_txrx_compl_task, in ath10k_htt_rx_alloc()
574 (unsigned long)htt); in ath10k_htt_rx_alloc()
577 htt->rx_ring.size, htt->rx_ring.fill_level); in ath10k_htt_rx_alloc()
581 dma_free_coherent(htt->ar->dev, in ath10k_htt_rx_alloc()
582 (htt->rx_ring.size * in ath10k_htt_rx_alloc()
583 sizeof(htt->rx_ring.paddrs_ring)), in ath10k_htt_rx_alloc()
584 htt->rx_ring.paddrs_ring, in ath10k_htt_rx_alloc()
585 htt->rx_ring.base_paddr); in ath10k_htt_rx_alloc()
587 kfree(htt->rx_ring.netbufs_ring); in ath10k_htt_rx_alloc()
1561 static void ath10k_htt_rx_handler(struct ath10k_htt *htt, in ath10k_htt_rx_handler() argument
1564 struct ath10k *ar = htt->ar; in ath10k_htt_rx_handler()
1565 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_handler()
1573 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_handler()
1575 if (htt->rx_confused) in ath10k_htt_rx_handler()
1595 ret = ath10k_htt_rx_amsdu_pop(htt, &fw_desc, in ath10k_htt_rx_handler()
1603 htt->rx_confused = true; in ath10k_htt_rx_handler()
1614 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_handler()
1617 static void ath10k_htt_rx_frag_handler(struct ath10k_htt *htt, in ath10k_htt_rx_frag_handler() argument
1620 struct ath10k *ar = htt->ar; in ath10k_htt_rx_frag_handler()
1621 struct ieee80211_rx_status *rx_status = &htt->rx_status; in ath10k_htt_rx_frag_handler()
1632 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_frag_handler()
1633 ret = ath10k_htt_rx_amsdu_pop(htt, &fw_desc, &fw_desc_len, in ath10k_htt_rx_frag_handler()
1635 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_rx_frag_handler()
1637 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_frag_handler()
1669 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_frm_tx_compl() local
1700 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_rx_frm_tx_compl()
1838 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_h_rx_offload() local
1839 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_h_rx_offload()
1887 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_rx_in_ord_ind() local
1889 struct ieee80211_rx_status *status = &htt->rx_status; in ath10k_htt_rx_in_ord_ind()
1900 lockdep_assert_held(&htt->rx_ring.lock); in ath10k_htt_rx_in_ord_ind()
1902 if (htt->rx_confused) in ath10k_htt_rx_in_ord_ind()
1929 ret = ath10k_htt_rx_pop_paddr_list(htt, &resp->rx_in_ord_ind, &list); in ath10k_htt_rx_in_ord_ind()
1932 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
1963 htt->rx_confused = true; in ath10k_htt_rx_in_ord_ind()
1969 tasklet_schedule(&htt->rx_replenish_task); in ath10k_htt_rx_in_ord_ind()
1974 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_t2h_msg_handler() local
1985 if (resp->hdr.msg_type >= ar->htt.t2h_msg_types_max) { in ath10k_htt_t2h_msg_handler()
1987 resp->hdr.msg_type, ar->htt.t2h_msg_types_max); in ath10k_htt_t2h_msg_handler()
1991 type = ar->htt.t2h_msg_types[resp->hdr.msg_type]; in ath10k_htt_t2h_msg_handler()
1995 htt->target_version_major = resp->ver_resp.major; in ath10k_htt_t2h_msg_handler()
1996 htt->target_version_minor = resp->ver_resp.minor; in ath10k_htt_t2h_msg_handler()
1997 complete(&htt->target_version_received); in ath10k_htt_t2h_msg_handler()
2001 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2002 __skb_queue_tail(&htt->rx_compl_q, skb); in ath10k_htt_t2h_msg_handler()
2003 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2004 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
2012 ath10k_peer_map_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
2019 ath10k_peer_unmap_event(htt, &ev); in ath10k_htt_t2h_msg_handler()
2041 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_t2h_msg_handler()
2045 skb_queue_tail(&htt->tx_compl_q, skb); in ath10k_htt_t2h_msg_handler()
2046 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
2049 struct ath10k *ar = htt->ar; in ath10k_htt_t2h_msg_handler()
2063 ath10k_htt_rx_frag_handler(htt, &resp->rx_frag_ind); in ath10k_htt_t2h_msg_handler()
2101 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2102 __skb_queue_tail(&htt->rx_in_ord_compl_q, skb); in ath10k_htt_t2h_msg_handler()
2103 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_t2h_msg_handler()
2104 tasklet_schedule(&htt->txrx_compl_task); in ath10k_htt_t2h_msg_handler()
2132 struct ath10k_htt *htt = (struct ath10k_htt *)ptr; in ath10k_htt_txrx_compl_task() local
2133 struct ath10k *ar = htt->ar; in ath10k_htt_txrx_compl_task()
2137 while ((skb = skb_dequeue(&htt->tx_compl_q))) { in ath10k_htt_txrx_compl_task()
2138 ath10k_htt_rx_frm_tx_compl(htt->ar, skb); in ath10k_htt_txrx_compl_task()
2142 spin_lock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()
2143 while ((skb = __skb_dequeue(&htt->rx_compl_q))) { in ath10k_htt_txrx_compl_task()
2145 ath10k_htt_rx_handler(htt, &resp->rx_ind); in ath10k_htt_txrx_compl_task()
2149 while ((skb = __skb_dequeue(&htt->rx_in_ord_compl_q))) { in ath10k_htt_txrx_compl_task()
2153 spin_unlock_bh(&htt->rx_ring.lock); in ath10k_htt_txrx_compl_task()