Lines Matching refs:htt

25 void __ath10k_htt_tx_dec_pending(struct ath10k_htt *htt, bool limit_mgmt_desc)  in __ath10k_htt_tx_dec_pending()  argument
28 htt->num_pending_mgmt_tx--; in __ath10k_htt_tx_dec_pending()
30 htt->num_pending_tx--; in __ath10k_htt_tx_dec_pending()
31 if (htt->num_pending_tx == htt->max_num_pending_tx - 1) in __ath10k_htt_tx_dec_pending()
32 ath10k_mac_tx_unlock(htt->ar, ATH10K_TX_PAUSE_Q_FULL); in __ath10k_htt_tx_dec_pending()
35 static void ath10k_htt_tx_dec_pending(struct ath10k_htt *htt, in ath10k_htt_tx_dec_pending() argument
38 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_dec_pending()
39 __ath10k_htt_tx_dec_pending(htt, limit_mgmt_desc); in ath10k_htt_tx_dec_pending()
40 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_dec_pending()
43 static int ath10k_htt_tx_inc_pending(struct ath10k_htt *htt, in ath10k_htt_tx_inc_pending() argument
46 struct ath10k *ar = htt->ar; in ath10k_htt_tx_inc_pending()
49 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_inc_pending()
51 if (htt->num_pending_tx >= htt->max_num_pending_tx) { in ath10k_htt_tx_inc_pending()
57 if (is_probe_resp && (htt->num_pending_mgmt_tx > in ath10k_htt_tx_inc_pending()
62 htt->num_pending_mgmt_tx++; in ath10k_htt_tx_inc_pending()
65 htt->num_pending_tx++; in ath10k_htt_tx_inc_pending()
66 if (htt->num_pending_tx == htt->max_num_pending_tx) in ath10k_htt_tx_inc_pending()
67 ath10k_mac_tx_lock(htt->ar, ATH10K_TX_PAUSE_Q_FULL); in ath10k_htt_tx_inc_pending()
70 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_inc_pending()
74 int ath10k_htt_tx_alloc_msdu_id(struct ath10k_htt *htt, struct sk_buff *skb) in ath10k_htt_tx_alloc_msdu_id() argument
76 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_msdu_id()
79 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_alloc_msdu_id()
81 ret = idr_alloc(&htt->pending_tx, skb, 0, in ath10k_htt_tx_alloc_msdu_id()
82 htt->max_num_pending_tx, GFP_ATOMIC); in ath10k_htt_tx_alloc_msdu_id()
89 void ath10k_htt_tx_free_msdu_id(struct ath10k_htt *htt, u16 msdu_id) in ath10k_htt_tx_free_msdu_id() argument
91 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_msdu_id()
93 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_free_msdu_id()
97 idr_remove(&htt->pending_tx, msdu_id); in ath10k_htt_tx_free_msdu_id()
100 int ath10k_htt_tx_alloc(struct ath10k_htt *htt) in ath10k_htt_tx_alloc() argument
102 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc()
106 htt->max_num_pending_tx); in ath10k_htt_tx_alloc()
108 spin_lock_init(&htt->tx_lock); in ath10k_htt_tx_alloc()
109 idr_init(&htt->pending_tx); in ath10k_htt_tx_alloc()
111 size = htt->max_num_pending_tx * sizeof(struct ath10k_htt_txbuf); in ath10k_htt_tx_alloc()
112 htt->txbuf.vaddr = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc()
113 &htt->txbuf.paddr, in ath10k_htt_tx_alloc()
115 if (!htt->txbuf.vaddr) { in ath10k_htt_tx_alloc()
124 size = htt->max_num_pending_tx * sizeof(struct htt_msdu_ext_desc); in ath10k_htt_tx_alloc()
125 htt->frag_desc.vaddr = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc()
126 &htt->frag_desc.paddr, in ath10k_htt_tx_alloc()
128 if (!htt->frag_desc.vaddr) { in ath10k_htt_tx_alloc()
138 size = htt->max_num_pending_tx * in ath10k_htt_tx_alloc()
140 dma_free_coherent(htt->ar->dev, size, htt->txbuf.vaddr, in ath10k_htt_tx_alloc()
141 htt->txbuf.paddr); in ath10k_htt_tx_alloc()
143 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_alloc()
150 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_tx_clean_up_pending() local
158 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_tx_clean_up_pending()
163 void ath10k_htt_tx_free(struct ath10k_htt *htt) in ath10k_htt_tx_free() argument
167 idr_for_each(&htt->pending_tx, ath10k_htt_tx_clean_up_pending, htt->ar); in ath10k_htt_tx_free()
168 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_free()
170 if (htt->txbuf.vaddr) { in ath10k_htt_tx_free()
171 size = htt->max_num_pending_tx * in ath10k_htt_tx_free()
173 dma_free_coherent(htt->ar->dev, size, htt->txbuf.vaddr, in ath10k_htt_tx_free()
174 htt->txbuf.paddr); in ath10k_htt_tx_free()
177 if (htt->frag_desc.vaddr) { in ath10k_htt_tx_free()
178 size = htt->max_num_pending_tx * in ath10k_htt_tx_free()
180 dma_free_coherent(htt->ar->dev, size, htt->frag_desc.vaddr, in ath10k_htt_tx_free()
181 htt->frag_desc.paddr); in ath10k_htt_tx_free()
196 int ath10k_htt_h2t_ver_req_msg(struct ath10k_htt *htt) in ath10k_htt_h2t_ver_req_msg() argument
198 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_ver_req_msg()
215 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_ver_req_msg()
224 int ath10k_htt_h2t_stats_req(struct ath10k_htt *htt, u8 mask, u64 cookie) in ath10k_htt_h2t_stats_req() argument
226 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_stats_req()
255 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_stats_req()
266 int ath10k_htt_send_frag_desc_bank_cfg(struct ath10k_htt *htt) in ath10k_htt_send_frag_desc_bank_cfg() argument
268 struct ath10k *ar = htt->ar; in ath10k_htt_send_frag_desc_bank_cfg()
276 if (!htt->frag_desc.paddr) { in ath10k_htt_send_frag_desc_bank_cfg()
293 __cpu_to_le32(htt->frag_desc.paddr); in ath10k_htt_send_frag_desc_bank_cfg()
296 __cpu_to_le16(htt->max_num_pending_tx - 1); in ath10k_htt_send_frag_desc_bank_cfg()
298 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_frag_desc_bank_cfg()
309 int ath10k_htt_send_rx_ring_cfg_ll(struct ath10k_htt *htt) in ath10k_htt_send_rx_ring_cfg_ll() argument
311 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_ll()
361 fw_idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in ath10k_htt_send_rx_ring_cfg_ll()
364 __cpu_to_le32(htt->rx_ring.alloc_idx.paddr); in ath10k_htt_send_rx_ring_cfg_ll()
365 ring->rx_ring_base_paddr = __cpu_to_le32(htt->rx_ring.base_paddr); in ath10k_htt_send_rx_ring_cfg_ll()
366 ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size); in ath10k_htt_send_rx_ring_cfg_ll()
386 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_ll()
395 int ath10k_htt_h2t_aggr_cfg_msg(struct ath10k_htt *htt, in ath10k_htt_h2t_aggr_cfg_msg() argument
399 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_aggr_cfg_msg()
433 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_aggr_cfg_msg()
442 int ath10k_htt_mgmt_tx(struct ath10k_htt *htt, struct sk_buff *msdu) in ath10k_htt_mgmt_tx() argument
444 struct ath10k *ar = htt->ar; in ath10k_htt_mgmt_tx()
464 res = ath10k_htt_tx_inc_pending(htt, limit_mgmt_desc, is_probe_resp); in ath10k_htt_mgmt_tx()
472 spin_lock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
473 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_mgmt_tx()
474 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
506 skb_cb->htt.txbuf = NULL; in ath10k_htt_mgmt_tx()
508 res = ath10k_htc_send(&htt->ar->htc, htt->eid, txdesc); in ath10k_htt_mgmt_tx()
519 spin_lock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
520 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_mgmt_tx()
521 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
523 ath10k_htt_tx_dec_pending(htt, limit_mgmt_desc); in ath10k_htt_mgmt_tx()
528 int ath10k_htt_tx(struct ath10k_htt *htt, struct sk_buff *msdu) in ath10k_htt_tx() argument
530 struct ath10k *ar = htt->ar; in ath10k_htt_tx()
537 u8 tid = skb_cb->htt.tid; in ath10k_htt_tx()
555 res = ath10k_htt_tx_inc_pending(htt, limit_mgmt_desc, is_probe_resp); in ath10k_htt_tx()
559 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx()
560 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_tx()
561 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx()
567 prefetch_len = min(htt->prefetch_len, msdu->len); in ath10k_htt_tx()
570 skb_cb->htt.txbuf = &htt->txbuf.vaddr[msdu_id]; in ath10k_htt_tx()
571 skb_cb->htt.txbuf_paddr = htt->txbuf.paddr + in ath10k_htt_tx()
579 } else if (!skb_cb->htt.nohwcrypt && in ath10k_htt_tx()
600 memset(&htt->frag_desc.vaddr[msdu_id], 0, in ath10k_htt_tx()
603 &htt->frag_desc.vaddr[msdu_id].frags; in ath10k_htt_tx()
604 ext_desc = &htt->frag_desc.vaddr[msdu_id]; in ath10k_htt_tx()
610 frags_paddr = htt->frag_desc.paddr + in ath10k_htt_tx()
613 frags = skb_cb->htt.txbuf->frags; in ath10k_htt_tx()
620 frags_paddr = skb_cb->htt.txbuf_paddr; in ath10k_htt_tx()
649 skb_cb->htt.txbuf->htc_hdr.eid = htt->eid; in ath10k_htt_tx()
650 skb_cb->htt.txbuf->htc_hdr.len = __cpu_to_le16( in ath10k_htt_tx()
651 sizeof(skb_cb->htt.txbuf->cmd_hdr) + in ath10k_htt_tx()
652 sizeof(skb_cb->htt.txbuf->cmd_tx) + in ath10k_htt_tx()
654 skb_cb->htt.txbuf->htc_hdr.flags = 0; in ath10k_htt_tx()
656 if (skb_cb->htt.nohwcrypt) in ath10k_htt_tx()
678 skb_cb->htt.txbuf->cmd_hdr.msg_type = HTT_H2T_MSG_TYPE_TX_FRM; in ath10k_htt_tx()
679 skb_cb->htt.txbuf->cmd_tx.flags0 = flags0; in ath10k_htt_tx()
680 skb_cb->htt.txbuf->cmd_tx.flags1 = __cpu_to_le16(flags1); in ath10k_htt_tx()
681 skb_cb->htt.txbuf->cmd_tx.len = __cpu_to_le16(msdu->len); in ath10k_htt_tx()
682 skb_cb->htt.txbuf->cmd_tx.id = __cpu_to_le16(msdu_id); in ath10k_htt_tx()
683 skb_cb->htt.txbuf->cmd_tx.frags_paddr = __cpu_to_le32(frags_paddr); in ath10k_htt_tx()
684 skb_cb->htt.txbuf->cmd_tx.peerid = __cpu_to_le16(HTT_INVALID_PEERID); in ath10k_htt_tx()
685 skb_cb->htt.txbuf->cmd_tx.freq = __cpu_to_le16(skb_cb->htt.freq); in ath10k_htt_tx()
691 (u32)skb_cb->paddr, vdev_id, tid, skb_cb->htt.freq); in ath10k_htt_tx()
699 sg_items[0].vaddr = &skb_cb->htt.txbuf->htc_hdr; in ath10k_htt_tx()
700 sg_items[0].paddr = skb_cb->htt.txbuf_paddr + in ath10k_htt_tx()
701 sizeof(skb_cb->htt.txbuf->frags); in ath10k_htt_tx()
702 sg_items[0].len = sizeof(skb_cb->htt.txbuf->htc_hdr) + in ath10k_htt_tx()
703 sizeof(skb_cb->htt.txbuf->cmd_hdr) + in ath10k_htt_tx()
704 sizeof(skb_cb->htt.txbuf->cmd_tx); in ath10k_htt_tx()
712 res = ath10k_hif_tx_sg(htt->ar, in ath10k_htt_tx()
713 htt->ar->htc.endpoint[htt->eid].ul_pipe_id, in ath10k_htt_tx()
723 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx()
724 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_tx()
725 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx()
727 ath10k_htt_tx_dec_pending(htt, limit_mgmt_desc); in ath10k_htt_tx()