Lines Matching refs:htt

25 void __ath10k_htt_tx_dec_pending(struct ath10k_htt *htt)  in __ath10k_htt_tx_dec_pending()  argument
27 htt->num_pending_tx--; in __ath10k_htt_tx_dec_pending()
28 if (htt->num_pending_tx == htt->max_num_pending_tx - 1) in __ath10k_htt_tx_dec_pending()
29 ieee80211_wake_queues(htt->ar->hw); in __ath10k_htt_tx_dec_pending()
32 static void ath10k_htt_tx_dec_pending(struct ath10k_htt *htt) in ath10k_htt_tx_dec_pending() argument
34 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_dec_pending()
35 __ath10k_htt_tx_dec_pending(htt); in ath10k_htt_tx_dec_pending()
36 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_dec_pending()
39 static int ath10k_htt_tx_inc_pending(struct ath10k_htt *htt) in ath10k_htt_tx_inc_pending() argument
43 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_inc_pending()
45 if (htt->num_pending_tx >= htt->max_num_pending_tx) { in ath10k_htt_tx_inc_pending()
50 htt->num_pending_tx++; in ath10k_htt_tx_inc_pending()
51 if (htt->num_pending_tx == htt->max_num_pending_tx) in ath10k_htt_tx_inc_pending()
52 ieee80211_stop_queues(htt->ar->hw); in ath10k_htt_tx_inc_pending()
55 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_inc_pending()
59 int ath10k_htt_tx_alloc_msdu_id(struct ath10k_htt *htt, struct sk_buff *skb) in ath10k_htt_tx_alloc_msdu_id() argument
61 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_msdu_id()
64 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_alloc_msdu_id()
66 ret = idr_alloc(&htt->pending_tx, skb, 0, 0x10000, GFP_ATOMIC); in ath10k_htt_tx_alloc_msdu_id()
73 void ath10k_htt_tx_free_msdu_id(struct ath10k_htt *htt, u16 msdu_id) in ath10k_htt_tx_free_msdu_id() argument
75 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_msdu_id()
77 lockdep_assert_held(&htt->tx_lock); in ath10k_htt_tx_free_msdu_id()
81 idr_remove(&htt->pending_tx, msdu_id); in ath10k_htt_tx_free_msdu_id()
84 int ath10k_htt_tx_alloc(struct ath10k_htt *htt) in ath10k_htt_tx_alloc() argument
86 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc()
89 htt->max_num_pending_tx); in ath10k_htt_tx_alloc()
91 spin_lock_init(&htt->tx_lock); in ath10k_htt_tx_alloc()
92 idr_init(&htt->pending_tx); in ath10k_htt_tx_alloc()
94 htt->tx_pool = dma_pool_create("ath10k htt tx pool", htt->ar->dev, in ath10k_htt_tx_alloc()
96 if (!htt->tx_pool) { in ath10k_htt_tx_alloc()
97 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_alloc()
107 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_tx_clean_up_pending() local
115 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx_clean_up_pending()
116 ath10k_txrx_tx_unref(htt, &tx_done); in ath10k_htt_tx_clean_up_pending()
117 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx_clean_up_pending()
122 void ath10k_htt_tx_free(struct ath10k_htt *htt) in ath10k_htt_tx_free() argument
124 idr_for_each(&htt->pending_tx, ath10k_htt_tx_clean_up_pending, htt->ar); in ath10k_htt_tx_free()
125 idr_destroy(&htt->pending_tx); in ath10k_htt_tx_free()
126 dma_pool_destroy(htt->tx_pool); in ath10k_htt_tx_free()
134 int ath10k_htt_h2t_ver_req_msg(struct ath10k_htt *htt) in ath10k_htt_h2t_ver_req_msg() argument
136 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_ver_req_msg()
153 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_ver_req_msg()
162 int ath10k_htt_h2t_stats_req(struct ath10k_htt *htt, u8 mask, u64 cookie) in ath10k_htt_h2t_stats_req() argument
164 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_stats_req()
193 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_stats_req()
204 int ath10k_htt_send_rx_ring_cfg_ll(struct ath10k_htt *htt) in ath10k_htt_send_rx_ring_cfg_ll() argument
206 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_ll()
256 fw_idx = __le32_to_cpu(*htt->rx_ring.alloc_idx.vaddr); in ath10k_htt_send_rx_ring_cfg_ll()
259 __cpu_to_le32(htt->rx_ring.alloc_idx.paddr); in ath10k_htt_send_rx_ring_cfg_ll()
260 ring->rx_ring_base_paddr = __cpu_to_le32(htt->rx_ring.base_paddr); in ath10k_htt_send_rx_ring_cfg_ll()
261 ring->rx_ring_len = __cpu_to_le16(htt->rx_ring.size); in ath10k_htt_send_rx_ring_cfg_ll()
281 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_ll()
290 int ath10k_htt_h2t_aggr_cfg_msg(struct ath10k_htt *htt, in ath10k_htt_h2t_aggr_cfg_msg() argument
294 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_aggr_cfg_msg()
328 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_aggr_cfg_msg()
337 int ath10k_htt_mgmt_tx(struct ath10k_htt *htt, struct sk_buff *msdu) in ath10k_htt_mgmt_tx() argument
339 struct ath10k *ar = htt->ar; in ath10k_htt_mgmt_tx()
349 res = ath10k_htt_tx_inc_pending(htt); in ath10k_htt_mgmt_tx()
356 spin_lock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
357 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_mgmt_tx()
359 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
363 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
389 skb_cb->htt.txbuf = NULL; in ath10k_htt_mgmt_tx()
391 res = ath10k_htc_send(&htt->ar->htc, htt->eid, txdesc); in ath10k_htt_mgmt_tx()
402 spin_lock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
403 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_mgmt_tx()
404 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_mgmt_tx()
406 ath10k_htt_tx_dec_pending(htt); in ath10k_htt_mgmt_tx()
411 int ath10k_htt_tx(struct ath10k_htt *htt, struct sk_buff *msdu) in ath10k_htt_tx() argument
413 struct ath10k *ar = htt->ar; in ath10k_htt_tx()
420 u8 tid = skb_cb->htt.tid; in ath10k_htt_tx()
429 res = ath10k_htt_tx_inc_pending(htt); in ath10k_htt_tx()
433 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx()
434 res = ath10k_htt_tx_alloc_msdu_id(htt, msdu); in ath10k_htt_tx()
436 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx()
440 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx()
442 prefetch_len = min(htt->prefetch_len, msdu->len); in ath10k_htt_tx()
448 use_frags = htt->target_version_major < 3 || in ath10k_htt_tx()
451 skb_cb->htt.txbuf = dma_pool_alloc(htt->tx_pool, GFP_ATOMIC, in ath10k_htt_tx()
453 if (!skb_cb->htt.txbuf) { in ath10k_htt_tx()
457 skb_cb->htt.txbuf_paddr = paddr; in ath10k_htt_tx()
474 frags = skb_cb->htt.txbuf->frags; in ath10k_htt_tx()
484 frags_paddr = skb_cb->htt.txbuf_paddr; in ath10k_htt_tx()
508 skb_cb->htt.txbuf->htc_hdr.eid = htt->eid; in ath10k_htt_tx()
509 skb_cb->htt.txbuf->htc_hdr.len = __cpu_to_le16( in ath10k_htt_tx()
510 sizeof(skb_cb->htt.txbuf->cmd_hdr) + in ath10k_htt_tx()
511 sizeof(skb_cb->htt.txbuf->cmd_tx) + in ath10k_htt_tx()
513 skb_cb->htt.txbuf->htc_hdr.flags = 0; in ath10k_htt_tx()
533 skb_cb->htt.txbuf->cmd_hdr.msg_type = HTT_H2T_MSG_TYPE_TX_FRM; in ath10k_htt_tx()
534 skb_cb->htt.txbuf->cmd_tx.flags0 = flags0; in ath10k_htt_tx()
535 skb_cb->htt.txbuf->cmd_tx.flags1 = __cpu_to_le16(flags1); in ath10k_htt_tx()
536 skb_cb->htt.txbuf->cmd_tx.len = __cpu_to_le16(msdu->len); in ath10k_htt_tx()
537 skb_cb->htt.txbuf->cmd_tx.id = __cpu_to_le16(msdu_id); in ath10k_htt_tx()
538 skb_cb->htt.txbuf->cmd_tx.frags_paddr = __cpu_to_le32(frags_paddr); in ath10k_htt_tx()
539 skb_cb->htt.txbuf->cmd_tx.peerid = __cpu_to_le16(HTT_INVALID_PEERID); in ath10k_htt_tx()
540 skb_cb->htt.txbuf->cmd_tx.freq = __cpu_to_le16(skb_cb->htt.freq); in ath10k_htt_tx()
546 (u32)skb_cb->paddr, vdev_id, tid, skb_cb->htt.freq); in ath10k_htt_tx()
554 sg_items[0].vaddr = &skb_cb->htt.txbuf->htc_hdr; in ath10k_htt_tx()
555 sg_items[0].paddr = skb_cb->htt.txbuf_paddr + in ath10k_htt_tx()
556 sizeof(skb_cb->htt.txbuf->frags); in ath10k_htt_tx()
557 sg_items[0].len = sizeof(skb_cb->htt.txbuf->htc_hdr) + in ath10k_htt_tx()
558 sizeof(skb_cb->htt.txbuf->cmd_hdr) + in ath10k_htt_tx()
559 sizeof(skb_cb->htt.txbuf->cmd_tx); in ath10k_htt_tx()
567 res = ath10k_hif_tx_sg(htt->ar, in ath10k_htt_tx()
568 htt->ar->htc.endpoint[htt->eid].ul_pipe_id, in ath10k_htt_tx()
578 dma_pool_free(htt->tx_pool, in ath10k_htt_tx()
579 skb_cb->htt.txbuf, in ath10k_htt_tx()
580 skb_cb->htt.txbuf_paddr); in ath10k_htt_tx()
582 spin_lock_bh(&htt->tx_lock); in ath10k_htt_tx()
583 ath10k_htt_tx_free_msdu_id(htt, msdu_id); in ath10k_htt_tx()
584 spin_unlock_bh(&htt->tx_lock); in ath10k_htt_tx()
586 ath10k_htt_tx_dec_pending(htt); in ath10k_htt_tx()