Lines Matching refs:ar

32 		ath10k_mac_tx_unlock(htt->ar, ATH10K_TX_PAUSE_Q_FULL);  in __ath10k_htt_tx_dec_pending()
46 struct ath10k *ar = htt->ar; in ath10k_htt_tx_inc_pending() local
58 ar->hw_params.max_probe_resp_desc_thres)) { in ath10k_htt_tx_inc_pending()
67 ath10k_mac_tx_lock(htt->ar, ATH10K_TX_PAUSE_Q_FULL); in ath10k_htt_tx_inc_pending()
76 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc_msdu_id() local
84 ath10k_dbg(ar, ATH10K_DBG_HTT, "htt tx alloc msdu_id %d\n", ret); in ath10k_htt_tx_alloc_msdu_id()
91 struct ath10k *ar = htt->ar; in ath10k_htt_tx_free_msdu_id() local
95 ath10k_dbg(ar, ATH10K_DBG_HTT, "htt tx free msdu_id %hu\n", msdu_id); in ath10k_htt_tx_free_msdu_id()
102 struct ath10k *ar = htt->ar; in ath10k_htt_tx_alloc() local
105 ath10k_dbg(ar, ATH10K_DBG_BOOT, "htt tx max num pending tx %d\n", in ath10k_htt_tx_alloc()
112 htt->txbuf.vaddr = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc()
116 ath10k_err(ar, "failed to alloc tx buffer\n"); in ath10k_htt_tx_alloc()
121 if (!ar->hw_params.continuous_frag_desc) in ath10k_htt_tx_alloc()
125 htt->frag_desc.vaddr = dma_alloc_coherent(ar->dev, size, in ath10k_htt_tx_alloc()
129 ath10k_warn(ar, "failed to alloc fragment desc memory\n"); in ath10k_htt_tx_alloc()
140 dma_free_coherent(htt->ar->dev, size, htt->txbuf.vaddr, in ath10k_htt_tx_alloc()
149 struct ath10k *ar = ctx; in ath10k_htt_tx_clean_up_pending() local
150 struct ath10k_htt *htt = &ar->htt; in ath10k_htt_tx_clean_up_pending()
153 ath10k_dbg(ar, ATH10K_DBG_HTT, "force cleanup msdu_id %hu\n", msdu_id); in ath10k_htt_tx_clean_up_pending()
167 idr_for_each(&htt->pending_tx, ath10k_htt_tx_clean_up_pending, htt->ar); in ath10k_htt_tx_free()
173 dma_free_coherent(htt->ar->dev, size, htt->txbuf.vaddr, in ath10k_htt_tx_free()
180 dma_free_coherent(htt->ar->dev, size, htt->frag_desc.vaddr, in ath10k_htt_tx_free()
185 void ath10k_htt_htc_tx_complete(struct ath10k *ar, struct sk_buff *skb) in ath10k_htt_htc_tx_complete() argument
190 void ath10k_htt_hif_tx_complete(struct ath10k *ar, struct sk_buff *skb) in ath10k_htt_hif_tx_complete() argument
198 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_ver_req_msg() local
207 skb = ath10k_htc_alloc_skb(ar, len); in ath10k_htt_h2t_ver_req_msg()
215 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_ver_req_msg()
226 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_stats_req() local
235 skb = ath10k_htc_alloc_skb(ar, len); in ath10k_htt_h2t_stats_req()
255 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_stats_req()
257 ath10k_warn(ar, "failed to send htt type stats request: %d", in ath10k_htt_h2t_stats_req()
268 struct ath10k *ar = htt->ar; in ath10k_htt_send_frag_desc_bank_cfg() local
273 if (!ar->hw_params.continuous_frag_desc) in ath10k_htt_send_frag_desc_bank_cfg()
277 ath10k_warn(ar, "invalid frag desc memory\n"); in ath10k_htt_send_frag_desc_bank_cfg()
282 skb = ath10k_htc_alloc_skb(ar, size); in ath10k_htt_send_frag_desc_bank_cfg()
298 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_frag_desc_bank_cfg()
300 ath10k_warn(ar, "failed to send frag desc bank cfg request: %d\n", in ath10k_htt_send_frag_desc_bank_cfg()
311 struct ath10k *ar = htt->ar; in ath10k_htt_send_rx_ring_cfg_ll() local
330 skb = ath10k_htc_alloc_skb(ar, len); in ath10k_htt_send_rx_ring_cfg_ll()
386 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_send_rx_ring_cfg_ll()
399 struct ath10k *ar = htt->ar; in ath10k_htt_h2t_aggr_cfg_msg() local
417 skb = ath10k_htc_alloc_skb(ar, len); in ath10k_htt_h2t_aggr_cfg_msg()
429 ath10k_dbg(ar, ATH10K_DBG_HTT, "htt h2t aggr cfg msg amsdu %d ampdu %d", in ath10k_htt_h2t_aggr_cfg_msg()
433 ret = ath10k_htc_send(&htt->ar->htc, htt->eid, skb); in ath10k_htt_h2t_aggr_cfg_msg()
444 struct ath10k *ar = htt->ar; in ath10k_htt_mgmt_tx() local
445 struct device *dev = ar->dev; in ath10k_htt_mgmt_tx()
457 if (ar->hw_params.max_probe_resp_desc_thres) { in ath10k_htt_mgmt_tx()
480 txdesc = ath10k_htc_alloc_skb(ar, len); in ath10k_htt_mgmt_tx()
508 res = ath10k_htc_send(&htt->ar->htc, htt->eid, txdesc); in ath10k_htt_mgmt_tx()
530 struct ath10k *ar = htt->ar; in ath10k_htt_tx() local
531 struct device *dev = ar->dev; in ath10k_htt_tx()
548 ar->hw_params.max_probe_resp_desc_thres) { in ath10k_htt_tx()
599 if (ar->hw_params.continuous_frag_desc) { in ath10k_htt_tx()
665 !test_bit(ATH10K_FLAG_RAW_MODE, &ar->dev_flags)) { in ath10k_htt_tx()
668 if (ar->hw_params.continuous_frag_desc) in ath10k_htt_tx()
687 trace_ath10k_htt_tx(ar, msdu_id, msdu->len, vdev_id, tid); in ath10k_htt_tx()
688 ath10k_dbg(ar, ATH10K_DBG_HTT, in ath10k_htt_tx()
692 ath10k_dbg_dump(ar, ATH10K_DBG_HTT_DUMP, NULL, "htt tx msdu: ", in ath10k_htt_tx()
694 trace_ath10k_tx_hdr(ar, msdu->data, msdu->len); in ath10k_htt_tx()
695 trace_ath10k_tx_payload(ar, msdu->data, msdu->len); in ath10k_htt_tx()
712 res = ath10k_hif_tx_sg(htt->ar, in ath10k_htt_tx()
713 htt->ar->htc.endpoint[htt->eid].ul_pipe_id, in ath10k_htt_tx()