Lines Matching refs:sc

50 static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq,
52 static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb,
54 static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf,
57 static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq,
59 static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf,
62 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
64 static struct ath_buf *ath_tx_setup_buffer(struct ath_softc *sc,
80 void ath_txq_lock(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_lock() argument
86 void ath_txq_unlock(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_unlock() argument
92 void ath_txq_unlock_complete(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_unlock_complete() argument
103 ieee80211_tx_status(sc->hw, skb); in ath_txq_unlock_complete()
106 static void ath_tx_queue_tid(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_queue_tid() argument
145 static void ath_txq_skb_done(struct ath_softc *sc, struct ath_txq *txq, in ath_txq_skb_done() argument
155 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
160 txq->pending_frames < sc->tx.txq_max_pending[q]) { in ath_txq_skb_done()
162 ieee80211_wake_queue(sc->hw, info->hw_queue); in ath_txq_skb_done()
164 ieee80211_wake_queue(sc->hw, q); in ath_txq_skb_done()
170 ath_get_skb_tid(struct ath_softc *sc, struct ath_node *an, struct sk_buff *skb) in ath_get_skb_tid() argument
198 ath_tx_tid_change_state(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_tid_change_state() argument
216 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_tid_change_state()
219 ath_txq_skb_done(sc, txq, skb); in ath_tx_tid_change_state()
220 ieee80211_free_txskb(sc->hw, skb); in ath_tx_tid_change_state()
227 static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_flush_tid() argument
245 ath_txq_skb_done(sc, txq, skb); in ath_tx_flush_tid()
246 ieee80211_free_txskb(sc->hw, skb); in ath_tx_flush_tid()
251 ath_tx_update_baw(sc, tid, bf->bf_state.seqno); in ath_tx_flush_tid()
256 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0); in ath_tx_flush_tid()
260 ath_txq_unlock(sc, txq); in ath_tx_flush_tid()
262 ath_txq_lock(sc, txq); in ath_tx_flush_tid()
266 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_update_baw() argument
284 static void ath_tx_addto_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_addto_baw() argument
303 static void ath_tid_drain(struct ath_softc *sc, struct ath_txq *txq, in ath_tid_drain() argument
321 ath_tx_complete(sc, skb, ATH_TX_ERROR, txq); in ath_tid_drain()
326 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0); in ath_tid_drain()
330 static void ath_tx_set_retry(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_set_retry() argument
346 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath_tx_set_retry()
350 static struct ath_buf *ath_tx_get_buffer(struct ath_softc *sc) in ath_tx_get_buffer() argument
354 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
356 if (unlikely(list_empty(&sc->tx.txbuf))) { in ath_tx_get_buffer()
357 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
361 bf = list_first_entry(&sc->tx.txbuf, struct ath_buf, list); in ath_tx_get_buffer()
364 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
369 static void ath_tx_return_buffer(struct ath_softc *sc, struct ath_buf *bf) in ath_tx_return_buffer() argument
371 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
372 list_add_tail(&bf->list, &sc->tx.txbuf); in ath_tx_return_buffer()
373 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
376 static struct ath_buf* ath_clone_txbuf(struct ath_softc *sc, struct ath_buf *bf) in ath_clone_txbuf() argument
380 tbf = ath_tx_get_buffer(sc); in ath_clone_txbuf()
388 memcpy(tbf->bf_desc, bf->bf_desc, sc->sc_ah->caps.tx_desc_len); in ath_clone_txbuf()
395 static void ath_tx_count_frames(struct ath_softc *sc, struct ath_buf *bf, in ath_tx_count_frames() argument
427 static void ath_tx_complete_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_complete_aggr() argument
434 struct ieee80211_hw *hw = sc->hw; in ath_tx_complete_aggr()
476 ath_tx_complete_buf(sc, bf, txq, &bf_head, ts, 0); in ath_tx_complete_aggr()
484 tid = ath_get_skb_tid(sc, an, skb); in ath_tx_complete_aggr()
514 if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION) in ath_tx_complete_aggr()
521 ath_tx_count_frames(sc, bf, ts, txok, &nframes, &nbad); in ath_tx_complete_aggr()
550 ath_tx_set_retry(sc, txq, bf->bf_mpdu, in ath_tx_complete_aggr()
574 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
578 ath_tx_rc_status(sc, bf, ts, nframes, nbad, txok); in ath_tx_complete_aggr()
581 ath_dynack_sample_tx_ts(sc->sc_ah, in ath_tx_complete_aggr()
586 ath_tx_complete_buf(sc, bf, txq, &bf_head, ts, in ath_tx_complete_aggr()
597 tbf = ath_clone_txbuf(sc, bf_last); in ath_tx_complete_aggr()
604 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
606 ath_tx_complete_buf(sc, bf, txq, in ath_tx_complete_aggr()
633 ath_tx_queue_tid(sc, txq, tid); in ath_tx_complete_aggr()
646 ath_txq_unlock(sc, txq); in ath_tx_complete_aggr()
648 ath_txq_lock(sc, txq); in ath_tx_complete_aggr()
654 ath9k_queue_reset(sc, RESET_TYPE_TX_ERROR); in ath_tx_complete_aggr()
663 static void ath_tx_process_buffer(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_process_buffer() argument
678 ts->duration = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, in ath_tx_process_buffer()
685 ath_tx_rc_status(sc, bf, ts, 1, txok ? 0 : 1, txok); in ath_tx_process_buffer()
686 ath_dynack_sample_tx_ts(sc->sc_ah, bf->bf_mpdu, ts); in ath_tx_process_buffer()
688 ath_tx_complete_buf(sc, bf, txq, bf_head, ts, txok); in ath_tx_process_buffer()
690 ath_tx_complete_aggr(sc, txq, bf, bf_head, ts, txok); in ath_tx_process_buffer()
693 ath_txq_schedule(sc, txq); in ath_tx_process_buffer()
718 static u32 ath_lookup_rate(struct ath_softc *sc, struct ath_buf *bf, in ath_lookup_rate() argument
758 frmlen = sc->tx.max_aggr_framelen[q][modeidx][rates[i].idx]; in ath_lookup_rate()
775 bt_aggr_limit = ath9k_btcoex_aggr_limit(sc, max_4ms_framelen); in ath_lookup_rate()
789 static int ath_compute_num_delims(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_compute_num_delims() argument
810 !(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)) in ath_compute_num_delims()
817 if (first_subfrm && !AR_SREV_9580_10_OR_LATER(sc->sc_ah) && in ath_compute_num_delims()
818 (sc->sc_ah->ent_mode & AR_ENT_OTP_MIN_PKT_SIZE_DISABLE)) in ath_compute_num_delims()
860 ath_tx_get_tid_subframe(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_get_tid_subframe() argument
881 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_get_tid_subframe()
887 ath_txq_skb_done(sc, txq, skb); in ath_tx_get_tid_subframe()
888 ieee80211_free_txskb(sc->hw, skb); in ath_tx_get_tid_subframe()
925 ath_tx_update_baw(sc, tid, seqno); in ath_tx_get_tid_subframe()
926 ath_tx_complete_buf(sc, bf, txq, &bf_head, &ts, 0); in ath_tx_get_tid_subframe()
937 ath_tx_form_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_form_aggr() argument
953 aggr_limit = ath_lookup_rate(sc, bf, tid); in ath_tx_form_aggr()
979 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
988 ath_tx_addto_baw(sc, tid, bf); in ath_tx_form_aggr()
998 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_aggr()
1027 static u32 ath_pkt_duration(struct ath_softc *sc, u8 rix, int pktlen, in ath_pkt_duration() argument
1067 void ath_update_max_aggr_framelen(struct ath_softc *sc, int queue, int txop) in ath_update_max_aggr_framelen() argument
1076 cur_ht20 = sc->tx.max_aggr_framelen[queue][MCS_HT20]; in ath_update_max_aggr_framelen()
1077 cur_ht20_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT20_SGI]; in ath_update_max_aggr_framelen()
1078 cur_ht40 = sc->tx.max_aggr_framelen[queue][MCS_HT40]; in ath_update_max_aggr_framelen()
1079 cur_ht40_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT40_SGI]; in ath_update_max_aggr_framelen()
1088 static u8 ath_get_rate_txpower(struct ath_softc *sc, struct ath_buf *bf, in ath_get_rate_txpower() argument
1095 struct ath_hw *ah = sc->sc_ah; in ath_get_rate_txpower()
1097 if (sc->tx99_state || !ah->tpc_enabled) in ath_get_rate_txpower()
1161 static void ath_buf_set_rate(struct ath_softc *sc, struct ath_buf *bf, in ath_buf_set_rate() argument
1164 struct ath_hw *ah = sc->sc_ah; in ath_buf_set_rate()
1172 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_buf_set_rate()
1225 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1227 info->rates[i].PktDuration = ath_pkt_duration(sc, rix, len, in ath_buf_set_rate()
1232 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, in ath_buf_set_rate()
1256 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1259 info->rates[i].PktDuration = ath9k_hw_computetxtime(sc->sc_ah, in ath_buf_set_rate()
1263 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, false, in ath_buf_set_rate()
1268 if (bf_isaggr(bf) && (len > sc->sc_ah->caps.rts_aggr_limit)) in ath_buf_set_rate()
1299 static void ath_tx_fill_desc(struct ath_softc *sc, struct ath_buf *bf, in ath_tx_fill_desc() argument
1302 struct ath_hw *ah = sc->sc_ah; in ath_tx_fill_desc()
1305 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_tx_fill_desc()
1323 info.link = (sc->tx99_state) ? bf->bf_daddr : 0; in ath_tx_fill_desc()
1328 if (!sc->tx99_state) in ath_tx_fill_desc()
1331 txq == sc->tx.uapsdq) in ath_tx_fill_desc()
1362 ath_buf_set_rate(sc, bf, &info, len, rts); in ath_tx_fill_desc()
1392 ath_tx_form_burst(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_form_burst() argument
1414 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_burst()
1426 static bool ath_tx_sched_aggr(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_sched_aggr() argument
1441 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_sched_aggr()
1455 last = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf, in ath_tx_sched_aggr()
1458 ath_tx_form_burst(sc, txq, tid, &bf_q, bf, tid_q); in ath_tx_sched_aggr()
1468 ath_tx_fill_desc(sc, bf, txq, aggr_len); in ath_tx_sched_aggr()
1469 ath_tx_txqaddbuf(sc, txq, &bf_q, false); in ath_tx_sched_aggr()
1473 int ath_tx_aggr_start(struct ath_softc *sc, struct ieee80211_sta *sta, in ath_tx_aggr_start() argument
1485 ath_txq_lock(sc, txq); in ath_tx_aggr_start()
1499 ath_tx_tid_change_state(sc, txtid); in ath_tx_aggr_start()
1508 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_start()
1513 void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid) in ath_tx_aggr_stop() argument
1519 ath_txq_lock(sc, txq); in ath_tx_aggr_stop()
1521 ath_tx_flush_tid(sc, txtid); in ath_tx_aggr_stop()
1522 ath_tx_tid_change_state(sc, txtid); in ath_tx_aggr_stop()
1523 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_stop()
1526 void ath_tx_aggr_sleep(struct ieee80211_sta *sta, struct ath_softc *sc, in ath_tx_aggr_sleep() argument
1539 ath_txq_lock(sc, txq); in ath_tx_aggr_sleep()
1542 ath_txq_unlock(sc, txq); in ath_tx_aggr_sleep()
1550 ath_txq_unlock(sc, txq); in ath_tx_aggr_sleep()
1556 void ath_tx_aggr_wakeup(struct ath_softc *sc, struct ath_node *an) in ath_tx_aggr_wakeup() argument
1567 ath_txq_lock(sc, txq); in ath_tx_aggr_wakeup()
1571 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_wakeup()
1572 ath_txq_schedule(sc, txq); in ath_tx_aggr_wakeup()
1575 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_wakeup()
1579 void ath_tx_aggr_resume(struct ath_softc *sc, struct ieee80211_sta *sta, in ath_tx_aggr_resume() argument
1590 ath_txq_lock(sc, txq); in ath_tx_aggr_resume()
1595 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_resume()
1596 ath_txq_schedule(sc, txq); in ath_tx_aggr_resume()
1599 ath_txq_unlock_complete(sc, txq); in ath_tx_aggr_resume()
1608 struct ath_softc *sc = hw->priv; in ath9k_release_buffered_frames() local
1610 struct ath_txq *txq = sc->tx.uapsdq; in ath9k_release_buffered_frames()
1627 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1629 bf = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, tid, &tid_q); in ath9k_release_buffered_frames()
1637 ath_tx_addto_baw(sc, tid, bf); in ath9k_release_buffered_frames()
1651 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1661 ath_txq_lock(sc, txq); in ath9k_release_buffered_frames()
1662 ath_tx_fill_desc(sc, bf, txq, 0); in ath9k_release_buffered_frames()
1663 ath_tx_txqaddbuf(sc, txq, &bf_q, false); in ath9k_release_buffered_frames()
1664 ath_txq_unlock(sc, txq); in ath9k_release_buffered_frames()
1671 struct ath_txq *ath_txq_setup(struct ath_softc *sc, int qtype, int subtype) in ath_txq_setup() argument
1673 struct ath_hw *ah = sc->sc_ah; in ath_txq_setup()
1722 if (!ATH_TXQ_SETUP(sc, axq_qnum)) { in ath_txq_setup()
1723 struct ath_txq *txq = &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1734 sc->tx.txqsetup |= 1<<axq_qnum; in ath_txq_setup()
1740 return &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1743 int ath_txq_update(struct ath_softc *sc, int qnum, in ath_txq_update() argument
1746 struct ath_hw *ah = sc->sc_ah; in ath_txq_update()
1750 BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum); in ath_txq_update()
1760 ath_err(ath9k_hw_common(sc->sc_ah), in ath_txq_update()
1770 int ath_cabq_update(struct ath_softc *sc) in ath_cabq_update() argument
1773 struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon; in ath_cabq_update()
1774 int qnum = sc->beacon.cabq->axq_qnum; in ath_cabq_update()
1776 ath9k_hw_get_txq_props(sc->sc_ah, qnum, &qi); in ath_cabq_update()
1780 ath_txq_update(sc, qnum, &qi); in ath_cabq_update()
1785 static void ath_drain_txq_list(struct ath_softc *sc, struct ath_txq *txq, in ath_drain_txq_list() argument
1802 ath_tx_return_buffer(sc, bf); in ath_drain_txq_list()
1808 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_drain_txq_list()
1818 void ath_draintxq(struct ath_softc *sc, struct ath_txq *txq) in ath_draintxq() argument
1820 ath_txq_lock(sc, txq); in ath_draintxq()
1822 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_draintxq()
1826 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]); in ath_draintxq()
1835 ath_drain_txq_list(sc, txq, &txq->axq_q); in ath_draintxq()
1837 ath_txq_unlock_complete(sc, txq); in ath_draintxq()
1840 bool ath_drain_all_txq(struct ath_softc *sc) in ath_drain_all_txq() argument
1842 struct ath_hw *ah = sc->sc_ah; in ath_drain_all_txq()
1843 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_drain_all_txq()
1855 if (!ATH_TXQ_SETUP(sc, i)) in ath_drain_all_txq()
1858 if (!sc->tx.txq[i].axq_depth) in ath_drain_all_txq()
1861 if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum)) in ath_drain_all_txq()
1866 RESET_STAT_INC(sc, RESET_TX_DMA_ERROR); in ath_drain_all_txq()
1872 if (!ATH_TXQ_SETUP(sc, i)) in ath_drain_all_txq()
1880 txq = &sc->tx.txq[i]; in ath_drain_all_txq()
1882 ath_draintxq(sc, txq); in ath_drain_all_txq()
1888 void ath_tx_cleanupq(struct ath_softc *sc, struct ath_txq *txq) in ath_tx_cleanupq() argument
1890 ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum); in ath_tx_cleanupq()
1891 sc->tx.txqsetup &= ~(1<<txq->axq_qnum); in ath_tx_cleanupq()
1897 void ath_txq_schedule(struct ath_softc *sc, struct ath_txq *txq) in ath_txq_schedule() argument
1899 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_txq_schedule()
1910 spin_lock_bh(&sc->chan_lock); in ath_txq_schedule()
1911 tid_list = &sc->cur_chan->acq[txq->mac80211_qnum]; in ath_txq_schedule()
1914 spin_unlock_bh(&sc->chan_lock); in ath_txq_schedule()
1924 if (sc->cur_chan->stopped) in ath_txq_schedule()
1930 if (ath_tx_sched_aggr(sc, txq, tid, &stop)) in ath_txq_schedule()
1938 ath_tx_queue_tid(sc, txq, tid); in ath_txq_schedule()
1954 spin_unlock_bh(&sc->chan_lock); in ath_txq_schedule()
1957 void ath_txq_schedule_all(struct ath_softc *sc) in ath_txq_schedule_all() argument
1963 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
1966 ath_txq_schedule(sc, txq); in ath_txq_schedule_all()
1979 static void ath_tx_txqaddbuf(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_txqaddbuf() argument
1982 struct ath_hw *ah = sc->sc_ah; in ath_tx_txqaddbuf()
2028 if (!edma || sc->tx99_state) { in ath_tx_txqaddbuf()
2046 static void ath_tx_send_normal(struct ath_softc *sc, struct ath_txq *txq, in ath_tx_send_normal() argument
2059 ath_tx_addto_baw(sc, tid, bf); in ath_tx_send_normal()
2064 ath_tx_fill_desc(sc, bf, txq, fi->framelen); in ath_tx_send_normal()
2065 ath_tx_txqaddbuf(sc, txq, &bf_head, false); in ath_tx_send_normal()
2104 struct ath_softc *sc = hw->priv; in setup_frame_info() local
2106 txpower = sc->cur_chan->cur_txpower; in setup_frame_info()
2128 u8 ath_txchainmask_reduction(struct ath_softc *sc, u8 chainmask, u32 rate) in ath_txchainmask_reduction() argument
2130 struct ath_hw *ah = sc->sc_ah; in ath_txchainmask_reduction()
2147 static struct ath_buf *ath_tx_setup_buffer(struct ath_softc *sc, in ath_tx_setup_buffer() argument
2152 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_setup_buffer()
2159 bf = ath_tx_get_buffer(sc); in ath_tx_setup_buffer()
2183 bf->bf_buf_addr = dma_map_single(sc->dev, skb->data, in ath_tx_setup_buffer()
2185 if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) { in ath_tx_setup_buffer()
2188 ath_err(ath9k_hw_common(sc->sc_ah), in ath_tx_setup_buffer()
2190 ath_tx_return_buffer(sc, bf); in ath_tx_setup_buffer()
2229 struct ath_softc *sc = hw->priv; in ath_tx_prepare() local
2244 ath_assign_seq(ath9k_hw_common(sc->sc_ah), skb); in ath_tx_prepare()
2277 struct ath_softc *sc = hw->priv; in ath_tx_start() local
2304 ath_txq_lock(sc, txq); in ath_tx_start()
2305 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2307 if (++txq->pending_frames > sc->tx.txq_max_pending[q] && in ath_tx_start()
2310 ieee80211_stop_queue(sc->hw, info->hw_queue); in ath_tx_start()
2312 ieee80211_stop_queue(sc->hw, q); in ath_tx_start()
2323 if (((avp && avp->chanctx != sc->cur_chan) || in ath_tx_start()
2324 sc->cur_chan->stopped) && !txctl->force_channel) { in ath_tx_start()
2332 tid = ath_get_skb_tid(sc, txctl->an, skb); in ath_tx_start()
2335 ath_txq_unlock(sc, txq); in ath_tx_start()
2336 txq = sc->tx.uapsdq; in ath_tx_start()
2337 ath_txq_lock(sc, txq); in ath_tx_start()
2351 ath_tx_queue_tid(sc, txq, tid); in ath_tx_start()
2353 ath_txq_schedule(sc, txq); in ath_tx_start()
2357 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_start()
2359 ath_txq_skb_done(sc, txq, skb); in ath_tx_start()
2363 ieee80211_free_txskb(sc->hw, skb); in ath_tx_start()
2373 ath_tx_send_normal(sc, txq, tid, skb); in ath_tx_start()
2376 ath_txq_unlock(sc, txq); in ath_tx_start()
2384 struct ath_softc *sc = hw->priv; in ath_tx_cabq() local
2386 .txq = sc->beacon.cabq in ath_tx_cabq()
2397 sc->cur_chan->beacon.beacon_interval * 1000 * in ath_tx_cabq()
2398 sc->cur_chan->beacon.dtim_period / ATH_BCBUF; in ath_tx_cabq()
2406 bf = ath_tx_setup_buffer(sc, txctl.txq, NULL, skb); in ath_tx_cabq()
2412 ath_buf_set_rate(sc, bf, &info, fi->framelen, false); in ath_tx_cabq()
2438 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath_tx_cabq()
2442 ath_txq_lock(sc, txctl.txq); in ath_tx_cabq()
2443 ath_tx_fill_desc(sc, bf, txctl.txq, 0); in ath_tx_cabq()
2444 ath_tx_txqaddbuf(sc, txctl.txq, &bf_q, false); in ath_tx_cabq()
2446 ath_txq_unlock(sc, txctl.txq); in ath_tx_cabq()
2453 static void ath_tx_complete(struct ath_softc *sc, struct sk_buff *skb, in ath_tx_complete() argument
2457 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_complete()
2464 if (sc->sc_ah->caldata) in ath_tx_complete()
2465 set_bit(PAPRD_PACKET_SENT, &sc->sc_ah->caldata->cal_flags); in ath_tx_complete()
2485 spin_lock_irqsave(&sc->sc_pm_lock, flags); in ath_tx_complete()
2486 if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) { in ath_tx_complete()
2487 sc->ps_flags &= ~PS_WAIT_FOR_TX_ACK; in ath_tx_complete()
2490 sc->ps_flags & (PS_WAIT_FOR_BEACON | in ath_tx_complete()
2495 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); in ath_tx_complete()
2498 ath_txq_skb_done(sc, txq, skb); in ath_tx_complete()
2501 static void ath_tx_complete_buf(struct ath_softc *sc, struct ath_buf *bf, in ath_tx_complete_buf() argument
2516 dma_unmap_single(sc->dev, bf->bf_buf_addr, skb->len, DMA_TO_DEVICE); in ath_tx_complete_buf()
2518 if (sc->tx99_state) in ath_tx_complete_buf()
2527 complete(&sc->paprd_complete); in ath_tx_complete_buf()
2529 ath_debug_stat_tx(sc, bf, ts, txq, tx_flags); in ath_tx_complete_buf()
2530 ath_tx_complete(sc, skb, tx_flags, txq); in ath_tx_complete_buf()
2541 spin_lock_irqsave(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2542 list_splice_tail_init(bf_q, &sc->tx.txbuf); in ath_tx_complete_buf()
2543 spin_unlock_irqrestore(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2546 static void ath_tx_rc_status(struct ath_softc *sc, struct ath_buf *bf, in ath_tx_rc_status() argument
2553 struct ieee80211_hw *hw = sc->hw; in ath_tx_rc_status()
2554 struct ath_hw *ah = sc->sc_ah; in ath_tx_rc_status()
2588 ah->tx_trig_level >= sc->sc_ah->config.max_txtrig_level) in ath_tx_rc_status()
2601 static void ath_tx_processq(struct ath_softc *sc, struct ath_txq *txq) in ath_tx_processq() argument
2603 struct ath_hw *ah = sc->sc_ah; in ath_tx_processq()
2612 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), in ath_tx_processq()
2615 ath_txq_lock(sc, txq); in ath_tx_processq()
2622 ath_txq_schedule(sc, txq); in ath_tx_processq()
2668 ath_tx_return_buffer(sc, bf_held); in ath_tx_processq()
2671 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_processq()
2673 ath_txq_unlock_complete(sc, txq); in ath_tx_processq()
2676 void ath_tx_tasklet(struct ath_softc *sc) in ath_tx_tasklet() argument
2678 struct ath_hw *ah = sc->sc_ah; in ath_tx_tasklet()
2683 if (ATH_TXQ_SETUP(sc, i) && (qcumask & (1 << i))) in ath_tx_tasklet()
2684 ath_tx_processq(sc, &sc->tx.txq[i]); in ath_tx_tasklet()
2688 void ath_tx_edma_tasklet(struct ath_softc *sc) in ath_tx_edma_tasklet() argument
2691 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_edma_tasklet()
2692 struct ath_hw *ah = sc->sc_ah; in ath_tx_edma_tasklet()
2712 if (ts.qid == sc->beacon.beaconq) { in ath_tx_edma_tasklet()
2713 sc->beacon.tx_processed = true; in ath_tx_edma_tasklet()
2714 sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK); in ath_tx_edma_tasklet()
2717 ath_chanctx_event(sc, NULL, in ath_tx_edma_tasklet()
2721 ath9k_csa_update(sc); in ath_tx_edma_tasklet()
2725 txq = &sc->tx.txq[ts.qid]; in ath_tx_edma_tasklet()
2727 ath_txq_lock(sc, txq); in ath_tx_edma_tasklet()
2733 ath_txq_unlock(sc, txq); in ath_tx_edma_tasklet()
2740 ath_tx_return_buffer(sc, bf); in ath_tx_edma_tasklet()
2757 ath_tx_txqaddbuf(sc, txq, &bf_q, true); in ath_tx_edma_tasklet()
2766 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_edma_tasklet()
2767 ath_txq_unlock_complete(sc, txq); in ath_tx_edma_tasklet()
2775 static int ath_txstatus_setup(struct ath_softc *sc, int size) in ath_txstatus_setup() argument
2777 struct ath_descdma *dd = &sc->txsdma; in ath_txstatus_setup()
2778 u8 txs_len = sc->sc_ah->caps.txs_len; in ath_txstatus_setup()
2781 dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len, in ath_txstatus_setup()
2789 static int ath_tx_edma_init(struct ath_softc *sc) in ath_tx_edma_init() argument
2793 err = ath_txstatus_setup(sc, ATH_TXSTATUS_RING_SIZE); in ath_tx_edma_init()
2795 ath9k_hw_setup_statusring(sc->sc_ah, sc->txsdma.dd_desc, in ath_tx_edma_init()
2796 sc->txsdma.dd_desc_paddr, in ath_tx_edma_init()
2802 int ath_tx_init(struct ath_softc *sc, int nbufs) in ath_tx_init() argument
2804 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_init()
2807 spin_lock_init(&sc->tx.txbuflock); in ath_tx_init()
2809 error = ath_descdma_setup(sc, &sc->tx.txdma, &sc->tx.txbuf, in ath_tx_init()
2817 error = ath_descdma_setup(sc, &sc->beacon.bdma, &sc->beacon.bbuf, in ath_tx_init()
2825 INIT_DELAYED_WORK(&sc->tx_complete_work, ath_tx_complete_poll_work); in ath_tx_init()
2827 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) in ath_tx_init()
2828 error = ath_tx_edma_init(sc); in ath_tx_init()
2833 void ath_tx_node_init(struct ath_softc *sc, struct ath_node *an) in ath_tx_node_init() argument
2852 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2856 void ath_tx_node_cleanup(struct ath_softc *sc, struct ath_node *an) in ath_tx_node_cleanup() argument
2867 ath_txq_lock(sc, txq); in ath_tx_node_cleanup()
2872 ath_tid_drain(sc, txq, tid); in ath_tx_node_cleanup()
2875 ath_txq_unlock(sc, txq); in ath_tx_node_cleanup()
2881 int ath9k_tx99_send(struct ath_softc *sc, struct sk_buff *skb, in ath9k_tx99_send() argument
2886 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_tx99_send()
2908 bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()
2914 ath_set_rates(sc->tx99_vif, NULL, bf); in ath9k_tx99_send()
2916 ath9k_hw_set_desc_link(sc->sc_ah, bf->bf_desc, bf->bf_daddr); in ath9k_tx99_send()
2917 ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum); in ath9k_tx99_send()
2919 ath_tx_send_normal(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()