Lines Matching refs:tid

51 			       struct ath_atx_tid *tid, struct sk_buff *skb);
62 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
66 struct ath_atx_tid *tid,
107 struct ath_atx_tid *tid) in ath_tx_queue_tid() argument
110 struct ath_vif *avp = (struct ath_vif *) tid->an->vif->drv_priv; in ath_tx_queue_tid()
116 list = &ctx->acq[TID_TO_WME_AC(tid->tidno)]; in ath_tx_queue_tid()
117 if (list_empty(&tid->list)) in ath_tx_queue_tid()
118 list_add_tail(&tid->list, list); in ath_tx_queue_tid()
129 static void ath_send_bar(struct ath_atx_tid *tid, u16 seqno) in ath_send_bar() argument
131 if (!tid->an->sta) in ath_send_bar()
134 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
176 static bool ath_tid_has_buffered(struct ath_atx_tid *tid) in ath_tid_has_buffered() argument
178 return !skb_queue_empty(&tid->buf_q) || !skb_queue_empty(&tid->retry_q); in ath_tid_has_buffered()
181 static struct sk_buff *ath_tid_dequeue(struct ath_atx_tid *tid) in ath_tid_dequeue() argument
185 skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
187 skb = __skb_dequeue(&tid->buf_q); in ath_tid_dequeue()
198 ath_tx_tid_change_state(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_tid_change_state() argument
200 struct ath_txq *txq = tid->txq; in ath_tx_tid_change_state()
206 skb_queue_walk_safe(&tid->buf_q, skb, tskb) { in ath_tx_tid_change_state()
216 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_tid_change_state()
218 __skb_unlink(skb, &tid->buf_q); in ath_tx_tid_change_state()
227 static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_flush_tid() argument
229 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid()
241 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
251 ath_tx_update_baw(sc, tid, bf->bf_state.seqno); in ath_tx_flush_tid()
261 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
266 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_update_baw() argument
271 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
272 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
274 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
276 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
277 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
278 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
279 if (tid->bar_index >= 0) in ath_tx_update_baw()
280 tid->bar_index--; in ath_tx_update_baw()
284 static void ath_tx_addto_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_addto_baw() argument
291 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
292 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
293 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
296 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
298 tid->baw_tail = cindex; in ath_tx_addto_baw()
299 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
304 struct ath_atx_tid *tid) in ath_tid_drain() argument
316 while ((skb = ath_tid_dequeue(tid))) { in ath_tid_drain()
437 struct ath_atx_tid *tid = NULL; in ath_tx_complete_aggr() local
484 tid = ath_get_skb_tid(sc, an, skb); in ath_tx_complete_aggr()
485 seq_first = tid->seq_start; in ath_tx_complete_aggr()
496 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
532 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
533 !tid->active) { in ath_tx_complete_aggr()
574 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
604 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
629 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
631 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
633 ath_tx_queue_tid(sc, txq, tid); in ath_tx_complete_aggr()
636 tid->clear_ps_filter = true; in ath_tx_complete_aggr()
643 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
644 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
647 ath_send_bar(tid, ATH_BA_INDEX2SEQ(seq_first, bar_index + 1)); in ath_tx_complete_aggr()
719 struct ath_atx_tid *tid) in ath_lookup_rate() argument
726 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
779 if (tid->an->maxampdu) in ath_lookup_rate()
780 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
789 static int ath_compute_num_delims(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_compute_num_delims() argument
831 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
840 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
842 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
861 struct ath_atx_tid *tid, struct sk_buff_head **q) in ath_tx_get_tid_subframe() argument
870 *q = &tid->retry_q; in ath_tx_get_tid_subframe()
872 *q = &tid->buf_q; in ath_tx_get_tid_subframe()
881 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_get_tid_subframe()
903 if (!tid->active) in ath_tx_get_tid_subframe()
915 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) in ath_tx_get_tid_subframe()
918 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
925 ath_tx_update_baw(sc, tid, seqno); in ath_tx_get_tid_subframe()
938 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_aggr() argument
946 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
953 aggr_limit = ath_lookup_rate(sc, bf, tid); in ath_tx_form_aggr()
979 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
988 ath_tx_addto_baw(sc, tid, bf); in ath_tx_form_aggr()
998 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_aggr()
1003 } while (ath_tid_has_buffered(tid)); in ath_tx_form_aggr()
1393 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_burst() argument
1414 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_burst()
1422 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1427 struct ath_atx_tid *tid, bool *stop) in ath_tx_sched_aggr() argument
1436 if (!ath_tid_has_buffered(tid)) in ath_tx_sched_aggr()
1441 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_sched_aggr()
1453 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1455 last = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf, in ath_tx_sched_aggr()
1458 ath_tx_form_burst(sc, txq, tid, &bf_q, bf, tid_q); in ath_tx_sched_aggr()
1463 if (tid->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1464 tid->clear_ps_filter = false; in ath_tx_sched_aggr()
1474 u16 tid, u16 *ssn) in ath_tx_aggr_start() argument
1482 txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_start()
1513 void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid) in ath_tx_aggr_stop() argument
1516 struct ath_atx_tid *txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_stop()
1529 struct ath_atx_tid *tid; in ath_tx_aggr_sleep() local
1534 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_aggr_sleep()
1535 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_aggr_sleep()
1537 txq = tid->txq; in ath_tx_aggr_sleep()
1541 if (list_empty(&tid->list)) { in ath_tx_aggr_sleep()
1546 buffered = ath_tid_has_buffered(tid); in ath_tx_aggr_sleep()
1548 list_del_init(&tid->list); in ath_tx_aggr_sleep()
1558 struct ath_atx_tid *tid; in ath_tx_aggr_wakeup() local
1562 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_aggr_wakeup()
1563 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_aggr_wakeup()
1565 txq = tid->txq; in ath_tx_aggr_wakeup()
1568 tid->clear_ps_filter = true; in ath_tx_aggr_wakeup()
1570 if (ath_tid_has_buffered(tid)) { in ath_tx_aggr_wakeup()
1571 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_wakeup()
1582 struct ath_atx_tid *tid; in ath_tx_aggr_resume() local
1587 tid = ATH_AN_2_TID(an, tidno); in ath_tx_aggr_resume()
1588 txq = tid->txq; in ath_tx_aggr_resume()
1592 tid->baw_size = IEEE80211_MIN_AMPDU_BUF << sta->ht_cap.ampdu_factor; in ath_tx_aggr_resume()
1594 if (ath_tid_has_buffered(tid)) { in ath_tx_aggr_resume()
1595 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_resume()
1620 struct ath_atx_tid *tid; in ath9k_release_buffered_frames() local
1625 tid = ATH_AN_2_TID(an, i); in ath9k_release_buffered_frames()
1627 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1629 bf = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, tid, &tid_q); in ath9k_release_buffered_frames()
1635 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1637 ath_tx_addto_baw(sc, tid, bf); in ath9k_release_buffered_frames()
1648 if (an->sta && !ath_tid_has_buffered(tid)) in ath9k_release_buffered_frames()
1651 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1900 struct ath_atx_tid *tid, *last_tid; in ath_txq_schedule() local
1927 tid = list_first_entry(tid_list, struct ath_atx_tid, list); in ath_txq_schedule()
1928 list_del_init(&tid->list); in ath_txq_schedule()
1930 if (ath_tx_sched_aggr(sc, txq, tid, &stop)) in ath_txq_schedule()
1937 if (ath_tid_has_buffered(tid)) in ath_txq_schedule()
1938 ath_tx_queue_tid(sc, txq, tid); in ath_txq_schedule()
1943 if (tid == last_tid) { in ath_txq_schedule()
2047 struct ath_atx_tid *tid, struct sk_buff *skb) in ath_tx_send_normal() argument
2057 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2059 ath_tx_addto_baw(sc, tid, bf); in ath_tx_send_normal()
2149 struct ath_atx_tid *tid, in ath_tx_setup_buffer() argument
2167 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2169 seqno = tid->seq_next; in ath_tx_setup_buffer()
2170 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2176 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2279 struct ath_atx_tid *tid = NULL; in ath_tx_start() local
2332 tid = ath_get_skb_tid(sc, txctl->an, skb); in ath_tx_start()
2339 WARN_ON(tid->txq != txctl->txq); in ath_tx_start()
2342 tid->clear_ps_filter = true; in ath_tx_start()
2349 __skb_queue_tail(&tid->buf_q, skb); in ath_tx_start()
2351 ath_tx_queue_tid(sc, txq, tid); in ath_tx_start()
2357 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_start()
2373 ath_tx_send_normal(sc, txq, tid, skb); in ath_tx_start()
2835 struct ath_atx_tid *tid; in ath_tx_node_init() local
2838 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_node_init()
2840 tidno++, tid++) { in ath_tx_node_init()
2841 tid->an = an; in ath_tx_node_init()
2842 tid->tidno = tidno; in ath_tx_node_init()
2843 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2844 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2845 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2846 tid->active = false; in ath_tx_node_init()
2847 tid->clear_ps_filter = true; in ath_tx_node_init()
2848 __skb_queue_head_init(&tid->buf_q); in ath_tx_node_init()
2849 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2850 INIT_LIST_HEAD(&tid->list); in ath_tx_node_init()
2852 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2858 struct ath_atx_tid *tid; in ath_tx_node_cleanup() local
2862 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_node_cleanup()
2863 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_node_cleanup()
2865 txq = tid->txq; in ath_tx_node_cleanup()
2869 if (!list_empty(&tid->list)) in ath_tx_node_cleanup()
2870 list_del_init(&tid->list); in ath_tx_node_cleanup()
2872 ath_tid_drain(sc, txq, tid); in ath_tx_node_cleanup()
2873 tid->active = false; in ath_tx_node_cleanup()