Lines Matching refs:tid

51 			       struct ath_atx_tid *tid, struct sk_buff *skb);
62 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid,
66 struct ath_atx_tid *tid,
107 struct ath_atx_tid *tid) in ath_tx_queue_tid() argument
109 struct ath_atx_ac *ac = tid->ac; in ath_tx_queue_tid()
111 struct ath_vif *avp = (struct ath_vif *) tid->an->vif->drv_priv; in ath_tx_queue_tid()
117 if (tid->sched) in ath_tx_queue_tid()
120 tid->sched = true; in ath_tx_queue_tid()
121 list_add_tail(&tid->list, &ac->tid_q); in ath_tx_queue_tid()
128 list = &ctx->acq[TID_TO_WME_AC(tid->tidno)]; in ath_tx_queue_tid()
140 static void ath_send_bar(struct ath_atx_tid *tid, u16 seqno) in ath_send_bar() argument
142 if (!tid->an->sta) in ath_send_bar()
145 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
187 static bool ath_tid_has_buffered(struct ath_atx_tid *tid) in ath_tid_has_buffered() argument
189 return !skb_queue_empty(&tid->buf_q) || !skb_queue_empty(&tid->retry_q); in ath_tid_has_buffered()
192 static struct sk_buff *ath_tid_dequeue(struct ath_atx_tid *tid) in ath_tid_dequeue() argument
196 skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
198 skb = __skb_dequeue(&tid->buf_q); in ath_tid_dequeue()
209 ath_tx_tid_change_state(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_tid_change_state() argument
211 struct ath_txq *txq = tid->ac->txq; in ath_tx_tid_change_state()
217 skb_queue_walk_safe(&tid->buf_q, skb, tskb) { in ath_tx_tid_change_state()
227 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_tid_change_state()
229 __skb_unlink(skb, &tid->buf_q); in ath_tx_tid_change_state()
238 static void ath_tx_flush_tid(struct ath_softc *sc, struct ath_atx_tid *tid) in ath_tx_flush_tid() argument
240 struct ath_txq *txq = tid->ac->txq; in ath_tx_flush_tid()
252 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
262 ath_tx_update_baw(sc, tid, bf->bf_state.seqno); in ath_tx_flush_tid()
272 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
277 static void ath_tx_update_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_update_baw() argument
282 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
283 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
285 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
287 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
288 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
289 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
290 if (tid->bar_index >= 0) in ath_tx_update_baw()
291 tid->bar_index--; in ath_tx_update_baw()
295 static void ath_tx_addto_baw(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_tx_addto_baw() argument
302 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
303 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
304 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
307 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
309 tid->baw_tail = cindex; in ath_tx_addto_baw()
310 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
315 struct ath_atx_tid *tid) in ath_tid_drain() argument
327 while ((skb = ath_tid_dequeue(tid))) { in ath_tid_drain()
448 struct ath_atx_tid *tid = NULL; in ath_tx_complete_aggr() local
495 tid = ath_get_skb_tid(sc, an, skb); in ath_tx_complete_aggr()
496 seq_first = tid->seq_start; in ath_tx_complete_aggr()
507 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
543 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
544 !tid->active) { in ath_tx_complete_aggr()
585 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
615 ath_tx_update_baw(sc, tid, seqno); in ath_tx_complete_aggr()
640 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
642 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
644 ath_tx_queue_tid(sc, txq, tid); in ath_tx_complete_aggr()
647 tid->ac->clear_ps_filter = true; in ath_tx_complete_aggr()
654 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
655 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
658 ath_send_bar(tid, ATH_BA_INDEX2SEQ(seq_first, bar_index + 1)); in ath_tx_complete_aggr()
730 struct ath_atx_tid *tid) in ath_lookup_rate() argument
737 int q = tid->ac->txq->mac80211_qnum; in ath_lookup_rate()
790 if (tid->an->maxampdu) in ath_lookup_rate()
791 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
800 static int ath_compute_num_delims(struct ath_softc *sc, struct ath_atx_tid *tid, in ath_compute_num_delims() argument
842 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
851 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
853 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
872 struct ath_atx_tid *tid, struct sk_buff_head **q) in ath_tx_get_tid_subframe() argument
881 *q = &tid->retry_q; in ath_tx_get_tid_subframe()
883 *q = &tid->buf_q; in ath_tx_get_tid_subframe()
892 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_get_tid_subframe()
914 if (!tid->active) in ath_tx_get_tid_subframe()
926 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) in ath_tx_get_tid_subframe()
929 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
936 ath_tx_update_baw(sc, tid, seqno); in ath_tx_get_tid_subframe()
949 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_aggr() argument
957 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
964 aggr_limit = ath_lookup_rate(sc, bf, tid); in ath_tx_form_aggr()
990 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
999 ath_tx_addto_baw(sc, tid, bf); in ath_tx_form_aggr()
1009 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_aggr()
1014 } while (ath_tid_has_buffered(tid)); in ath_tx_form_aggr()
1404 struct ath_atx_tid *tid, struct list_head *bf_q, in ath_tx_form_burst() argument
1425 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_form_burst()
1433 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1438 struct ath_atx_tid *tid, bool *stop) in ath_tx_sched_aggr() argument
1447 if (!ath_tid_has_buffered(tid)) in ath_tx_sched_aggr()
1452 bf = ath_tx_get_tid_subframe(sc, txq, tid, &tid_q); in ath_tx_sched_aggr()
1464 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1466 last = ath_tx_form_aggr(sc, txq, tid, &bf_q, bf, in ath_tx_sched_aggr()
1469 ath_tx_form_burst(sc, txq, tid, &bf_q, bf, tid_q); in ath_tx_sched_aggr()
1474 if (tid->ac->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1475 tid->ac->clear_ps_filter = false; in ath_tx_sched_aggr()
1485 u16 tid, u16 *ssn) in ath_tx_aggr_start() argument
1493 txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_start()
1524 void ath_tx_aggr_stop(struct ath_softc *sc, struct ieee80211_sta *sta, u16 tid) in ath_tx_aggr_stop() argument
1527 struct ath_atx_tid *txtid = ATH_AN_2_TID(an, tid); in ath_tx_aggr_stop()
1540 struct ath_atx_tid *tid; in ath_tx_aggr_sleep() local
1546 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_aggr_sleep()
1547 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_aggr_sleep()
1549 ac = tid->ac; in ath_tx_aggr_sleep()
1554 if (!tid->sched) { in ath_tx_aggr_sleep()
1559 buffered = ath_tid_has_buffered(tid); in ath_tx_aggr_sleep()
1561 tid->sched = false; in ath_tx_aggr_sleep()
1562 list_del(&tid->list); in ath_tx_aggr_sleep()
1577 struct ath_atx_tid *tid; in ath_tx_aggr_wakeup() local
1582 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_aggr_wakeup()
1583 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_aggr_wakeup()
1585 ac = tid->ac; in ath_tx_aggr_wakeup()
1591 if (ath_tid_has_buffered(tid)) { in ath_tx_aggr_wakeup()
1592 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_wakeup()
1603 struct ath_atx_tid *tid; in ath_tx_aggr_resume() local
1608 tid = ATH_AN_2_TID(an, tidno); in ath_tx_aggr_resume()
1609 txq = tid->ac->txq; in ath_tx_aggr_resume()
1613 tid->baw_size = IEEE80211_MIN_AMPDU_BUF << sta->ht_cap.ampdu_factor; in ath_tx_aggr_resume()
1615 if (ath_tid_has_buffered(tid)) { in ath_tx_aggr_resume()
1616 ath_tx_queue_tid(sc, txq, tid); in ath_tx_aggr_resume()
1641 struct ath_atx_tid *tid; in ath9k_release_buffered_frames() local
1646 tid = ATH_AN_2_TID(an, i); in ath9k_release_buffered_frames()
1648 ath_txq_lock(sc, tid->ac->txq); in ath9k_release_buffered_frames()
1650 bf = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, tid, &tid_q); in ath9k_release_buffered_frames()
1656 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1658 ath_tx_addto_baw(sc, tid, bf); in ath9k_release_buffered_frames()
1669 if (an->sta && !ath_tid_has_buffered(tid)) in ath9k_release_buffered_frames()
1672 ath_txq_unlock_complete(sc, tid->ac->txq); in ath9k_release_buffered_frames()
1919 struct ath_atx_tid *tid, *last_tid; in ath_txq_schedule() local
1953 tid = list_first_entry(&ac->tid_q, struct ath_atx_tid, in ath_txq_schedule()
1955 list_del(&tid->list); in ath_txq_schedule()
1956 tid->sched = false; in ath_txq_schedule()
1958 if (ath_tx_sched_aggr(sc, txq, tid, &stop)) in ath_txq_schedule()
1965 if (ath_tid_has_buffered(tid)) in ath_txq_schedule()
1966 ath_tx_queue_tid(sc, txq, tid); in ath_txq_schedule()
1968 if (stop || tid == last_tid) in ath_txq_schedule()
2084 struct ath_atx_tid *tid, struct sk_buff *skb) in ath_tx_send_normal() argument
2094 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2096 ath_tx_addto_baw(sc, tid, bf); in ath_tx_send_normal()
2186 struct ath_atx_tid *tid, in ath_tx_setup_buffer() argument
2204 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2206 seqno = tid->seq_next; in ath_tx_setup_buffer()
2207 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2213 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2316 struct ath_atx_tid *tid = NULL; in ath_tx_start() local
2369 tid = ath_get_skb_tid(sc, txctl->an, skb); in ath_tx_start()
2376 WARN_ON(tid->ac->txq != txctl->txq); in ath_tx_start()
2379 tid->ac->clear_ps_filter = true; in ath_tx_start()
2386 __skb_queue_tail(&tid->buf_q, skb); in ath_tx_start()
2388 ath_tx_queue_tid(sc, txq, tid); in ath_tx_start()
2394 bf = ath_tx_setup_buffer(sc, txq, tid, skb); in ath_tx_start()
2410 ath_tx_send_normal(sc, txq, tid, skb); in ath_tx_start()
2872 struct ath_atx_tid *tid; in ath_tx_node_init() local
2876 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_node_init()
2878 tidno++, tid++) { in ath_tx_node_init()
2879 tid->an = an; in ath_tx_node_init()
2880 tid->tidno = tidno; in ath_tx_node_init()
2881 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2882 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2883 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2884 tid->sched = false; in ath_tx_node_init()
2885 tid->active = false; in ath_tx_node_init()
2886 __skb_queue_head_init(&tid->buf_q); in ath_tx_node_init()
2887 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2889 tid->ac = &an->ac[acno]; in ath_tx_node_init()
2904 struct ath_atx_tid *tid; in ath_tx_node_cleanup() local
2908 for (tidno = 0, tid = &an->tid[tidno]; in ath_tx_node_cleanup()
2909 tidno < IEEE80211_NUM_TIDS; tidno++, tid++) { in ath_tx_node_cleanup()
2911 ac = tid->ac; in ath_tx_node_cleanup()
2916 if (tid->sched) { in ath_tx_node_cleanup()
2917 list_del(&tid->list); in ath_tx_node_cleanup()
2918 tid->sched = false; in ath_tx_node_cleanup()
2923 tid->ac->sched = false; in ath_tx_node_cleanup()
2926 ath_tid_drain(sc, txq, tid); in ath_tx_node_cleanup()
2927 tid->active = false; in ath_tx_node_cleanup()