Lines Matching refs:lq_sta

400 				  struct iwl_lq_sta *lq_sta,
404 struct iwl_lq_sta *lq_sta,
406 static void rs_stay_in_table(struct iwl_lq_sta *lq_sta, bool force_search);
727 struct iwl_lq_sta *lq_sta, in rs_collect_tx_data() argument
739 struct lq_sta_pers *pers = &lq_sta->pers; in rs_collect_tx_data()
930 static u16 rs_get_supported_rates(struct iwl_lq_sta *lq_sta, in rs_get_supported_rates() argument
934 return lq_sta->active_legacy_rate; in rs_get_supported_rates()
936 return lq_sta->active_siso_rate; in rs_get_supported_rates()
938 return lq_sta->active_mimo2_rate; in rs_get_supported_rates()
998 static inline bool rs_rate_supported(struct iwl_lq_sta *lq_sta, in rs_rate_supported() argument
1001 return BIT(rate->index) & rs_get_supported_rates(lq_sta, rate); in rs_rate_supported()
1007 static bool rs_get_lower_rate_in_column(struct iwl_lq_sta *lq_sta, in rs_get_lower_rate_in_column() argument
1013 struct iwl_mvm *mvm = lq_sta->pers.drv; in rs_get_lower_rate_in_column()
1015 rate_mask = rs_get_supported_rates(lq_sta, rate); in rs_get_lower_rate_in_column()
1029 static void rs_get_lower_rate_down_column(struct iwl_lq_sta *lq_sta, in rs_get_lower_rate_down_column() argument
1032 struct iwl_mvm *mvm = lq_sta->pers.drv; in rs_get_lower_rate_down_column()
1039 if (lq_sta->band == IEEE80211_BAND_5GHZ) in rs_get_lower_rate_down_column()
1063 if (!rs_rate_supported(lq_sta, rate)) in rs_get_lower_rate_down_column()
1064 rs_get_lower_rate_in_column(lq_sta, rate); in rs_get_lower_rate_down_column()
1169 struct iwl_lq_sta *lq_sta = &mvmsta->lq_sta; in iwl_mvm_rs_tx_status() local
1174 if (!lq_sta) { in iwl_mvm_rs_tx_status()
1177 } else if (!lq_sta->pers.drv) { in iwl_mvm_rs_tx_status()
1192 if (lq_sta->pers.dbg_fixed_rate) { in iwl_mvm_rs_tx_status()
1211 lq_sta->pers.tx_stats[column][index].total += attempts; in iwl_mvm_rs_tx_status()
1212 lq_sta->pers.tx_stats[column][index].success += success; in iwl_mvm_rs_tx_status()
1221 (unsigned long)(lq_sta->last_tx + in iwl_mvm_rs_tx_status()
1232 lq_sta->last_tx = jiffies; in iwl_mvm_rs_tx_status()
1241 table = &lq_sta->lq; in iwl_mvm_rs_tx_status()
1256 lq_sta->missed_rate_counter++; in iwl_mvm_rs_tx_status()
1257 if (lq_sta->missed_rate_counter > IWL_MVM_RS_MISSED_RATE_MAX) { in iwl_mvm_rs_tx_status()
1258 lq_sta->missed_rate_counter = 0; in iwl_mvm_rs_tx_status()
1261 lq_sta->rs_state); in iwl_mvm_rs_tx_status()
1262 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq, false); in iwl_mvm_rs_tx_status()
1268 lq_sta->missed_rate_counter = 0; in iwl_mvm_rs_tx_status()
1270 if (!lq_sta->search_better_tbl) { in iwl_mvm_rs_tx_status()
1271 curr_tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1272 other_tbl = &(lq_sta->lq_info[1 - lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1274 curr_tbl = &(lq_sta->lq_info[1 - lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1275 other_tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1281 tmp_tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1283 tmp_tbl = &(lq_sta->lq_info[1 - lq_sta->active_tbl]); in iwl_mvm_rs_tx_status()
1291 rs_stay_in_table(lq_sta, true); in iwl_mvm_rs_tx_status()
1310 rs_collect_tx_data(mvm, lq_sta, curr_tbl, lq_rate.index, in iwl_mvm_rs_tx_status()
1316 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in iwl_mvm_rs_tx_status()
1317 lq_sta->total_success += info->status.ampdu_ack_len; in iwl_mvm_rs_tx_status()
1318 lq_sta->total_failed += (info->status.ampdu_len - in iwl_mvm_rs_tx_status()
1346 rs_collect_tx_data(mvm, lq_sta, tmp_tbl, lq_rate.index, in iwl_mvm_rs_tx_status()
1352 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in iwl_mvm_rs_tx_status()
1353 lq_sta->total_success += legacy_success; in iwl_mvm_rs_tx_status()
1354 lq_sta->total_failed += retries + (1 - legacy_success); in iwl_mvm_rs_tx_status()
1358 lq_sta->last_rate_n_flags = lq_hwrate; in iwl_mvm_rs_tx_status()
1363 rs_rate_scale_perform(mvm, sta, lq_sta, tid); in iwl_mvm_rs_tx_status()
1398 struct iwl_lq_sta *lq_sta) in rs_set_stay_in_table() argument
1401 lq_sta->rs_state = RS_STATE_STAY_IN_COLUMN; in rs_set_stay_in_table()
1403 lq_sta->table_count_limit = IWL_MVM_RS_LEGACY_TABLE_COUNT; in rs_set_stay_in_table()
1404 lq_sta->max_failure_limit = IWL_MVM_RS_LEGACY_FAILURE_LIMIT; in rs_set_stay_in_table()
1405 lq_sta->max_success_limit = IWL_MVM_RS_LEGACY_SUCCESS_LIMIT; in rs_set_stay_in_table()
1407 lq_sta->table_count_limit = IWL_MVM_RS_NON_LEGACY_TABLE_COUNT; in rs_set_stay_in_table()
1408 lq_sta->max_failure_limit = IWL_MVM_RS_NON_LEGACY_FAILURE_LIMIT; in rs_set_stay_in_table()
1409 lq_sta->max_success_limit = IWL_MVM_RS_NON_LEGACY_SUCCESS_LIMIT; in rs_set_stay_in_table()
1411 lq_sta->table_count = 0; in rs_set_stay_in_table()
1412 lq_sta->total_failed = 0; in rs_set_stay_in_table()
1413 lq_sta->total_success = 0; in rs_set_stay_in_table()
1414 lq_sta->flush_timer = jiffies; in rs_set_stay_in_table()
1415 lq_sta->visited_columns = 0; in rs_set_stay_in_table()
1425 static int rs_get_max_allowed_rate(struct iwl_lq_sta *lq_sta, in rs_get_max_allowed_rate() argument
1430 return lq_sta->max_legacy_rate_idx; in rs_get_max_allowed_rate()
1432 return lq_sta->max_siso_rate_idx; in rs_get_max_allowed_rate()
1434 return lq_sta->max_mimo2_rate_idx; in rs_get_max_allowed_rate()
1439 return lq_sta->max_legacy_rate_idx; in rs_get_max_allowed_rate()
1442 static const u16 *rs_get_expected_tpt_table(struct iwl_lq_sta *lq_sta, in rs_get_expected_tpt_table() argument
1494 if (!column->sgi && !lq_sta->is_agg) /* Normal */ in rs_get_expected_tpt_table()
1496 else if (column->sgi && !lq_sta->is_agg) /* SGI */ in rs_get_expected_tpt_table()
1498 else if (!column->sgi && lq_sta->is_agg) /* AGG */ in rs_get_expected_tpt_table()
1504 static void rs_set_expected_tpt_table(struct iwl_lq_sta *lq_sta, in rs_set_expected_tpt_table() argument
1510 tbl->expected_tpt = rs_get_expected_tpt_table(lq_sta, column, rate->bw); in rs_set_expected_tpt_table()
1514 struct iwl_lq_sta *lq_sta, in rs_get_best_rate() argument
1519 &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_get_best_rate()
1533 target_tpt = lq_sta->last_tpt; in rs_get_best_rate()
1576 static void rs_stay_in_table(struct iwl_lq_sta *lq_sta, bool force_search) in rs_stay_in_table() argument
1583 mvm = lq_sta->pers.drv; in rs_stay_in_table()
1584 active_tbl = lq_sta->active_tbl; in rs_stay_in_table()
1586 tbl = &(lq_sta->lq_info[active_tbl]); in rs_stay_in_table()
1589 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) { in rs_stay_in_table()
1591 if (lq_sta->flush_timer) in rs_stay_in_table()
1594 (unsigned long)(lq_sta->flush_timer + in rs_stay_in_table()
1606 (lq_sta->total_failed > lq_sta->max_failure_limit) || in rs_stay_in_table()
1607 (lq_sta->total_success > lq_sta->max_success_limit) || in rs_stay_in_table()
1608 ((!lq_sta->search_better_tbl) && in rs_stay_in_table()
1609 (lq_sta->flush_timer) && (flush_interval_passed))) { in rs_stay_in_table()
1612 lq_sta->total_failed, in rs_stay_in_table()
1613 lq_sta->total_success, in rs_stay_in_table()
1617 lq_sta->rs_state = RS_STATE_SEARCH_CYCLE_STARTED; in rs_stay_in_table()
1620 lq_sta->total_failed = 0; in rs_stay_in_table()
1621 lq_sta->total_success = 0; in rs_stay_in_table()
1622 lq_sta->flush_timer = 0; in rs_stay_in_table()
1624 lq_sta->visited_columns = BIT(tbl->column); in rs_stay_in_table()
1632 lq_sta->table_count++; in rs_stay_in_table()
1633 if (lq_sta->table_count >= in rs_stay_in_table()
1634 lq_sta->table_count_limit) { in rs_stay_in_table()
1635 lq_sta->table_count = 0; in rs_stay_in_table()
1646 if (lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_STARTED) { in rs_stay_in_table()
1657 struct iwl_lq_sta *lq_sta, in rs_update_rate_tbl() argument
1660 rs_fill_lq_cmd(mvm, sta, lq_sta, &tbl->rate); in rs_update_rate_tbl()
1661 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq, false); in rs_update_rate_tbl()
1666 struct iwl_lq_sta *lq_sta, in rs_tweak_rate_tbl() argument
1704 rs_set_expected_tpt_table(lq_sta, tbl); in rs_tweak_rate_tbl()
1710 struct iwl_lq_sta *lq_sta, in rs_get_next_column() argument
1729 if (lq_sta->visited_columns & BIT(next_col_id)) { in rs_get_next_column()
1759 tpt = lq_sta->last_tpt / 100; in rs_get_next_column()
1760 expected_tpt_tbl = rs_get_expected_tpt_table(lq_sta, next_col, in rs_get_next_column()
1765 max_rate = rs_get_max_allowed_rate(lq_sta, next_col); in rs_get_next_column()
1794 struct iwl_lq_sta *lq_sta, in rs_switch_to_column() argument
1798 struct iwl_scale_tbl_info *tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_switch_to_column()
1800 &(lq_sta->lq_info[(1 - lq_sta->active_tbl)]); in rs_switch_to_column()
1815 if (lq_sta->band == IEEE80211_BAND_5GHZ) in rs_switch_to_column()
1822 rate_mask = lq_sta->active_legacy_rate; in rs_switch_to_column()
1824 rate->type = lq_sta->is_vht ? LQ_VHT_SISO : LQ_HT_SISO; in rs_switch_to_column()
1825 rate_mask = lq_sta->active_siso_rate; in rs_switch_to_column()
1827 rate->type = lq_sta->is_vht ? LQ_VHT_MIMO2 : LQ_HT_MIMO2; in rs_switch_to_column()
1828 rate_mask = lq_sta->active_mimo2_rate; in rs_switch_to_column()
1835 rate->ldpc = lq_sta->ldpc; in rs_switch_to_column()
1839 rs_set_expected_tpt_table(lq_sta, search_tbl); in rs_switch_to_column()
1841 lq_sta->visited_columns |= BIT(col_id); in rs_switch_to_column()
1847 rate_idx = rs_get_best_rate(mvm, lq_sta, search_tbl, in rs_switch_to_column()
1958 struct iwl_lq_sta *lq_sta) in rs_stbc_allow() argument
1963 if (!lq_sta->stbc_capable) in rs_stbc_allow()
2084 struct iwl_lq_sta *lq_sta, in rs_tpc_perform() argument
2095 u8 cur = lq_sta->lq.reduced_tpc; in rs_tpc_perform()
2101 if (lq_sta->pers.dbg_fixed_txp_reduction <= TPC_MAX_REDUCTION) { in rs_tpc_perform()
2103 lq_sta->pers.dbg_fixed_txp_reduction); in rs_tpc_perform()
2104 lq_sta->lq.reduced_tpc = lq_sta->pers.dbg_fixed_txp_reduction; in rs_tpc_perform()
2105 return cur != lq_sta->pers.dbg_fixed_txp_reduction; in rs_tpc_perform()
2120 lq_sta->lq.reduced_tpc = TPC_NO_REDUCTION; in rs_tpc_perform()
2156 lq_sta->lq.reduced_tpc = weak; in rs_tpc_perform()
2159 lq_sta->lq.reduced_tpc = strong; in rs_tpc_perform()
2162 lq_sta->lq.reduced_tpc = TPC_NO_REDUCTION; in rs_tpc_perform()
2176 struct iwl_lq_sta *lq_sta, in rs_rate_scale_perform() argument
2195 u8 prev_agg = lq_sta->is_agg; in rs_rate_scale_perform()
2200 lq_sta->is_agg = !!sta_priv->agg_tids; in rs_rate_scale_perform()
2207 if (!lq_sta->search_better_tbl) in rs_rate_scale_perform()
2208 active_tbl = lq_sta->active_tbl; in rs_rate_scale_perform()
2210 active_tbl = 1 - lq_sta->active_tbl; in rs_rate_scale_perform()
2212 tbl = &(lq_sta->lq_info[active_tbl]); in rs_rate_scale_perform()
2215 if (prev_agg != lq_sta->is_agg) { in rs_rate_scale_perform()
2218 prev_agg, lq_sta->is_agg); in rs_rate_scale_perform()
2219 rs_set_expected_tpt_table(lq_sta, tbl); in rs_rate_scale_perform()
2227 rate_mask = rs_get_supported_rates(lq_sta, rate); in rs_rate_scale_perform()
2231 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2234 lq_sta->search_better_tbl = 0; in rs_rate_scale_perform()
2235 tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_rate_scale_perform()
2236 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2270 rs_stay_in_table(lq_sta, false); in rs_rate_scale_perform()
2276 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2280 if (window->average_tpt > lq_sta->last_tpt) { in rs_rate_scale_perform()
2286 lq_sta->last_tpt); in rs_rate_scale_perform()
2289 lq_sta->active_tbl = active_tbl; in rs_rate_scale_perform()
2298 lq_sta->last_tpt); in rs_rate_scale_perform()
2304 active_tbl = lq_sta->active_tbl; in rs_rate_scale_perform()
2305 tbl = &(lq_sta->lq_info[active_tbl]); in rs_rate_scale_perform()
2309 current_tpt = lq_sta->last_tpt; in rs_rate_scale_perform()
2317 lq_sta->search_better_tbl = 0; in rs_rate_scale_perform()
2352 rs_stay_in_table(lq_sta, true); in rs_rate_scale_perform()
2381 if (lq_sta->rs_state == RS_STATE_STAY_IN_COLUMN) in rs_rate_scale_perform()
2382 update_lq = rs_tpc_perform(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2393 rs_tweak_rate_tbl(mvm, sta, lq_sta, tbl, scale_action); in rs_rate_scale_perform()
2394 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2397 rs_stay_in_table(lq_sta, false); in rs_rate_scale_perform()
2406 lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_STARTED in rs_rate_scale_perform()
2411 lq_sta->last_tpt = current_tpt; in rs_rate_scale_perform()
2415 update_lq, done_search, lq_sta->rs_state, in rs_rate_scale_perform()
2418 next_column = rs_get_next_column(mvm, lq_sta, sta, tbl); in rs_rate_scale_perform()
2420 int ret = rs_switch_to_column(mvm, lq_sta, sta, in rs_rate_scale_perform()
2423 lq_sta->search_better_tbl = 1; in rs_rate_scale_perform()
2427 lq_sta->rs_state = RS_STATE_SEARCH_CYCLE_ENDED; in rs_rate_scale_perform()
2431 if (lq_sta->search_better_tbl) { in rs_rate_scale_perform()
2433 tbl = &(lq_sta->lq_info[(1 - lq_sta->active_tbl)]); in rs_rate_scale_perform()
2441 rs_update_rate_tbl(mvm, sta, lq_sta, tbl); in rs_rate_scale_perform()
2447 if (done_search && lq_sta->rs_state == RS_STATE_SEARCH_CYCLE_ENDED) { in rs_rate_scale_perform()
2453 tbl1 = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_rate_scale_perform()
2466 rs_set_stay_in_table(mvm, 1, lq_sta); in rs_rate_scale_perform()
2471 if ((lq_sta->last_tpt > IWL_AGG_TPT_THREHOLD) && in rs_rate_scale_perform()
2472 (lq_sta->tx_agg_tid_en & (1 << tid)) && in rs_rate_scale_perform()
2480 lq_sta, sta); in rs_rate_scale_perform()
2483 rs_set_stay_in_table(mvm, 0, lq_sta); in rs_rate_scale_perform()
2559 struct iwl_lq_sta *lq_sta) in rs_init_optimal_rate() argument
2561 struct rs_rate *rate = &lq_sta->optimal_rate; in rs_init_optimal_rate()
2563 if (lq_sta->max_mimo2_rate_idx != IWL_RATE_INVALID) in rs_init_optimal_rate()
2564 rate->type = lq_sta->is_vht ? LQ_VHT_MIMO2 : LQ_HT_MIMO2; in rs_init_optimal_rate()
2565 else if (lq_sta->max_siso_rate_idx != IWL_RATE_INVALID) in rs_init_optimal_rate()
2566 rate->type = lq_sta->is_vht ? LQ_VHT_SISO : LQ_HT_SISO; in rs_init_optimal_rate()
2567 else if (lq_sta->band == IEEE80211_BAND_5GHZ) in rs_init_optimal_rate()
2578 lq_sta->optimal_rate_mask = lq_sta->active_mimo2_rate; in rs_init_optimal_rate()
2580 lq_sta->optimal_rate_mask = lq_sta->active_siso_rate; in rs_init_optimal_rate()
2582 lq_sta->optimal_rate_mask = lq_sta->active_legacy_rate; in rs_init_optimal_rate()
2584 if (lq_sta->band == IEEE80211_BAND_5GHZ) { in rs_init_optimal_rate()
2585 lq_sta->optimal_rates = rs_optimal_rates_5ghz_legacy; in rs_init_optimal_rate()
2586 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2589 lq_sta->optimal_rates = rs_optimal_rates_24ghz_legacy; in rs_init_optimal_rate()
2590 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2597 lq_sta->optimal_rates = rs_optimal_rates_vht_20mhz; in rs_init_optimal_rate()
2598 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2601 lq_sta->optimal_rates = rs_optimal_rates_vht_40_80mhz; in rs_init_optimal_rate()
2602 lq_sta->optimal_nentries = in rs_init_optimal_rate()
2606 lq_sta->optimal_rates = rs_optimal_rates_ht; in rs_init_optimal_rate()
2607 lq_sta->optimal_nentries = ARRAY_SIZE(rs_optimal_rates_ht); in rs_init_optimal_rate()
2613 struct iwl_lq_sta *lq_sta) in rs_get_optimal_rate() argument
2615 struct rs_rate *rate = &lq_sta->optimal_rate; in rs_get_optimal_rate()
2618 rate->index = find_first_bit(&lq_sta->optimal_rate_mask, in rs_get_optimal_rate()
2621 for (i = 0; i < lq_sta->optimal_nentries; i++) { in rs_get_optimal_rate()
2622 int rate_idx = lq_sta->optimal_rates[i].rate_idx; in rs_get_optimal_rate()
2624 if ((lq_sta->pers.last_rssi >= lq_sta->optimal_rates[i].rssi) && in rs_get_optimal_rate()
2625 (BIT(rate_idx) & lq_sta->optimal_rate_mask)) { in rs_get_optimal_rate()
2638 struct iwl_lq_sta *lq_sta, in rs_get_initial_rate() argument
2648 for (i = 0; i < ARRAY_SIZE(lq_sta->pers.chain_signal); i++) { in rs_get_initial_rate()
2649 if (!(lq_sta->pers.chains & BIT(i))) in rs_get_initial_rate()
2652 if (lq_sta->pers.chain_signal[i] > best_rssi) { in rs_get_initial_rate()
2653 best_rssi = lq_sta->pers.chain_signal[i]; in rs_get_initial_rate()
2670 rate->index = find_first_bit(&lq_sta->active_legacy_rate, in rs_get_initial_rate()
2687 (BIT(rate_idx) & lq_sta->active_legacy_rate)) { in rs_get_initial_rate()
2700 struct iwl_lq_sta *lq_sta, in rs_update_last_rssi() argument
2705 lq_sta->pers.chains = rx_status->chains; in rs_update_last_rssi()
2706 lq_sta->pers.chain_signal[0] = rx_status->chain_signal[0]; in rs_update_last_rssi()
2707 lq_sta->pers.chain_signal[1] = rx_status->chain_signal[1]; in rs_update_last_rssi()
2708 lq_sta->pers.chain_signal[2] = rx_status->chain_signal[2]; in rs_update_last_rssi()
2709 lq_sta->pers.last_rssi = S8_MIN; in rs_update_last_rssi()
2711 for (i = 0; i < ARRAY_SIZE(lq_sta->pers.chain_signal); i++) { in rs_update_last_rssi()
2712 if (!(lq_sta->pers.chains & BIT(i))) in rs_update_last_rssi()
2715 if (lq_sta->pers.chain_signal[i] > lq_sta->pers.last_rssi) in rs_update_last_rssi()
2716 lq_sta->pers.last_rssi = lq_sta->pers.chain_signal[i]; in rs_update_last_rssi()
2736 struct iwl_lq_sta *lq_sta, in rs_initialize_lq() argument
2744 if (!sta || !lq_sta) in rs_initialize_lq()
2747 if (!lq_sta->search_better_tbl) in rs_initialize_lq()
2748 active_tbl = lq_sta->active_tbl; in rs_initialize_lq()
2750 active_tbl = 1 - lq_sta->active_tbl; in rs_initialize_lq()
2752 tbl = &(lq_sta->lq_info[active_tbl]); in rs_initialize_lq()
2755 rs_get_initial_rate(mvm, lq_sta, band, rate); in rs_initialize_lq()
2756 rs_init_optimal_rate(mvm, sta, lq_sta); in rs_initialize_lq()
2764 rs_set_expected_tpt_table(lq_sta, tbl); in rs_initialize_lq()
2765 rs_fill_lq_cmd(mvm, sta, lq_sta, rate); in rs_initialize_lq()
2767 iwl_mvm_send_lq_cmd(mvm, &lq_sta->lq, init); in rs_initialize_lq()
2778 struct iwl_lq_sta *lq_sta = mvm_sta; in rs_get_rate() local
2793 if (lq_sta && !lq_sta->pers.drv) { in rs_get_rate()
2802 iwl_mvm_hwrate_to_tx_rate(lq_sta->last_rate_n_flags, in rs_get_rate()
2809 if (lq_sta->rs_state != RS_STATE_STAY_IN_COLUMN) { in rs_get_rate()
2810 optimal_rate = rs_get_optimal_rate(mvm, lq_sta); in rs_get_rate()
2824 struct iwl_lq_sta *lq_sta = &sta_priv->lq_sta; in rs_alloc_sta() local
2828 lq_sta->pers.drv = mvm; in rs_alloc_sta()
2830 lq_sta->pers.dbg_fixed_rate = 0; in rs_alloc_sta()
2831 lq_sta->pers.dbg_fixed_txp_reduction = TPC_INVALID; in rs_alloc_sta()
2832 lq_sta->pers.ss_force = RS_SS_FORCE_NONE; in rs_alloc_sta()
2834 lq_sta->pers.chains = 0; in rs_alloc_sta()
2835 memset(lq_sta->pers.chain_signal, 0, sizeof(lq_sta->pers.chain_signal)); in rs_alloc_sta()
2836 lq_sta->pers.last_rssi = S8_MIN; in rs_alloc_sta()
2838 return &sta_priv->lq_sta; in rs_alloc_sta()
2861 struct iwl_lq_sta *lq_sta) in rs_vht_set_enabled_rates() argument
2876 lq_sta->active_siso_rate |= BIT(i); in rs_vht_set_enabled_rates()
2894 lq_sta->active_mimo2_rate |= BIT(i); in rs_vht_set_enabled_rates()
2901 struct iwl_lq_sta *lq_sta, in rs_ht_init() argument
2908 lq_sta->active_siso_rate = ht_cap->mcs.rx_mask[0] << 1; in rs_ht_init()
2909 lq_sta->active_siso_rate |= ht_cap->mcs.rx_mask[0] & 0x1; in rs_ht_init()
2910 lq_sta->active_siso_rate &= ~((u16)0x2); in rs_ht_init()
2911 lq_sta->active_siso_rate <<= IWL_FIRST_OFDM_RATE; in rs_ht_init()
2913 lq_sta->active_mimo2_rate = ht_cap->mcs.rx_mask[1] << 1; in rs_ht_init()
2914 lq_sta->active_mimo2_rate |= ht_cap->mcs.rx_mask[1] & 0x1; in rs_ht_init()
2915 lq_sta->active_mimo2_rate &= ~((u16)0x2); in rs_ht_init()
2916 lq_sta->active_mimo2_rate <<= IWL_FIRST_OFDM_RATE; in rs_ht_init()
2920 lq_sta->ldpc = true; in rs_ht_init()
2925 lq_sta->stbc_capable = true; in rs_ht_init()
2927 lq_sta->is_vht = false; in rs_ht_init()
2932 struct iwl_lq_sta *lq_sta, in rs_vht_init() argument
2935 rs_vht_set_enabled_rates(sta, vht_cap, lq_sta); in rs_vht_init()
2939 lq_sta->ldpc = true; in rs_vht_init()
2944 lq_sta->stbc_capable = true; in rs_vht_init()
2949 lq_sta->bfer_capable = true; in rs_vht_init()
2951 lq_sta->is_vht = true; in rs_vht_init()
3030 struct iwl_lq_sta *lq_sta = &sta_priv->lq_sta; in iwl_mvm_rs_rate_init() local
3035 memset(lq_sta, 0, offsetof(typeof(*lq_sta), pers)); in iwl_mvm_rs_rate_init()
3039 lq_sta->lq.sta_id = sta_priv->sta_id; in iwl_mvm_rs_rate_init()
3042 rs_rate_scale_clear_tbl_windows(mvm, &lq_sta->lq_info[j]); in iwl_mvm_rs_rate_init()
3044 lq_sta->flush_timer = 0; in iwl_mvm_rs_rate_init()
3045 lq_sta->last_tx = jiffies; in iwl_mvm_rs_rate_init()
3055 lq_sta->missed_rate_counter = IWL_MVM_RS_MISSED_RATE_MAX; in iwl_mvm_rs_rate_init()
3056 lq_sta->band = sband->band; in iwl_mvm_rs_rate_init()
3061 lq_sta->active_legacy_rate = 0; in iwl_mvm_rs_rate_init()
3063 lq_sta->active_legacy_rate |= BIT(sband->bitrates[i].hw_value); in iwl_mvm_rs_rate_init()
3067 rs_ht_init(mvm, sta, lq_sta, ht_cap); in iwl_mvm_rs_rate_init()
3069 rs_vht_init(mvm, sta, lq_sta, vht_cap); in iwl_mvm_rs_rate_init()
3071 lq_sta->max_legacy_rate_idx = in iwl_mvm_rs_rate_init()
3072 rs_get_max_rate_from_mask(lq_sta->active_legacy_rate); in iwl_mvm_rs_rate_init()
3073 lq_sta->max_siso_rate_idx = in iwl_mvm_rs_rate_init()
3074 rs_get_max_rate_from_mask(lq_sta->active_siso_rate); in iwl_mvm_rs_rate_init()
3075 lq_sta->max_mimo2_rate_idx = in iwl_mvm_rs_rate_init()
3076 rs_get_max_rate_from_mask(lq_sta->active_mimo2_rate); in iwl_mvm_rs_rate_init()
3080 lq_sta->active_legacy_rate, in iwl_mvm_rs_rate_init()
3081 lq_sta->active_siso_rate, in iwl_mvm_rs_rate_init()
3082 lq_sta->active_mimo2_rate, in iwl_mvm_rs_rate_init()
3083 lq_sta->is_vht, lq_sta->ldpc, lq_sta->stbc_capable, in iwl_mvm_rs_rate_init()
3084 lq_sta->bfer_capable); in iwl_mvm_rs_rate_init()
3086 lq_sta->max_legacy_rate_idx, in iwl_mvm_rs_rate_init()
3087 lq_sta->max_siso_rate_idx, in iwl_mvm_rs_rate_init()
3088 lq_sta->max_mimo2_rate_idx); in iwl_mvm_rs_rate_init()
3091 lq_sta->lq.single_stream_ant_msk = in iwl_mvm_rs_rate_init()
3093 lq_sta->lq.dual_stream_ant_msk = ANT_AB; in iwl_mvm_rs_rate_init()
3096 lq_sta->tx_agg_tid_en = IWL_AGG_ALL_TID; in iwl_mvm_rs_rate_init()
3097 lq_sta->is_agg = 0; in iwl_mvm_rs_rate_init()
3101 rs_initialize_lq(mvm, sta, lq_sta, band, init); in iwl_mvm_rs_rate_init()
3157 struct iwl_lq_sta *lq_sta, in rs_fill_rates_for_column() argument
3180 bottom_reached = rs_get_lower_rate_in_column(lq_sta, rate); in rs_fill_rates_for_column()
3213 struct iwl_lq_sta *lq_sta, in rs_build_rates_table() argument
3219 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_build_rates_table()
3228 rs_stbc_allow(mvm, sta, lq_sta)) in rs_build_rates_table()
3243 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3247 rs_get_lower_rate_down_column(lq_sta, &rate); in rs_build_rates_table()
3262 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3266 rs_get_lower_rate_down_column(lq_sta, &rate); in rs_build_rates_table()
3271 rs_fill_rates_for_column(mvm, lq_sta, &rate, lq_cmd->rs_table, &index, in rs_build_rates_table()
3287 struct iwl_lq_cmd *lq_cmd = &mvmsta->lq_sta.lq; in rs_bfer_active_iter()
3341 struct iwl_lq_sta *lq_sta, in rs_set_lq_ss_params() argument
3344 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_set_lq_ss_params()
3360 if (lq_sta->pers.ss_force == RS_SS_FORCE_STBC) in rs_set_lq_ss_params()
3362 else if (lq_sta->pers.ss_force == RS_SS_FORCE_BFER) in rs_set_lq_ss_params()
3365 if (lq_sta->pers.ss_force != RS_SS_FORCE_NONE) { in rs_set_lq_ss_params()
3367 lq_sta->pers.ss_force); in rs_set_lq_ss_params()
3372 if (lq_sta->stbc_capable) in rs_set_lq_ss_params()
3375 if (!lq_sta->bfer_capable) in rs_set_lq_ss_params()
3399 struct iwl_lq_cmd *bfersta_lq_cmd = &bfer_mvmsta->lq_sta.lq; in rs_set_lq_ss_params()
3417 struct iwl_lq_sta *lq_sta, in rs_fill_lq_cmd() argument
3420 struct iwl_lq_cmd *lq_cmd = &lq_sta->lq; in rs_fill_lq_cmd()
3429 if (lq_sta->pers.dbg_fixed_rate) { in rs_fill_lq_cmd()
3431 lq_sta->band, in rs_fill_lq_cmd()
3432 lq_sta->pers.dbg_fixed_rate); in rs_fill_lq_cmd()
3439 rs_build_rates_table(mvm, sta, lq_sta, initial_rate); in rs_fill_lq_cmd()
3442 rs_set_lq_ss_params(mvm, sta, lq_sta, initial_rate); in rs_fill_lq_cmd()
3556 struct iwl_lq_sta *lq_sta) in rs_program_fix_rate() argument
3558 lq_sta->active_legacy_rate = 0x0FFF; /* 1 - 54 MBits, includes CCK */ in rs_program_fix_rate()
3559 lq_sta->active_siso_rate = 0x1FD0; /* 6 - 60 MBits, no 9, no CCK */ in rs_program_fix_rate()
3560 lq_sta->active_mimo2_rate = 0x1FD0; /* 6 - 60 MBits, no 9, no CCK */ in rs_program_fix_rate()
3563 lq_sta->lq.sta_id, lq_sta->pers.dbg_fixed_rate); in rs_program_fix_rate()
3565 if (lq_sta->pers.dbg_fixed_rate) { in rs_program_fix_rate()
3566 rs_fill_lq_cmd(mvm, NULL, lq_sta, NULL); in rs_program_fix_rate()
3567 iwl_mvm_send_lq_cmd(lq_sta->pers.drv, &lq_sta->lq, false); in rs_program_fix_rate()
3574 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_scale_table_write() local
3580 mvm = lq_sta->pers.drv; in rs_sta_dbgfs_scale_table_write()
3587 lq_sta->pers.dbg_fixed_rate = parsed_rate; in rs_sta_dbgfs_scale_table_write()
3589 lq_sta->pers.dbg_fixed_rate = 0; in rs_sta_dbgfs_scale_table_write()
3591 rs_program_fix_rate(mvm, lq_sta); in rs_sta_dbgfs_scale_table_write()
3604 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_scale_table_read() local
3606 struct iwl_scale_tbl_info *tbl = &(lq_sta->lq_info[lq_sta->active_tbl]); in rs_sta_dbgfs_scale_table_read()
3609 mvm = lq_sta->pers.drv; in rs_sta_dbgfs_scale_table_read()
3614 desc += sprintf(buff+desc, "sta_id %d\n", lq_sta->lq.sta_id); in rs_sta_dbgfs_scale_table_read()
3616 lq_sta->total_failed, lq_sta->total_success, in rs_sta_dbgfs_scale_table_read()
3617 lq_sta->active_legacy_rate); in rs_sta_dbgfs_scale_table_read()
3619 lq_sta->pers.dbg_fixed_rate); in rs_sta_dbgfs_scale_table_read()
3637 (lq_sta->is_agg) ? "AGG on" : ""); in rs_sta_dbgfs_scale_table_read()
3640 lq_sta->last_rate_n_flags); in rs_sta_dbgfs_scale_table_read()
3643 lq_sta->lq.flags, in rs_sta_dbgfs_scale_table_read()
3644 lq_sta->lq.mimo_delim, in rs_sta_dbgfs_scale_table_read()
3645 lq_sta->lq.single_stream_ant_msk, in rs_sta_dbgfs_scale_table_read()
3646 lq_sta->lq.dual_stream_ant_msk); in rs_sta_dbgfs_scale_table_read()
3650 le16_to_cpu(lq_sta->lq.agg_time_limit), in rs_sta_dbgfs_scale_table_read()
3651 lq_sta->lq.agg_disable_start_th, in rs_sta_dbgfs_scale_table_read()
3652 lq_sta->lq.agg_frame_cnt_limit); in rs_sta_dbgfs_scale_table_read()
3654 desc += sprintf(buff+desc, "reduced tpc=%d\n", lq_sta->lq.reduced_tpc); in rs_sta_dbgfs_scale_table_read()
3655 ss_params = le32_to_cpu(lq_sta->lq.ss_params); in rs_sta_dbgfs_scale_table_read()
3667 lq_sta->lq.initial_rate_index[0], in rs_sta_dbgfs_scale_table_read()
3668 lq_sta->lq.initial_rate_index[1], in rs_sta_dbgfs_scale_table_read()
3669 lq_sta->lq.initial_rate_index[2], in rs_sta_dbgfs_scale_table_read()
3670 lq_sta->lq.initial_rate_index[3]); in rs_sta_dbgfs_scale_table_read()
3673 u32 r = le32_to_cpu(lq_sta->lq.rs_table[i]); in rs_sta_dbgfs_scale_table_read()
3699 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_stats_table_read() local
3706 tbl = &(lq_sta->lq_info[i]); in rs_sta_dbgfs_stats_table_read()
3711 lq_sta->active_tbl == i ? "*" : "x", in rs_sta_dbgfs_stats_table_read()
3773 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_drv_tx_stats_read() local
3794 stats = &(lq_sta->pers.tx_stats[col][rate]); in rs_sta_dbgfs_drv_tx_stats_read()
3812 struct iwl_lq_sta *lq_sta = file->private_data; in rs_sta_dbgfs_drv_tx_stats_write() local
3813 memset(lq_sta->pers.tx_stats, 0, sizeof(lq_sta->pers.tx_stats)); in rs_sta_dbgfs_drv_tx_stats_write()
3829 struct iwl_lq_sta *lq_sta = file->private_data; in iwl_dbgfs_ss_force_read() local
3841 ss_force_name[lq_sta->pers.ss_force]); in iwl_dbgfs_ss_force_read()
3845 static ssize_t iwl_dbgfs_ss_force_write(struct iwl_lq_sta *lq_sta, char *buf, in iwl_dbgfs_ss_force_write() argument
3848 struct iwl_mvm *mvm = lq_sta->pers.drv; in iwl_dbgfs_ss_force_write()
3852 lq_sta->pers.ss_force = RS_SS_FORCE_NONE; in iwl_dbgfs_ss_force_write()
3854 lq_sta->pers.ss_force = RS_SS_FORCE_SISO; in iwl_dbgfs_ss_force_write()
3856 if (lq_sta->stbc_capable) { in iwl_dbgfs_ss_force_write()
3857 lq_sta->pers.ss_force = RS_SS_FORCE_STBC; in iwl_dbgfs_ss_force_write()
3864 if (lq_sta->bfer_capable) { in iwl_dbgfs_ss_force_write()
3865 lq_sta->pers.ss_force = RS_SS_FORCE_BFER; in iwl_dbgfs_ss_force_write()
3881 if (!debugfs_create_file(#name, mode, parent, lq_sta, \
3890 struct iwl_lq_sta *lq_sta = priv_sta; in rs_add_debugfs() local
3893 mvmsta = container_of(lq_sta, struct iwl_mvm_sta, lq_sta); in rs_add_debugfs()
3899 lq_sta, &rs_sta_dbgfs_scale_table_ops); in rs_add_debugfs()
3901 lq_sta, &rs_sta_dbgfs_stats_table_ops); in rs_add_debugfs()
3903 lq_sta, &rs_sta_dbgfs_drv_tx_stats_ops); in rs_add_debugfs()
3905 &lq_sta->tx_agg_tid_en); in rs_add_debugfs()
3907 &lq_sta->pers.dbg_fixed_txp_reduction); in rs_add_debugfs()
3968 struct iwl_lq_cmd *lq = &mvmsta->lq_sta.lq; in iwl_mvm_tx_protection()