Lines Matching +full:max +full:- +full:sample +full:- +full:rate +full:- +full:hz
21 #define COMPUTE_TO (5 * HZ)
22 #define LATEACK_DELAY (10 * HZ)
27 * ath_dynack_get_max_to - set max timeout according to channel width
33 const struct ath9k_channel *chan = ah->curchan; in ath_dynack_get_max_to()
48 * ath_dynack_ewma - EWMA (Exponentially Weighted Moving Average) calculation
53 return (new * (EWMA_DIV - EWMA_LEVEL) + in ath_dynack_ewma()
60 * ath_dynack_get_sifs - get sifs time based on phy used
70 if (IS_CHAN_QUARTER_RATE(ah->curchan)) in ath_dynack_get_sifs()
72 else if (IS_CHAN_HALF_RATE(ah->curchan)) in ath_dynack_get_sifs()
81 * ath_dynack_bssidmask - filter out ACK frames based on BSSID mask
91 if ((common->macaddr[i] & common->bssidmask[i]) != in ath_dynack_bssidmask()
92 (mac[i] & common->bssidmask[i])) in ath_dynack_bssidmask()
100 * ath_dynack_set_timeout - configure timeouts/slottime registers
108 int slottime = (to - 3) / 2; in ath_dynack_set_timeout()
118 * ath_dynack_compute_ackto - compute ACK timeout as the maximum STA timeout
125 struct ath_dynack *da = &ah->dynack; in ath_dynack_compute_ackto()
129 list_for_each_entry(an, &da->nodes, list) in ath_dynack_compute_ackto()
130 if (an->ackto > to) in ath_dynack_compute_ackto()
131 to = an->ackto; in ath_dynack_compute_ackto()
133 if (to && da->ackto != to) { in ath_dynack_compute_ackto()
135 da->ackto = to; in ath_dynack_compute_ackto()
140 * ath_dynack_compute_to - compute STA ACK timeout
147 struct ath_dynack *da = &ah->dynack; in ath_dynack_compute_to()
157 while (da->st_rbf.h_rb != da->st_rbf.t_rb && in ath_dynack_compute_to()
158 da->ack_rbf.h_rb != da->ack_rbf.t_rb) { in ath_dynack_compute_to()
159 ack_ts = da->ack_rbf.tstamp[da->ack_rbf.h_rb]; in ath_dynack_compute_to()
160 st_ts = &da->st_rbf.ts[da->st_rbf.h_rb]; in ath_dynack_compute_to()
161 dst = da->st_rbf.addr[da->st_rbf.h_rb].h_dest; in ath_dynack_compute_to()
162 src = da->st_rbf.addr[da->st_rbf.h_rb].h_src; in ath_dynack_compute_to()
165 "ack_ts %u st_ts %u st_dur %u [%u-%u]\n", in ath_dynack_compute_to()
166 ack_ts, st_ts->tstamp, st_ts->dur, in ath_dynack_compute_to()
167 da->ack_rbf.h_rb, da->st_rbf.h_rb); in ath_dynack_compute_to()
169 if (ack_ts > st_ts->tstamp + st_ts->dur) { in ath_dynack_compute_to()
170 ackto = ack_ts - st_ts->tstamp - st_ts->dur; in ath_dynack_compute_to()
173 sta = ieee80211_find_sta_by_ifaddr(ah->hw, dst, in ath_dynack_compute_to()
176 an = (struct ath_node *)sta->drv_priv; in ath_dynack_compute_to()
177 an->ackto = ath_dynack_ewma(an->ackto, in ath_dynack_compute_to()
181 an->ackto, ackto); in ath_dynack_compute_to()
182 if (time_is_before_jiffies(da->lto)) { in ath_dynack_compute_to()
184 da->lto = jiffies + COMPUTE_TO; in ath_dynack_compute_to()
187 INCR(da->ack_rbf.h_rb, ATH_DYN_BUF); in ath_dynack_compute_to()
189 INCR(da->st_rbf.h_rb, ATH_DYN_BUF); in ath_dynack_compute_to()
191 INCR(da->ack_rbf.h_rb, ATH_DYN_BUF); in ath_dynack_compute_to()
199 * ath_dynack_sample_tx_ts - status timestamp sampling method
211 struct ath_dynack *da = &ah->dynack; in ath_dynack_sample_tx_ts()
214 u32 dur = ts->duration; in ath_dynack_sample_tx_ts()
217 if (!da->enabled || (info->flags & IEEE80211_TX_CTL_NO_ACK)) in ath_dynack_sample_tx_ts()
220 spin_lock_bh(&da->qlock); in ath_dynack_sample_tx_ts()
222 hdr = (struct ieee80211_hdr *)skb->data; in ath_dynack_sample_tx_ts()
225 if (ts->ts_status & ATH9K_TXERR_XRETRY) { in ath_dynack_sample_tx_ts()
226 if (ieee80211_is_assoc_req(hdr->frame_control) || in ath_dynack_sample_tx_ts()
227 ieee80211_is_assoc_resp(hdr->frame_control) || in ath_dynack_sample_tx_ts()
228 ieee80211_is_auth(hdr->frame_control)) { in ath_dynack_sample_tx_ts()
236 an = (struct ath_node *)sta->drv_priv; in ath_dynack_sample_tx_ts()
237 an->ackto = -1; in ath_dynack_sample_tx_ts()
239 da->lto = jiffies + LATEACK_DELAY; in ath_dynack_sample_tx_ts()
242 spin_unlock_bh(&da->qlock); in ath_dynack_sample_tx_ts()
246 ridx = ts->ts_rateindex; in ath_dynack_sample_tx_ts()
248 da->st_rbf.ts[da->st_rbf.t_rb].tstamp = ts->ts_tstamp; in ath_dynack_sample_tx_ts()
250 /* ether_addr_copy() gives a false warning on gcc-10 so use memcpy() in ath_dynack_sample_tx_ts()
253 memcpy(da->st_rbf.addr[da->st_rbf.t_rb].h_dest, hdr->addr1, ETH_ALEN); in ath_dynack_sample_tx_ts()
254 memcpy(da->st_rbf.addr[da->st_rbf.t_rb].h_src, hdr->addr2, ETH_ALEN); in ath_dynack_sample_tx_ts()
256 if (!(info->status.rates[ridx].flags & IEEE80211_TX_RC_MCS)) { in ath_dynack_sample_tx_ts()
257 const struct ieee80211_rate *rate; in ath_dynack_sample_tx_ts() local
258 struct ieee80211_tx_rate *rates = info->status.rates; in ath_dynack_sample_tx_ts()
261 rate = &common->sbands[info->band].bitrates[rates[ridx].idx]; in ath_dynack_sample_tx_ts()
262 if (info->band == NL80211_BAND_2GHZ && in ath_dynack_sample_tx_ts()
263 !(rate->flags & IEEE80211_RATE_ERP_G)) in ath_dynack_sample_tx_ts()
268 dur -= ath_dynack_get_sifs(ah, phy); in ath_dynack_sample_tx_ts()
270 da->st_rbf.ts[da->st_rbf.t_rb].dur = dur; in ath_dynack_sample_tx_ts()
272 INCR(da->st_rbf.t_rb, ATH_DYN_BUF); in ath_dynack_sample_tx_ts()
273 if (da->st_rbf.t_rb == da->st_rbf.h_rb) in ath_dynack_sample_tx_ts()
274 INCR(da->st_rbf.h_rb, ATH_DYN_BUF); in ath_dynack_sample_tx_ts()
276 ath_dbg(common, DYNACK, "{%pM} tx sample %u [dur %u][h %u-t %u]\n", in ath_dynack_sample_tx_ts()
277 hdr->addr1, ts->ts_tstamp, dur, da->st_rbf.h_rb, in ath_dynack_sample_tx_ts()
278 da->st_rbf.t_rb); in ath_dynack_sample_tx_ts()
282 spin_unlock_bh(&da->qlock); in ath_dynack_sample_tx_ts()
287 * ath_dynack_sample_ack_ts - ACK timestamp sampling method
296 struct ath_dynack *da = &ah->dynack; in ath_dynack_sample_ack_ts()
298 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_dynack_sample_ack_ts()
300 if (!da->enabled || !ath_dynack_bssidmask(ah, hdr->addr1)) in ath_dynack_sample_ack_ts()
303 spin_lock_bh(&da->qlock); in ath_dynack_sample_ack_ts()
304 da->ack_rbf.tstamp[da->ack_rbf.t_rb] = ts; in ath_dynack_sample_ack_ts()
306 INCR(da->ack_rbf.t_rb, ATH_DYN_BUF); in ath_dynack_sample_ack_ts()
307 if (da->ack_rbf.t_rb == da->ack_rbf.h_rb) in ath_dynack_sample_ack_ts()
308 INCR(da->ack_rbf.h_rb, ATH_DYN_BUF); in ath_dynack_sample_ack_ts()
310 ath_dbg(common, DYNACK, "rx sample %u [h %u-t %u]\n", in ath_dynack_sample_ack_ts()
311 ts, da->ack_rbf.h_rb, da->ack_rbf.t_rb); in ath_dynack_sample_ack_ts()
315 spin_unlock_bh(&da->qlock); in ath_dynack_sample_ack_ts()
320 * ath_dynack_node_init - init ath_node related info
327 struct ath_dynack *da = &ah->dynack; in ath_dynack_node_init()
329 an->ackto = da->ackto; in ath_dynack_node_init()
331 spin_lock_bh(&da->qlock); in ath_dynack_node_init()
332 list_add_tail(&an->list, &da->nodes); in ath_dynack_node_init()
333 spin_unlock_bh(&da->qlock); in ath_dynack_node_init()
338 * ath_dynack_node_deinit - deinit ath_node related info
345 struct ath_dynack *da = &ah->dynack; in ath_dynack_node_deinit()
347 spin_lock_bh(&da->qlock); in ath_dynack_node_deinit()
348 list_del(&an->list); in ath_dynack_node_deinit()
349 spin_unlock_bh(&da->qlock); in ath_dynack_node_deinit()
354 * ath_dynack_reset - reset dynack processing
360 struct ath_dynack *da = &ah->dynack; in ath_dynack_reset()
363 spin_lock_bh(&da->qlock); in ath_dynack_reset()
365 da->lto = jiffies + COMPUTE_TO; in ath_dynack_reset()
367 da->st_rbf.t_rb = 0; in ath_dynack_reset()
368 da->st_rbf.h_rb = 0; in ath_dynack_reset()
369 da->ack_rbf.t_rb = 0; in ath_dynack_reset()
370 da->ack_rbf.h_rb = 0; in ath_dynack_reset()
372 da->ackto = ath_dynack_get_max_to(ah); in ath_dynack_reset()
373 list_for_each_entry(an, &da->nodes, list) in ath_dynack_reset()
374 an->ackto = da->ackto; in ath_dynack_reset()
377 ath_dynack_set_timeout(ah, da->ackto); in ath_dynack_reset()
379 spin_unlock_bh(&da->qlock); in ath_dynack_reset()
384 * ath_dynack_init - init dynack data structure
390 struct ath_dynack *da = &ah->dynack; in ath_dynack_init()
394 spin_lock_init(&da->qlock); in ath_dynack_init()
395 INIT_LIST_HEAD(&da->nodes); in ath_dynack_init()
397 da->ackto = 9 + 16 + 64; in ath_dynack_init()
399 ah->hw->wiphy->features |= NL80211_FEATURE_ACKTO_ESTIMATION; in ath_dynack_init()