1 // SPDX-License-Identifier: ISC
5 static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
10 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
11 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
12 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
13 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
14 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
15 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
16 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
17 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
18 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
19 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
22 void mt76_testmode_tx_pending(struct mt76_dev *dev)
24 struct mt76_testmode_data *td = &dev->test;
25 struct mt76_wcid *wcid = &dev->global_wcid;
26 struct mt76_phy *phy = &dev->phy;
27 struct sk_buff *skb = td->tx_skb;
31 if (!skb || !td->tx_pending)
34 qid = skb_get_queue_mapping(skb);
37 spin_lock_bh(&q->lock);
39 while (td->tx_pending > 0 && td->tx_queued - td->tx_done < 1000 &&
40 q->queued < q->ndesc / 2) {
43 ret = dev->queue_ops->tx_queue_skb(dev, q, skb_get(skb), wcid,
52 dev->queue_ops->kick(dev, q);
54 spin_unlock_bh(&q->lock);
59 mt76_testmode_tx_init(struct mt76_dev *dev)
61 struct mt76_testmode_data *td = &dev->test;
62 struct mt76_phy *phy = &dev->phy;
63 struct ieee80211_tx_info *info;
64 struct ieee80211_hdr *hdr;
66 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
67 IEEE80211_FCTL_FROMDS;
68 struct ieee80211_tx_rate *rate;
69 u8 max_nss = hweight8(phy->antenna_mask);
71 if (td->tx_antenna_mask)
72 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
74 skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL);
78 dev_kfree_skb(td->tx_skb);
80 hdr = __skb_put_zero(skb, td->tx_msdu_len);
81 hdr->frame_control = cpu_to_le16(fc);
82 memcpy(hdr->addr1, phy->macaddr, sizeof(phy->macaddr));
83 memcpy(hdr->addr2, phy->macaddr, sizeof(phy->macaddr));
84 memcpy(hdr->addr3, phy->macaddr, sizeof(phy->macaddr));
86 info = IEEE80211_SKB_CB(skb);
87 info->flags = IEEE80211_TX_CTL_INJECTED |
88 IEEE80211_TX_CTL_NO_ACK |
89 IEEE80211_TX_CTL_NO_PS_BUFFER;
91 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
94 rate = &info->control.rates[0];
96 rate->idx = td->tx_rate_idx;
98 switch (td->tx_rate_mode) {
99 case MT76_TM_TX_MODE_CCK:
100 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
106 case MT76_TM_TX_MODE_OFDM:
107 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
115 case MT76_TM_TX_MODE_HT:
116 if (rate->idx > 8 * max_nss &&
118 phy->chandef.width >= NL80211_CHAN_WIDTH_40))
121 rate->flags |= IEEE80211_TX_RC_MCS;
123 case MT76_TM_TX_MODE_VHT:
127 if (td->tx_rate_nss > max_nss)
130 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
131 rate->flags |= IEEE80211_TX_RC_VHT_MCS;
138 rate->flags |= IEEE80211_TX_RC_SHORT_GI;
140 if (td->tx_rate_ldpc)
141 info->flags |= IEEE80211_TX_CTL_LDPC;
143 if (td->tx_rate_stbc)
144 info->flags |= IEEE80211_TX_CTL_STBC;
146 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
147 switch (phy->chandef.width) {
148 case NL80211_CHAN_WIDTH_40:
149 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
151 case NL80211_CHAN_WIDTH_80:
152 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
154 case NL80211_CHAN_WIDTH_80P80:
155 case NL80211_CHAN_WIDTH_160:
156 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
163 skb_set_queue_mapping(skb, IEEE80211_AC_BE);
169 mt76_testmode_tx_start(struct mt76_dev *dev)
171 struct mt76_testmode_data *td = &dev->test;
175 td->tx_pending = td->tx_count;
176 mt76_worker_schedule(&dev->tx_worker);
180 mt76_testmode_tx_stop(struct mt76_dev *dev)
182 struct mt76_testmode_data *td = &dev->test;
184 mt76_worker_disable(&dev->tx_worker);
188 mt76_worker_enable(&dev->tx_worker);
190 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 10 * HZ);
192 dev_kfree_skb(td->tx_skb);
197 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
199 td->param_set[idx / 32] |= BIT(idx % 32);
203 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
205 return td->param_set[idx / 32] & BIT(idx % 32);
209 mt76_testmode_init_defaults(struct mt76_dev *dev)
211 struct mt76_testmode_data *td = &dev->test;
213 if (td->tx_msdu_len > 0)
216 td->tx_msdu_len = 1024;
218 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
223 __mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
225 enum mt76_testmode_state prev_state = dev->test.state;
228 if (prev_state == MT76_TM_STATE_TX_FRAMES)
229 mt76_testmode_tx_stop(dev);
231 if (state == MT76_TM_STATE_TX_FRAMES) {
232 err = mt76_testmode_tx_init(dev);
237 err = dev->test_ops->set_state(dev, state);
239 if (state == MT76_TM_STATE_TX_FRAMES)
240 mt76_testmode_tx_stop(dev);
245 if (state == MT76_TM_STATE_TX_FRAMES)
246 mt76_testmode_tx_start(dev);
247 else if (state == MT76_TM_STATE_RX_FRAMES) {
248 memset(&dev->test.rx_stats, 0, sizeof(dev->test.rx_stats));
251 dev->test.state = state;
256 int mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
258 struct mt76_testmode_data *td = &dev->test;
259 struct ieee80211_hw *hw = dev->phy.hw;
261 if (state == td->state && state == MT76_TM_STATE_OFF)
264 if (state > MT76_TM_STATE_OFF &&
265 (!test_bit(MT76_STATE_RUNNING, &dev->phy.state) ||
266 !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
269 if (state != MT76_TM_STATE_IDLE &&
270 td->state != MT76_TM_STATE_IDLE) {
273 ret = __mt76_testmode_set_state(dev, MT76_TM_STATE_IDLE);
278 return __mt76_testmode_set_state(dev, state);
281 EXPORT_SYMBOL(mt76_testmode_set_state);
284 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
291 val = nla_get_u8(attr);
292 if (val < min || val > max)
299 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
302 struct mt76_phy *phy = hw->priv;
303 struct mt76_dev *dev = phy->dev;
304 struct mt76_testmode_data *td = &dev->test;
305 struct nlattr *tb[NUM_MT76_TM_ATTRS];
313 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
314 mt76_tm_policy, NULL);
320 mutex_lock(&dev->mutex);
322 if (tb[MT76_TM_ATTR_RESET]) {
323 mt76_testmode_set_state(dev, MT76_TM_STATE_OFF);
324 memset(td, 0, sizeof(*td));
327 mt76_testmode_init_defaults(dev);
329 if (tb[MT76_TM_ATTR_TX_COUNT])
330 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
332 if (tb[MT76_TM_ATTR_TX_LENGTH]) {
333 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
335 if (val > IEEE80211_MAX_FRAME_LEN ||
336 val < sizeof(struct ieee80211_hdr))
339 td->tx_msdu_len = val;
342 if (tb[MT76_TM_ATTR_TX_RATE_IDX])
343 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
345 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
346 0, MT76_TM_TX_MODE_MAX) ||
347 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
348 1, hweight8(phy->antenna_mask)) ||
349 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
350 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
351 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
352 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
353 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 1,
354 phy->antenna_mask) ||
355 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
356 &td->tx_power_control, 0, 1))
359 if (tb[MT76_TM_ATTR_FREQ_OFFSET])
360 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
362 if (tb[MT76_TM_ATTR_STATE]) {
363 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
364 if (state > MT76_TM_STATE_MAX)
370 if (tb[MT76_TM_ATTR_TX_POWER]) {
375 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
376 if (nla_len(cur) != 1 ||
377 idx >= ARRAY_SIZE(td->tx_power))
380 td->tx_power[idx++] = nla_get_u8(cur);
384 if (dev->test_ops->set_params) {
385 err = dev->test_ops->set_params(dev, tb, state);
390 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
392 mt76_testmode_param_set(td, i);
395 if (tb[MT76_TM_ATTR_STATE])
396 err = mt76_testmode_set_state(dev, state);
399 mutex_unlock(&dev->mutex);
403 EXPORT_SYMBOL(mt76_testmode_cmd);
406 mt76_testmode_dump_stats(struct mt76_dev *dev, struct sk_buff *msg)
408 struct mt76_testmode_data *td = &dev->test;
410 u64 rx_fcs_error = 0;
413 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
414 rx_packets += td->rx_stats.packets[i];
415 rx_fcs_error += td->rx_stats.fcs_error[i];
418 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
419 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
420 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
421 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
422 MT76_TM_STATS_ATTR_PAD) ||
423 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
424 MT76_TM_STATS_ATTR_PAD))
427 if (dev->test_ops->dump_stats)
428 return dev->test_ops->dump_stats(dev, msg);
433 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
434 struct netlink_callback *cb, void *data, int len)
436 struct mt76_phy *phy = hw->priv;
437 struct mt76_dev *dev = phy->dev;
438 struct mt76_testmode_data *td = &dev->test;
439 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
447 if (cb->args[2]++ > 0)
451 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
452 mt76_tm_policy, NULL);
457 mutex_lock(&dev->mutex);
459 if (tb[MT76_TM_ATTR_STATS]) {
462 a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
464 err = mt76_testmode_dump_stats(dev, msg);
465 nla_nest_end(msg, a);
471 mt76_testmode_init_defaults(dev);
474 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
478 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, td->mtd_name) ||
479 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, td->mtd_offset)))
482 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
483 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) ||
484 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
485 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
486 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
487 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
488 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
489 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
490 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
491 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
492 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
493 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
494 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
495 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
496 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
497 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
500 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
501 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
505 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
506 if (nla_put_u8(msg, i, td->tx_power[i]))
509 nla_nest_end(msg, a);
515 mutex_unlock(&dev->mutex);
519 EXPORT_SYMBOL(mt76_testmode_dump);