1 // SPDX-License-Identifier: ISC
4 #include <linux/random.h>
7 const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
8 [MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
9 [MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
10 [MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
11 [MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
12 [MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
13 [MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
14 [MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
15 [MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
16 [MT76_TM_ATTR_TX_RATE_STBC] = { .type = NLA_U8 },
17 [MT76_TM_ATTR_TX_LTF] = { .type = NLA_U8 },
18 [MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
19 [MT76_TM_ATTR_TX_SPE_IDX] = { .type = NLA_U8 },
20 [MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
21 [MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
22 [MT76_TM_ATTR_TX_DUTY_CYCLE] = { .type = NLA_U8 },
23 [MT76_TM_ATTR_TX_IPG] = { .type = NLA_U32 },
24 [MT76_TM_ATTR_TX_TIME] = { .type = NLA_U32 },
25 [MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
26 [MT76_TM_ATTR_DRV_DATA] = { .type = NLA_NESTED },
28 EXPORT_SYMBOL_GPL(mt76_tm_policy);
30 void mt76_testmode_tx_pending(struct mt76_phy *phy)
32 struct mt76_testmode_data *td = &phy->test;
33 struct mt76_dev *dev = phy->dev;
34 struct mt76_wcid *wcid = &dev->global_wcid;
35 struct sk_buff *skb = td->tx_skb;
40 if (!skb || !td->tx_pending)
43 qid = skb_get_queue_mapping(skb);
46 tx_queued_limit = td->tx_queued_limit ? td->tx_queued_limit : 1000;
48 spin_lock_bh(&q->lock);
50 while (td->tx_pending > 0 &&
51 td->tx_queued - td->tx_done < tx_queued_limit &&
52 q->queued < q->ndesc / 2) {
55 ret = dev->queue_ops->tx_queue_skb(dev, q, qid, skb_get(skb),
64 dev->queue_ops->kick(dev, q);
66 spin_unlock_bh(&q->lock);
70 mt76_testmode_max_mpdu_len(struct mt76_phy *phy, u8 tx_rate_mode)
72 switch (tx_rate_mode) {
73 case MT76_TM_TX_MODE_HT:
74 return IEEE80211_MAX_MPDU_LEN_HT_7935;
75 case MT76_TM_TX_MODE_VHT:
76 case MT76_TM_TX_MODE_HE_SU:
77 case MT76_TM_TX_MODE_HE_EXT_SU:
78 case MT76_TM_TX_MODE_HE_TB:
79 case MT76_TM_TX_MODE_HE_MU:
80 if (phy->sband_5g.sband.vht_cap.cap &
81 IEEE80211_VHT_CAP_MAX_MPDU_LENGTH_7991)
82 return IEEE80211_MAX_MPDU_LEN_VHT_7991;
83 return IEEE80211_MAX_MPDU_LEN_VHT_11454;
84 case MT76_TM_TX_MODE_CCK:
85 case MT76_TM_TX_MODE_OFDM:
87 return IEEE80211_MAX_FRAME_LEN;
92 mt76_testmode_free_skb(struct mt76_phy *phy)
94 struct mt76_testmode_data *td = &phy->test;
96 dev_kfree_skb(td->tx_skb);
100 int mt76_testmode_alloc_skb(struct mt76_phy *phy, u32 len)
102 #define MT_TXP_MAX_LEN 4095
103 u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
104 IEEE80211_FCTL_FROMDS;
105 struct mt76_testmode_data *td = &phy->test;
106 struct sk_buff **frag_tail, *head;
107 struct ieee80211_tx_info *info;
108 struct ieee80211_hdr *hdr;
109 u32 max_len, head_len;
112 max_len = mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode);
115 else if (len < sizeof(struct ieee80211_hdr))
116 len = sizeof(struct ieee80211_hdr);
118 nfrags = len / MT_TXP_MAX_LEN;
119 head_len = nfrags ? MT_TXP_MAX_LEN : len;
121 if (len > IEEE80211_MAX_FRAME_LEN)
122 fc |= IEEE80211_STYPE_QOS_DATA;
124 head = alloc_skb(head_len, GFP_KERNEL);
128 hdr = __skb_put_zero(head, sizeof(*hdr));
129 hdr->frame_control = cpu_to_le16(fc);
130 memcpy(hdr->addr1, td->addr[0], ETH_ALEN);
131 memcpy(hdr->addr2, td->addr[1], ETH_ALEN);
132 memcpy(hdr->addr3, td->addr[2], ETH_ALEN);
133 skb_set_queue_mapping(head, IEEE80211_AC_BE);
134 get_random_bytes(__skb_put(head, head_len - sizeof(*hdr)),
135 head_len - sizeof(*hdr));
137 info = IEEE80211_SKB_CB(head);
138 info->flags = IEEE80211_TX_CTL_INJECTED |
139 IEEE80211_TX_CTL_NO_ACK |
140 IEEE80211_TX_CTL_NO_PS_BUFFER;
142 info->hw_queue |= FIELD_PREP(MT_TX_HW_QUEUE_PHY, phy->band_idx);
143 frag_tail = &skb_shinfo(head)->frag_list;
145 for (i = 0; i < nfrags; i++) {
146 struct sk_buff *frag;
150 frag_len = len % MT_TXP_MAX_LEN;
152 frag_len = MT_TXP_MAX_LEN;
154 frag = alloc_skb(frag_len, GFP_KERNEL);
156 mt76_testmode_free_skb(phy);
161 get_random_bytes(__skb_put(frag, frag_len), frag_len);
162 head->len += frag->len;
163 head->data_len += frag->len;
166 frag_tail = &(*frag_tail)->next;
169 mt76_testmode_free_skb(phy);
174 EXPORT_SYMBOL(mt76_testmode_alloc_skb);
177 mt76_testmode_tx_init(struct mt76_phy *phy)
179 struct mt76_testmode_data *td = &phy->test;
180 struct ieee80211_tx_info *info;
181 struct ieee80211_tx_rate *rate;
182 u8 max_nss = hweight8(phy->antenna_mask);
185 ret = mt76_testmode_alloc_skb(phy, td->tx_mpdu_len);
189 if (td->tx_rate_mode > MT76_TM_TX_MODE_VHT)
192 if (td->tx_antenna_mask)
193 max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
195 info = IEEE80211_SKB_CB(td->tx_skb);
196 rate = &info->control.rates[0];
198 rate->idx = td->tx_rate_idx;
200 switch (td->tx_rate_mode) {
201 case MT76_TM_TX_MODE_CCK:
202 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
208 case MT76_TM_TX_MODE_OFDM:
209 if (phy->chandef.chan->band != NL80211_BAND_2GHZ)
217 case MT76_TM_TX_MODE_HT:
218 if (rate->idx > 8 * max_nss &&
220 phy->chandef.width >= NL80211_CHAN_WIDTH_40))
223 rate->flags |= IEEE80211_TX_RC_MCS;
225 case MT76_TM_TX_MODE_VHT:
229 if (td->tx_rate_nss > max_nss)
232 ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
233 rate->flags |= IEEE80211_TX_RC_VHT_MCS;
240 rate->flags |= IEEE80211_TX_RC_SHORT_GI;
242 if (td->tx_rate_ldpc)
243 info->flags |= IEEE80211_TX_CTL_LDPC;
245 if (td->tx_rate_stbc)
246 info->flags |= IEEE80211_TX_CTL_STBC;
248 if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
249 switch (phy->chandef.width) {
250 case NL80211_CHAN_WIDTH_40:
251 rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
253 case NL80211_CHAN_WIDTH_80:
254 rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
256 case NL80211_CHAN_WIDTH_80P80:
257 case NL80211_CHAN_WIDTH_160:
258 rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
269 mt76_testmode_tx_start(struct mt76_phy *phy)
271 struct mt76_testmode_data *td = &phy->test;
272 struct mt76_dev *dev = phy->dev;
276 td->tx_pending = td->tx_count;
277 mt76_worker_schedule(&dev->tx_worker);
281 mt76_testmode_tx_stop(struct mt76_phy *phy)
283 struct mt76_testmode_data *td = &phy->test;
284 struct mt76_dev *dev = phy->dev;
286 mt76_worker_disable(&dev->tx_worker);
290 mt76_worker_enable(&dev->tx_worker);
292 wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued,
293 MT76_TM_TIMEOUT * HZ);
295 mt76_testmode_free_skb(phy);
299 mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
301 td->param_set[idx / 32] |= BIT(idx % 32);
305 mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
307 return td->param_set[idx / 32] & BIT(idx % 32);
311 mt76_testmode_init_defaults(struct mt76_phy *phy)
313 struct mt76_testmode_data *td = &phy->test;
315 if (td->tx_mpdu_len > 0)
318 td->tx_mpdu_len = 1024;
320 td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
323 memcpy(td->addr[0], phy->macaddr, ETH_ALEN);
324 memcpy(td->addr[1], phy->macaddr, ETH_ALEN);
325 memcpy(td->addr[2], phy->macaddr, ETH_ALEN);
329 __mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
331 enum mt76_testmode_state prev_state = phy->test.state;
332 struct mt76_dev *dev = phy->dev;
335 if (prev_state == MT76_TM_STATE_TX_FRAMES)
336 mt76_testmode_tx_stop(phy);
338 if (state == MT76_TM_STATE_TX_FRAMES) {
339 err = mt76_testmode_tx_init(phy);
344 err = dev->test_ops->set_state(phy, state);
346 if (state == MT76_TM_STATE_TX_FRAMES)
347 mt76_testmode_tx_stop(phy);
352 if (state == MT76_TM_STATE_TX_FRAMES)
353 mt76_testmode_tx_start(phy);
354 else if (state == MT76_TM_STATE_RX_FRAMES) {
355 memset(&phy->test.rx_stats, 0, sizeof(phy->test.rx_stats));
358 phy->test.state = state;
363 int mt76_testmode_set_state(struct mt76_phy *phy, enum mt76_testmode_state state)
365 struct mt76_testmode_data *td = &phy->test;
366 struct ieee80211_hw *hw = phy->hw;
368 if (state == td->state && state == MT76_TM_STATE_OFF)
371 if (state > MT76_TM_STATE_OFF &&
372 (!test_bit(MT76_STATE_RUNNING, &phy->state) ||
373 !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
376 if (state != MT76_TM_STATE_IDLE &&
377 td->state != MT76_TM_STATE_IDLE) {
380 ret = __mt76_testmode_set_state(phy, MT76_TM_STATE_IDLE);
385 return __mt76_testmode_set_state(phy, state);
388 EXPORT_SYMBOL(mt76_testmode_set_state);
391 mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
398 val = nla_get_u8(attr);
399 if (val < min || val > max)
406 int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
409 struct mt76_phy *phy = hw->priv;
410 struct mt76_dev *dev = phy->dev;
411 struct mt76_testmode_data *td = &phy->test;
412 struct nlattr *tb[NUM_MT76_TM_ATTRS];
420 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
421 mt76_tm_policy, NULL);
427 mutex_lock(&dev->mutex);
429 if (tb[MT76_TM_ATTR_RESET]) {
430 mt76_testmode_set_state(phy, MT76_TM_STATE_OFF);
431 memset(td, 0, sizeof(*td));
434 mt76_testmode_init_defaults(phy);
436 if (tb[MT76_TM_ATTR_TX_COUNT])
437 td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
439 if (tb[MT76_TM_ATTR_TX_RATE_IDX])
440 td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
442 if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
443 0, MT76_TM_TX_MODE_MAX) ||
444 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
445 1, hweight8(phy->antenna_mask)) ||
446 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 2) ||
447 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
448 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_STBC], &td->tx_rate_stbc, 0, 1) ||
449 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_LTF], &td->tx_ltf, 0, 2) ||
450 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA],
451 &td->tx_antenna_mask, 0, 0xff) ||
452 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_SPE_IDX], &td->tx_spe_idx, 0, 27) ||
453 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_DUTY_CYCLE],
454 &td->tx_duty_cycle, 0, 99) ||
455 mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
456 &td->tx_power_control, 0, 1))
459 if (tb[MT76_TM_ATTR_TX_LENGTH]) {
460 u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
462 if (val > mt76_testmode_max_mpdu_len(phy, td->tx_rate_mode) ||
463 val < sizeof(struct ieee80211_hdr))
466 td->tx_mpdu_len = val;
469 if (tb[MT76_TM_ATTR_TX_IPG])
470 td->tx_ipg = nla_get_u32(tb[MT76_TM_ATTR_TX_IPG]);
472 if (tb[MT76_TM_ATTR_TX_TIME])
473 td->tx_time = nla_get_u32(tb[MT76_TM_ATTR_TX_TIME]);
475 if (tb[MT76_TM_ATTR_FREQ_OFFSET])
476 td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
478 if (tb[MT76_TM_ATTR_STATE]) {
479 state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
480 if (state > MT76_TM_STATE_MAX)
486 if (tb[MT76_TM_ATTR_TX_POWER]) {
491 nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
492 if (nla_len(cur) != 1 ||
493 idx >= ARRAY_SIZE(td->tx_power))
496 td->tx_power[idx++] = nla_get_u8(cur);
500 if (tb[MT76_TM_ATTR_MAC_ADDRS]) {
505 nla_for_each_nested(cur, tb[MT76_TM_ATTR_MAC_ADDRS], rem) {
506 if (nla_len(cur) != ETH_ALEN || idx >= 3)
509 memcpy(td->addr[idx], nla_data(cur), ETH_ALEN);
514 if (dev->test_ops->set_params) {
515 err = dev->test_ops->set_params(phy, tb, state);
520 for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
522 mt76_testmode_param_set(td, i);
525 if (tb[MT76_TM_ATTR_STATE])
526 err = mt76_testmode_set_state(phy, state);
529 mutex_unlock(&dev->mutex);
533 EXPORT_SYMBOL(mt76_testmode_cmd);
536 mt76_testmode_dump_stats(struct mt76_phy *phy, struct sk_buff *msg)
538 struct mt76_testmode_data *td = &phy->test;
539 struct mt76_dev *dev = phy->dev;
541 u64 rx_fcs_error = 0;
544 if (dev->test_ops->dump_stats) {
547 ret = dev->test_ops->dump_stats(phy, msg);
552 for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
553 rx_packets += td->rx_stats.packets[i];
554 rx_fcs_error += td->rx_stats.fcs_error[i];
557 if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
558 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
559 nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
560 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
561 MT76_TM_STATS_ATTR_PAD) ||
562 nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
563 MT76_TM_STATS_ATTR_PAD))
569 int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
570 struct netlink_callback *cb, void *data, int len)
572 struct mt76_phy *phy = hw->priv;
573 struct mt76_dev *dev = phy->dev;
574 struct mt76_testmode_data *td = &phy->test;
575 struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
583 if (cb->args[2]++ > 0)
587 err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
588 mt76_tm_policy, NULL);
593 mutex_lock(&dev->mutex);
595 if (tb[MT76_TM_ATTR_STATS]) {
598 a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
600 err = mt76_testmode_dump_stats(phy, msg);
601 nla_nest_end(msg, a);
607 mt76_testmode_init_defaults(phy);
610 if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
613 if (dev->test_mtd.name &&
614 (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, dev->test_mtd.name) ||
615 nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, dev->test_mtd.offset)))
618 if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
619 nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_mpdu_len) ||
620 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
621 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
622 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
623 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
624 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
625 nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_STBC, td->tx_rate_stbc) ||
626 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_LTF) &&
627 nla_put_u8(msg, MT76_TM_ATTR_TX_LTF, td->tx_ltf)) ||
628 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
629 nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
630 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_SPE_IDX) &&
631 nla_put_u8(msg, MT76_TM_ATTR_TX_SPE_IDX, td->tx_spe_idx)) ||
632 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_DUTY_CYCLE) &&
633 nla_put_u8(msg, MT76_TM_ATTR_TX_DUTY_CYCLE, td->tx_duty_cycle)) ||
634 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_IPG) &&
635 nla_put_u32(msg, MT76_TM_ATTR_TX_IPG, td->tx_ipg)) ||
636 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_TIME) &&
637 nla_put_u32(msg, MT76_TM_ATTR_TX_TIME, td->tx_time)) ||
638 (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
639 nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
640 (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
641 nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
644 if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
645 a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
649 for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
650 if (nla_put_u8(msg, i, td->tx_power[i]))
653 nla_nest_end(msg, a);
656 if (mt76_testmode_param_present(td, MT76_TM_ATTR_MAC_ADDRS)) {
657 a = nla_nest_start(msg, MT76_TM_ATTR_MAC_ADDRS);
661 for (i = 0; i < 3; i++)
662 if (nla_put(msg, i, ETH_ALEN, td->addr[i]))
665 nla_nest_end(msg, a);
671 mutex_unlock(&dev->mutex);
675 EXPORT_SYMBOL(mt76_testmode_dump);