1 // SPDX-License-Identifier: ISC
5 #include <linux/sched.h>
9 #define CHAN2G(_idx, _freq) { \
10 .band = NL80211_BAND_2GHZ, \
11 .center_freq = (_freq), \
16 #define CHAN5G(_idx, _freq) { \
17 .band = NL80211_BAND_5GHZ, \
18 .center_freq = (_freq), \
23 static const struct ieee80211_channel mt76_channels_2ghz[] = {
40 static const struct ieee80211_channel mt76_channels_5ghz[] = {
73 static const struct ieee80211_tpt_blink mt76_tpt_blink[] = {
74 { .throughput = 0 * 1024, .blink_time = 334 },
75 { .throughput = 1 * 1024, .blink_time = 260 },
76 { .throughput = 5 * 1024, .blink_time = 220 },
77 { .throughput = 10 * 1024, .blink_time = 190 },
78 { .throughput = 20 * 1024, .blink_time = 170 },
79 { .throughput = 50 * 1024, .blink_time = 150 },
80 { .throughput = 70 * 1024, .blink_time = 130 },
81 { .throughput = 100 * 1024, .blink_time = 110 },
82 { .throughput = 200 * 1024, .blink_time = 80 },
83 { .throughput = 300 * 1024, .blink_time = 50 },
86 static int mt76_led_init(struct mt76_dev *dev)
88 struct device_node *np = dev->dev->of_node;
89 struct ieee80211_hw *hw = dev->hw;
92 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
95 snprintf(dev->led_name, sizeof(dev->led_name),
96 "mt76-%s", wiphy_name(hw->wiphy));
98 dev->led_cdev.name = dev->led_name;
99 dev->led_cdev.default_trigger =
100 ieee80211_create_tpt_led_trigger(hw,
101 IEEE80211_TPT_LEDTRIG_FL_RADIO,
103 ARRAY_SIZE(mt76_tpt_blink));
105 np = of_get_child_by_name(np, "led");
107 if (!of_property_read_u32(np, "led-sources", &led_pin))
108 dev->led_pin = led_pin;
109 dev->led_al = of_property_read_bool(np, "led-active-low");
112 return led_classdev_register(dev->dev, &dev->led_cdev);
115 static void mt76_led_cleanup(struct mt76_dev *dev)
117 if (!dev->led_cdev.brightness_set && !dev->led_cdev.blink_set)
120 led_classdev_unregister(&dev->led_cdev);
123 static void mt76_init_stream_cap(struct mt76_phy *phy,
124 struct ieee80211_supported_band *sband,
127 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap;
128 int i, nstream = hweight8(phy->antenna_mask);
129 struct ieee80211_sta_vht_cap *vht_cap;
133 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC;
135 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC;
137 for (i = 0; i < IEEE80211_HT_MCS_MASK_LEN; i++)
138 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0;
143 vht_cap = &sband->vht_cap;
145 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC;
147 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC;
149 for (i = 0; i < 8; i++) {
151 mcs_map |= (IEEE80211_VHT_MCS_SUPPORT_0_9 << (i * 2));
154 (IEEE80211_VHT_MCS_NOT_SUPPORTED << (i * 2));
156 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map);
157 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map);
160 void mt76_set_stream_caps(struct mt76_phy *phy, bool vht)
162 if (phy->cap.has_2ghz)
163 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false);
164 if (phy->cap.has_5ghz)
165 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht);
167 EXPORT_SYMBOL_GPL(mt76_set_stream_caps);
170 mt76_init_sband(struct mt76_phy *phy, struct mt76_sband *msband,
171 const struct ieee80211_channel *chan, int n_chan,
172 struct ieee80211_rate *rates, int n_rates, bool vht)
174 struct ieee80211_supported_band *sband = &msband->sband;
175 struct ieee80211_sta_vht_cap *vht_cap;
176 struct ieee80211_sta_ht_cap *ht_cap;
177 struct mt76_dev *dev = phy->dev;
181 size = n_chan * sizeof(*chan);
182 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL);
186 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan),
191 sband->channels = chanlist;
192 sband->n_channels = n_chan;
193 sband->bitrates = rates;
194 sband->n_bitrates = n_rates;
196 ht_cap = &sband->ht_cap;
197 ht_cap->ht_supported = true;
198 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 |
199 IEEE80211_HT_CAP_GRN_FLD |
200 IEEE80211_HT_CAP_SGI_20 |
201 IEEE80211_HT_CAP_SGI_40 |
202 (1 << IEEE80211_HT_CAP_RX_STBC_SHIFT);
204 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED;
205 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K;
207 mt76_init_stream_cap(phy, sband, vht);
212 vht_cap = &sband->vht_cap;
213 vht_cap->vht_supported = true;
214 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC |
215 IEEE80211_VHT_CAP_RXSTBC_1 |
216 IEEE80211_VHT_CAP_SHORT_GI_80 |
217 IEEE80211_VHT_CAP_RX_ANTENNA_PATTERN |
218 IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN |
219 (3 << IEEE80211_VHT_CAP_MAX_A_MPDU_LENGTH_EXPONENT_SHIFT);
225 mt76_init_sband_2g(struct mt76_phy *phy, struct ieee80211_rate *rates,
228 phy->hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband;
230 return mt76_init_sband(phy, &phy->sband_2g, mt76_channels_2ghz,
231 ARRAY_SIZE(mt76_channels_2ghz), rates,
236 mt76_init_sband_5g(struct mt76_phy *phy, struct ieee80211_rate *rates,
237 int n_rates, bool vht)
239 phy->hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband;
241 return mt76_init_sband(phy, &phy->sband_5g, mt76_channels_5ghz,
242 ARRAY_SIZE(mt76_channels_5ghz), rates,
247 mt76_check_sband(struct mt76_phy *phy, struct mt76_sband *msband,
248 enum nl80211_band band)
250 struct ieee80211_supported_band *sband = &msband->sband;
257 for (i = 0; i < sband->n_channels; i++) {
258 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED)
266 phy->chandef.chan = &sband->channels[0];
267 phy->chan_state = &msband->chan[0];
271 sband->n_channels = 0;
272 phy->hw->wiphy->bands[band] = NULL;
276 mt76_phy_init(struct mt76_phy *phy, struct ieee80211_hw *hw)
278 struct mt76_dev *dev = phy->dev;
279 struct wiphy *wiphy = hw->wiphy;
281 SET_IEEE80211_DEV(hw, dev->dev);
282 SET_IEEE80211_PERM_ADDR(hw, phy->macaddr);
284 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR;
285 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH |
286 WIPHY_FLAG_SUPPORTS_TDLS |
289 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_CQM_RSSI_LIST);
290 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AIRTIME_FAIRNESS);
291 wiphy_ext_feature_set(wiphy, NL80211_EXT_FEATURE_AQL);
293 wiphy->available_antennas_tx = dev->phy.antenna_mask;
294 wiphy->available_antennas_rx = dev->phy.antenna_mask;
296 hw->txq_data_size = sizeof(struct mt76_txq);
297 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL;
299 if (!hw->max_tx_fragments)
300 hw->max_tx_fragments = 16;
302 ieee80211_hw_set(hw, SIGNAL_DBM);
303 ieee80211_hw_set(hw, AMPDU_AGGREGATION);
304 ieee80211_hw_set(hw, SUPPORTS_RC_TABLE);
305 ieee80211_hw_set(hw, SUPPORT_FAST_XMIT);
306 ieee80211_hw_set(hw, SUPPORTS_CLONED_SKBS);
307 ieee80211_hw_set(hw, SUPPORTS_AMSDU_IN_AMPDU);
308 ieee80211_hw_set(hw, SUPPORTS_REORDERING_BUFFER);
310 if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD)) {
311 ieee80211_hw_set(hw, TX_AMSDU);
312 ieee80211_hw_set(hw, TX_FRAG_LIST);
315 ieee80211_hw_set(hw, MFP_CAPABLE);
316 ieee80211_hw_set(hw, AP_LINK_PS);
317 ieee80211_hw_set(hw, REPORTS_TX_ACK_STATUS);
319 wiphy->flags |= WIPHY_FLAG_IBSS_RSN;
320 wiphy->interface_modes =
321 BIT(NL80211_IFTYPE_STATION) |
322 BIT(NL80211_IFTYPE_AP) |
323 #ifdef CONFIG_MAC80211_MESH
324 BIT(NL80211_IFTYPE_MESH_POINT) |
326 BIT(NL80211_IFTYPE_P2P_CLIENT) |
327 BIT(NL80211_IFTYPE_P2P_GO) |
328 BIT(NL80211_IFTYPE_ADHOC);
332 mt76_alloc_phy(struct mt76_dev *dev, unsigned int size,
333 const struct ieee80211_ops *ops)
335 struct ieee80211_hw *hw;
336 unsigned int phy_size;
337 struct mt76_phy *phy;
339 phy_size = ALIGN(sizeof(*phy), 8);
340 hw = ieee80211_alloc_hw(size + phy_size, ops);
347 phy->priv = hw->priv + phy_size;
351 EXPORT_SYMBOL_GPL(mt76_alloc_phy);
353 int mt76_register_phy(struct mt76_phy *phy, bool vht,
354 struct ieee80211_rate *rates, int n_rates)
358 mt76_phy_init(phy, phy->hw);
360 if (phy->cap.has_2ghz) {
361 ret = mt76_init_sband_2g(phy, rates, n_rates);
366 if (phy->cap.has_5ghz) {
367 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht);
372 wiphy_read_of_freq_limits(phy->hw->wiphy);
373 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ);
374 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ);
376 ret = ieee80211_register_hw(phy->hw);
380 phy->dev->phy2 = phy;
384 EXPORT_SYMBOL_GPL(mt76_register_phy);
386 void mt76_unregister_phy(struct mt76_phy *phy)
388 struct mt76_dev *dev = phy->dev;
390 mt76_tx_status_check(dev, NULL, true);
391 ieee80211_unregister_hw(phy->hw);
394 EXPORT_SYMBOL_GPL(mt76_unregister_phy);
397 mt76_alloc_device(struct device *pdev, unsigned int size,
398 const struct ieee80211_ops *ops,
399 const struct mt76_driver_ops *drv_ops)
401 struct ieee80211_hw *hw;
402 struct mt76_phy *phy;
403 struct mt76_dev *dev;
406 hw = ieee80211_alloc_hw(size, ops);
419 spin_lock_init(&dev->rx_lock);
420 spin_lock_init(&dev->lock);
421 spin_lock_init(&dev->cc_lock);
422 mutex_init(&dev->mutex);
423 init_waitqueue_head(&dev->tx_wait);
424 skb_queue_head_init(&dev->status_list);
426 skb_queue_head_init(&dev->mcu.res_q);
427 init_waitqueue_head(&dev->mcu.wait);
428 mutex_init(&dev->mcu.mutex);
429 dev->tx_worker.fn = mt76_tx_worker;
431 spin_lock_init(&dev->token_lock);
432 idr_init(&dev->token);
434 INIT_LIST_HEAD(&dev->txwi_cache);
436 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++)
437 skb_queue_head_init(&dev->rx_skb[i]);
439 dev->wq = alloc_ordered_workqueue("mt76", 0);
441 ieee80211_free_hw(hw);
447 EXPORT_SYMBOL_GPL(mt76_alloc_device);
449 int mt76_register_device(struct mt76_dev *dev, bool vht,
450 struct ieee80211_rate *rates, int n_rates)
452 struct ieee80211_hw *hw = dev->hw;
453 struct mt76_phy *phy = &dev->phy;
456 dev_set_drvdata(dev->dev, dev);
457 mt76_phy_init(phy, hw);
459 if (phy->cap.has_2ghz) {
460 ret = mt76_init_sband_2g(phy, rates, n_rates);
465 if (phy->cap.has_5ghz) {
466 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht);
471 wiphy_read_of_freq_limits(hw->wiphy);
472 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ);
473 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ);
475 if (IS_ENABLED(CONFIG_MT76_LEDS)) {
476 ret = mt76_led_init(dev);
481 ret = ieee80211_register_hw(hw);
485 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx"));
486 sched_set_fifo_low(dev->tx_worker.task);
490 EXPORT_SYMBOL_GPL(mt76_register_device);
492 void mt76_unregister_device(struct mt76_dev *dev)
494 struct ieee80211_hw *hw = dev->hw;
496 if (IS_ENABLED(CONFIG_MT76_LEDS))
497 mt76_led_cleanup(dev);
498 mt76_tx_status_check(dev, NULL, true);
499 ieee80211_unregister_hw(hw);
501 EXPORT_SYMBOL_GPL(mt76_unregister_device);
503 void mt76_free_device(struct mt76_dev *dev)
505 mt76_worker_teardown(&dev->tx_worker);
507 destroy_workqueue(dev->wq);
510 ieee80211_free_hw(dev->hw);
512 EXPORT_SYMBOL_GPL(mt76_free_device);
514 static void mt76_rx_release_amsdu(struct mt76_phy *phy, enum mt76_rxq_id q)
516 struct sk_buff *skb = phy->rx_amsdu[q].head;
517 struct mt76_dev *dev = phy->dev;
519 phy->rx_amsdu[q].head = NULL;
520 phy->rx_amsdu[q].tail = NULL;
521 __skb_queue_tail(&dev->rx_skb[q], skb);
524 static void mt76_rx_release_burst(struct mt76_phy *phy, enum mt76_rxq_id q,
527 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
529 if (phy->rx_amsdu[q].head &&
530 (!status->amsdu || status->first_amsdu ||
531 status->seqno != phy->rx_amsdu[q].seqno))
532 mt76_rx_release_amsdu(phy, q);
534 if (!phy->rx_amsdu[q].head) {
535 phy->rx_amsdu[q].tail = &skb_shinfo(skb)->frag_list;
536 phy->rx_amsdu[q].seqno = status->seqno;
537 phy->rx_amsdu[q].head = skb;
539 *phy->rx_amsdu[q].tail = skb;
540 phy->rx_amsdu[q].tail = &skb->next;
543 if (!status->amsdu || status->last_amsdu)
544 mt76_rx_release_amsdu(phy, q);
547 void mt76_rx(struct mt76_dev *dev, enum mt76_rxq_id q, struct sk_buff *skb)
549 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
550 struct mt76_phy *phy = mt76_dev_phy(dev, status->ext_phy);
552 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) {
557 #ifdef CONFIG_NL80211_TESTMODE
558 if (phy->test.state == MT76_TM_STATE_RX_FRAMES) {
559 phy->test.rx_stats.packets[q]++;
560 if (status->flag & RX_FLAG_FAILED_FCS_CRC)
561 phy->test.rx_stats.fcs_error[q]++;
565 mt76_rx_release_burst(phy, q, skb);
567 EXPORT_SYMBOL_GPL(mt76_rx);
569 bool mt76_has_tx_pending(struct mt76_phy *phy)
571 struct mt76_queue *q;
574 for (i = 0; i < __MT_TXQ_MAX; i++) {
582 EXPORT_SYMBOL_GPL(mt76_has_tx_pending);
584 static struct mt76_channel_state *
585 mt76_channel_state(struct mt76_phy *phy, struct ieee80211_channel *c)
587 struct mt76_sband *msband;
590 if (c->band == NL80211_BAND_2GHZ)
591 msband = &phy->sband_2g;
593 msband = &phy->sband_5g;
595 idx = c - &msband->sband.channels[0];
596 return &msband->chan[idx];
599 void mt76_update_survey_active_time(struct mt76_phy *phy, ktime_t time)
601 struct mt76_channel_state *state = phy->chan_state;
603 state->cc_active += ktime_to_us(ktime_sub(time,
605 phy->survey_time = time;
607 EXPORT_SYMBOL_GPL(mt76_update_survey_active_time);
609 void mt76_update_survey(struct mt76_dev *dev)
613 if (dev->drv->update_survey)
614 dev->drv->update_survey(dev);
616 cur_time = ktime_get_boottime();
617 mt76_update_survey_active_time(&dev->phy, cur_time);
619 mt76_update_survey_active_time(dev->phy2, cur_time);
621 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) {
622 struct mt76_channel_state *state = dev->phy.chan_state;
624 spin_lock_bh(&dev->cc_lock);
625 state->cc_bss_rx += dev->cur_cc_bss_rx;
626 dev->cur_cc_bss_rx = 0;
627 spin_unlock_bh(&dev->cc_lock);
630 EXPORT_SYMBOL_GPL(mt76_update_survey);
632 void mt76_set_channel(struct mt76_phy *phy)
634 struct mt76_dev *dev = phy->dev;
635 struct ieee80211_hw *hw = phy->hw;
636 struct cfg80211_chan_def *chandef = &hw->conf.chandef;
637 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL;
638 int timeout = HZ / 5;
640 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout);
641 mt76_update_survey(dev);
643 phy->chandef = *chandef;
644 phy->chan_state = mt76_channel_state(phy, chandef->chan);
647 phy->main_chan = chandef->chan;
649 if (chandef->chan != phy->main_chan)
650 memset(phy->chan_state, 0, sizeof(*phy->chan_state));
652 EXPORT_SYMBOL_GPL(mt76_set_channel);
654 int mt76_get_survey(struct ieee80211_hw *hw, int idx,
655 struct survey_info *survey)
657 struct mt76_phy *phy = hw->priv;
658 struct mt76_dev *dev = phy->dev;
659 struct mt76_sband *sband;
660 struct ieee80211_channel *chan;
661 struct mt76_channel_state *state;
664 mutex_lock(&dev->mutex);
665 if (idx == 0 && dev->drv->update_survey)
666 mt76_update_survey(dev);
668 sband = &phy->sband_2g;
669 if (idx >= sband->sband.n_channels) {
670 idx -= sband->sband.n_channels;
671 sband = &phy->sband_5g;
674 if (idx >= sband->sband.n_channels) {
679 chan = &sband->sband.channels[idx];
680 state = mt76_channel_state(phy, chan);
682 memset(survey, 0, sizeof(*survey));
683 survey->channel = chan;
684 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY;
685 survey->filled |= dev->drv->survey_flags;
687 survey->filled |= SURVEY_INFO_NOISE_DBM;
689 if (chan == phy->main_chan) {
690 survey->filled |= SURVEY_INFO_IN_USE;
692 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)
693 survey->filled |= SURVEY_INFO_TIME_BSS_RX;
696 survey->time_busy = div_u64(state->cc_busy, 1000);
697 survey->time_rx = div_u64(state->cc_rx, 1000);
698 survey->time = div_u64(state->cc_active, 1000);
699 survey->noise = state->noise;
701 spin_lock_bh(&dev->cc_lock);
702 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000);
703 survey->time_tx = div_u64(state->cc_tx, 1000);
704 spin_unlock_bh(&dev->cc_lock);
707 mutex_unlock(&dev->mutex);
711 EXPORT_SYMBOL_GPL(mt76_get_survey);
713 void mt76_wcid_key_setup(struct mt76_dev *dev, struct mt76_wcid *wcid,
714 struct ieee80211_key_conf *key)
716 struct ieee80211_key_seq seq;
719 wcid->rx_check_pn = false;
724 if (key->cipher != WLAN_CIPHER_SUITE_CCMP)
727 wcid->rx_check_pn = true;
728 for (i = 0; i < IEEE80211_NUM_TIDS; i++) {
729 ieee80211_get_key_rx_seq(key, i, &seq);
730 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn));
733 EXPORT_SYMBOL(mt76_wcid_key_setup);
736 mt76_rx_convert(struct mt76_dev *dev, struct sk_buff *skb,
737 struct ieee80211_hw **hw,
738 struct ieee80211_sta **sta)
740 struct ieee80211_rx_status *status = IEEE80211_SKB_RXCB(skb);
741 struct mt76_rx_status mstat;
743 mstat = *((struct mt76_rx_status *)skb->cb);
744 memset(status, 0, sizeof(*status));
746 status->flag = mstat.flag;
747 status->freq = mstat.freq;
748 status->enc_flags = mstat.enc_flags;
749 status->encoding = mstat.encoding;
750 status->bw = mstat.bw;
751 status->he_ru = mstat.he_ru;
752 status->he_gi = mstat.he_gi;
753 status->he_dcm = mstat.he_dcm;
754 status->rate_idx = mstat.rate_idx;
755 status->nss = mstat.nss;
756 status->band = mstat.band;
757 status->signal = mstat.signal;
758 status->chains = mstat.chains;
759 status->ampdu_reference = mstat.ampdu_ref;
760 status->device_timestamp = mstat.timestamp;
761 status->mactime = mstat.timestamp;
763 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb));
764 BUILD_BUG_ON(sizeof(status->chain_signal) !=
765 sizeof(mstat.chain_signal));
766 memcpy(status->chain_signal, mstat.chain_signal,
767 sizeof(mstat.chain_signal));
769 *sta = wcid_to_sta(mstat.wcid);
770 *hw = mt76_phy_hw(dev, mstat.ext_phy);
774 mt76_check_ccmp_pn(struct sk_buff *skb)
776 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
777 struct mt76_wcid *wcid = status->wcid;
778 struct ieee80211_hdr *hdr;
779 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
782 if (!(status->flag & RX_FLAG_DECRYPTED))
785 if (!wcid || !wcid->rx_check_pn)
788 if (!(status->flag & RX_FLAG_IV_STRIPPED)) {
790 * Validate the first fragment both here and in mac80211
791 * All further fragments will be validated by mac80211 only.
793 hdr = mt76_skb_get_hdr(skb);
794 if (ieee80211_is_frag(hdr) &&
795 !ieee80211_is_first_frag(hdr->frame_control))
799 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0]));
800 ret = memcmp(status->iv, wcid->rx_key_pn[tidno],
803 return -EINVAL; /* replay */
805 memcpy(wcid->rx_key_pn[tidno], status->iv, sizeof(status->iv));
807 if (status->flag & RX_FLAG_IV_STRIPPED)
808 status->flag |= RX_FLAG_PN_VALIDATED;
814 mt76_airtime_report(struct mt76_dev *dev, struct mt76_rx_status *status,
817 struct mt76_wcid *wcid = status->wcid;
818 struct ieee80211_rx_status info = {
819 .enc_flags = status->enc_flags,
820 .rate_idx = status->rate_idx,
821 .encoding = status->encoding,
822 .band = status->band,
826 struct ieee80211_sta *sta;
828 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
830 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len);
831 spin_lock(&dev->cc_lock);
832 dev->cur_cc_bss_rx += airtime;
833 spin_unlock(&dev->cc_lock);
835 if (!wcid || !wcid->sta)
838 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
839 ieee80211_sta_register_airtime(sta, tidno, 0, airtime);
843 mt76_airtime_flush_ampdu(struct mt76_dev *dev)
845 struct mt76_wcid *wcid;
848 if (!dev->rx_ampdu_len)
851 wcid_idx = dev->rx_ampdu_status.wcid_idx;
852 if (wcid_idx < ARRAY_SIZE(dev->wcid))
853 wcid = rcu_dereference(dev->wcid[wcid_idx]);
856 dev->rx_ampdu_status.wcid = wcid;
858 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len);
860 dev->rx_ampdu_len = 0;
861 dev->rx_ampdu_ref = 0;
865 mt76_airtime_check(struct mt76_dev *dev, struct sk_buff *skb)
867 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
868 struct mt76_wcid *wcid = status->wcid;
870 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME))
873 if (!wcid || !wcid->sta) {
874 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
876 if (status->flag & RX_FLAG_8023)
879 if (!ether_addr_equal(hdr->addr1, dev->phy.macaddr))
885 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) ||
886 status->ampdu_ref != dev->rx_ampdu_ref)
887 mt76_airtime_flush_ampdu(dev);
889 if (status->flag & RX_FLAG_AMPDU_DETAILS) {
890 if (!dev->rx_ampdu_len ||
891 status->ampdu_ref != dev->rx_ampdu_ref) {
892 dev->rx_ampdu_status = *status;
893 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff;
894 dev->rx_ampdu_ref = status->ampdu_ref;
897 dev->rx_ampdu_len += skb->len;
901 mt76_airtime_report(dev, status, skb->len);
905 mt76_check_sta(struct mt76_dev *dev, struct sk_buff *skb)
907 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
908 struct ieee80211_hdr *hdr = mt76_skb_get_hdr(skb);
909 struct ieee80211_sta *sta;
910 struct ieee80211_hw *hw;
911 struct mt76_wcid *wcid = status->wcid;
912 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK;
915 hw = mt76_phy_hw(dev, status->ext_phy);
916 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid &&
917 !(status->flag & RX_FLAG_8023)) {
918 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL);
920 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv;
923 mt76_airtime_check(dev, skb);
925 if (!wcid || !wcid->sta)
928 sta = container_of((void *)wcid, struct ieee80211_sta, drv_priv);
930 if (status->signal <= 0)
931 ewma_signal_add(&wcid->rssi, -status->signal);
933 wcid->inactive_count = 0;
935 if (status->flag & RX_FLAG_8023)
938 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags))
941 if (ieee80211_is_pspoll(hdr->frame_control)) {
942 ieee80211_sta_pspoll(sta);
946 if (ieee80211_has_morefrags(hdr->frame_control) ||
947 !(ieee80211_is_mgmt(hdr->frame_control) ||
948 ieee80211_is_data(hdr->frame_control)))
951 ps = ieee80211_has_pm(hdr->frame_control);
953 if (ps && (ieee80211_is_data_qos(hdr->frame_control) ||
954 ieee80211_is_qos_nullfunc(hdr->frame_control)))
955 ieee80211_sta_uapsd_trigger(sta, tidno);
957 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps)
961 set_bit(MT_WCID_FLAG_PS, &wcid->flags);
963 clear_bit(MT_WCID_FLAG_PS, &wcid->flags);
965 dev->drv->sta_ps(dev, sta, ps);
966 ieee80211_sta_ps_transition(sta, ps);
969 void mt76_rx_complete(struct mt76_dev *dev, struct sk_buff_head *frames,
970 struct napi_struct *napi)
972 struct ieee80211_sta *sta;
973 struct ieee80211_hw *hw;
974 struct sk_buff *skb, *tmp;
977 spin_lock(&dev->rx_lock);
978 while ((skb = __skb_dequeue(frames)) != NULL) {
979 struct sk_buff *nskb = skb_shinfo(skb)->frag_list;
981 if (mt76_check_ccmp_pn(skb)) {
986 skb_shinfo(skb)->frag_list = NULL;
987 mt76_rx_convert(dev, skb, &hw, &sta);
988 ieee80211_rx_list(hw, sta, skb, &list);
990 /* subsequent amsdu frames */
996 mt76_rx_convert(dev, skb, &hw, &sta);
997 ieee80211_rx_list(hw, sta, skb, &list);
1000 spin_unlock(&dev->rx_lock);
1003 netif_receive_skb_list(&list);
1007 list_for_each_entry_safe(skb, tmp, &list, list) {
1008 skb_list_del_init(skb);
1009 napi_gro_receive(napi, skb);
1013 void mt76_rx_poll_complete(struct mt76_dev *dev, enum mt76_rxq_id q,
1014 struct napi_struct *napi)
1016 struct sk_buff_head frames;
1017 struct sk_buff *skb;
1019 __skb_queue_head_init(&frames);
1021 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) {
1022 mt76_check_sta(dev, skb);
1023 mt76_rx_aggr_reorder(skb, &frames);
1026 mt76_rx_complete(dev, &frames, napi);
1028 EXPORT_SYMBOL_GPL(mt76_rx_poll_complete);
1031 mt76_sta_add(struct mt76_dev *dev, struct ieee80211_vif *vif,
1032 struct ieee80211_sta *sta, bool ext_phy)
1034 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1038 mutex_lock(&dev->mutex);
1040 ret = dev->drv->sta_add(dev, vif, sta);
1044 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) {
1045 struct mt76_txq *mtxq;
1050 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv;
1054 ewma_signal_init(&wcid->rssi);
1056 mt76_wcid_mask_set(dev->wcid_phy_mask, wcid->idx);
1057 wcid->ext_phy = ext_phy;
1058 rcu_assign_pointer(dev->wcid[wcid->idx], wcid);
1061 mutex_unlock(&dev->mutex);
1066 void __mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1067 struct ieee80211_sta *sta)
1069 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1070 int i, idx = wcid->idx;
1072 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++)
1073 mt76_rx_aggr_stop(dev, wcid, i);
1075 if (dev->drv->sta_remove)
1076 dev->drv->sta_remove(dev, vif, sta);
1078 mt76_tx_status_check(dev, wcid, true);
1079 mt76_wcid_mask_clear(dev->wcid_mask, idx);
1080 mt76_wcid_mask_clear(dev->wcid_phy_mask, idx);
1082 EXPORT_SYMBOL_GPL(__mt76_sta_remove);
1085 mt76_sta_remove(struct mt76_dev *dev, struct ieee80211_vif *vif,
1086 struct ieee80211_sta *sta)
1088 mutex_lock(&dev->mutex);
1089 __mt76_sta_remove(dev, vif, sta);
1090 mutex_unlock(&dev->mutex);
1093 int mt76_sta_state(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1094 struct ieee80211_sta *sta,
1095 enum ieee80211_sta_state old_state,
1096 enum ieee80211_sta_state new_state)
1098 struct mt76_phy *phy = hw->priv;
1099 struct mt76_dev *dev = phy->dev;
1100 bool ext_phy = phy != &dev->phy;
1102 if (old_state == IEEE80211_STA_NOTEXIST &&
1103 new_state == IEEE80211_STA_NONE)
1104 return mt76_sta_add(dev, vif, sta, ext_phy);
1106 if (old_state == IEEE80211_STA_AUTH &&
1107 new_state == IEEE80211_STA_ASSOC &&
1108 dev->drv->sta_assoc)
1109 dev->drv->sta_assoc(dev, vif, sta);
1111 if (old_state == IEEE80211_STA_NONE &&
1112 new_state == IEEE80211_STA_NOTEXIST)
1113 mt76_sta_remove(dev, vif, sta);
1117 EXPORT_SYMBOL_GPL(mt76_sta_state);
1119 void mt76_sta_pre_rcu_remove(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1120 struct ieee80211_sta *sta)
1122 struct mt76_phy *phy = hw->priv;
1123 struct mt76_dev *dev = phy->dev;
1124 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv;
1126 mutex_lock(&dev->mutex);
1127 rcu_assign_pointer(dev->wcid[wcid->idx], NULL);
1128 mutex_unlock(&dev->mutex);
1130 EXPORT_SYMBOL_GPL(mt76_sta_pre_rcu_remove);
1132 int mt76_get_txpower(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1135 struct mt76_phy *phy = hw->priv;
1136 int n_chains = hweight8(phy->antenna_mask);
1137 int delta = mt76_tx_power_nss_delta(n_chains);
1139 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2);
1143 EXPORT_SYMBOL_GPL(mt76_get_txpower);
1146 __mt76_csa_finish(void *priv, u8 *mac, struct ieee80211_vif *vif)
1148 if (vif->csa_active && ieee80211_beacon_cntdwn_is_complete(vif))
1149 ieee80211_csa_finish(vif);
1152 void mt76_csa_finish(struct mt76_dev *dev)
1154 if (!dev->csa_complete)
1157 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1158 IEEE80211_IFACE_ITER_RESUME_ALL,
1159 __mt76_csa_finish, dev);
1161 dev->csa_complete = 0;
1163 EXPORT_SYMBOL_GPL(mt76_csa_finish);
1166 __mt76_csa_check(void *priv, u8 *mac, struct ieee80211_vif *vif)
1168 struct mt76_dev *dev = priv;
1170 if (!vif->csa_active)
1173 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif);
1176 void mt76_csa_check(struct mt76_dev *dev)
1178 ieee80211_iterate_active_interfaces_atomic(dev->hw,
1179 IEEE80211_IFACE_ITER_RESUME_ALL,
1180 __mt76_csa_check, dev);
1182 EXPORT_SYMBOL_GPL(mt76_csa_check);
1185 mt76_set_tim(struct ieee80211_hw *hw, struct ieee80211_sta *sta, bool set)
1189 EXPORT_SYMBOL_GPL(mt76_set_tim);
1191 void mt76_insert_ccmp_hdr(struct sk_buff *skb, u8 key_id)
1193 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb;
1194 int hdr_len = ieee80211_get_hdrlen_from_skb(skb);
1195 u8 *hdr, *pn = status->iv;
1198 memmove(skb->data, skb->data + 8, hdr_len);
1199 hdr = skb->data + hdr_len;
1204 hdr[3] = 0x20 | (key_id << 6);
1210 status->flag &= ~RX_FLAG_IV_STRIPPED;
1212 EXPORT_SYMBOL_GPL(mt76_insert_ccmp_hdr);
1214 int mt76_get_rate(struct mt76_dev *dev,
1215 struct ieee80211_supported_band *sband,
1218 int i, offset = 0, len = sband->n_bitrates;
1221 if (sband == &dev->phy.sband_5g.sband)
1224 idx &= ~BIT(2); /* short preamble */
1225 } else if (sband == &dev->phy.sband_2g.sband) {
1229 for (i = offset; i < len; i++) {
1230 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx)
1236 EXPORT_SYMBOL_GPL(mt76_get_rate);
1238 void mt76_sw_scan(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
1241 struct mt76_phy *phy = hw->priv;
1243 set_bit(MT76_SCANNING, &phy->state);
1245 EXPORT_SYMBOL_GPL(mt76_sw_scan);
1247 void mt76_sw_scan_complete(struct ieee80211_hw *hw, struct ieee80211_vif *vif)
1249 struct mt76_phy *phy = hw->priv;
1251 clear_bit(MT76_SCANNING, &phy->state);
1253 EXPORT_SYMBOL_GPL(mt76_sw_scan_complete);
1255 int mt76_get_antenna(struct ieee80211_hw *hw, u32 *tx_ant, u32 *rx_ant)
1257 struct mt76_phy *phy = hw->priv;
1258 struct mt76_dev *dev = phy->dev;
1260 mutex_lock(&dev->mutex);
1261 *tx_ant = phy->antenna_mask;
1262 *rx_ant = phy->antenna_mask;
1263 mutex_unlock(&dev->mutex);
1267 EXPORT_SYMBOL_GPL(mt76_get_antenna);
1270 mt76_init_queue(struct mt76_dev *dev, int qid, int idx, int n_desc,
1273 struct mt76_queue *hwq;
1276 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL);
1278 return ERR_PTR(-ENOMEM);
1280 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base);
1282 return ERR_PTR(err);
1286 EXPORT_SYMBOL_GPL(mt76_init_queue);