Lines Matching refs:mt76

20 	return phy && test_bit(MT76_STATE_RUNNING, &phy->mt76->state);
33 ret = mt76_connac_mcu_set_pm(&dev->mt76,
34 dev->phy.mt76->band_idx, 0);
38 ret = mt7915_mcu_set_mac(dev, dev->phy.mt76->band_idx,
43 mt7915_mac_enable_nf(dev, dev->phy.mt76->band_idx);
47 ret = mt76_connac_mcu_set_pm(&dev->mt76,
48 phy->mt76->band_idx, 0);
52 ret = mt7915_mcu_set_mac(dev, phy->mt76->band_idx,
57 mt7915_mac_enable_nf(dev, phy->mt76->band_idx);
71 ret = mt76_connac_mcu_set_rts_thresh(&dev->mt76, 0x92b,
72 phy->mt76->band_idx);
84 set_bit(MT76_STATE_RUNNING, &phy->mt76->state);
86 if (!mt76_testmode_enabled(phy->mt76))
87 ieee80211_queue_delayed_work(hw, &phy->mt76->mac_work,
104 mutex_lock(&dev->mt76.mutex);
106 mutex_unlock(&dev->mt76.mutex);
116 cancel_delayed_work_sync(&phy->mt76->mac_work);
118 mutex_lock(&dev->mt76.mutex);
120 mt76_testmode_reset(phy->mt76, true);
122 clear_bit(MT76_STATE_RUNNING, &phy->mt76->state);
125 mt76_connac_mcu_set_pm(&dev->mt76, phy->mt76->band_idx, 1);
126 mt7915_mcu_set_mac(dev, phy->mt76->band_idx, false, false);
130 mt76_connac_mcu_set_pm(&dev->mt76, dev->phy.mt76->band_idx, 1);
131 mt7915_mcu_set_mac(dev, dev->phy.mt76->band_idx, false, false);
134 mutex_unlock(&dev->mt76.mutex);
214 mutex_lock(&dev->mt76.mutex);
216 mt76_testmode_reset(phy->mt76, true);
222 mvif->mt76.idx = __ffs64(~dev->mt76.vif_mask);
223 if (mvif->mt76.idx >= (MT7915_MAX_INTERFACES << dev->dbdc_support)) {
233 mvif->mt76.omac_idx = idx;
235 mvif->mt76.band_idx = phy->mt76->band_idx;
237 mvif->mt76.wmm_idx = vif->type != NL80211_IFTYPE_AP;
239 mvif->mt76.wmm_idx += 2;
245 dev->mt76.vif_mask |= BIT_ULL(mvif->mt76.idx);
246 phy->omac_mask |= BIT_ULL(mvif->mt76.omac_idx);
248 idx = MT7915_WTBL_RESERVED - mvif->mt76.idx;
267 (!mvif->mt76.omac_idx || mvif->mt76.omac_idx > 3))
276 rcu_assign_pointer(dev->mt76.wcid[idx], &mvif->sta.wcid);
279 mutex_unlock(&dev->mt76.mutex);
296 mutex_lock(&dev->mt76.mutex);
297 mt76_testmode_reset(phy->mt76, true);
298 mutex_unlock(&dev->mt76.mutex);
305 rcu_assign_pointer(dev->mt76.wcid[idx], NULL);
307 mutex_lock(&dev->mt76.mutex);
308 dev->mt76.vif_mask &= ~BIT_ULL(mvif->mt76.idx);
309 phy->omac_mask &= ~BIT_ULL(mvif->mt76.omac_idx);
310 mutex_unlock(&dev->mt76.mutex);
312 spin_lock_bh(&dev->mt76.sta_poll_lock);
315 spin_unlock_bh(&dev->mt76.sta_poll_lock);
317 mt76_wcid_cleanup(&dev->mt76, &msta->wcid);
325 cancel_delayed_work_sync(&phy->mt76->mac_work);
327 mutex_lock(&dev->mt76.mutex);
328 set_bit(MT76_RESET, &phy->mt76->state);
330 mt76_set_channel(phy->mt76);
350 clear_bit(MT76_RESET, &phy->mt76->state);
351 mutex_unlock(&dev->mt76.mutex);
353 mt76_txq_schedule_all(phy->mt76);
355 if (!mt76_testmode_enabled(phy->mt76))
356 ieee80211_queue_delayed_work(phy->mt76->hw,
357 &phy->mt76->mac_work,
406 mutex_lock(&dev->mt76.mutex);
408 if (cmd == SET_KEY && !sta && !mvif->mt76.cipher) {
409 mvif->mt76.cipher = mt76_connac_mcu_get_cipher(key->cipher);
421 mt76_wcid_key_setup(&dev->mt76, wcid, key);
422 err = mt76_connac_mcu_add_key(&dev->mt76, vif, &msta->bip,
426 mutex_unlock(&dev->mt76.mutex);
438 mutex_lock(&dev->mt76.mutex);
439 if (!cfg80211_chandef_valid(&phy->mt76->chandef))
448 mutex_unlock(&dev->mt76.mutex);
461 if (phy->mt76->test.state != MT76_TM_STATE_OFF) {
462 mutex_lock(&dev->mt76.mutex);
463 mt76_testmode_reset(phy->mt76, false);
464 mutex_unlock(&dev->mt76.mutex);
481 mutex_lock(&dev->mt76.mutex);
485 bool band = phy->mt76->band_idx;
500 mt76_testmode_reset(phy->mt76, true);
504 mutex_unlock(&dev->mt76.mutex);
530 bool band = phy->mt76->band_idx;
545 mutex_lock(&dev->mt76.mutex);
583 mutex_unlock(&dev->mt76.mutex);
597 if (mvif->mt76.omac_idx > HW_BSSID_MAX)
618 mutex_lock(&dev->mt76.mutex);
673 mutex_unlock(&dev->mt76.mutex);
702 mutex_lock(&dev->mt76.mutex);
711 mutex_unlock(&dev->mt76.mutex);
722 mutex_lock(&dev->mt76.mutex);
724 mutex_unlock(&dev->mt76.mutex);
734 mutex_lock(&dev->mt76.mutex);
736 mutex_unlock(&dev->mt76.mutex);
742 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76);
749 idx = mt76_wcid_alloc(dev->mt76.wcid_mask, MT7915_WTBL_STA);
780 struct mt7915_dev *dev = container_of(mdev, struct mt7915_dev, mt76);
808 struct mt76_wcid *wcid = &dev->mt76.global_wcid;
833 mutex_lock(&dev->mt76.mutex);
834 ret = mt76_connac_mcu_set_rts_thresh(&dev->mt76, val,
835 phy->mt76->band_idx);
836 mutex_unlock(&dev->mt76.mutex);
860 mutex_lock(&dev->mt76.mutex);
863 mt76_rx_aggr_start(&dev->mt76, &msta->wcid, tid, ssn,
868 mt76_rx_aggr_stop(&dev->mt76, &msta->wcid, tid);
893 mutex_unlock(&dev->mt76.mutex);
922 mutex_lock(&dev->mt76.mutex);
929 mutex_unlock(&dev->mt76.mutex);
938 bool band = phy->mt76->band_idx;
945 lockdep_assert_held(&dev->mt76.mutex);
947 n = mvif->mt76.omac_idx > HW_BSSID_MAX ? HW_BSSID_0
948 : mvif->mt76.omac_idx;
950 if (is_mt7915(&dev->mt76))
969 mutex_lock(&dev->mt76.mutex);
971 mutex_unlock(&dev->mt76.mutex);
983 bool band = phy->mt76->band_idx;
990 mutex_lock(&dev->mt76.mutex);
992 n = mvif->mt76.omac_idx > HW_BSSID_MAX ? HW_BSSID_0
993 : mvif->mt76.omac_idx;
997 if (is_mt7915(&dev->mt76))
1004 mutex_unlock(&dev->mt76.mutex);
1014 bool band = phy->mt76->band_idx;
1021 mutex_lock(&dev->mt76.mutex);
1023 n = mvif->mt76.omac_idx > HW_BSSID_MAX ? HW_BSSID_0
1024 : mvif->mt76.omac_idx;
1028 if (is_mt7915(&dev->mt76))
1035 mutex_unlock(&dev->mt76.mutex);
1044 mutex_lock(&dev->mt76.mutex);
1047 mutex_unlock(&dev->mt76.mutex);
1057 u8 band = phy->mt76->band_idx;
1062 mutex_lock(&dev->mt76.mutex);
1064 phy->mt76->antenna_mask = tx_ant;
1067 if ((is_mt7916(&dev->mt76) || is_mt7981(&dev->mt76)) &&
1069 phy->mt76->chainmask = (dev->chainmask >> chainshift) << chainshift;
1071 phy->mt76->chainmask = tx_ant << (chainshift * band);
1073 mt76_set_stream_caps(phy->mt76, true);
1077 mutex_unlock(&dev->mt76.mutex);
1092 if (is_mt7915(&phy->dev->mt76) &&
1116 if (mtk_wed_device_active(&phy->dev->mt76.mmio.wed)) {
1125 if (mtk_wed_get_rx_capa(&phy->dev->mt76.mmio.wed)) {
1153 spin_lock_bh(&dev->mt76.sta_poll_lock);
1157 spin_unlock_bh(&dev->mt76.sta_poll_lock);
1210 mt76_connac_mcu_wtbl_update_hdr_trans(&dev->mt76, vif, sta);
1226 mt76_connac_mcu_wtbl_update_hdr_trans(&dev->mt76, vif, sta);
1241 mutex_lock(&dev->mt76.mutex);
1257 mutex_unlock(&dev->mt76.mutex);
1428 if (msta->vif->mt76.idx != wi->idx)
1445 .idx = mvif->mt76.idx,
1450 mutex_lock(&dev->mt76.mutex);
1465 data[ei++] = phy->mt76->aggr_stats[i];
1554 mutex_unlock(&dev->mt76.mutex);
1561 mt76_ethtool_page_pool_stats(&dev->mt76, &data[ei], &ei);
1565 dev_err(dev->mt76.dev, "ei: %d size: %d", ei, stats_size);
1576 mutex_lock(&dev->mt76.mutex);
1578 mutex_unlock(&dev->mt76.mutex);
1590 mutex_lock(&dev->mt76.mutex);
1592 if (dev->mt76.region == NL80211_DFS_UNSET)
1625 mutex_unlock(&dev->mt76.mutex);
1642 struct mtk_wed_device *wed = &dev->mt76.mmio.wed;
1653 path->mtk_wdma.bss = mvif->mt76.idx;
1654 path->mtk_wdma.wcid = is_mt7915(&dev->mt76) ? msta->wcid.idx : 0x3ff;