2 * (c) Copyright 2002-2010, Ralink Technology, Inc.
3 * Copyright (C) 2014 Felix Fietkau <nbd@openwrt.org>
4 * Copyright (C) 2015 Jakub Kicinski <kubakici@wp.pl>
5 * Copyright (C) 2018 Stanislaw Gruszka <stf_xl@wp.pl>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2
9 * as published by the Free Software Foundation
11 * This program is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 * GNU General Public License for more details.
23 #include "initvals_phy.h"
25 #include <linux/etherdevice.h>
28 mt76x0_rf_csr_wr(struct mt76x0_dev *dev, u32 offset, u8 value)
33 if (test_bit(MT76_REMOVED, &dev->mt76.state))
36 bank = MT_RF_BANK(offset);
37 reg = MT_RF_REG(offset);
39 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
42 mutex_lock(&dev->reg_atomic_mutex);
44 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100)) {
49 mt76_wr(dev, MT_RF_CSR_CFG,
50 FIELD_PREP(MT_RF_CSR_CFG_DATA, value) |
51 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
52 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
55 trace_mt76x0_rf_write(&dev->mt76, bank, offset, value);
57 mutex_unlock(&dev->reg_atomic_mutex);
60 dev_err(dev->mt76.dev, "Error: RF write %d:%d failed:%d!!\n",
67 mt76x0_rf_csr_rr(struct mt76x0_dev *dev, u32 offset)
73 if (test_bit(MT76_REMOVED, &dev->mt76.state))
76 bank = MT_RF_BANK(offset);
77 reg = MT_RF_REG(offset);
79 if (WARN_ON_ONCE(reg > 64) || WARN_ON_ONCE(bank) > 8)
82 mutex_lock(&dev->reg_atomic_mutex);
84 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
87 mt76_wr(dev, MT_RF_CSR_CFG,
88 FIELD_PREP(MT_RF_CSR_CFG_REG_BANK, bank) |
89 FIELD_PREP(MT_RF_CSR_CFG_REG_ID, reg) |
92 if (!mt76_poll(dev, MT_RF_CSR_CFG, MT_RF_CSR_CFG_KICK, 0, 100))
95 val = mt76_rr(dev, MT_RF_CSR_CFG);
96 if (FIELD_GET(MT_RF_CSR_CFG_REG_ID, val) == reg &&
97 FIELD_GET(MT_RF_CSR_CFG_REG_BANK, val) == bank) {
98 ret = FIELD_GET(MT_RF_CSR_CFG_DATA, val);
99 trace_mt76x0_rf_read(&dev->mt76, bank, offset, ret);
102 mutex_unlock(&dev->reg_atomic_mutex);
105 dev_err(dev->mt76.dev, "Error: RF read %d:%d failed:%d!!\n",
112 rf_wr(struct mt76x0_dev *dev, u32 offset, u8 val)
114 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
115 struct mt76_reg_pair pair = {
120 return mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
123 return mt76x0_rf_csr_wr(dev, offset, val);
128 rf_rr(struct mt76x0_dev *dev, u32 offset)
133 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->mt76.state)) {
134 struct mt76_reg_pair pair = {
138 ret = mt76_rd_rp(dev, MT_MCU_MEMMAP_RF, &pair, 1);
142 ret = val = mt76x0_rf_csr_rr(dev, offset);
145 return (ret < 0) ? ret : val;
149 rf_rmw(struct mt76x0_dev *dev, u32 offset, u8 mask, u8 val)
153 ret = rf_rr(dev, offset);
157 ret = rf_wr(dev, offset, val);
165 rf_set(struct mt76x0_dev *dev, u32 offset, u8 val)
167 return rf_rmw(dev, offset, 0, val);
172 rf_clear(struct mt76x0_dev *dev, u32 offset, u8 mask)
174 return rf_rmw(dev, offset, mask, 0);
178 #define RF_RANDOM_WRITE(dev, tab) \
179 mt76_wr_rp(dev, MT_MCU_MEMMAP_RF, \
180 tab, ARRAY_SIZE(tab))
182 int mt76x0_wait_bbp_ready(struct mt76x0_dev *dev)
188 val = mt76_rr(dev, MT_BBP(CORE, 0));
189 printk("BBP version %08x\n", val);
195 dev_err(dev->mt76.dev, "Error: BBP is not ready\n");
203 mt76x0_bbp_set_ctrlch(struct mt76x0_dev *dev, enum nl80211_chan_width width,
206 int core_val, agc_val;
209 case NL80211_CHAN_WIDTH_80:
213 case NL80211_CHAN_WIDTH_40:
223 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
224 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
225 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
226 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
229 int mt76x0_phy_get_rssi(struct mt76x0_dev *dev, struct mt76x02_rxwi *rxwi)
231 struct mt76x0_caldata *caldata = &dev->caldata;
233 return rxwi->rssi[0] + caldata->rssi_offset[0] - caldata->lna_gain;
236 static void mt76x0_vco_cal(struct mt76x0_dev *dev, u8 channel)
240 val = rf_rr(dev, MT_RF(0, 4));
241 if ((val & 0x70) != 0x30)
245 * Calibration Mode - Open loop, closed loop, and amplitude:
247 * B0.R06.[3:1] bp_close_code: 100
248 * B0.R05.[7:0] bp_open_code: 0x0
249 * B0.R04.[2:0] cal_bits: 000
250 * B0.R03.[2:0] startup_time: 011
251 * B0.R03.[6:4] settle_time:
256 val = rf_rr(dev, MT_RF(0, 6));
259 rf_wr(dev, MT_RF(0, 6), val);
261 val = rf_rr(dev, MT_RF(0, 5));
263 rf_wr(dev, MT_RF(0, 5), 0x0);
265 val = rf_rr(dev, MT_RF(0, 4));
267 rf_wr(dev, MT_RF(0, 4), val);
269 val = rf_rr(dev, MT_RF(0, 3));
271 if (channel == 1 || channel == 7 || channel == 9 || channel >= 13) {
273 } else if (channel == 3 || channel == 4 || channel == 10) {
275 } else if (channel == 2 || channel == 5 || channel == 6 ||
276 channel == 8 || channel == 11 || channel == 12) {
279 WARN(1, "Unknown channel %u\n", channel);
282 rf_wr(dev, MT_RF(0, 3), val);
284 /* TODO replace by mt76x0_rf_set(dev, MT_RF(0, 4), BIT(7)); */
285 val = rf_rr(dev, MT_RF(0, 4));
286 val = ((val & ~(0x80)) | 0x80);
287 rf_wr(dev, MT_RF(0, 4), val);
293 mt76x0_mac_set_ctrlch(struct mt76x0_dev *dev, bool primary_upper)
295 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
300 mt76x0_phy_set_band(struct mt76x0_dev *dev, enum nl80211_band band)
303 case NL80211_BAND_2GHZ:
304 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
306 rf_wr(dev, MT_RF(5, 0), 0x45);
307 rf_wr(dev, MT_RF(6, 0), 0x44);
309 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
310 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
312 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00050007);
313 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x003E0002);
315 case NL80211_BAND_5GHZ:
316 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
318 rf_wr(dev, MT_RF(5, 0), 0x44);
319 rf_wr(dev, MT_RF(6, 0), 0x45);
321 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
322 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
324 mt76_wr(dev, MT_TX_ALC_VGA3, 0x00000005);
325 mt76_wr(dev, MT_TX0_RF_GAIN_CORR, 0x01010102);
333 mt76x0_phy_set_chan_rf_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
335 u16 rf_band = rf_bw_band & 0xff00;
336 u16 rf_bw = rf_bw_band & 0x00ff;
337 enum nl80211_band band;
342 const struct mt76x0_freq_item *freq_item;
344 for (i = 0; i < ARRAY_SIZE(mt76x0_sdm_channel); i++) {
345 if (channel == mt76x0_sdm_channel[i]) {
351 for (i = 0; i < ARRAY_SIZE(mt76x0_frequency_plan); i++) {
352 if (channel == mt76x0_frequency_plan[i].channel) {
353 rf_band = mt76x0_frequency_plan[i].band;
356 freq_item = &(mt76x0_sdm_frequency_plan[i]);
358 freq_item = &(mt76x0_frequency_plan[i]);
360 rf_wr(dev, MT_RF(0, 37), freq_item->pllR37);
361 rf_wr(dev, MT_RF(0, 36), freq_item->pllR36);
362 rf_wr(dev, MT_RF(0, 35), freq_item->pllR35);
363 rf_wr(dev, MT_RF(0, 34), freq_item->pllR34);
364 rf_wr(dev, MT_RF(0, 33), freq_item->pllR33);
366 rf_val = rf_rr(dev, MT_RF(0, 32));
368 rf_val |= freq_item->pllR32_b7b5;
369 rf_wr(dev, MT_RF(0, 32), rf_val);
371 /* R32<4:0> pll_den: (Denomina - 8) */
372 rf_val = rf_rr(dev, MT_RF(0, 32));
374 rf_val |= freq_item->pllR32_b4b0;
375 rf_wr(dev, MT_RF(0, 32), rf_val);
378 rf_val = rf_rr(dev, MT_RF(0, 31));
380 rf_val |= freq_item->pllR31_b7b5;
381 rf_wr(dev, MT_RF(0, 31), rf_val);
383 /* R31<4:0> pll_k(Nominator) */
384 rf_val = rf_rr(dev, MT_RF(0, 31));
386 rf_val |= freq_item->pllR31_b4b0;
387 rf_wr(dev, MT_RF(0, 31), rf_val);
389 /* R30<7> sdm_reset_n */
390 rf_val = rf_rr(dev, MT_RF(0, 30));
393 rf_wr(dev, MT_RF(0, 30), rf_val);
395 rf_wr(dev, MT_RF(0, 30), rf_val);
397 rf_val |= freq_item->pllR30_b7;
398 rf_wr(dev, MT_RF(0, 30), rf_val);
401 /* R30<6:2> sdmmash_prbs,sin */
402 rf_val = rf_rr(dev, MT_RF(0, 30));
404 rf_val |= freq_item->pllR30_b6b2;
405 rf_wr(dev, MT_RF(0, 30), rf_val);
408 rf_val = rf_rr(dev, MT_RF(0, 30));
410 rf_val |= (freq_item->pllR30_b1 << 1);
411 rf_wr(dev, MT_RF(0, 30), rf_val);
413 /* R30<0> R29<7:0> (hex) pll_n */
414 rf_val = freq_item->pll_n & 0x00FF;
415 rf_wr(dev, MT_RF(0, 29), rf_val);
417 rf_val = rf_rr(dev, MT_RF(0, 30));
419 rf_val |= ((freq_item->pll_n >> 8) & 0x0001);
420 rf_wr(dev, MT_RF(0, 30), rf_val);
422 /* R28<7:6> isi_iso */
423 rf_val = rf_rr(dev, MT_RF(0, 28));
425 rf_val |= freq_item->pllR28_b7b6;
426 rf_wr(dev, MT_RF(0, 28), rf_val);
428 /* R28<5:4> pfd_dly */
429 rf_val = rf_rr(dev, MT_RF(0, 28));
431 rf_val |= freq_item->pllR28_b5b4;
432 rf_wr(dev, MT_RF(0, 28), rf_val);
434 /* R28<3:2> clksel option */
435 rf_val = rf_rr(dev, MT_RF(0, 28));
437 rf_val |= freq_item->pllR28_b3b2;
438 rf_wr(dev, MT_RF(0, 28), rf_val);
440 /* R28<1:0> R27<7:0> R26<7:0> (hex) sdm_k */
441 rf_val = freq_item->pll_sdm_k & 0x000000FF;
442 rf_wr(dev, MT_RF(0, 26), rf_val);
444 rf_val = ((freq_item->pll_sdm_k >> 8) & 0x000000FF);
445 rf_wr(dev, MT_RF(0, 27), rf_val);
447 rf_val = rf_rr(dev, MT_RF(0, 28));
449 rf_val |= ((freq_item->pll_sdm_k >> 16) & 0x0003);
450 rf_wr(dev, MT_RF(0, 28), rf_val);
452 /* R24<1:0> xo_div */
453 rf_val = rf_rr(dev, MT_RF(0, 24));
455 rf_val |= freq_item->pllR24_b1b0;
456 rf_wr(dev, MT_RF(0, 24), rf_val);
462 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
463 if (rf_bw == mt76x0_rf_bw_switch_tab[i].bw_band) {
464 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
465 mt76x0_rf_bw_switch_tab[i].value);
466 } else if ((rf_bw == (mt76x0_rf_bw_switch_tab[i].bw_band & 0xFF)) &&
467 (rf_band & mt76x0_rf_bw_switch_tab[i].bw_band)) {
468 rf_wr(dev, mt76x0_rf_bw_switch_tab[i].rf_bank_reg,
469 mt76x0_rf_bw_switch_tab[i].value);
473 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
474 if (mt76x0_rf_band_switch_tab[i].bw_band & rf_band) {
475 rf_wr(dev, mt76x0_rf_band_switch_tab[i].rf_bank_reg,
476 mt76x0_rf_band_switch_tab[i].value);
480 mac_reg = mt76_rr(dev, MT_RF_MISC);
481 mac_reg &= ~0xC; /* Clear 0x518[3:2] */
482 mt76_wr(dev, MT_RF_MISC, mac_reg);
484 band = (rf_band & RF_G_BAND) ? NL80211_BAND_2GHZ : NL80211_BAND_5GHZ;
485 if (mt76x02_ext_pa_enabled(&dev->mt76, band)) {
487 MT_RF_MISC (offset: 0x0518)
488 [2]1'b1: enable external A band PA, 1'b0: disable external A band PA
489 [3]1'b1: enable external G band PA, 1'b0: disable external G band PA
491 if (rf_band & RF_A_BAND) {
492 mac_reg = mt76_rr(dev, MT_RF_MISC);
494 mt76_wr(dev, MT_RF_MISC, mac_reg);
496 mac_reg = mt76_rr(dev, MT_RF_MISC);
498 mt76_wr(dev, MT_RF_MISC, mac_reg);
502 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_ext_pa_tab); i++)
503 if (mt76x0_rf_ext_pa_tab[i].bw_band & rf_band)
504 rf_wr(dev, mt76x0_rf_ext_pa_tab[i].rf_bank_reg,
505 mt76x0_rf_ext_pa_tab[i].value);
508 if (rf_band & RF_G_BAND) {
509 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x63707400);
510 /* Set Atten mode = 2 For G band, Disable Tx Inc dcoc. */
511 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
512 mac_reg &= 0x896400FF;
513 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
515 mt76_wr(dev, MT_TX0_RF_GAIN_ATTEN, 0x686A7800);
516 /* Set Atten mode = 0 For Ext A band, Disable Tx Inc dcoc Cal. */
517 mac_reg = mt76_rr(dev, MT_TX_ALC_CFG_1);
518 mac_reg &= 0x890400FF;
519 mt76_wr(dev, MT_TX_ALC_CFG_1, mac_reg);
524 mt76x0_phy_set_chan_bbp_params(struct mt76x0_dev *dev, u8 channel, u16 rf_bw_band)
528 for (i = 0; i < ARRAY_SIZE(mt76x0_bbp_switch_tab); i++) {
529 const struct mt76x0_bbp_switch_item *item = &mt76x0_bbp_switch_tab[i];
530 const struct mt76_reg_pair *pair = &item->reg_pair;
532 if ((rf_bw_band & item->bw_band) != rf_bw_band)
535 if (pair->reg == MT_BBP(AGC, 8)) {
536 u32 val = pair->value;
539 gain = FIELD_GET(MT_BBP_AGC_GAIN, val);
540 gain -= dev->caldata.lna_gain * 2;
541 val &= ~MT_BBP_AGC_GAIN;
542 val |= FIELD_PREP(MT_BBP_AGC_GAIN, gain);
543 mt76_wr(dev, pair->reg, val);
545 mt76_wr(dev, pair->reg, pair->value);
552 mt76x0_extra_power_over_mac(struct mt76x0_dev *dev)
556 val = ((mt76_rr(dev, MT_TX_PWR_CFG_1) & 0x00003f00) >> 8);
557 val |= ((mt76_rr(dev, MT_TX_PWR_CFG_2) & 0x00003f00) << 8);
558 mt76_wr(dev, MT_TX_PWR_CFG_7, val);
561 val = ((mt76_rr(dev, MT_TX_PWR_CFG_3) & 0x0000ff00) >> 8);
562 mt76_wr(dev, MT_TX_PWR_CFG_8, val);
564 val = ((mt76_rr(dev, MT_TX_PWR_CFG_4) & 0x0000ff00) >> 8);
565 mt76_wr(dev, MT_TX_PWR_CFG_9, val);
569 mt76x0_phy_set_tx_power(struct mt76x0_dev *dev, u8 channel, u8 rf_bw_band)
573 int bw = (rf_bw_band & RF_BW_20) ? 0 : 1;
575 for (i = 0; i < 4; i++) {
577 val = dev->ee->tx_pwr_cfg_2g[i][bw];
579 val = dev->ee->tx_pwr_cfg_5g[i][bw];
581 mt76_wr(dev, MT_TX_PWR_CFG_0 + 4*i, val);
584 mt76x0_extra_power_over_mac(dev);
589 mt76x0_bbp_set_bw(struct mt76x0_dev *dev, enum nl80211_chan_width width)
591 enum { BW_20 = 0, BW_40 = 1, BW_80 = 2, BW_10 = 4};
596 case NL80211_CHAN_WIDTH_20_NOHT:
597 case NL80211_CHAN_WIDTH_20:
600 case NL80211_CHAN_WIDTH_40:
603 case NL80211_CHAN_WIDTH_80:
606 case NL80211_CHAN_WIDTH_10:
609 case NL80211_CHAN_WIDTH_80P80:
610 case NL80211_CHAN_WIDTH_160:
611 case NL80211_CHAN_WIDTH_5:
616 mt76x02_mcu_function_select(&dev->mt76, BW_SETTING, bw, false);
620 mt76x0_phy_set_chan_pwr(struct mt76x0_dev *dev, u8 channel)
622 static const int mt76x0_tx_pwr_ch_list[] = {
623 1,2,3,4,5,6,7,8,9,10,11,12,13,14,
624 36,38,40,44,46,48,52,54,56,60,62,64,
625 100,102,104,108,110,112,116,118,120,124,126,128,132,134,136,140,
626 149,151,153,157,159,161,165,167,169,171,173,
632 for (i = 0; i < ARRAY_SIZE(mt76x0_tx_pwr_ch_list); i++)
633 if (mt76x0_tx_pwr_ch_list[i] == channel)
636 if (WARN_ON(i == ARRAY_SIZE(mt76x0_tx_pwr_ch_list)))
639 val = mt76_rr(dev, MT_TX_ALC_CFG_0);
641 val |= dev->ee->tx_pwr_per_chan[i];
643 mt76_wr(dev, MT_TX_ALC_CFG_0, val);
647 __mt76x0_phy_set_channel(struct mt76x0_dev *dev,
648 struct cfg80211_chan_def *chandef)
650 u32 ext_cca_chan[4] = {
651 [0] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 0) |
652 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 1) |
653 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
654 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
655 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(0)),
656 [1] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 1) |
657 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 0) |
658 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 2) |
659 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 3) |
660 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(1)),
661 [2] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 2) |
662 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 3) |
663 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
664 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
665 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(2)),
666 [3] = FIELD_PREP(MT_EXT_CCA_CFG_CCA0, 3) |
667 FIELD_PREP(MT_EXT_CCA_CFG_CCA1, 2) |
668 FIELD_PREP(MT_EXT_CCA_CFG_CCA2, 1) |
669 FIELD_PREP(MT_EXT_CCA_CFG_CCA3, 0) |
670 FIELD_PREP(MT_EXT_CCA_CFG_CCA_MASK, BIT(3)),
672 bool scan = test_bit(MT76_SCANNING, &dev->mt76.state);
673 int ch_group_index, freq, freq1;
678 freq = chandef->chan->center_freq;
679 freq1 = chandef->center_freq1;
680 channel = chandef->chan->hw_value;
681 rf_bw_band = (channel <= 14) ? RF_G_BAND : RF_A_BAND;
683 switch (chandef->width) {
684 case NL80211_CHAN_WIDTH_40:
689 channel += 2 - ch_group_index * 4;
690 rf_bw_band |= RF_BW_40;
692 case NL80211_CHAN_WIDTH_80:
693 ch_group_index = (freq - freq1 + 30) / 20;
694 if (WARN_ON(ch_group_index < 0 || ch_group_index > 3))
696 channel += 6 - ch_group_index * 4;
697 rf_bw_band |= RF_BW_80;
701 rf_bw_band |= RF_BW_20;
705 mt76x0_bbp_set_bw(dev, chandef->width);
706 mt76x0_bbp_set_ctrlch(dev, chandef->width, ch_group_index);
707 mt76x0_mac_set_ctrlch(dev, ch_group_index & 1);
709 mt76_rmw(dev, MT_EXT_CCA_CFG,
710 (MT_EXT_CCA_CFG_CCA0 |
711 MT_EXT_CCA_CFG_CCA1 |
712 MT_EXT_CCA_CFG_CCA2 |
713 MT_EXT_CCA_CFG_CCA3 |
714 MT_EXT_CCA_CFG_CCA_MASK),
715 ext_cca_chan[ch_group_index]);
717 mt76x0_phy_set_band(dev, chandef->chan->band);
718 mt76x0_phy_set_chan_rf_params(dev, channel, rf_bw_band);
719 mt76x0_read_rx_gain(dev);
721 /* set Japan Tx filter at channel 14 */
722 val = mt76_rr(dev, MT_BBP(CORE, 1));
727 mt76_wr(dev, MT_BBP(CORE, 1), val);
729 mt76x0_phy_set_chan_bbp_params(dev, channel, rf_bw_band);
731 /* Vendor driver don't do it */
732 /* mt76x0_phy_set_tx_power(dev, channel, rf_bw_band); */
734 mt76x0_vco_cal(dev, channel);
736 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false);
738 mt76x0_phy_set_chan_pwr(dev, channel);
740 dev->mt76.chandef = *chandef;
744 int mt76x0_phy_set_channel(struct mt76x0_dev *dev,
745 struct cfg80211_chan_def *chandef)
749 mutex_lock(&dev->hw_atomic_mutex);
750 ret = __mt76x0_phy_set_channel(dev, chandef);
751 mutex_unlock(&dev->hw_atomic_mutex);
756 void mt76x0_phy_recalibrate_after_assoc(struct mt76x0_dev *dev)
759 u8 channel = dev->mt76.chandef.chan->hw_value;
760 int is_5ghz = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 1 : 0;
762 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_R, 0, false);
764 mt76x0_vco_cal(dev, channel);
766 tx_alc = mt76_rr(dev, MT_TX_ALC_CFG_0);
767 mt76_wr(dev, MT_TX_ALC_CFG_0, 0);
768 usleep_range(500, 700);
770 reg_val = mt76_rr(dev, 0x2124);
771 reg_val &= 0xffffff7e;
772 mt76_wr(dev, 0x2124, reg_val);
774 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 0, false);
776 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LC, is_5ghz, false);
777 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_LOFT, is_5ghz, false);
778 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TXIQ, is_5ghz, false);
779 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_TX_GROUP_DELAY,
781 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXIQ, is_5ghz, false);
782 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RX_GROUP_DELAY,
785 mt76_wr(dev, 0x2124, reg_val);
786 mt76_wr(dev, MT_TX_ALC_CFG_0, tx_alc);
789 mt76x02_mcu_calibrate(&dev->mt76, MCU_CAL_RXDCOC, 1, false);
792 void mt76x0_agc_save(struct mt76x0_dev *dev)
794 /* Only one RX path */
795 dev->agc_save = FIELD_GET(MT_BBP_AGC_GAIN, mt76_rr(dev, MT_BBP(AGC, 8)));
798 void mt76x0_agc_restore(struct mt76x0_dev *dev)
800 mt76_rmw_field(dev, MT_BBP(AGC, 8), MT_BBP_AGC_GAIN, dev->agc_save);
803 static void mt76x0_temp_sensor(struct mt76x0_dev *dev)
805 u8 rf_b7_73, rf_b0_66, rf_b0_67;
810 rf_b7_73 = rf_rr(dev, MT_RF(7, 73));
811 rf_b0_66 = rf_rr(dev, MT_RF(0, 66));
812 rf_b0_67 = rf_rr(dev, MT_RF(0, 73));
814 rf_wr(dev, MT_RF(7, 73), 0x02);
815 rf_wr(dev, MT_RF(0, 66), 0x23);
816 rf_wr(dev, MT_RF(0, 73), 0x01);
818 mt76_wr(dev, MT_BBP(CORE, 34), 0x00080055);
820 for (cycle = 0; cycle < 2000; cycle++) {
821 val = mt76_rr(dev, MT_BBP(CORE, 34));
829 mt76_wr(dev, MT_BBP(CORE, 34), val);
833 sval = mt76_rr(dev, MT_BBP(CORE, 35)) & 0xff;
835 sval &= 0x7f; /* Positive */
837 sval |= 0xffffff00; /* Negative */
839 temp = (35 * (sval - dev->caldata.temp_offset)) / 10 + 25;
842 rf_wr(dev, MT_RF(7, 73), rf_b7_73);
843 rf_wr(dev, MT_RF(0, 66), rf_b0_66);
844 rf_wr(dev, MT_RF(0, 73), rf_b0_67);
847 static void mt76x0_dynamic_vga_tuning(struct mt76x0_dev *dev)
851 init_vga = (dev->mt76.chandef.chan->band == NL80211_BAND_5GHZ) ? 0x54 : 0x4E;
852 if (dev->avg_rssi > -60)
854 else if (dev->avg_rssi > -70)
857 val = mt76_rr(dev, MT_BBP(AGC, 8));
859 val |= init_vga << 8;
860 mt76_wr(dev, MT_BBP(AGC,8), val);
863 static void mt76x0_phy_calibrate(struct work_struct *work)
865 struct mt76x0_dev *dev = container_of(work, struct mt76x0_dev,
868 mt76x0_dynamic_vga_tuning(dev);
869 mt76x0_temp_sensor(dev);
871 ieee80211_queue_delayed_work(dev->mt76.hw, &dev->cal_work,
872 MT_CALIBRATE_INTERVAL);
875 void mt76x0_phy_con_cal_onoff(struct mt76x0_dev *dev,
876 struct ieee80211_bss_conf *info)
878 /* Start/stop collecting beacon data */
879 spin_lock_bh(&dev->con_mon_lock);
880 ether_addr_copy(dev->ap_bssid, info->bssid);
882 dev->bcn_freq_off = MT_FREQ_OFFSET_INVALID;
883 spin_unlock_bh(&dev->con_mon_lock);
887 mt76x0_set_rx_chains(struct mt76x0_dev *dev)
891 val = mt76_rr(dev, MT_BBP(AGC, 0));
892 val &= ~(BIT(3) | BIT(4));
894 if (dev->chainmask & BIT(1))
897 mt76_wr(dev, MT_BBP(AGC, 0), val);
900 val = mt76_rr(dev, MT_BBP(AGC, 0));
904 mt76x0_set_tx_dac(struct mt76x0_dev *dev)
906 if (dev->chainmask & BIT(1))
907 mt76_set(dev, MT_BBP(TXBE, 5), 3);
909 mt76_clear(dev, MT_BBP(TXBE, 5), 3);
913 mt76x0_rf_init(struct mt76x0_dev *dev)
918 RF_RANDOM_WRITE(dev, mt76x0_rf_central_tab);
919 RF_RANDOM_WRITE(dev, mt76x0_rf_2g_channel_0_tab);
920 RF_RANDOM_WRITE(dev, mt76x0_rf_5g_channel_0_tab);
921 RF_RANDOM_WRITE(dev, mt76x0_rf_vga_channel_0_tab);
923 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_bw_switch_tab); i++) {
924 const struct mt76x0_rf_switch_item *item = &mt76x0_rf_bw_switch_tab[i];
926 if (item->bw_band == RF_BW_20)
927 rf_wr(dev, item->rf_bank_reg, item->value);
928 else if (((RF_G_BAND | RF_BW_20) & item->bw_band) == (RF_G_BAND | RF_BW_20))
929 rf_wr(dev, item->rf_bank_reg, item->value);
932 for (i = 0; i < ARRAY_SIZE(mt76x0_rf_band_switch_tab); i++) {
933 if (mt76x0_rf_band_switch_tab[i].bw_band & RF_G_BAND) {
935 mt76x0_rf_band_switch_tab[i].rf_bank_reg,
936 mt76x0_rf_band_switch_tab[i].value);
941 Frequency calibration
942 E1: B0.R22<6:0>: xo_cxo<6:0>
943 E2: B0.R21<0>: xo_cxo<0>, B0.R22<7:0>: xo_cxo<8:1>
945 rf_wr(dev, MT_RF(0, 22),
946 min_t(u8, dev->caldata.freq_offset, 0xbf));
947 val = rf_rr(dev, MT_RF(0, 22));
950 Reset the DAC (Set B0.R73<7>=1, then set B0.R73<7>=0, and then set B0.R73<7>) during power up.
952 val = rf_rr(dev, MT_RF(0, 73));
954 rf_wr(dev, MT_RF(0, 73), val);
956 rf_wr(dev, MT_RF(0, 73), val);
958 rf_wr(dev, MT_RF(0, 73), val);
961 vcocal_en (initiate VCO calibration (reset after completion)) - It should be at the end of RF configuration.
963 rf_set(dev, MT_RF(0, 4), 0x80);
966 static void mt76x0_ant_select(struct mt76x0_dev *dev)
968 /* Single antenna mode. */
969 mt76_rmw(dev, MT_WLAN_FUN_CTRL, BIT(5), BIT(6));
970 mt76_clear(dev, MT_CMB_CTRL, BIT(14) | BIT(12));
971 mt76_clear(dev, MT_COEXCFG0, BIT(2));
972 mt76_rmw(dev, MT_COEXCFG3, BIT(5) | BIT(4) | BIT(3) | BIT(2), BIT(1));
975 void mt76x0_phy_init(struct mt76x0_dev *dev)
977 INIT_DELAYED_WORK(&dev->cal_work, mt76x0_phy_calibrate);
979 mt76x0_ant_select(dev);
983 mt76x0_set_rx_chains(dev);
984 mt76x0_set_tx_dac(dev);