2 * Copyright (C) 2016 Felix Fietkau <nbd@nbd.name>
3 * Copyright (C) 2018 Lorenzo Bianconi <lorenzo.bianconi83@gmail.com>
5 * Permission to use, copy, modify, and/or distribute this software for any
6 * purpose with or without fee is hereby granted, provided that the above
7 * copyright notice and this permission notice appear in all copies.
9 * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10 * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11 * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12 * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13 * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14 * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15 * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
18 #include <linux/kernel.h>
21 #include "mt76x02_phy.h"
23 void mt76x02_phy_set_rxpath(struct mt76x02_dev *dev)
27 val = mt76_rr(dev, MT_BBP(AGC, 0));
30 switch (dev->mt76.chainmask & 0xf) {
39 mt76_wr(dev, MT_BBP(AGC, 0), val);
41 val = mt76_rr(dev, MT_BBP(AGC, 0));
43 EXPORT_SYMBOL_GPL(mt76x02_phy_set_rxpath);
45 void mt76x02_phy_set_txdac(struct mt76x02_dev *dev)
49 txpath = (dev->mt76.chainmask >> 8) & 0xf;
52 mt76_set(dev, MT_BBP(TXBE, 5), 0x3);
55 mt76_clear(dev, MT_BBP(TXBE, 5), 0x3);
59 EXPORT_SYMBOL_GPL(mt76x02_phy_set_txdac);
62 mt76x02_tx_power_mask(u8 v1, u8 v2, u8 v3, u8 v4)
66 val |= (v1 & (BIT(6) - 1)) << 0;
67 val |= (v2 & (BIT(6) - 1)) << 8;
68 val |= (v3 & (BIT(6) - 1)) << 16;
69 val |= (v4 & (BIT(6) - 1)) << 24;
73 int mt76x02_get_max_rate_power(struct mt76_rate_power *r)
78 for (i = 0; i < sizeof(r->all); i++)
79 ret = max(ret, r->all[i]);
83 EXPORT_SYMBOL_GPL(mt76x02_get_max_rate_power);
85 void mt76x02_limit_rate_power(struct mt76_rate_power *r, int limit)
89 for (i = 0; i < sizeof(r->all); i++)
90 if (r->all[i] > limit)
93 EXPORT_SYMBOL_GPL(mt76x02_limit_rate_power);
95 void mt76x02_add_rate_power_offset(struct mt76_rate_power *r, int offset)
99 for (i = 0; i < sizeof(r->all); i++)
102 EXPORT_SYMBOL_GPL(mt76x02_add_rate_power_offset);
104 void mt76x02_phy_set_txpower(struct mt76x02_dev *dev, int txp_0, int txp_1)
106 struct mt76_rate_power *t = &dev->mt76.rate_power;
108 mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_0, txp_0);
109 mt76_rmw_field(dev, MT_TX_ALC_CFG_0, MT_TX_ALC_CFG_0_CH_INIT_1, txp_1);
111 mt76_wr(dev, MT_TX_PWR_CFG_0,
112 mt76x02_tx_power_mask(t->cck[0], t->cck[2], t->ofdm[0],
114 mt76_wr(dev, MT_TX_PWR_CFG_1,
115 mt76x02_tx_power_mask(t->ofdm[4], t->ofdm[6], t->ht[0],
117 mt76_wr(dev, MT_TX_PWR_CFG_2,
118 mt76x02_tx_power_mask(t->ht[4], t->ht[6], t->ht[8],
120 mt76_wr(dev, MT_TX_PWR_CFG_3,
121 mt76x02_tx_power_mask(t->ht[12], t->ht[14], t->stbc[0],
123 mt76_wr(dev, MT_TX_PWR_CFG_4,
124 mt76x02_tx_power_mask(t->stbc[4], t->stbc[6], 0, 0));
125 mt76_wr(dev, MT_TX_PWR_CFG_7,
126 mt76x02_tx_power_mask(t->ofdm[7], t->vht[8], t->ht[7],
128 mt76_wr(dev, MT_TX_PWR_CFG_8,
129 mt76x02_tx_power_mask(t->ht[14], 0, t->vht[8], t->vht[9]));
130 mt76_wr(dev, MT_TX_PWR_CFG_9,
131 mt76x02_tx_power_mask(t->ht[7], 0, t->stbc[8], t->stbc[9]));
133 EXPORT_SYMBOL_GPL(mt76x02_phy_set_txpower);
135 void mt76x02_phy_set_bw(struct mt76x02_dev *dev, int width, u8 ctrl)
137 int core_val, agc_val;
140 case NL80211_CHAN_WIDTH_80:
144 case NL80211_CHAN_WIDTH_40:
154 mt76_rmw_field(dev, MT_BBP(CORE, 1), MT_BBP_CORE_R1_BW, core_val);
155 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_BW, agc_val);
156 mt76_rmw_field(dev, MT_BBP(AGC, 0), MT_BBP_AGC_R0_CTRL_CHAN, ctrl);
157 mt76_rmw_field(dev, MT_BBP(TXBE, 0), MT_BBP_TXBE_R0_CTRL_CHAN, ctrl);
159 EXPORT_SYMBOL_GPL(mt76x02_phy_set_bw);
161 void mt76x02_phy_set_band(struct mt76x02_dev *dev, int band,
165 case NL80211_BAND_2GHZ:
166 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
167 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
169 case NL80211_BAND_5GHZ:
170 mt76_clear(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_2G);
171 mt76_set(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_5G);
175 mt76_rmw_field(dev, MT_TX_BAND_CFG, MT_TX_BAND_CFG_UPPER_40M,
178 EXPORT_SYMBOL_GPL(mt76x02_phy_set_band);
180 bool mt76x02_phy_adjust_vga_gain(struct mt76x02_dev *dev)
182 u8 limit = dev->cal.low_gain > 0 ? 16 : 4;
186 false_cca = FIELD_GET(MT_RX_STAT_1_CCA_ERRORS, mt76_rr(dev, MT_RX_STAT_1));
187 dev->cal.false_cca = false_cca;
188 if (false_cca > 800 && dev->cal.agc_gain_adjust < limit) {
189 dev->cal.agc_gain_adjust += 2;
191 } else if ((false_cca < 10 && dev->cal.agc_gain_adjust > 0) ||
192 (dev->cal.agc_gain_adjust >= limit && false_cca < 500)) {
193 dev->cal.agc_gain_adjust -= 2;
197 dev->cal.agc_lowest_gain = dev->cal.agc_gain_adjust >= limit;
201 EXPORT_SYMBOL_GPL(mt76x02_phy_adjust_vga_gain);
203 void mt76x02_init_agc_gain(struct mt76x02_dev *dev)
205 dev->cal.agc_gain_init[0] = mt76_get_field(dev, MT_BBP(AGC, 8),
207 dev->cal.agc_gain_init[1] = mt76_get_field(dev, MT_BBP(AGC, 9),
209 memcpy(dev->cal.agc_gain_cur, dev->cal.agc_gain_init,
210 sizeof(dev->cal.agc_gain_cur));
211 dev->cal.low_gain = -1;
212 dev->cal.gain_init_done = true;
214 EXPORT_SYMBOL_GPL(mt76x02_init_agc_gain);