1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
18 union phy_table_tile {
19 struct rtw_phy_cond cond;
20 struct phy_cfg_pair cfg;
23 struct phy_pg_cfg_pair {
32 struct txpwr_lmt_cfg_pair {
41 static const u32 db_invert_table[12][8] = {
47 1007, 1268, 1596, 2010},
49 794, 1000, 1259, 1585},
50 {1995, 2512, 3162, 3981,
51 5012, 6310, 7943, 10000},
52 {12589, 15849, 19953, 25119,
53 31623, 39811, 50119, 63098},
54 {79433, 100000, 125893, 158489,
55 199526, 251189, 316228, 398107},
56 {501187, 630957, 794328, 1000000,
57 1258925, 1584893, 1995262, 2511886},
58 {3162278, 3981072, 5011872, 6309573,
59 7943282, 1000000, 12589254, 15848932},
60 {19952623, 25118864, 31622777, 39810717,
61 50118723, 63095734, 79432823, 100000000},
62 {125892541, 158489319, 199526232, 251188643,
63 316227766, 398107171, 501187234, 630957345},
64 {794328235, 1000000000, 1258925412, 1584893192,
65 1995262315, 2511886432U, 3162277660U, 3981071706U}
68 enum rtw_phy_band_type {
73 void rtw_phy_init(struct rtw_dev *rtwdev)
75 struct rtw_chip_info *chip = rtwdev->chip;
76 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
79 dm_info->fa_history[3] = 0;
80 dm_info->fa_history[2] = 0;
81 dm_info->fa_history[1] = 0;
82 dm_info->fa_history[0] = 0;
83 dm_info->igi_bitmap = 0;
84 dm_info->igi_history[3] = 0;
85 dm_info->igi_history[2] = 0;
86 dm_info->igi_history[1] = 0;
88 addr = chip->dig[0].addr;
89 mask = chip->dig[0].mask;
90 dm_info->igi_history[0] = rtw_read32_mask(rtwdev, addr, mask);
93 void rtw_phy_dig_write(struct rtw_dev *rtwdev, u8 igi)
95 struct rtw_chip_info *chip = rtwdev->chip;
96 struct rtw_hal *hal = &rtwdev->hal;
100 for (path = 0; path < hal->rf_path_num; path++) {
101 addr = chip->dig[path].addr;
102 mask = chip->dig[path].mask;
103 rtw_write32_mask(rtwdev, addr, mask, igi);
107 static void rtw_phy_stat_false_alarm(struct rtw_dev *rtwdev)
109 struct rtw_chip_info *chip = rtwdev->chip;
111 chip->ops->false_alarm_statistics(rtwdev);
114 #define RA_FLOOR_TABLE_SIZE 7
115 #define RA_FLOOR_UP_GAP 3
117 static u8 rtw_phy_get_rssi_level(u8 old_level, u8 rssi)
119 u8 table[RA_FLOOR_TABLE_SIZE] = {20, 34, 38, 42, 46, 50, 100};
123 for (i = 0; i < RA_FLOOR_TABLE_SIZE; i++)
125 table[i] += RA_FLOOR_UP_GAP;
127 for (i = 0; i < RA_FLOOR_TABLE_SIZE; i++) {
128 if (rssi < table[i]) {
137 struct rtw_phy_stat_iter_data {
138 struct rtw_dev *rtwdev;
142 static void rtw_phy_stat_rssi_iter(void *data, struct ieee80211_sta *sta)
144 struct rtw_phy_stat_iter_data *iter_data = data;
145 struct rtw_dev *rtwdev = iter_data->rtwdev;
146 struct rtw_sta_info *si = (struct rtw_sta_info *)sta->drv_priv;
149 rssi = ewma_rssi_read(&si->avg_rssi);
150 rssi_level = rtw_phy_get_rssi_level(si->rssi_level, rssi);
152 rtw_fw_send_rssi_info(rtwdev, si);
154 iter_data->min_rssi = min_t(u8, rssi, iter_data->min_rssi);
157 static void rtw_phy_stat_rssi(struct rtw_dev *rtwdev)
159 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
160 struct rtw_phy_stat_iter_data data = {};
162 data.rtwdev = rtwdev;
163 data.min_rssi = U8_MAX;
164 rtw_iterate_stas_atomic(rtwdev, rtw_phy_stat_rssi_iter, &data);
166 dm_info->pre_min_rssi = dm_info->min_rssi;
167 dm_info->min_rssi = data.min_rssi;
170 static void rtw_phy_statistics(struct rtw_dev *rtwdev)
172 rtw_phy_stat_rssi(rtwdev);
173 rtw_phy_stat_false_alarm(rtwdev);
176 #define DIG_PERF_FA_TH_LOW 250
177 #define DIG_PERF_FA_TH_HIGH 500
178 #define DIG_PERF_FA_TH_EXTRA_HIGH 750
179 #define DIG_PERF_MAX 0x5a
180 #define DIG_PERF_MID 0x40
181 #define DIG_CVRG_FA_TH_LOW 2000
182 #define DIG_CVRG_FA_TH_HIGH 4000
183 #define DIG_CVRG_FA_TH_EXTRA_HIGH 5000
184 #define DIG_CVRG_MAX 0x2a
185 #define DIG_CVRG_MID 0x26
186 #define DIG_CVRG_MIN 0x1c
187 #define DIG_RSSI_GAIN_OFFSET 15
190 rtw_phy_dig_check_damping(struct rtw_dm_info *dm_info)
192 u16 fa_lo = DIG_PERF_FA_TH_LOW;
193 u16 fa_hi = DIG_PERF_FA_TH_HIGH;
200 bool damping = false;
202 min_rssi = dm_info->min_rssi;
203 if (dm_info->damping) {
204 damping_rssi = dm_info->damping_rssi;
205 diff = min_rssi > damping_rssi ? min_rssi - damping_rssi :
206 damping_rssi - min_rssi;
207 if (diff > 3 || dm_info->damping_cnt++ > 20) {
208 dm_info->damping = false;
215 igi_history = dm_info->igi_history;
216 fa_history = dm_info->fa_history;
217 igi_bitmap = dm_info->igi_bitmap & 0xf;
218 switch (igi_bitmap) {
220 /* down -> up -> down -> up */
221 if (igi_history[0] > igi_history[1] &&
222 igi_history[2] > igi_history[3] &&
223 igi_history[0] - igi_history[1] >= 2 &&
224 igi_history[2] - igi_history[3] >= 2 &&
225 fa_history[0] > fa_hi && fa_history[1] < fa_lo &&
226 fa_history[2] > fa_hi && fa_history[3] < fa_lo)
230 /* up -> down -> down -> up */
231 if (igi_history[0] > igi_history[1] &&
232 igi_history[3] > igi_history[2] &&
233 igi_history[0] - igi_history[1] >= 4 &&
234 igi_history[3] - igi_history[2] >= 2 &&
235 fa_history[0] > fa_hi && fa_history[1] < fa_lo &&
236 fa_history[2] < fa_lo && fa_history[3] > fa_hi)
244 dm_info->damping = true;
245 dm_info->damping_cnt = 0;
246 dm_info->damping_rssi = min_rssi;
252 static void rtw_phy_dig_get_boundary(struct rtw_dm_info *dm_info,
253 u8 *upper, u8 *lower, bool linked)
255 u8 dig_max, dig_min, dig_mid;
259 dig_max = DIG_PERF_MAX;
260 dig_mid = DIG_PERF_MID;
261 /* 22B=0x1c, 22C=0x20 */
263 min_rssi = max_t(u8, dm_info->min_rssi, dig_min);
265 dig_max = DIG_CVRG_MAX;
266 dig_mid = DIG_CVRG_MID;
267 dig_min = DIG_CVRG_MIN;
271 /* DIG MAX should be bounded by minimum RSSI with offset +15 */
272 dig_max = min_t(u8, dig_max, min_rssi + DIG_RSSI_GAIN_OFFSET);
274 *lower = clamp_t(u8, min_rssi, dig_min, dig_mid);
275 *upper = clamp_t(u8, *lower + DIG_RSSI_GAIN_OFFSET, dig_min, dig_max);
278 static void rtw_phy_dig_get_threshold(struct rtw_dm_info *dm_info,
279 u16 *fa_th, u8 *step, bool linked)
281 u8 min_rssi, pre_min_rssi;
283 min_rssi = dm_info->min_rssi;
284 pre_min_rssi = dm_info->pre_min_rssi;
290 fa_th[0] = DIG_PERF_FA_TH_EXTRA_HIGH;
291 fa_th[1] = DIG_PERF_FA_TH_HIGH;
292 fa_th[2] = DIG_PERF_FA_TH_LOW;
293 if (pre_min_rssi > min_rssi) {
299 fa_th[0] = DIG_CVRG_FA_TH_EXTRA_HIGH;
300 fa_th[1] = DIG_CVRG_FA_TH_HIGH;
301 fa_th[2] = DIG_CVRG_FA_TH_LOW;
305 static void rtw_phy_dig_recorder(struct rtw_dm_info *dm_info, u8 igi, u16 fa)
312 igi_bitmap = dm_info->igi_bitmap << 1 & 0xfe;
313 igi_history = dm_info->igi_history;
314 fa_history = dm_info->fa_history;
316 up = igi > igi_history[0];
319 igi_history[3] = igi_history[2];
320 igi_history[2] = igi_history[1];
321 igi_history[1] = igi_history[0];
322 igi_history[0] = igi;
324 fa_history[3] = fa_history[2];
325 fa_history[2] = fa_history[1];
326 fa_history[1] = fa_history[0];
329 dm_info->igi_bitmap = igi_bitmap;
332 static void rtw_phy_dig(struct rtw_dev *rtwdev)
334 struct rtw_dm_info *dm_info = &rtwdev->dm_info;
335 u8 upper_bound, lower_bound;
337 u16 fa_th[3], fa_cnt;
342 if (rtw_flag_check(rtwdev, RTW_FLAG_DIG_DISABLE))
345 if (rtw_phy_dig_check_damping(dm_info))
348 linked = !!rtwdev->sta_cnt;
350 fa_cnt = dm_info->total_fa_cnt;
351 pre_igi = dm_info->igi_history[0];
353 rtw_phy_dig_get_threshold(dm_info, fa_th, step, linked);
355 /* test the false alarm count from the highest threshold level first,
356 * and increase it by corresponding step size
358 * note that the step size is offset by -2, compensate it afterall
361 for (level = 0; level < 3; level++) {
362 if (fa_cnt > fa_th[level]) {
363 cur_igi += step[level];
369 /* calculate the upper/lower bound by the minimum rssi we have among
370 * the peers connected with us, meanwhile make sure the igi value does
371 * not beyond the hardware limitation
373 rtw_phy_dig_get_boundary(dm_info, &upper_bound, &lower_bound, linked);
374 cur_igi = clamp_t(u8, cur_igi, lower_bound, upper_bound);
376 /* record current igi value and false alarm statistics for further
377 * damping checks, and record the trend of igi values
379 rtw_phy_dig_recorder(dm_info, cur_igi, fa_cnt);
381 if (cur_igi != pre_igi)
382 rtw_phy_dig_write(rtwdev, cur_igi);
385 static void rtw_phy_ra_info_update_iter(void *data, struct ieee80211_sta *sta)
387 struct rtw_dev *rtwdev = data;
388 struct rtw_sta_info *si = (struct rtw_sta_info *)sta->drv_priv;
390 rtw_update_sta_info(rtwdev, si);
393 static void rtw_phy_ra_info_update(struct rtw_dev *rtwdev)
395 if (rtwdev->watch_dog_cnt & 0x3)
398 rtw_iterate_stas_atomic(rtwdev, rtw_phy_ra_info_update_iter, rtwdev);
401 void rtw_phy_dynamic_mechanism(struct rtw_dev *rtwdev)
403 /* for further calculation */
404 rtw_phy_statistics(rtwdev);
406 rtw_phy_ra_info_update(rtwdev);
411 static u8 rtw_phy_power_2_db(s8 power)
413 if (power <= -100 || power >= 20)
421 static u64 rtw_phy_db_2_linear(u8 power_db)
427 i = (power_db - 1) >> 3;
428 j = (power_db - 1) - (i << 3);
430 linear = db_invert_table[i][j];
431 linear = i > 2 ? linear << FRAC_BITS : linear;
436 static u8 rtw_phy_linear_2_db(u64 linear)
442 if (linear >= db_invert_table[11][7])
443 return 96; /* maximum 96 dB */
445 for (i = 0; i < 12; i++) {
446 if (i <= 2 && (linear << FRAC_BITS) <= db_invert_table[i][7])
448 else if (i > 2 && linear <= db_invert_table[i][7])
452 for (j = 0; j < 8; j++) {
453 if (i <= 2 && (linear << FRAC_BITS) <= db_invert_table[i][j])
455 else if (i > 2 && linear <= db_invert_table[i][j])
459 if (j == 0 && i == 0)
464 if (db_invert_table[i][0] - linear >
465 linear - db_invert_table[i - 1][7]) {
470 if (db_invert_table[3][0] - linear >
471 linear - db_invert_table[2][7]) {
477 if (db_invert_table[i][j] - linear >
478 linear - db_invert_table[i][j - 1]) {
483 dB = (i << 3) + j + 1;
488 u8 rtw_phy_rf_power_2_rssi(s8 *rf_power, u8 path_num)
496 for (path = 0; path < path_num; path++) {
497 power = rf_power[path];
498 power_db = rtw_phy_power_2_db(power);
499 linear = rtw_phy_db_2_linear(power_db);
503 sum = (sum + (1 << (FRAC_BITS - 1))) >> FRAC_BITS;
509 sum = ((sum) + ((sum) << 1) + ((sum) << 3)) >> 5;
518 return rtw_phy_linear_2_db(sum);
521 u32 rtw_phy_read_rf(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
524 struct rtw_hal *hal = &rtwdev->hal;
525 struct rtw_chip_info *chip = rtwdev->chip;
526 const u32 *base_addr = chip->rf_base_addr;
527 u32 val, direct_addr;
529 if (rf_path >= hal->rf_path_num) {
530 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
535 direct_addr = base_addr[rf_path] + (addr << 2);
538 val = rtw_read32_mask(rtwdev, direct_addr, mask);
543 bool rtw_phy_write_rf_reg_sipi(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
544 u32 addr, u32 mask, u32 data)
546 struct rtw_hal *hal = &rtwdev->hal;
547 struct rtw_chip_info *chip = rtwdev->chip;
548 u32 *sipi_addr = chip->rf_sipi_addr;
553 if (rf_path >= hal->rf_path_num) {
554 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
561 if (mask != RFREG_MASK) {
562 old_data = rtw_phy_read_rf(rtwdev, rf_path, addr, RFREG_MASK);
564 if (old_data == INV_RF_DATA) {
565 rtw_err(rtwdev, "Write fail, rf is disabled\n");
570 data = ((old_data) & (~mask)) | (data << shift);
573 data_and_addr = ((addr << 20) | (data & 0x000fffff)) & 0x0fffffff;
575 rtw_write32(rtwdev, sipi_addr[rf_path], data_and_addr);
582 bool rtw_phy_write_rf_reg(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
583 u32 addr, u32 mask, u32 data)
585 struct rtw_hal *hal = &rtwdev->hal;
586 struct rtw_chip_info *chip = rtwdev->chip;
587 const u32 *base_addr = chip->rf_base_addr;
590 if (rf_path >= hal->rf_path_num) {
591 rtw_err(rtwdev, "unsupported rf path (%d)\n", rf_path);
596 direct_addr = base_addr[rf_path] + (addr << 2);
599 rtw_write32_mask(rtwdev, REG_RSV_CTRL, BITS_RFC_DIRECT, DISABLE_PI);
600 rtw_write32_mask(rtwdev, REG_WLRF1, BITS_RFC_DIRECT, DISABLE_PI);
601 rtw_write32_mask(rtwdev, direct_addr, mask, data);
605 rtw_write32_mask(rtwdev, REG_RSV_CTRL, BITS_RFC_DIRECT, ENABLE_PI);
606 rtw_write32_mask(rtwdev, REG_WLRF1, BITS_RFC_DIRECT, ENABLE_PI);
611 bool rtw_phy_write_rf_reg_mix(struct rtw_dev *rtwdev, enum rtw_rf_path rf_path,
612 u32 addr, u32 mask, u32 data)
615 return rtw_phy_write_rf_reg(rtwdev, rf_path, addr, mask, data);
617 return rtw_phy_write_rf_reg_sipi(rtwdev, rf_path, addr, mask, data);
620 void rtw_phy_setup_phy_cond(struct rtw_dev *rtwdev, u32 pkg)
622 struct rtw_hal *hal = &rtwdev->hal;
623 struct rtw_efuse *efuse = &rtwdev->efuse;
624 struct rtw_phy_cond cond = {0};
626 cond.cut = hal->cut_version ? hal->cut_version : 15;
627 cond.pkg = pkg ? pkg : 15;
629 cond.rfe = efuse->rfe_option;
631 switch (rtw_hci_type(rtwdev)) {
632 case RTW_HCI_TYPE_USB:
633 cond.intf = INTF_USB;
635 case RTW_HCI_TYPE_SDIO:
636 cond.intf = INTF_SDIO;
638 case RTW_HCI_TYPE_PCIE:
640 cond.intf = INTF_PCIE;
644 hal->phy_cond = cond;
646 rtw_dbg(rtwdev, RTW_DBG_PHY, "phy cond=0x%08x\n", *((u32 *)&hal->phy_cond));
649 static bool check_positive(struct rtw_dev *rtwdev, struct rtw_phy_cond cond)
651 struct rtw_hal *hal = &rtwdev->hal;
652 struct rtw_phy_cond drv_cond = hal->phy_cond;
654 if (cond.cut && cond.cut != drv_cond.cut)
657 if (cond.pkg && cond.pkg != drv_cond.pkg)
660 if (cond.intf && cond.intf != drv_cond.intf)
663 if (cond.rfe != drv_cond.rfe)
669 void rtw_parse_tbl_phy_cond(struct rtw_dev *rtwdev, const struct rtw_table *tbl)
671 const union phy_table_tile *p = tbl->data;
672 const union phy_table_tile *end = p + tbl->size / 2;
673 struct rtw_phy_cond pos_cond = {0};
674 bool is_matched = true, is_skipped = false;
676 BUILD_BUG_ON(sizeof(union phy_table_tile) != sizeof(struct phy_cfg_pair));
678 for (; p < end; p++) {
680 switch (p->cond.branch) {
686 is_matched = is_skipped ? false : true;
694 } else if (p->cond.neg) {
696 if (check_positive(rtwdev, pos_cond)) {
706 } else if (is_matched) {
707 (*tbl->do_cfg)(rtwdev, tbl, p->cfg.addr, p->cfg.data);
712 void rtw_parse_tbl_bb_pg(struct rtw_dev *rtwdev, const struct rtw_table *tbl)
714 const struct phy_pg_cfg_pair *p = tbl->data;
715 const struct phy_pg_cfg_pair *end = p + tbl->size / 6;
717 BUILD_BUG_ON(sizeof(struct phy_pg_cfg_pair) != sizeof(u32) * 6);
719 for (; p < end; p++) {
720 if (p->addr == 0xfe || p->addr == 0xffe) {
724 phy_store_tx_power_by_rate(rtwdev, p->band, p->rf_path,
725 p->tx_num, p->addr, p->bitmask,
730 void rtw_parse_tbl_txpwr_lmt(struct rtw_dev *rtwdev,
731 const struct rtw_table *tbl)
733 const struct txpwr_lmt_cfg_pair *p = tbl->data;
734 const struct txpwr_lmt_cfg_pair *end = p + tbl->size / 6;
736 BUILD_BUG_ON(sizeof(struct txpwr_lmt_cfg_pair) != sizeof(u8) * 6);
738 for (; p < end; p++) {
739 phy_set_tx_power_limit(rtwdev, p->regd, p->band,
741 p->ch, p->txpwr_lmt);
745 void rtw_phy_cfg_mac(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
748 rtw_write8(rtwdev, addr, data);
751 void rtw_phy_cfg_agc(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
754 rtw_write32(rtwdev, addr, data);
757 void rtw_phy_cfg_bb(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
762 else if (addr == 0xfd)
764 else if (addr == 0xfc)
766 else if (addr == 0xfb)
767 usleep_range(50, 60);
768 else if (addr == 0xfa)
770 else if (addr == 0xf9)
773 rtw_write32(rtwdev, addr, data);
776 void rtw_phy_cfg_rf(struct rtw_dev *rtwdev, const struct rtw_table *tbl,
781 } else if (addr == 0xfe) {
782 usleep_range(100, 110);
784 rtw_write_rf(rtwdev, tbl->rf_path, addr, RFREG_MASK, data);
789 static void rtw_load_rfk_table(struct rtw_dev *rtwdev)
791 struct rtw_chip_info *chip = rtwdev->chip;
793 if (!chip->rfk_init_tbl)
796 rtw_load_table(rtwdev, chip->rfk_init_tbl);
799 void rtw_phy_load_tables(struct rtw_dev *rtwdev)
801 struct rtw_chip_info *chip = rtwdev->chip;
804 rtw_load_table(rtwdev, chip->mac_tbl);
805 rtw_load_table(rtwdev, chip->bb_tbl);
806 rtw_load_table(rtwdev, chip->agc_tbl);
807 rtw_load_rfk_table(rtwdev);
809 for (rf_path = 0; rf_path < rtwdev->hal.rf_path_num; rf_path++) {
810 const struct rtw_table *tbl;
812 tbl = chip->rf_tbl[rf_path];
813 rtw_load_table(rtwdev, tbl);
817 #define bcd_to_dec_pwr_by_rate(val, i) bcd2bin(val >> (i * 8))
819 #define RTW_MAX_POWER_INDEX 0x3F
821 u8 rtw_cck_rates[] = { DESC_RATE1M, DESC_RATE2M, DESC_RATE5_5M, DESC_RATE11M };
822 u8 rtw_ofdm_rates[] = {
823 DESC_RATE6M, DESC_RATE9M, DESC_RATE12M,
824 DESC_RATE18M, DESC_RATE24M, DESC_RATE36M,
825 DESC_RATE48M, DESC_RATE54M
827 u8 rtw_ht_1s_rates[] = {
828 DESC_RATEMCS0, DESC_RATEMCS1, DESC_RATEMCS2,
829 DESC_RATEMCS3, DESC_RATEMCS4, DESC_RATEMCS5,
830 DESC_RATEMCS6, DESC_RATEMCS7
832 u8 rtw_ht_2s_rates[] = {
833 DESC_RATEMCS8, DESC_RATEMCS9, DESC_RATEMCS10,
834 DESC_RATEMCS11, DESC_RATEMCS12, DESC_RATEMCS13,
835 DESC_RATEMCS14, DESC_RATEMCS15
837 u8 rtw_vht_1s_rates[] = {
838 DESC_RATEVHT1SS_MCS0, DESC_RATEVHT1SS_MCS1,
839 DESC_RATEVHT1SS_MCS2, DESC_RATEVHT1SS_MCS3,
840 DESC_RATEVHT1SS_MCS4, DESC_RATEVHT1SS_MCS5,
841 DESC_RATEVHT1SS_MCS6, DESC_RATEVHT1SS_MCS7,
842 DESC_RATEVHT1SS_MCS8, DESC_RATEVHT1SS_MCS9
844 u8 rtw_vht_2s_rates[] = {
845 DESC_RATEVHT2SS_MCS0, DESC_RATEVHT2SS_MCS1,
846 DESC_RATEVHT2SS_MCS2, DESC_RATEVHT2SS_MCS3,
847 DESC_RATEVHT2SS_MCS4, DESC_RATEVHT2SS_MCS5,
848 DESC_RATEVHT2SS_MCS6, DESC_RATEVHT2SS_MCS7,
849 DESC_RATEVHT2SS_MCS8, DESC_RATEVHT2SS_MCS9
851 u8 rtw_cck_size = ARRAY_SIZE(rtw_cck_rates);
852 u8 rtw_ofdm_size = ARRAY_SIZE(rtw_ofdm_rates);
853 u8 rtw_ht_1s_size = ARRAY_SIZE(rtw_ht_1s_rates);
854 u8 rtw_ht_2s_size = ARRAY_SIZE(rtw_ht_2s_rates);
855 u8 rtw_vht_1s_size = ARRAY_SIZE(rtw_vht_1s_rates);
856 u8 rtw_vht_2s_size = ARRAY_SIZE(rtw_vht_2s_rates);
857 u8 *rtw_rate_section[RTW_RATE_SECTION_MAX] = {
858 rtw_cck_rates, rtw_ofdm_rates,
859 rtw_ht_1s_rates, rtw_ht_2s_rates,
860 rtw_vht_1s_rates, rtw_vht_2s_rates
862 u8 rtw_rate_size[RTW_RATE_SECTION_MAX] = {
863 ARRAY_SIZE(rtw_cck_rates),
864 ARRAY_SIZE(rtw_ofdm_rates),
865 ARRAY_SIZE(rtw_ht_1s_rates),
866 ARRAY_SIZE(rtw_ht_2s_rates),
867 ARRAY_SIZE(rtw_vht_1s_rates),
868 ARRAY_SIZE(rtw_vht_2s_rates)
871 static const u8 rtw_channel_idx_5g[RTW_MAX_CHANNEL_NUM_5G] = {
872 36, 38, 40, 42, 44, 46, 48, /* Band 1 */
873 52, 54, 56, 58, 60, 62, 64, /* Band 2 */
874 100, 102, 104, 106, 108, 110, 112, /* Band 3 */
875 116, 118, 120, 122, 124, 126, 128, /* Band 3 */
876 132, 134, 136, 138, 140, 142, 144, /* Band 3 */
877 149, 151, 153, 155, 157, 159, 161, /* Band 4 */
878 165, 167, 169, 171, 173, 175, 177}; /* Band 4 */
880 static int rtw_channel_to_idx(u8 band, u8 channel)
885 if (band == PHY_BAND_2G) {
886 ch_idx = channel - 1;
887 n_channel = RTW_MAX_CHANNEL_NUM_2G;
888 } else if (band == PHY_BAND_5G) {
889 n_channel = RTW_MAX_CHANNEL_NUM_5G;
890 for (ch_idx = 0; ch_idx < n_channel; ch_idx++)
891 if (rtw_channel_idx_5g[ch_idx] == channel)
897 if (ch_idx >= n_channel)
903 static u8 rtw_get_channel_group(u8 channel)
992 static u8 phy_get_2g_tx_power_index(struct rtw_dev *rtwdev,
993 struct rtw_2g_txpwr_idx *pwr_idx_2g,
994 enum rtw_bandwidth bandwidth,
997 struct rtw_chip_info *chip = rtwdev->chip;
1001 u8 factor = chip->txgi_factor;
1003 if (rate <= DESC_RATE11M)
1004 tx_power = pwr_idx_2g->cck_base[group];
1006 tx_power = pwr_idx_2g->bw40_base[group];
1008 if (rate >= DESC_RATE6M && rate <= DESC_RATE54M)
1009 tx_power += pwr_idx_2g->ht_1s_diff.ofdm * factor;
1011 mcs_rate = (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS15) ||
1012 (rate >= DESC_RATEVHT1SS_MCS0 &&
1013 rate <= DESC_RATEVHT2SS_MCS9);
1014 above_2ss = (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15) ||
1015 (rate >= DESC_RATEVHT2SS_MCS0);
1020 switch (bandwidth) {
1024 case RTW_CHANNEL_WIDTH_20:
1025 tx_power += pwr_idx_2g->ht_1s_diff.bw20 * factor;
1027 tx_power += pwr_idx_2g->ht_2s_diff.bw20 * factor;
1029 case RTW_CHANNEL_WIDTH_40:
1030 /* bw40 is the base power */
1032 tx_power += pwr_idx_2g->ht_2s_diff.bw40 * factor;
1039 static u8 phy_get_5g_tx_power_index(struct rtw_dev *rtwdev,
1040 struct rtw_5g_txpwr_idx *pwr_idx_5g,
1041 enum rtw_bandwidth bandwidth,
1044 struct rtw_chip_info *chip = rtwdev->chip;
1049 u8 factor = chip->txgi_factor;
1051 tx_power = pwr_idx_5g->bw40_base[group];
1053 mcs_rate = (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS15) ||
1054 (rate >= DESC_RATEVHT1SS_MCS0 &&
1055 rate <= DESC_RATEVHT2SS_MCS9);
1056 above_2ss = (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15) ||
1057 (rate >= DESC_RATEVHT2SS_MCS0);
1060 tx_power += pwr_idx_5g->ht_1s_diff.ofdm * factor;
1064 switch (bandwidth) {
1068 case RTW_CHANNEL_WIDTH_20:
1069 tx_power += pwr_idx_5g->ht_1s_diff.bw20 * factor;
1071 tx_power += pwr_idx_5g->ht_2s_diff.bw20 * factor;
1073 case RTW_CHANNEL_WIDTH_40:
1074 /* bw40 is the base power */
1076 tx_power += pwr_idx_5g->ht_2s_diff.bw40 * factor;
1078 case RTW_CHANNEL_WIDTH_80:
1079 /* the base idx of bw80 is the average of bw40+/bw40- */
1080 lower = pwr_idx_5g->bw40_base[group];
1081 upper = pwr_idx_5g->bw40_base[group + 1];
1083 tx_power = (lower + upper) / 2;
1084 tx_power += pwr_idx_5g->vht_1s_diff.bw80 * factor;
1086 tx_power += pwr_idx_5g->vht_2s_diff.bw80 * factor;
1093 /* set tx power level by path for each rates, note that the order of the rates
1094 * are *very* important, bacause 8822B/8821C combines every four bytes of tx
1095 * power index into a four-byte power index register, and calls set_tx_agc to
1096 * write these values into hardware
1099 void phy_set_tx_power_level_by_path(struct rtw_dev *rtwdev, u8 ch, u8 path)
1101 struct rtw_hal *hal = &rtwdev->hal;
1104 /* do not need cck rates if we are not in 2.4G */
1105 if (hal->current_band_type == RTW_BAND_2G)
1106 rs = RTW_RATE_SECTION_CCK;
1108 rs = RTW_RATE_SECTION_OFDM;
1110 for (; rs < RTW_RATE_SECTION_MAX; rs++)
1111 phy_set_tx_power_index_by_rs(rtwdev, ch, path, rs);
1114 void rtw_phy_set_tx_power_level(struct rtw_dev *rtwdev, u8 channel)
1116 struct rtw_chip_info *chip = rtwdev->chip;
1117 struct rtw_hal *hal = &rtwdev->hal;
1120 mutex_lock(&hal->tx_power_mutex);
1122 for (path = 0; path < hal->rf_path_num; path++)
1123 phy_set_tx_power_level_by_path(rtwdev, channel, path);
1125 chip->ops->set_tx_power_index(rtwdev);
1126 mutex_unlock(&hal->tx_power_mutex);
1129 s8 phy_get_tx_power_limit(struct rtw_dev *rtwdev, u8 band,
1130 enum rtw_bandwidth bandwidth, u8 rf_path,
1131 u8 rate, u8 channel, u8 regd);
1134 u8 phy_get_tx_power_index(void *adapter, u8 rf_path, u8 rate,
1135 enum rtw_bandwidth bandwidth, u8 channel, u8 regd)
1137 struct rtw_dev *rtwdev = adapter;
1138 struct rtw_hal *hal = &rtwdev->hal;
1139 struct rtw_txpwr_idx *pwr_idx;
1145 pwr_idx = &rtwdev->efuse.txpwr_idx_table[rf_path];
1146 group = rtw_get_channel_group(channel);
1148 /* base power index for 2.4G/5G */
1149 if (channel <= 14) {
1151 tx_power = phy_get_2g_tx_power_index(rtwdev,
1152 &pwr_idx->pwr_idx_2g,
1153 bandwidth, rate, group);
1154 offset = hal->tx_pwr_by_rate_offset_2g[rf_path][rate];
1157 tx_power = phy_get_5g_tx_power_index(rtwdev,
1158 &pwr_idx->pwr_idx_5g,
1159 bandwidth, rate, group);
1160 offset = hal->tx_pwr_by_rate_offset_5g[rf_path][rate];
1163 limit = phy_get_tx_power_limit(rtwdev, band, bandwidth, rf_path,
1164 rate, channel, regd);
1171 if (tx_power > rtwdev->chip->max_power_index)
1172 tx_power = rtwdev->chip->max_power_index;
1177 void phy_set_tx_power_index_by_rs(void *adapter, u8 ch, u8 path, u8 rs)
1179 struct rtw_dev *rtwdev = adapter;
1180 struct rtw_hal *hal = &rtwdev->hal;
1181 u8 regd = rtwdev->regd.txpwr_regd;
1189 if (rs >= RTW_RATE_SECTION_MAX)
1192 rates = rtw_rate_section[rs];
1193 size = rtw_rate_size[rs];
1194 bw = hal->current_band_width;
1195 for (i = 0; i < size; i++) {
1197 pwr_idx = phy_get_tx_power_index(adapter, path, rate, bw, ch,
1199 hal->tx_pwr_tbl[path][rate] = pwr_idx;
1203 static u8 tbl_to_dec_pwr_by_rate(struct rtw_dev *rtwdev, u32 hex, u8 i)
1205 if (rtwdev->chip->is_pwr_by_rate_dec)
1206 return bcd_to_dec_pwr_by_rate(hex, i);
1208 return (hex >> (i * 8)) & 0xFF;
1211 static void phy_get_rate_values_of_txpwr_by_rate(struct rtw_dev *rtwdev,
1214 u8 *pwr_by_rate, u8 *rate_num)
1221 rate[0] = DESC_RATE6M;
1222 rate[1] = DESC_RATE9M;
1223 rate[2] = DESC_RATE12M;
1224 rate[3] = DESC_RATE18M;
1225 for (i = 0; i < 4; ++i)
1226 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1231 rate[0] = DESC_RATE24M;
1232 rate[1] = DESC_RATE36M;
1233 rate[2] = DESC_RATE48M;
1234 rate[3] = DESC_RATE54M;
1235 for (i = 0; i < 4; ++i)
1236 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1240 rate[0] = DESC_RATE1M;
1241 pwr_by_rate[0] = bcd_to_dec_pwr_by_rate(val, 1);
1245 if (mask == 0xffffff00) {
1246 rate[0] = DESC_RATE2M;
1247 rate[1] = DESC_RATE5_5M;
1248 rate[2] = DESC_RATE11M;
1249 for (i = 1; i < 4; ++i)
1250 pwr_by_rate[i - 1] =
1251 tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1253 } else if (mask == 0x000000ff) {
1254 rate[0] = DESC_RATE11M;
1255 pwr_by_rate[0] = bcd_to_dec_pwr_by_rate(val, 0);
1261 rate[0] = DESC_RATEMCS0;
1262 rate[1] = DESC_RATEMCS1;
1263 rate[2] = DESC_RATEMCS2;
1264 rate[3] = DESC_RATEMCS3;
1265 for (i = 0; i < 4; ++i)
1266 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1271 rate[0] = DESC_RATEMCS4;
1272 rate[1] = DESC_RATEMCS5;
1273 rate[2] = DESC_RATEMCS6;
1274 rate[3] = DESC_RATEMCS7;
1275 for (i = 0; i < 4; ++i)
1276 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1281 rate[0] = DESC_RATEMCS8;
1282 rate[1] = DESC_RATEMCS9;
1283 rate[2] = DESC_RATEMCS10;
1284 rate[3] = DESC_RATEMCS11;
1285 for (i = 0; i < 4; ++i)
1286 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1291 rate[0] = DESC_RATEMCS12;
1292 rate[1] = DESC_RATEMCS13;
1293 rate[2] = DESC_RATEMCS14;
1294 rate[3] = DESC_RATEMCS15;
1295 for (i = 0; i < 4; ++i)
1296 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1301 rate[0] = DESC_RATE1M;
1302 rate[1] = DESC_RATE2M;
1303 rate[2] = DESC_RATE5_5M;
1304 for (i = 1; i < 4; ++i)
1305 pwr_by_rate[i - 1] = tbl_to_dec_pwr_by_rate(rtwdev,
1313 rate[0] = DESC_RATE1M;
1314 rate[1] = DESC_RATE2M;
1315 rate[2] = DESC_RATE5_5M;
1316 rate[3] = DESC_RATE11M;
1317 for (i = 0; i < 4; ++i)
1318 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1325 rate[0] = DESC_RATE6M;
1326 rate[1] = DESC_RATE9M;
1327 rate[2] = DESC_RATE12M;
1328 rate[3] = DESC_RATE18M;
1329 for (i = 0; i < 4; ++i)
1330 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1337 rate[0] = DESC_RATE24M;
1338 rate[1] = DESC_RATE36M;
1339 rate[2] = DESC_RATE48M;
1340 rate[3] = DESC_RATE54M;
1341 for (i = 0; i < 4; ++i)
1342 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1349 rate[0] = DESC_RATEMCS0;
1350 rate[1] = DESC_RATEMCS1;
1351 rate[2] = DESC_RATEMCS2;
1352 rate[3] = DESC_RATEMCS3;
1353 for (i = 0; i < 4; ++i)
1354 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1361 rate[0] = DESC_RATEMCS4;
1362 rate[1] = DESC_RATEMCS5;
1363 rate[2] = DESC_RATEMCS6;
1364 rate[3] = DESC_RATEMCS7;
1365 for (i = 0; i < 4; ++i)
1366 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1373 rate[0] = DESC_RATEMCS8;
1374 rate[1] = DESC_RATEMCS9;
1375 rate[2] = DESC_RATEMCS10;
1376 rate[3] = DESC_RATEMCS11;
1377 for (i = 0; i < 4; ++i)
1378 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1385 rate[0] = DESC_RATEMCS12;
1386 rate[1] = DESC_RATEMCS13;
1387 rate[2] = DESC_RATEMCS14;
1388 rate[3] = DESC_RATEMCS15;
1389 for (i = 0; i < 4; ++i)
1390 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1397 rate[0] = DESC_RATEVHT1SS_MCS0;
1398 rate[1] = DESC_RATEVHT1SS_MCS1;
1399 rate[2] = DESC_RATEVHT1SS_MCS2;
1400 rate[3] = DESC_RATEVHT1SS_MCS3;
1401 for (i = 0; i < 4; ++i)
1402 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1409 rate[0] = DESC_RATEVHT1SS_MCS4;
1410 rate[1] = DESC_RATEVHT1SS_MCS5;
1411 rate[2] = DESC_RATEVHT1SS_MCS6;
1412 rate[3] = DESC_RATEVHT1SS_MCS7;
1413 for (i = 0; i < 4; ++i)
1414 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1421 rate[0] = DESC_RATEVHT1SS_MCS8;
1422 rate[1] = DESC_RATEVHT1SS_MCS9;
1423 rate[2] = DESC_RATEVHT2SS_MCS0;
1424 rate[3] = DESC_RATEVHT2SS_MCS1;
1425 for (i = 0; i < 4; ++i)
1426 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1433 rate[0] = DESC_RATEVHT2SS_MCS2;
1434 rate[1] = DESC_RATEVHT2SS_MCS3;
1435 rate[2] = DESC_RATEVHT2SS_MCS4;
1436 rate[3] = DESC_RATEVHT2SS_MCS5;
1437 for (i = 0; i < 4; ++i)
1438 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1445 rate[0] = DESC_RATEVHT2SS_MCS6;
1446 rate[1] = DESC_RATEVHT2SS_MCS7;
1447 rate[2] = DESC_RATEVHT2SS_MCS8;
1448 rate[3] = DESC_RATEVHT2SS_MCS9;
1449 for (i = 0; i < 4; ++i)
1450 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1457 rate[0] = DESC_RATEMCS16;
1458 rate[1] = DESC_RATEMCS17;
1459 rate[2] = DESC_RATEMCS18;
1460 rate[3] = DESC_RATEMCS19;
1461 for (i = 0; i < 4; ++i)
1462 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1469 rate[0] = DESC_RATEMCS20;
1470 rate[1] = DESC_RATEMCS21;
1471 rate[2] = DESC_RATEMCS22;
1472 rate[3] = DESC_RATEMCS23;
1473 for (i = 0; i < 4; ++i)
1474 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1481 rate[0] = DESC_RATEVHT3SS_MCS0;
1482 rate[1] = DESC_RATEVHT3SS_MCS1;
1483 rate[2] = DESC_RATEVHT3SS_MCS2;
1484 rate[3] = DESC_RATEVHT3SS_MCS3;
1485 for (i = 0; i < 4; ++i)
1486 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1493 rate[0] = DESC_RATEVHT3SS_MCS4;
1494 rate[1] = DESC_RATEVHT3SS_MCS5;
1495 rate[2] = DESC_RATEVHT3SS_MCS6;
1496 rate[3] = DESC_RATEVHT3SS_MCS7;
1497 for (i = 0; i < 4; ++i)
1498 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1505 rate[0] = DESC_RATEVHT3SS_MCS8;
1506 rate[1] = DESC_RATEVHT3SS_MCS9;
1507 for (i = 0; i < 2; ++i)
1508 pwr_by_rate[i] = tbl_to_dec_pwr_by_rate(rtwdev, val, i);
1512 rtw_warn(rtwdev, "invalid tx power index addr 0x%08x\n", addr);
1517 void phy_store_tx_power_by_rate(void *adapter, u32 band, u32 rfpath, u32 txnum,
1518 u32 regaddr, u32 bitmask, u32 data)
1520 struct rtw_dev *rtwdev = adapter;
1521 struct rtw_hal *hal = &rtwdev->hal;
1524 u8 rates[RTW_RF_PATH_MAX] = {0};
1526 s8 pwr_by_rate[RTW_RF_PATH_MAX] = {0};
1529 phy_get_rate_values_of_txpwr_by_rate(rtwdev, regaddr, bitmask, data,
1530 rates, pwr_by_rate, &rate_num);
1532 if (WARN_ON(rfpath >= RTW_RF_PATH_MAX ||
1533 (band != PHY_BAND_2G && band != PHY_BAND_5G) ||
1534 rate_num > RTW_RF_PATH_MAX))
1537 for (i = 0; i < rate_num; i++) {
1538 offset = pwr_by_rate[i];
1540 if (band == PHY_BAND_2G)
1541 hal->tx_pwr_by_rate_offset_2g[rfpath][rate] = offset;
1542 else if (band == PHY_BAND_5G)
1543 hal->tx_pwr_by_rate_offset_5g[rfpath][rate] = offset;
1550 void phy_tx_power_by_rate_config_by_path(struct rtw_hal *hal, u8 path,
1551 u8 rs, u8 size, u8 *rates)
1554 u8 base_idx, rate_idx;
1555 s8 base_2g, base_5g;
1557 if (rs >= RTW_RATE_SECTION_VHT_1S)
1558 base_idx = rates[size - 3];
1560 base_idx = rates[size - 1];
1561 base_2g = hal->tx_pwr_by_rate_offset_2g[path][base_idx];
1562 base_5g = hal->tx_pwr_by_rate_offset_5g[path][base_idx];
1563 hal->tx_pwr_by_rate_base_2g[path][rs] = base_2g;
1564 hal->tx_pwr_by_rate_base_5g[path][rs] = base_5g;
1565 for (rate = 0; rate < size; rate++) {
1566 rate_idx = rates[rate];
1567 hal->tx_pwr_by_rate_offset_2g[path][rate_idx] -= base_2g;
1568 hal->tx_pwr_by_rate_offset_5g[path][rate_idx] -= base_5g;
1572 void rtw_phy_tx_power_by_rate_config(struct rtw_hal *hal)
1576 for (path = 0; path < RTW_RF_PATH_MAX; path++) {
1577 phy_tx_power_by_rate_config_by_path(hal, path,
1578 RTW_RATE_SECTION_CCK,
1579 rtw_cck_size, rtw_cck_rates);
1580 phy_tx_power_by_rate_config_by_path(hal, path,
1581 RTW_RATE_SECTION_OFDM,
1582 rtw_ofdm_size, rtw_ofdm_rates);
1583 phy_tx_power_by_rate_config_by_path(hal, path,
1584 RTW_RATE_SECTION_HT_1S,
1585 rtw_ht_1s_size, rtw_ht_1s_rates);
1586 phy_tx_power_by_rate_config_by_path(hal, path,
1587 RTW_RATE_SECTION_HT_2S,
1588 rtw_ht_2s_size, rtw_ht_2s_rates);
1589 phy_tx_power_by_rate_config_by_path(hal, path,
1590 RTW_RATE_SECTION_VHT_1S,
1591 rtw_vht_1s_size, rtw_vht_1s_rates);
1592 phy_tx_power_by_rate_config_by_path(hal, path,
1593 RTW_RATE_SECTION_VHT_2S,
1594 rtw_vht_2s_size, rtw_vht_2s_rates);
1599 phy_tx_power_limit_config(struct rtw_hal *hal, u8 regd, u8 bw, u8 rs)
1604 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_2G; ch++) {
1605 base = hal->tx_pwr_by_rate_base_2g[0][rs];
1606 orig = hal->tx_pwr_limit_2g[regd][bw][rs][ch];
1607 hal->tx_pwr_limit_2g[regd][bw][rs][ch] -= base;
1610 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_5G; ch++) {
1611 base = hal->tx_pwr_by_rate_base_5g[0][rs];
1612 hal->tx_pwr_limit_5g[regd][bw][rs][ch] -= base;
1616 void rtw_phy_tx_power_limit_config(struct rtw_hal *hal)
1620 for (regd = 0; regd < RTW_REGD_MAX; regd++)
1621 for (bw = 0; bw < RTW_CHANNEL_WIDTH_MAX; bw++)
1622 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++)
1623 phy_tx_power_limit_config(hal, regd, bw, rs);
1626 static s8 get_tx_power_limit(struct rtw_hal *hal, u8 bw, u8 rs, u8 ch, u8 regd)
1628 if (regd > RTW_REGD_WW)
1629 return RTW_MAX_POWER_INDEX;
1631 return hal->tx_pwr_limit_2g[regd][bw][rs][ch];
1634 s8 phy_get_tx_power_limit(struct rtw_dev *rtwdev, u8 band,
1635 enum rtw_bandwidth bw, u8 rf_path,
1636 u8 rate, u8 channel, u8 regd)
1638 struct rtw_hal *hal = &rtwdev->hal;
1643 if (rate >= DESC_RATE1M && rate <= DESC_RATE11M)
1644 rs = RTW_RATE_SECTION_CCK;
1645 else if (rate >= DESC_RATE6M && rate <= DESC_RATE54M)
1646 rs = RTW_RATE_SECTION_OFDM;
1647 else if (rate >= DESC_RATEMCS0 && rate <= DESC_RATEMCS7)
1648 rs = RTW_RATE_SECTION_HT_1S;
1649 else if (rate >= DESC_RATEMCS8 && rate <= DESC_RATEMCS15)
1650 rs = RTW_RATE_SECTION_HT_2S;
1651 else if (rate >= DESC_RATEVHT1SS_MCS0 && rate <= DESC_RATEVHT1SS_MCS9)
1652 rs = RTW_RATE_SECTION_VHT_1S;
1653 else if (rate >= DESC_RATEVHT2SS_MCS0 && rate <= DESC_RATEVHT2SS_MCS9)
1654 rs = RTW_RATE_SECTION_VHT_2S;
1658 ch_idx = rtw_channel_to_idx(band, channel);
1662 power_limit = get_tx_power_limit(hal, bw, rs, ch_idx, regd);
1667 WARN(1, "invalid arguments, band=%d, bw=%d, path=%d, rate=%d, ch=%d\n",
1668 band, bw, rf_path, rate, channel);
1669 return RTW_MAX_POWER_INDEX;
1672 void phy_set_tx_power_limit(struct rtw_dev *rtwdev, u8 regd, u8 band,
1673 u8 bw, u8 rs, u8 ch, s8 pwr_limit)
1675 struct rtw_hal *hal = &rtwdev->hal;
1678 pwr_limit = clamp_t(s8, pwr_limit,
1679 -RTW_MAX_POWER_INDEX, RTW_MAX_POWER_INDEX);
1680 ch_idx = rtw_channel_to_idx(band, ch);
1682 if (regd >= RTW_REGD_MAX || bw >= RTW_CHANNEL_WIDTH_MAX ||
1683 rs >= RTW_RATE_SECTION_MAX || ch_idx < 0) {
1685 "wrong txpwr_lmt regd=%u, band=%u bw=%u, rs=%u, ch_idx=%u, pwr_limit=%d\n",
1686 regd, band, bw, rs, ch_idx, pwr_limit);
1690 if (band == PHY_BAND_2G)
1691 hal->tx_pwr_limit_2g[regd][bw][rs][ch_idx] = pwr_limit;
1692 else if (band == PHY_BAND_5G)
1693 hal->tx_pwr_limit_5g[regd][bw][rs][ch_idx] = pwr_limit;
1697 void rtw_hw_tx_power_limit_init(struct rtw_hal *hal, u8 regd, u8 bw, u8 rs)
1702 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_2G; ch++)
1703 hal->tx_pwr_limit_2g[regd][bw][rs][ch] = RTW_MAX_POWER_INDEX;
1706 for (ch = 0; ch < RTW_MAX_CHANNEL_NUM_5G; ch++)
1707 hal->tx_pwr_limit_5g[regd][bw][rs][ch] = RTW_MAX_POWER_INDEX;
1710 void rtw_hw_init_tx_power(struct rtw_hal *hal)
1712 u8 regd, path, rate, rs, bw;
1714 /* init tx power by rate offset */
1715 for (path = 0; path < RTW_RF_PATH_MAX; path++) {
1716 for (rate = 0; rate < DESC_RATE_MAX; rate++) {
1717 hal->tx_pwr_by_rate_offset_2g[path][rate] = 0;
1718 hal->tx_pwr_by_rate_offset_5g[path][rate] = 0;
1722 /* init tx power limit */
1723 for (regd = 0; regd < RTW_REGD_MAX; regd++)
1724 for (bw = 0; bw < RTW_CHANNEL_WIDTH_MAX; bw++)
1725 for (rs = 0; rs < RTW_RATE_SECTION_MAX; rs++)
1726 rtw_hw_tx_power_limit_init(hal, regd, bw, rs);