2 * Copyright 2012-16 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include "dce_clocks.h"
27 #include "dm_services.h"
28 #include "reg_helper.h"
29 #include "fixed32_32.h"
30 #include "bios_parser_interface.h"
33 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34 #include "dcn_calcs.h"
36 #include "core_types.h"
40 #define TO_DCE_CLOCKS(clocks)\
41 container_of(clocks, struct dce_disp_clk, base)
47 #define FN(reg_name, field_name) \
48 clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
55 /* Max clock values for each state indexed by "enum clocks_state": */
56 static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
57 /* ClocksStateInvalid - should not be used */
58 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
59 /* ClocksStateUltraLow - not expected to be used for DCE 8.0 */
60 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
62 { .display_clk_khz = 352000, .pixel_clk_khz = 330000},
63 /* ClocksStateNominal */
64 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
65 /* ClocksStatePerformance */
66 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
68 static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
69 /*ClocksStateInvalid - should not be used*/
70 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
71 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
72 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
74 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
75 /*ClocksStateNominal*/
76 { .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
77 /*ClocksStatePerformance*/
78 { .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
80 static const struct state_dependent_clocks dce112_max_clks_by_state[] = {
81 /*ClocksStateInvalid - should not be used*/
82 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
83 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
84 { .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
86 { .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
87 /*ClocksStateNominal*/
88 { .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
89 /*ClocksStatePerformance*/
90 { .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
92 static const struct state_dependent_clocks dce120_max_clks_by_state[] = {
93 /*ClocksStateInvalid - should not be used*/
94 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
95 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
96 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
98 { .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
99 /*ClocksStateNominal*/
100 { .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
101 /*ClocksStatePerformance*/
102 { .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
104 /* Starting point for each divider range.*/
105 enum dce_divider_range_start {
106 DIVIDER_RANGE_01_START = 200, /* 2.00*/
107 DIVIDER_RANGE_02_START = 1600, /* 16.00*/
108 DIVIDER_RANGE_03_START = 3200, /* 32.00*/
109 DIVIDER_RANGE_SCALE_FACTOR = 100 /* Results are scaled up by 100.*/
112 /* Ranges for divider identifiers (Divider ID or DID)
113 mmDENTIST_DISPCLK_CNTL.DENTIST_DISPCLK_WDIVIDER*/
114 enum dce_divider_id_register_setting {
115 DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08,
116 DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40,
117 DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60,
118 DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80
121 /* Step size between each divider within a range.
122 Incrementing the DENTIST_DISPCLK_WDIVIDER by one
123 will increment the divider by this much.*/
124 enum dce_divider_range_step_size {
125 DIVIDER_RANGE_01_STEP_SIZE = 25, /* 0.25*/
126 DIVIDER_RANGE_02_STEP_SIZE = 50, /* 0.50*/
127 DIVIDER_RANGE_03_STEP_SIZE = 100 /* 1.00 */
130 static bool dce_divider_range_construct(
131 struct dce_divider_range *div_range,
137 div_range->div_range_start = range_start;
138 div_range->div_range_step = range_step;
139 div_range->did_min = did_min;
140 div_range->did_max = did_max;
142 if (div_range->div_range_step == 0) {
143 div_range->div_range_step = 1;
144 /*div_range_step cannot be zero*/
147 /* Calculate this based on the other inputs.*/
148 /* See DividerRange.h for explanation of */
149 /* the relationship between divider id (DID) and a divider.*/
150 /* Number of Divider IDs = (Maximum Divider ID - Minimum Divider ID)*/
151 /* Maximum divider identified in this range =
152 * (Number of Divider IDs)*Step size between dividers
153 * + The start of this range.*/
154 div_range->div_range_end = (did_max - did_min) * range_step
159 static int dce_divider_range_calc_divider(
160 struct dce_divider_range *div_range,
163 /* Is this DID within our range?*/
164 if ((did < div_range->did_min) || (did >= div_range->did_max))
165 return INVALID_DIVIDER;
167 return ((did - div_range->did_min) * div_range->div_range_step)
168 + div_range->div_range_start;
172 static int dce_divider_range_get_divider(
173 struct dce_divider_range *div_range,
177 int div = INVALID_DIVIDER;
180 for (i = 0; i < ranges_num; i++) {
181 /* Calculate divider with given divider ID*/
182 div = dce_divider_range_calc_divider(&div_range[i], did);
183 /* Found a valid return divider*/
184 if (div != INVALID_DIVIDER)
190 static int dce_clocks_get_dp_ref_freq(struct display_clock *clk)
192 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
193 int dprefclk_wdivider;
194 int dprefclk_src_sel;
195 int dp_ref_clk_khz = 600000;
196 int target_div = INVALID_DIVIDER;
198 /* ASSERT DP Reference Clock source is from DFS*/
199 REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
200 ASSERT(dprefclk_src_sel == 0);
202 /* Read the mmDENTIST_DISPCLK_CNTL to get the currently
203 * programmed DID DENTIST_DPREFCLK_WDIVIDER*/
204 REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
206 /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/
207 target_div = dce_divider_range_get_divider(
208 clk_dce->divider_ranges,
212 if (target_div != INVALID_DIVIDER) {
213 /* Calculate the current DFS clock, in kHz.*/
214 dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR
215 * clk_dce->dentist_vco_freq_khz) / target_div;
218 /* SW will adjust DP REF Clock average value for all purposes
219 * (DP DTO / DP Audio DTO and DP GTC)
220 if clock is spread for all cases:
221 -if SS enabled on DP Ref clock and HW de-spreading enabled with SW
222 calculations for DS_INCR/DS_MODULO (this is planned to be default case)
223 -if SS enabled on DP Ref clock and HW de-spreading enabled with HW
224 calculations (not planned to be used, but average clock should still
226 -if SS enabled on DP Ref clock and HW de-spreading disabled
227 (should not be case with CIK) then SW should program all rates
228 generated according to average value (case as with previous ASICs)
230 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
231 struct fixed32_32 ss_percentage = dal_fixed32_32_div_int(
232 dal_fixed32_32_from_fraction(
233 clk_dce->dprefclk_ss_percentage,
234 clk_dce->dprefclk_ss_divider), 200);
235 struct fixed32_32 adj_dp_ref_clk_khz;
237 ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one,
240 dal_fixed32_32_mul_int(
243 dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz);
246 return dp_ref_clk_khz;
249 /* TODO: This is DCN DPREFCLK: it could be program by DENTIST by VBIOS
250 * or CLK0_CLK11 by SMU. For DCE120, it is wlays 600Mhz. Will re-visit
251 * clock implementation
253 static int dce_clocks_get_dp_ref_freq_wrkaround(struct display_clock *clk)
255 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
256 int dp_ref_clk_khz = 600000;
258 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
259 struct fixed32_32 ss_percentage = dal_fixed32_32_div_int(
260 dal_fixed32_32_from_fraction(
261 clk_dce->dprefclk_ss_percentage,
262 clk_dce->dprefclk_ss_divider), 200);
263 struct fixed32_32 adj_dp_ref_clk_khz;
265 ss_percentage = dal_fixed32_32_sub(dal_fixed32_32_one,
268 dal_fixed32_32_mul_int(
271 dp_ref_clk_khz = dal_fixed32_32_floor(adj_dp_ref_clk_khz);
274 return dp_ref_clk_khz;
276 static enum dm_pp_clocks_state dce_get_required_clocks_state(
277 struct display_clock *clk,
278 struct state_dependent_clocks *req_clocks)
280 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
282 enum dm_pp_clocks_state low_req_clk;
284 /* Iterate from highest supported to lowest valid state, and update
285 * lowest RequiredState with the lowest state that satisfies
286 * all required clocks
288 for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
289 if (req_clocks->display_clk_khz >
290 clk_dce->max_clks_by_state[i].display_clk_khz
291 || req_clocks->pixel_clk_khz >
292 clk_dce->max_clks_by_state[i].pixel_clk_khz)
296 if (low_req_clk > clk->max_clks_state) {
297 DC_LOG_WARNING("%s: clocks unsupported disp_clk %d pix_clk %d",
299 req_clocks->display_clk_khz,
300 req_clocks->pixel_clk_khz);
301 low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
307 static bool dce_clock_set_min_clocks_state(
308 struct display_clock *clk,
309 enum dm_pp_clocks_state clocks_state)
311 struct dm_pp_power_level_change_request level_change_req = {
314 if (clocks_state > clk->max_clks_state) {
315 /*Requested state exceeds max supported state.*/
316 DC_LOG_WARNING("Requested state exceeds max supported state");
318 } else if (clocks_state == clk->cur_min_clks_state) {
319 /*if we're trying to set the same state, we can just return
320 * since nothing needs to be done*/
324 /* get max clock state from PPLIB */
325 if (dm_pp_apply_power_level_change_request(clk->ctx, &level_change_req))
326 clk->cur_min_clks_state = clocks_state;
331 static int dce_set_clock(
332 struct display_clock *clk,
333 int requested_clk_khz)
335 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
336 struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
337 struct dc_bios *bp = clk->ctx->dc_bios;
338 int actual_clock = requested_clk_khz;
340 /* Make sure requested clock isn't lower than minimum threshold*/
341 if (requested_clk_khz > 0)
342 requested_clk_khz = max(requested_clk_khz,
343 clk_dce->dentist_vco_freq_khz / 64);
345 /* Prepare to program display clock*/
346 pxl_clk_params.target_pixel_clock = requested_clk_khz;
347 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
349 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
351 if (clk_dce->dfs_bypass_enabled) {
353 /* Cache the fixed display clock*/
354 clk_dce->dfs_bypass_disp_clk =
355 pxl_clk_params.dfs_bypass_display_clock;
356 actual_clock = pxl_clk_params.dfs_bypass_display_clock;
359 /* from power down, we need mark the clock state as ClocksStateNominal
360 * from HWReset, so when resume we will call pplib voltage regulator.*/
361 if (requested_clk_khz == 0)
362 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
366 static int dce_psr_set_clock(
367 struct display_clock *clk,
368 int requested_clk_khz)
370 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
371 struct dc_context *ctx = clk_dce->base.ctx;
372 struct dc *core_dc = ctx->dc;
373 struct dmcu *dmcu = core_dc->res_pool->dmcu;
374 int actual_clk_khz = requested_clk_khz;
376 actual_clk_khz = dce_set_clock(clk, requested_clk_khz);
378 dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7);
379 return actual_clk_khz;
382 static int dce112_set_clock(
383 struct display_clock *clk,
384 int requested_clk_khz)
386 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(clk);
387 struct bp_set_dce_clock_parameters dce_clk_params;
388 struct dc_bios *bp = clk->ctx->dc_bios;
389 struct dc *core_dc = clk->ctx->dc;
390 struct dmcu *dmcu = core_dc->res_pool->dmcu;
391 int actual_clock = requested_clk_khz;
392 /* Prepare to program display clock*/
393 memset(&dce_clk_params, 0, sizeof(dce_clk_params));
395 /* Make sure requested clock isn't lower than minimum threshold*/
396 if (requested_clk_khz > 0)
397 requested_clk_khz = max(requested_clk_khz,
398 clk_dce->dentist_vco_freq_khz / 62);
400 dce_clk_params.target_clock_frequency = requested_clk_khz;
401 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
402 dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
404 bp->funcs->set_dce_clock(bp, &dce_clk_params);
405 actual_clock = dce_clk_params.target_clock_frequency;
407 /* from power down, we need mark the clock state as ClocksStateNominal
408 * from HWReset, so when resume we will call pplib voltage regulator.*/
409 if (requested_clk_khz == 0)
410 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
412 /*Program DP ref Clock*/
413 /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
414 dce_clk_params.target_clock_frequency = 0;
415 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
416 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
417 (dce_clk_params.pll_id ==
418 CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
420 bp->funcs->set_dce_clock(bp, &dce_clk_params);
422 if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
423 if (clk_dce->dfs_bypass_disp_clk != actual_clock)
424 dmcu->funcs->set_psr_wait_loop(dmcu,
425 actual_clock / 1000 / 7);
428 clk_dce->dfs_bypass_disp_clk = actual_clock;
432 static void dce_clock_read_integrated_info(struct dce_disp_clk *clk_dce)
434 struct dc_debug *debug = &clk_dce->base.ctx->dc->debug;
435 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
436 struct integrated_info info = { { { 0 } } };
437 struct dc_firmware_info fw_info = { { 0 } };
440 if (bp->integrated_info)
441 info = *bp->integrated_info;
443 clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
444 if (clk_dce->dentist_vco_freq_khz == 0) {
445 bp->funcs->get_firmware_info(bp, &fw_info);
446 clk_dce->dentist_vco_freq_khz =
447 fw_info.smu_gpu_pll_output_freq;
448 if (clk_dce->dentist_vco_freq_khz == 0)
449 clk_dce->dentist_vco_freq_khz = 3600000;
452 /*update the maximum display clock for each power state*/
453 for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
454 enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
458 clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
462 clk_state = DM_PP_CLOCKS_STATE_LOW;
466 clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
470 clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
474 clk_state = DM_PP_CLOCKS_STATE_INVALID;
478 /*Do not allow bad VBIOS/SBIOS to override with invalid values,
479 * check for > 100MHz*/
480 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
481 clk_dce->max_clks_by_state[clk_state].display_clk_khz =
482 info.disp_clk_voltage[i].max_supported_clk;
485 if (!debug->disable_dfs_bypass && bp->integrated_info)
486 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
487 clk_dce->dfs_bypass_enabled = true;
489 clk_dce->use_max_disp_clk = debug->max_disp_clk;
492 static void dce_clock_read_ss_info(struct dce_disp_clk *clk_dce)
494 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
495 int ss_info_num = bp->funcs->get_ss_entry_number(
496 bp, AS_SIGNAL_TYPE_GPU_PLL);
499 struct spread_spectrum_info info = { { 0 } };
500 enum bp_result result = bp->funcs->get_spread_spectrum_info(
501 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
503 /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS
504 * even if SS not enabled and in that case
505 * SSInfo.spreadSpectrumPercentage !=0 would be sign
508 if (result == BP_RESULT_OK &&
509 info.spread_spectrum_percentage != 0) {
510 clk_dce->ss_on_dprefclk = true;
511 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
513 if (info.type.CENTER_MODE == 0) {
514 /* TODO: Currently for DP Reference clock we
515 * need only SS percentage for
517 clk_dce->dprefclk_ss_percentage =
518 info.spread_spectrum_percentage;
524 result = bp->funcs->get_spread_spectrum_info(
525 bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
527 /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS
528 * even if SS not enabled and in that case
529 * SSInfo.spreadSpectrumPercentage !=0 would be sign
532 if (result == BP_RESULT_OK &&
533 info.spread_spectrum_percentage != 0) {
534 clk_dce->ss_on_dprefclk = true;
535 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
537 if (info.type.CENTER_MODE == 0) {
538 /* Currently for DP Reference clock we
539 * need only SS percentage for
541 clk_dce->dprefclk_ss_percentage =
542 info.spread_spectrum_percentage;
548 static bool dce_apply_clock_voltage_request(
549 struct display_clock *clk,
550 enum dm_pp_clock_type clocks_type,
553 bool update_dp_phyclk)
555 bool send_request = false;
556 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
558 switch (clocks_type) {
559 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
560 case DM_PP_CLOCK_TYPE_PIXELCLK:
561 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
568 clock_voltage_req.clk_type = clocks_type;
569 clock_voltage_req.clocks_in_khz = clocks_in_khz;
573 switch (clocks_type) {
574 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
575 if (clocks_in_khz > clk->cur_clocks_value.dispclk_in_khz) {
576 clk->cur_clocks_value.dispclk_notify_pplib_done = true;
579 clk->cur_clocks_value.dispclk_notify_pplib_done = false;
580 /* no matter incrase or decrase clock, update current clock value */
581 clk->cur_clocks_value.dispclk_in_khz = clocks_in_khz;
583 case DM_PP_CLOCK_TYPE_PIXELCLK:
584 if (clocks_in_khz > clk->cur_clocks_value.max_pixelclk_in_khz) {
585 clk->cur_clocks_value.pixelclk_notify_pplib_done = true;
588 clk->cur_clocks_value.pixelclk_notify_pplib_done = false;
589 /* no matter incrase or decrase clock, update current clock value */
590 clk->cur_clocks_value.max_pixelclk_in_khz = clocks_in_khz;
592 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
593 if (clocks_in_khz > clk->cur_clocks_value.max_non_dp_phyclk_in_khz) {
594 clk->cur_clocks_value.phyclk_notigy_pplib_done = true;
597 clk->cur_clocks_value.phyclk_notigy_pplib_done = false;
598 /* no matter incrase or decrase clock, update current clock value */
599 clk->cur_clocks_value.max_non_dp_phyclk_in_khz = clocks_in_khz;
607 switch (clocks_type) {
608 case DM_PP_CLOCK_TYPE_DISPLAY_CLK:
609 if (!clk->cur_clocks_value.dispclk_notify_pplib_done)
612 case DM_PP_CLOCK_TYPE_PIXELCLK:
613 if (!clk->cur_clocks_value.pixelclk_notify_pplib_done)
616 case DM_PP_CLOCK_TYPE_DISPLAYPHYCLK:
617 if (!clk->cur_clocks_value.phyclk_notigy_pplib_done)
626 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
627 if (clk->ctx->dce_version >= DCN_VERSION_1_0) {
628 struct dc *core_dc = clk->ctx->dc;
629 /*use dcfclk request voltage*/
630 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
631 clock_voltage_req.clocks_in_khz =
632 dcn_find_dcfclk_suits_all(core_dc, &clk->cur_clocks_value);
635 dm_pp_apply_clock_for_voltage_request(
636 clk->ctx, &clock_voltage_req);
638 if (update_dp_phyclk && (clocks_in_khz >
639 clk->cur_clocks_value.max_dp_phyclk_in_khz))
640 clk->cur_clocks_value.max_dp_phyclk_in_khz = clocks_in_khz;
646 static const struct display_clock_funcs dce120_funcs = {
647 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
648 .apply_clock_voltage_request = dce_apply_clock_voltage_request,
649 .set_clock = dce112_set_clock
652 static const struct display_clock_funcs dce112_funcs = {
653 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
654 .get_required_clocks_state = dce_get_required_clocks_state,
655 .set_min_clocks_state = dce_clock_set_min_clocks_state,
656 .set_clock = dce112_set_clock
659 static const struct display_clock_funcs dce110_funcs = {
660 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
661 .get_required_clocks_state = dce_get_required_clocks_state,
662 .set_min_clocks_state = dce_clock_set_min_clocks_state,
663 .set_clock = dce_psr_set_clock
666 static const struct display_clock_funcs dce_funcs = {
667 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
668 .get_required_clocks_state = dce_get_required_clocks_state,
669 .set_min_clocks_state = dce_clock_set_min_clocks_state,
670 .set_clock = dce_set_clock
673 static void dce_disp_clk_construct(
674 struct dce_disp_clk *clk_dce,
675 struct dc_context *ctx,
676 const struct dce_disp_clk_registers *regs,
677 const struct dce_disp_clk_shift *clk_shift,
678 const struct dce_disp_clk_mask *clk_mask)
680 struct display_clock *base = &clk_dce->base;
683 base->funcs = &dce_funcs;
685 clk_dce->regs = regs;
686 clk_dce->clk_shift = clk_shift;
687 clk_dce->clk_mask = clk_mask;
689 clk_dce->dfs_bypass_disp_clk = 0;
691 clk_dce->dprefclk_ss_percentage = 0;
692 clk_dce->dprefclk_ss_divider = 1000;
693 clk_dce->ss_on_dprefclk = false;
695 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
696 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
698 dce_clock_read_integrated_info(clk_dce);
699 dce_clock_read_ss_info(clk_dce);
701 dce_divider_range_construct(
702 &clk_dce->divider_ranges[DIVIDER_RANGE_01],
703 DIVIDER_RANGE_01_START,
704 DIVIDER_RANGE_01_STEP_SIZE,
705 DIVIDER_RANGE_01_BASE_DIVIDER_ID,
706 DIVIDER_RANGE_02_BASE_DIVIDER_ID);
707 dce_divider_range_construct(
708 &clk_dce->divider_ranges[DIVIDER_RANGE_02],
709 DIVIDER_RANGE_02_START,
710 DIVIDER_RANGE_02_STEP_SIZE,
711 DIVIDER_RANGE_02_BASE_DIVIDER_ID,
712 DIVIDER_RANGE_03_BASE_DIVIDER_ID);
713 dce_divider_range_construct(
714 &clk_dce->divider_ranges[DIVIDER_RANGE_03],
715 DIVIDER_RANGE_03_START,
716 DIVIDER_RANGE_03_STEP_SIZE,
717 DIVIDER_RANGE_03_BASE_DIVIDER_ID,
718 DIVIDER_RANGE_MAX_DIVIDER_ID);
721 struct display_clock *dce_disp_clk_create(
722 struct dc_context *ctx,
723 const struct dce_disp_clk_registers *regs,
724 const struct dce_disp_clk_shift *clk_shift,
725 const struct dce_disp_clk_mask *clk_mask)
727 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
729 if (clk_dce == NULL) {
734 memcpy(clk_dce->max_clks_by_state,
735 dce80_max_clks_by_state,
736 sizeof(dce80_max_clks_by_state));
738 dce_disp_clk_construct(
739 clk_dce, ctx, regs, clk_shift, clk_mask);
741 return &clk_dce->base;
744 struct display_clock *dce110_disp_clk_create(
745 struct dc_context *ctx,
746 const struct dce_disp_clk_registers *regs,
747 const struct dce_disp_clk_shift *clk_shift,
748 const struct dce_disp_clk_mask *clk_mask)
750 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
752 if (clk_dce == NULL) {
757 memcpy(clk_dce->max_clks_by_state,
758 dce110_max_clks_by_state,
759 sizeof(dce110_max_clks_by_state));
761 dce_disp_clk_construct(
762 clk_dce, ctx, regs, clk_shift, clk_mask);
764 clk_dce->base.funcs = &dce110_funcs;
766 return &clk_dce->base;
769 struct display_clock *dce112_disp_clk_create(
770 struct dc_context *ctx,
771 const struct dce_disp_clk_registers *regs,
772 const struct dce_disp_clk_shift *clk_shift,
773 const struct dce_disp_clk_mask *clk_mask)
775 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
777 if (clk_dce == NULL) {
782 memcpy(clk_dce->max_clks_by_state,
783 dce112_max_clks_by_state,
784 sizeof(dce112_max_clks_by_state));
786 dce_disp_clk_construct(
787 clk_dce, ctx, regs, clk_shift, clk_mask);
789 clk_dce->base.funcs = &dce112_funcs;
791 return &clk_dce->base;
794 struct display_clock *dce120_disp_clk_create(struct dc_context *ctx)
796 struct dce_disp_clk *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
797 struct dm_pp_clock_levels_with_voltage clk_level_info = {0};
799 if (clk_dce == NULL) {
804 memcpy(clk_dce->max_clks_by_state,
805 dce120_max_clks_by_state,
806 sizeof(dce120_max_clks_by_state));
808 dce_disp_clk_construct(
809 clk_dce, ctx, NULL, NULL, NULL);
811 clk_dce->base.funcs = &dce120_funcs;
814 if (!ctx->dc->debug.disable_pplib_clock_request &&
815 dm_pp_get_clock_levels_by_type_with_voltage(
816 ctx, DM_PP_CLOCK_TYPE_DISPLAY_CLK, &clk_level_info)
817 && clk_level_info.num_levels)
818 clk_dce->max_displ_clk_in_khz =
819 clk_level_info.data[clk_level_info.num_levels - 1].clocks_in_khz;
821 clk_dce->max_displ_clk_in_khz = 1133000;
823 return &clk_dce->base;
826 void dce_disp_clk_destroy(struct display_clock **disp_clk)
828 struct dce_disp_clk *clk_dce = TO_DCE_CLOCKS(*disp_clk);