2 * Copyright 2012-16 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
26 #include "dce_clocks.h"
27 #include "dm_services.h"
28 #include "reg_helper.h"
29 #include "fixed31_32.h"
30 #include "bios_parser_interface.h"
33 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34 #include "dcn_calcs.h"
36 #include "core_types.h"
38 #include "dal_asic_id.h"
40 #define TO_DCE_CLOCKS(clocks)\
41 container_of(clocks, struct dce_dccg, base)
47 #define FN(reg_name, field_name) \
48 clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
55 /* Max clock values for each state indexed by "enum clocks_state": */
56 static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
57 /* ClocksStateInvalid - should not be used */
58 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
59 /* ClocksStateUltraLow - not expected to be used for DCE 8.0 */
60 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
62 { .display_clk_khz = 352000, .pixel_clk_khz = 330000},
63 /* ClocksStateNominal */
64 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
65 /* ClocksStatePerformance */
66 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
68 static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
69 /*ClocksStateInvalid - should not be used*/
70 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
71 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
72 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
74 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
75 /*ClocksStateNominal*/
76 { .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
77 /*ClocksStatePerformance*/
78 { .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
80 static const struct state_dependent_clocks dce112_max_clks_by_state[] = {
81 /*ClocksStateInvalid - should not be used*/
82 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
83 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
84 { .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
86 { .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
87 /*ClocksStateNominal*/
88 { .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
89 /*ClocksStatePerformance*/
90 { .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
92 static const struct state_dependent_clocks dce120_max_clks_by_state[] = {
93 /*ClocksStateInvalid - should not be used*/
94 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
95 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
96 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
98 { .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
99 /*ClocksStateNominal*/
100 { .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
101 /*ClocksStatePerformance*/
102 { .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
104 /* Starting point for each divider range.*/
105 enum dce_divider_range_start {
106 DIVIDER_RANGE_01_START = 200, /* 2.00*/
107 DIVIDER_RANGE_02_START = 1600, /* 16.00*/
108 DIVIDER_RANGE_03_START = 3200, /* 32.00*/
109 DIVIDER_RANGE_SCALE_FACTOR = 100 /* Results are scaled up by 100.*/
112 /* Ranges for divider identifiers (Divider ID or DID)
113 mmDENTIST_DISPCLK_CNTL.DENTIST_DISPCLK_WDIVIDER*/
114 enum dce_divider_id_register_setting {
115 DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08,
116 DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40,
117 DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60,
118 DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80
121 /* Step size between each divider within a range.
122 Incrementing the DENTIST_DISPCLK_WDIVIDER by one
123 will increment the divider by this much.*/
124 enum dce_divider_range_step_size {
125 DIVIDER_RANGE_01_STEP_SIZE = 25, /* 0.25*/
126 DIVIDER_RANGE_02_STEP_SIZE = 50, /* 0.50*/
127 DIVIDER_RANGE_03_STEP_SIZE = 100 /* 1.00 */
130 static bool dce_divider_range_construct(
131 struct dce_divider_range *div_range,
137 div_range->div_range_start = range_start;
138 div_range->div_range_step = range_step;
139 div_range->did_min = did_min;
140 div_range->did_max = did_max;
142 if (div_range->div_range_step == 0) {
143 div_range->div_range_step = 1;
144 /*div_range_step cannot be zero*/
147 /* Calculate this based on the other inputs.*/
148 /* See DividerRange.h for explanation of */
149 /* the relationship between divider id (DID) and a divider.*/
150 /* Number of Divider IDs = (Maximum Divider ID - Minimum Divider ID)*/
151 /* Maximum divider identified in this range =
152 * (Number of Divider IDs)*Step size between dividers
153 * + The start of this range.*/
154 div_range->div_range_end = (did_max - did_min) * range_step
159 static int dce_divider_range_calc_divider(
160 struct dce_divider_range *div_range,
163 /* Is this DID within our range?*/
164 if ((did < div_range->did_min) || (did >= div_range->did_max))
165 return INVALID_DIVIDER;
167 return ((did - div_range->did_min) * div_range->div_range_step)
168 + div_range->div_range_start;
172 static int dce_divider_range_get_divider(
173 struct dce_divider_range *div_range,
177 int div = INVALID_DIVIDER;
180 for (i = 0; i < ranges_num; i++) {
181 /* Calculate divider with given divider ID*/
182 div = dce_divider_range_calc_divider(&div_range[i], did);
183 /* Found a valid return divider*/
184 if (div != INVALID_DIVIDER)
190 static int dce_clocks_get_dp_ref_freq(struct dccg *clk)
192 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
193 int dprefclk_wdivider;
194 int dprefclk_src_sel;
195 int dp_ref_clk_khz = 600000;
196 int target_div = INVALID_DIVIDER;
198 /* ASSERT DP Reference Clock source is from DFS*/
199 REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
200 ASSERT(dprefclk_src_sel == 0);
202 /* Read the mmDENTIST_DISPCLK_CNTL to get the currently
203 * programmed DID DENTIST_DPREFCLK_WDIVIDER*/
204 REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
206 /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/
207 target_div = dce_divider_range_get_divider(
208 clk_dce->divider_ranges,
212 if (target_div != INVALID_DIVIDER) {
213 /* Calculate the current DFS clock, in kHz.*/
214 dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR
215 * clk_dce->dentist_vco_freq_khz) / target_div;
218 /* SW will adjust DP REF Clock average value for all purposes
219 * (DP DTO / DP Audio DTO and DP GTC)
220 if clock is spread for all cases:
221 -if SS enabled on DP Ref clock and HW de-spreading enabled with SW
222 calculations for DS_INCR/DS_MODULO (this is planned to be default case)
223 -if SS enabled on DP Ref clock and HW de-spreading enabled with HW
224 calculations (not planned to be used, but average clock should still
226 -if SS enabled on DP Ref clock and HW de-spreading disabled
227 (should not be case with CIK) then SW should program all rates
228 generated according to average value (case as with previous ASICs)
230 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
231 struct fixed31_32 ss_percentage = dc_fixpt_div_int(
232 dc_fixpt_from_fraction(
233 clk_dce->dprefclk_ss_percentage,
234 clk_dce->dprefclk_ss_divider), 200);
235 struct fixed31_32 adj_dp_ref_clk_khz;
237 ss_percentage = dc_fixpt_sub(dc_fixpt_one,
243 dp_ref_clk_khz = dc_fixpt_floor(adj_dp_ref_clk_khz);
246 return dp_ref_clk_khz;
249 /* TODO: This is DCN DPREFCLK: it could be program by DENTIST by VBIOS
250 * or CLK0_CLK11 by SMU. For DCE120, it is wlays 600Mhz. Will re-visit
251 * clock implementation
253 static int dce_clocks_get_dp_ref_freq_wrkaround(struct dccg *clk)
255 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
256 int dp_ref_clk_khz = 600000;
258 if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
259 struct fixed31_32 ss_percentage = dc_fixpt_div_int(
260 dc_fixpt_from_fraction(
261 clk_dce->dprefclk_ss_percentage,
262 clk_dce->dprefclk_ss_divider), 200);
263 struct fixed31_32 adj_dp_ref_clk_khz;
265 ss_percentage = dc_fixpt_sub(dc_fixpt_one,
271 dp_ref_clk_khz = dc_fixpt_floor(adj_dp_ref_clk_khz);
274 return dp_ref_clk_khz;
276 static enum dm_pp_clocks_state dce_get_required_clocks_state(
278 struct dc_clocks *req_clocks)
280 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
282 enum dm_pp_clocks_state low_req_clk;
284 /* Iterate from highest supported to lowest valid state, and update
285 * lowest RequiredState with the lowest state that satisfies
286 * all required clocks
288 for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
289 if (req_clocks->dispclk_khz >
290 clk_dce->max_clks_by_state[i].display_clk_khz
291 || req_clocks->phyclk_khz >
292 clk_dce->max_clks_by_state[i].pixel_clk_khz)
296 if (low_req_clk > clk->max_clks_state) {
297 /* set max clock state for high phyclock, invalid on exceeding display clock */
298 if (clk_dce->max_clks_by_state[clk->max_clks_state].display_clk_khz
299 < req_clocks->dispclk_khz)
300 low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
302 low_req_clk = clk->max_clks_state;
308 static int dce_set_clock(
310 int requested_clk_khz)
312 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
313 struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
314 struct dc_bios *bp = clk->ctx->dc_bios;
315 int actual_clock = requested_clk_khz;
317 /* Make sure requested clock isn't lower than minimum threshold*/
318 if (requested_clk_khz > 0)
319 requested_clk_khz = max(requested_clk_khz,
320 clk_dce->dentist_vco_freq_khz / 64);
322 /* Prepare to program display clock*/
323 pxl_clk_params.target_pixel_clock = requested_clk_khz;
324 pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
326 bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
328 if (clk_dce->dfs_bypass_enabled) {
330 /* Cache the fixed display clock*/
331 clk_dce->dfs_bypass_disp_clk =
332 pxl_clk_params.dfs_bypass_display_clock;
333 actual_clock = pxl_clk_params.dfs_bypass_display_clock;
336 /* from power down, we need mark the clock state as ClocksStateNominal
337 * from HWReset, so when resume we will call pplib voltage regulator.*/
338 if (requested_clk_khz == 0)
339 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
343 static int dce_psr_set_clock(
345 int requested_clk_khz)
347 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
348 struct dc_context *ctx = clk_dce->base.ctx;
349 struct dc *core_dc = ctx->dc;
350 struct dmcu *dmcu = core_dc->res_pool->dmcu;
351 int actual_clk_khz = requested_clk_khz;
353 actual_clk_khz = dce_set_clock(clk, requested_clk_khz);
355 dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7);
356 return actual_clk_khz;
359 static int dce112_set_clock(
361 int requested_clk_khz)
363 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
364 struct bp_set_dce_clock_parameters dce_clk_params;
365 struct dc_bios *bp = clk->ctx->dc_bios;
366 struct dc *core_dc = clk->ctx->dc;
367 struct dmcu *dmcu = core_dc->res_pool->dmcu;
368 int actual_clock = requested_clk_khz;
369 /* Prepare to program display clock*/
370 memset(&dce_clk_params, 0, sizeof(dce_clk_params));
372 /* Make sure requested clock isn't lower than minimum threshold*/
373 if (requested_clk_khz > 0)
374 requested_clk_khz = max(requested_clk_khz,
375 clk_dce->dentist_vco_freq_khz / 62);
377 dce_clk_params.target_clock_frequency = requested_clk_khz;
378 dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
379 dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
381 bp->funcs->set_dce_clock(bp, &dce_clk_params);
382 actual_clock = dce_clk_params.target_clock_frequency;
384 /* from power down, we need mark the clock state as ClocksStateNominal
385 * from HWReset, so when resume we will call pplib voltage regulator.*/
386 if (requested_clk_khz == 0)
387 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
389 /*Program DP ref Clock*/
390 /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
391 dce_clk_params.target_clock_frequency = 0;
392 dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
393 if (!ASICREV_IS_VEGA20_P(clk->ctx->asic_id.hw_internal_rev))
394 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
395 (dce_clk_params.pll_id ==
396 CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
398 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK = false;
400 bp->funcs->set_dce_clock(bp, &dce_clk_params);
402 if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
403 if (clk_dce->dfs_bypass_disp_clk != actual_clock)
404 dmcu->funcs->set_psr_wait_loop(dmcu,
405 actual_clock / 1000 / 7);
408 clk_dce->dfs_bypass_disp_clk = actual_clock;
412 static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce)
414 struct dc_debug *debug = &clk_dce->base.ctx->dc->debug;
415 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
416 struct integrated_info info = { { { 0 } } };
417 struct dc_firmware_info fw_info = { { 0 } };
420 if (bp->integrated_info)
421 info = *bp->integrated_info;
423 clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
424 if (clk_dce->dentist_vco_freq_khz == 0) {
425 bp->funcs->get_firmware_info(bp, &fw_info);
426 clk_dce->dentist_vco_freq_khz =
427 fw_info.smu_gpu_pll_output_freq;
428 if (clk_dce->dentist_vco_freq_khz == 0)
429 clk_dce->dentist_vco_freq_khz = 3600000;
432 /*update the maximum display clock for each power state*/
433 for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
434 enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
438 clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
442 clk_state = DM_PP_CLOCKS_STATE_LOW;
446 clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
450 clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
454 clk_state = DM_PP_CLOCKS_STATE_INVALID;
458 /*Do not allow bad VBIOS/SBIOS to override with invalid values,
459 * check for > 100MHz*/
460 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
461 clk_dce->max_clks_by_state[clk_state].display_clk_khz =
462 info.disp_clk_voltage[i].max_supported_clk;
465 if (!debug->disable_dfs_bypass && bp->integrated_info)
466 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
467 clk_dce->dfs_bypass_enabled = true;
470 static void dce_clock_read_ss_info(struct dce_dccg *clk_dce)
472 struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
473 int ss_info_num = bp->funcs->get_ss_entry_number(
474 bp, AS_SIGNAL_TYPE_GPU_PLL);
477 struct spread_spectrum_info info = { { 0 } };
478 enum bp_result result = bp->funcs->get_spread_spectrum_info(
479 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
481 /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS
482 * even if SS not enabled and in that case
483 * SSInfo.spreadSpectrumPercentage !=0 would be sign
486 if (result == BP_RESULT_OK &&
487 info.spread_spectrum_percentage != 0) {
488 clk_dce->ss_on_dprefclk = true;
489 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
491 if (info.type.CENTER_MODE == 0) {
492 /* TODO: Currently for DP Reference clock we
493 * need only SS percentage for
495 clk_dce->dprefclk_ss_percentage =
496 info.spread_spectrum_percentage;
502 result = bp->funcs->get_spread_spectrum_info(
503 bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
505 /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS
506 * even if SS not enabled and in that case
507 * SSInfo.spreadSpectrumPercentage !=0 would be sign
510 if (result == BP_RESULT_OK &&
511 info.spread_spectrum_percentage != 0) {
512 clk_dce->ss_on_dprefclk = true;
513 clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
515 if (info.type.CENTER_MODE == 0) {
516 /* Currently for DP Reference clock we
517 * need only SS percentage for
519 clk_dce->dprefclk_ss_percentage =
520 info.spread_spectrum_percentage;
526 static void dce12_update_clocks(struct dccg *dccg,
527 struct dc_clocks *new_clocks,
530 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
532 if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
533 || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
534 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
535 clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
536 dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
537 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
539 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
542 if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
543 || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
544 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
545 clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
546 dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
548 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
552 static void dcn_update_clocks(struct dccg *dccg,
553 struct dc_clocks *new_clocks,
556 struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
557 bool send_request_to_increase = false;
558 bool send_request_to_lower = false;
560 if (new_clocks->dispclk_khz > dccg->clks.dispclk_khz
561 || new_clocks->phyclk_khz > dccg->clks.phyclk_khz
562 || new_clocks->fclk_khz > dccg->clks.fclk_khz
563 || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz)
564 send_request_to_increase = true;
566 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
567 if (send_request_to_increase
569 struct dc *core_dc = dccg->ctx->dc;
571 /*use dcfclk to request voltage*/
572 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
573 clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
574 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
578 if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
579 || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
580 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
581 clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
582 /* TODO: ramp up - dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);*/
583 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
585 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
586 send_request_to_lower = true;
589 if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
590 || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
591 dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
592 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
593 clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
595 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
596 send_request_to_lower = true;
599 if ((new_clocks->fclk_khz < dccg->clks.fclk_khz && safe_to_lower)
600 || new_clocks->fclk_khz > dccg->clks.fclk_khz) {
601 dccg->clks.phyclk_khz = new_clocks->fclk_khz;
602 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_FCLK;
603 clock_voltage_req.clocks_in_khz = new_clocks->fclk_khz;
605 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
606 send_request_to_lower = true;
609 if ((new_clocks->dcfclk_khz < dccg->clks.dcfclk_khz && safe_to_lower)
610 || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz) {
611 dccg->clks.phyclk_khz = new_clocks->dcfclk_khz;
612 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
613 clock_voltage_req.clocks_in_khz = new_clocks->dcfclk_khz;
615 send_request_to_lower = true;
618 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
619 if (!send_request_to_increase && send_request_to_lower
621 struct dc *core_dc = dccg->ctx->dc;
623 /*use dcfclk to request voltage*/
624 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
625 clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
626 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
631 static void dce_update_clocks(struct dccg *dccg,
632 struct dc_clocks *new_clocks,
635 struct dm_pp_power_level_change_request level_change_req;
637 level_change_req.power_level = dce_get_required_clocks_state(dccg, new_clocks);
638 /* get max clock state from PPLIB */
639 if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
640 || level_change_req.power_level > dccg->cur_min_clks_state) {
641 if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
642 dccg->cur_min_clks_state = level_change_req.power_level;
645 if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
646 || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
647 dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
648 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
652 static const struct display_clock_funcs dcn_funcs = {
653 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
654 .set_dispclk = dce112_set_clock,
655 .update_clocks = dcn_update_clocks
658 static const struct display_clock_funcs dce120_funcs = {
659 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
660 .set_dispclk = dce112_set_clock,
661 .update_clocks = dce12_update_clocks
664 static const struct display_clock_funcs dce112_funcs = {
665 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
666 .set_dispclk = dce112_set_clock,
667 .update_clocks = dce_update_clocks
670 static const struct display_clock_funcs dce110_funcs = {
671 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
672 .set_dispclk = dce_psr_set_clock,
673 .update_clocks = dce_update_clocks
676 static const struct display_clock_funcs dce_funcs = {
677 .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
678 .set_dispclk = dce_set_clock,
679 .update_clocks = dce_update_clocks
682 static void dce_dccg_construct(
683 struct dce_dccg *clk_dce,
684 struct dc_context *ctx,
685 const struct dce_disp_clk_registers *regs,
686 const struct dce_disp_clk_shift *clk_shift,
687 const struct dce_disp_clk_mask *clk_mask)
689 struct dccg *base = &clk_dce->base;
692 base->funcs = &dce_funcs;
694 clk_dce->regs = regs;
695 clk_dce->clk_shift = clk_shift;
696 clk_dce->clk_mask = clk_mask;
698 clk_dce->dfs_bypass_disp_clk = 0;
700 clk_dce->dprefclk_ss_percentage = 0;
701 clk_dce->dprefclk_ss_divider = 1000;
702 clk_dce->ss_on_dprefclk = false;
704 base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
705 base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
707 dce_clock_read_integrated_info(clk_dce);
708 dce_clock_read_ss_info(clk_dce);
710 dce_divider_range_construct(
711 &clk_dce->divider_ranges[DIVIDER_RANGE_01],
712 DIVIDER_RANGE_01_START,
713 DIVIDER_RANGE_01_STEP_SIZE,
714 DIVIDER_RANGE_01_BASE_DIVIDER_ID,
715 DIVIDER_RANGE_02_BASE_DIVIDER_ID);
716 dce_divider_range_construct(
717 &clk_dce->divider_ranges[DIVIDER_RANGE_02],
718 DIVIDER_RANGE_02_START,
719 DIVIDER_RANGE_02_STEP_SIZE,
720 DIVIDER_RANGE_02_BASE_DIVIDER_ID,
721 DIVIDER_RANGE_03_BASE_DIVIDER_ID);
722 dce_divider_range_construct(
723 &clk_dce->divider_ranges[DIVIDER_RANGE_03],
724 DIVIDER_RANGE_03_START,
725 DIVIDER_RANGE_03_STEP_SIZE,
726 DIVIDER_RANGE_03_BASE_DIVIDER_ID,
727 DIVIDER_RANGE_MAX_DIVIDER_ID);
730 struct dccg *dce_dccg_create(
731 struct dc_context *ctx,
732 const struct dce_disp_clk_registers *regs,
733 const struct dce_disp_clk_shift *clk_shift,
734 const struct dce_disp_clk_mask *clk_mask)
736 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
738 if (clk_dce == NULL) {
743 memcpy(clk_dce->max_clks_by_state,
744 dce80_max_clks_by_state,
745 sizeof(dce80_max_clks_by_state));
748 clk_dce, ctx, regs, clk_shift, clk_mask);
750 return &clk_dce->base;
753 struct dccg *dce110_dccg_create(
754 struct dc_context *ctx,
755 const struct dce_disp_clk_registers *regs,
756 const struct dce_disp_clk_shift *clk_shift,
757 const struct dce_disp_clk_mask *clk_mask)
759 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
761 if (clk_dce == NULL) {
766 memcpy(clk_dce->max_clks_by_state,
767 dce110_max_clks_by_state,
768 sizeof(dce110_max_clks_by_state));
771 clk_dce, ctx, regs, clk_shift, clk_mask);
773 clk_dce->base.funcs = &dce110_funcs;
775 return &clk_dce->base;
778 struct dccg *dce112_dccg_create(
779 struct dc_context *ctx,
780 const struct dce_disp_clk_registers *regs,
781 const struct dce_disp_clk_shift *clk_shift,
782 const struct dce_disp_clk_mask *clk_mask)
784 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
786 if (clk_dce == NULL) {
791 memcpy(clk_dce->max_clks_by_state,
792 dce112_max_clks_by_state,
793 sizeof(dce112_max_clks_by_state));
796 clk_dce, ctx, regs, clk_shift, clk_mask);
798 clk_dce->base.funcs = &dce112_funcs;
800 return &clk_dce->base;
803 struct dccg *dce120_dccg_create(struct dc_context *ctx)
805 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
807 if (clk_dce == NULL) {
812 memcpy(clk_dce->max_clks_by_state,
813 dce120_max_clks_by_state,
814 sizeof(dce120_max_clks_by_state));
817 clk_dce, ctx, NULL, NULL, NULL);
819 clk_dce->base.funcs = &dce120_funcs;
821 return &clk_dce->base;
824 struct dccg *dcn_dccg_create(struct dc_context *ctx)
826 struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
828 if (clk_dce == NULL) {
833 /* TODO strip out useful stuff out of dce constructor */
835 clk_dce, ctx, NULL, NULL, NULL);
837 clk_dce->base.funcs = &dcn_funcs;
839 return &clk_dce->base;
842 void dce_dccg_destroy(struct dccg **dccg)
844 struct dce_dccg *clk_dce = TO_DCE_CLOCKS(*dccg);