]> asedeno.scripts.mit.edu Git - linux.git/blob - drivers/gpu/drm/amd/display/dc/dce/dce_clocks.c
drm/amd/display: rename display clock block to dccg
[linux.git] / drivers / gpu / drm / amd / display / dc / dce / dce_clocks.c
1 /*
2  * Copyright 2012-16 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: AMD
23  *
24  */
25
26 #include "dce_clocks.h"
27 #include "dm_services.h"
28 #include "reg_helper.h"
29 #include "fixed31_32.h"
30 #include "bios_parser_interface.h"
31 #include "dc.h"
32 #include "dmcu.h"
33 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
34 #include "dcn_calcs.h"
35 #endif
36 #include "core_types.h"
37 #include "dc_types.h"
38 #include "dal_asic_id.h"
39
40 #define TO_DCE_CLOCKS(clocks)\
41         container_of(clocks, struct dce_dccg, base)
42
43 #define REG(reg) \
44         (clk_dce->regs->reg)
45
46 #undef FN
47 #define FN(reg_name, field_name) \
48         clk_dce->clk_shift->field_name, clk_dce->clk_mask->field_name
49
50 #define CTX \
51         clk_dce->base.ctx
52 #define DC_LOGGER \
53         clk->ctx->logger
54
55 /* Max clock values for each state indexed by "enum clocks_state": */
56 static const struct state_dependent_clocks dce80_max_clks_by_state[] = {
57 /* ClocksStateInvalid - should not be used */
58 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
59 /* ClocksStateUltraLow - not expected to be used for DCE 8.0 */
60 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
61 /* ClocksStateLow */
62 { .display_clk_khz = 352000, .pixel_clk_khz = 330000},
63 /* ClocksStateNominal */
64 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 },
65 /* ClocksStatePerformance */
66 { .display_clk_khz = 600000, .pixel_clk_khz = 400000 } };
67
68 static const struct state_dependent_clocks dce110_max_clks_by_state[] = {
69 /*ClocksStateInvalid - should not be used*/
70 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
71 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
72 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
73 /*ClocksStateLow*/
74 { .display_clk_khz = 352000, .pixel_clk_khz = 330000 },
75 /*ClocksStateNominal*/
76 { .display_clk_khz = 467000, .pixel_clk_khz = 400000 },
77 /*ClocksStatePerformance*/
78 { .display_clk_khz = 643000, .pixel_clk_khz = 400000 } };
79
80 static const struct state_dependent_clocks dce112_max_clks_by_state[] = {
81 /*ClocksStateInvalid - should not be used*/
82 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
83 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
84 { .display_clk_khz = 389189, .pixel_clk_khz = 346672 },
85 /*ClocksStateLow*/
86 { .display_clk_khz = 459000, .pixel_clk_khz = 400000 },
87 /*ClocksStateNominal*/
88 { .display_clk_khz = 667000, .pixel_clk_khz = 600000 },
89 /*ClocksStatePerformance*/
90 { .display_clk_khz = 1132000, .pixel_clk_khz = 600000 } };
91
92 static const struct state_dependent_clocks dce120_max_clks_by_state[] = {
93 /*ClocksStateInvalid - should not be used*/
94 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
95 /*ClocksStateUltraLow - currently by HW design team not supposed to be used*/
96 { .display_clk_khz = 0, .pixel_clk_khz = 0 },
97 /*ClocksStateLow*/
98 { .display_clk_khz = 460000, .pixel_clk_khz = 400000 },
99 /*ClocksStateNominal*/
100 { .display_clk_khz = 670000, .pixel_clk_khz = 600000 },
101 /*ClocksStatePerformance*/
102 { .display_clk_khz = 1133000, .pixel_clk_khz = 600000 } };
103
104 /* Starting point for each divider range.*/
105 enum dce_divider_range_start {
106         DIVIDER_RANGE_01_START = 200, /* 2.00*/
107         DIVIDER_RANGE_02_START = 1600, /* 16.00*/
108         DIVIDER_RANGE_03_START = 3200, /* 32.00*/
109         DIVIDER_RANGE_SCALE_FACTOR = 100 /* Results are scaled up by 100.*/
110 };
111
112 /* Ranges for divider identifiers (Divider ID or DID)
113  mmDENTIST_DISPCLK_CNTL.DENTIST_DISPCLK_WDIVIDER*/
114 enum dce_divider_id_register_setting {
115         DIVIDER_RANGE_01_BASE_DIVIDER_ID = 0X08,
116         DIVIDER_RANGE_02_BASE_DIVIDER_ID = 0X40,
117         DIVIDER_RANGE_03_BASE_DIVIDER_ID = 0X60,
118         DIVIDER_RANGE_MAX_DIVIDER_ID = 0X80
119 };
120
121 /* Step size between each divider within a range.
122  Incrementing the DENTIST_DISPCLK_WDIVIDER by one
123  will increment the divider by this much.*/
124 enum dce_divider_range_step_size {
125         DIVIDER_RANGE_01_STEP_SIZE = 25, /* 0.25*/
126         DIVIDER_RANGE_02_STEP_SIZE = 50, /* 0.50*/
127         DIVIDER_RANGE_03_STEP_SIZE = 100 /* 1.00 */
128 };
129
130 static bool dce_divider_range_construct(
131         struct dce_divider_range *div_range,
132         int range_start,
133         int range_step,
134         int did_min,
135         int did_max)
136 {
137         div_range->div_range_start = range_start;
138         div_range->div_range_step = range_step;
139         div_range->did_min = did_min;
140         div_range->did_max = did_max;
141
142         if (div_range->div_range_step == 0) {
143                 div_range->div_range_step = 1;
144                 /*div_range_step cannot be zero*/
145                 BREAK_TO_DEBUGGER();
146         }
147         /* Calculate this based on the other inputs.*/
148         /* See DividerRange.h for explanation of */
149         /* the relationship between divider id (DID) and a divider.*/
150         /* Number of Divider IDs = (Maximum Divider ID - Minimum Divider ID)*/
151         /* Maximum divider identified in this range =
152          * (Number of Divider IDs)*Step size between dividers
153          *  + The start of this range.*/
154         div_range->div_range_end = (did_max - did_min) * range_step
155                 + range_start;
156         return true;
157 }
158
159 static int dce_divider_range_calc_divider(
160         struct dce_divider_range *div_range,
161         int did)
162 {
163         /* Is this DID within our range?*/
164         if ((did < div_range->did_min) || (did >= div_range->did_max))
165                 return INVALID_DIVIDER;
166
167         return ((did - div_range->did_min) * div_range->div_range_step)
168                         + div_range->div_range_start;
169
170 }
171
172 static int dce_divider_range_get_divider(
173         struct dce_divider_range *div_range,
174         int ranges_num,
175         int did)
176 {
177         int div = INVALID_DIVIDER;
178         int i;
179
180         for (i = 0; i < ranges_num; i++) {
181                 /* Calculate divider with given divider ID*/
182                 div = dce_divider_range_calc_divider(&div_range[i], did);
183                 /* Found a valid return divider*/
184                 if (div != INVALID_DIVIDER)
185                         break;
186         }
187         return div;
188 }
189
190 static int dce_clocks_get_dp_ref_freq(struct dccg *clk)
191 {
192         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
193         int dprefclk_wdivider;
194         int dprefclk_src_sel;
195         int dp_ref_clk_khz = 600000;
196         int target_div = INVALID_DIVIDER;
197
198         /* ASSERT DP Reference Clock source is from DFS*/
199         REG_GET(DPREFCLK_CNTL, DPREFCLK_SRC_SEL, &dprefclk_src_sel);
200         ASSERT(dprefclk_src_sel == 0);
201
202         /* Read the mmDENTIST_DISPCLK_CNTL to get the currently
203          * programmed DID DENTIST_DPREFCLK_WDIVIDER*/
204         REG_GET(DENTIST_DISPCLK_CNTL, DENTIST_DPREFCLK_WDIVIDER, &dprefclk_wdivider);
205
206         /* Convert DENTIST_DPREFCLK_WDIVIDERto actual divider*/
207         target_div = dce_divider_range_get_divider(
208                         clk_dce->divider_ranges,
209                         DIVIDER_RANGE_MAX,
210                         dprefclk_wdivider);
211
212         if (target_div != INVALID_DIVIDER) {
213                 /* Calculate the current DFS clock, in kHz.*/
214                 dp_ref_clk_khz = (DIVIDER_RANGE_SCALE_FACTOR
215                         * clk_dce->dentist_vco_freq_khz) / target_div;
216         }
217
218         /* SW will adjust DP REF Clock average value for all purposes
219          * (DP DTO / DP Audio DTO and DP GTC)
220          if clock is spread for all cases:
221          -if SS enabled on DP Ref clock and HW de-spreading enabled with SW
222          calculations for DS_INCR/DS_MODULO (this is planned to be default case)
223          -if SS enabled on DP Ref clock and HW de-spreading enabled with HW
224          calculations (not planned to be used, but average clock should still
225          be valid)
226          -if SS enabled on DP Ref clock and HW de-spreading disabled
227          (should not be case with CIK) then SW should program all rates
228          generated according to average value (case as with previous ASICs)
229           */
230         if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
231                 struct fixed31_32 ss_percentage = dc_fixpt_div_int(
232                                 dc_fixpt_from_fraction(
233                                                 clk_dce->dprefclk_ss_percentage,
234                                                 clk_dce->dprefclk_ss_divider), 200);
235                 struct fixed31_32 adj_dp_ref_clk_khz;
236
237                 ss_percentage = dc_fixpt_sub(dc_fixpt_one,
238                                                                 ss_percentage);
239                 adj_dp_ref_clk_khz =
240                         dc_fixpt_mul_int(
241                                 ss_percentage,
242                                 dp_ref_clk_khz);
243                 dp_ref_clk_khz = dc_fixpt_floor(adj_dp_ref_clk_khz);
244         }
245
246         return dp_ref_clk_khz;
247 }
248
249 /* TODO: This is DCN DPREFCLK: it could be program by DENTIST by VBIOS
250  * or CLK0_CLK11 by SMU. For DCE120, it is wlays 600Mhz. Will re-visit
251  * clock implementation
252  */
253 static int dce_clocks_get_dp_ref_freq_wrkaround(struct dccg *clk)
254 {
255         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
256         int dp_ref_clk_khz = 600000;
257
258         if (clk_dce->ss_on_dprefclk && clk_dce->dprefclk_ss_divider != 0) {
259                 struct fixed31_32 ss_percentage = dc_fixpt_div_int(
260                                 dc_fixpt_from_fraction(
261                                                 clk_dce->dprefclk_ss_percentage,
262                                                 clk_dce->dprefclk_ss_divider), 200);
263                 struct fixed31_32 adj_dp_ref_clk_khz;
264
265                 ss_percentage = dc_fixpt_sub(dc_fixpt_one,
266                                                                 ss_percentage);
267                 adj_dp_ref_clk_khz =
268                         dc_fixpt_mul_int(
269                                 ss_percentage,
270                                 dp_ref_clk_khz);
271                 dp_ref_clk_khz = dc_fixpt_floor(adj_dp_ref_clk_khz);
272         }
273
274         return dp_ref_clk_khz;
275 }
276 static enum dm_pp_clocks_state dce_get_required_clocks_state(
277         struct dccg *clk,
278         struct dc_clocks *req_clocks)
279 {
280         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
281         int i;
282         enum dm_pp_clocks_state low_req_clk;
283
284         /* Iterate from highest supported to lowest valid state, and update
285          * lowest RequiredState with the lowest state that satisfies
286          * all required clocks
287          */
288         for (i = clk->max_clks_state; i >= DM_PP_CLOCKS_STATE_ULTRA_LOW; i--)
289                 if (req_clocks->dispclk_khz >
290                                 clk_dce->max_clks_by_state[i].display_clk_khz
291                         || req_clocks->phyclk_khz >
292                                 clk_dce->max_clks_by_state[i].pixel_clk_khz)
293                         break;
294
295         low_req_clk = i + 1;
296         if (low_req_clk > clk->max_clks_state) {
297                 /* set max clock state for high phyclock, invalid on exceeding display clock */
298                 if (clk_dce->max_clks_by_state[clk->max_clks_state].display_clk_khz
299                                 < req_clocks->dispclk_khz)
300                         low_req_clk = DM_PP_CLOCKS_STATE_INVALID;
301                 else
302                         low_req_clk = clk->max_clks_state;
303         }
304
305         return low_req_clk;
306 }
307
308 static int dce_set_clock(
309         struct dccg *clk,
310         int requested_clk_khz)
311 {
312         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
313         struct bp_pixel_clock_parameters pxl_clk_params = { 0 };
314         struct dc_bios *bp = clk->ctx->dc_bios;
315         int actual_clock = requested_clk_khz;
316
317         /* Make sure requested clock isn't lower than minimum threshold*/
318         if (requested_clk_khz > 0)
319                 requested_clk_khz = max(requested_clk_khz,
320                                 clk_dce->dentist_vco_freq_khz / 64);
321
322         /* Prepare to program display clock*/
323         pxl_clk_params.target_pixel_clock = requested_clk_khz;
324         pxl_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
325
326         bp->funcs->program_display_engine_pll(bp, &pxl_clk_params);
327
328         if (clk_dce->dfs_bypass_enabled) {
329
330                 /* Cache the fixed display clock*/
331                 clk_dce->dfs_bypass_disp_clk =
332                         pxl_clk_params.dfs_bypass_display_clock;
333                 actual_clock = pxl_clk_params.dfs_bypass_display_clock;
334         }
335
336         /* from power down, we need mark the clock state as ClocksStateNominal
337          * from HWReset, so when resume we will call pplib voltage regulator.*/
338         if (requested_clk_khz == 0)
339                 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
340         return actual_clock;
341 }
342
343 static int dce_psr_set_clock(
344         struct dccg *clk,
345         int requested_clk_khz)
346 {
347         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
348         struct dc_context *ctx = clk_dce->base.ctx;
349         struct dc *core_dc = ctx->dc;
350         struct dmcu *dmcu = core_dc->res_pool->dmcu;
351         int actual_clk_khz = requested_clk_khz;
352
353         actual_clk_khz = dce_set_clock(clk, requested_clk_khz);
354
355         dmcu->funcs->set_psr_wait_loop(dmcu, actual_clk_khz / 1000 / 7);
356         return actual_clk_khz;
357 }
358
359 static int dce112_set_clock(
360         struct dccg *clk,
361         int requested_clk_khz)
362 {
363         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(clk);
364         struct bp_set_dce_clock_parameters dce_clk_params;
365         struct dc_bios *bp = clk->ctx->dc_bios;
366         struct dc *core_dc = clk->ctx->dc;
367         struct dmcu *dmcu = core_dc->res_pool->dmcu;
368         int actual_clock = requested_clk_khz;
369         /* Prepare to program display clock*/
370         memset(&dce_clk_params, 0, sizeof(dce_clk_params));
371
372         /* Make sure requested clock isn't lower than minimum threshold*/
373         if (requested_clk_khz > 0)
374                 requested_clk_khz = max(requested_clk_khz,
375                                 clk_dce->dentist_vco_freq_khz / 62);
376
377         dce_clk_params.target_clock_frequency = requested_clk_khz;
378         dce_clk_params.pll_id = CLOCK_SOURCE_ID_DFS;
379         dce_clk_params.clock_type = DCECLOCK_TYPE_DISPLAY_CLOCK;
380
381         bp->funcs->set_dce_clock(bp, &dce_clk_params);
382         actual_clock = dce_clk_params.target_clock_frequency;
383
384         /* from power down, we need mark the clock state as ClocksStateNominal
385          * from HWReset, so when resume we will call pplib voltage regulator.*/
386         if (requested_clk_khz == 0)
387                 clk->cur_min_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
388
389         /*Program DP ref Clock*/
390         /*VBIOS will determine DPREFCLK frequency, so we don't set it*/
391         dce_clk_params.target_clock_frequency = 0;
392         dce_clk_params.clock_type = DCECLOCK_TYPE_DPREFCLK;
393         if (!ASICREV_IS_VEGA20_P(clk->ctx->asic_id.hw_internal_rev))
394                 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK =
395                         (dce_clk_params.pll_id ==
396                                         CLOCK_SOURCE_COMBO_DISPLAY_PLL0);
397         else
398                 dce_clk_params.flags.USE_GENLOCK_AS_SOURCE_FOR_DPREFCLK = false;
399
400         bp->funcs->set_dce_clock(bp, &dce_clk_params);
401
402         if (!IS_FPGA_MAXIMUS_DC(core_dc->ctx->dce_environment)) {
403                 if (clk_dce->dfs_bypass_disp_clk != actual_clock)
404                         dmcu->funcs->set_psr_wait_loop(dmcu,
405                                         actual_clock / 1000 / 7);
406         }
407
408         clk_dce->dfs_bypass_disp_clk = actual_clock;
409         return actual_clock;
410 }
411
412 static void dce_clock_read_integrated_info(struct dce_dccg *clk_dce)
413 {
414         struct dc_debug *debug = &clk_dce->base.ctx->dc->debug;
415         struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
416         struct integrated_info info = { { { 0 } } };
417         struct dc_firmware_info fw_info = { { 0 } };
418         int i;
419
420         if (bp->integrated_info)
421                 info = *bp->integrated_info;
422
423         clk_dce->dentist_vco_freq_khz = info.dentist_vco_freq;
424         if (clk_dce->dentist_vco_freq_khz == 0) {
425                 bp->funcs->get_firmware_info(bp, &fw_info);
426                 clk_dce->dentist_vco_freq_khz =
427                         fw_info.smu_gpu_pll_output_freq;
428                 if (clk_dce->dentist_vco_freq_khz == 0)
429                         clk_dce->dentist_vco_freq_khz = 3600000;
430         }
431
432         /*update the maximum display clock for each power state*/
433         for (i = 0; i < NUMBER_OF_DISP_CLK_VOLTAGE; ++i) {
434                 enum dm_pp_clocks_state clk_state = DM_PP_CLOCKS_STATE_INVALID;
435
436                 switch (i) {
437                 case 0:
438                         clk_state = DM_PP_CLOCKS_STATE_ULTRA_LOW;
439                         break;
440
441                 case 1:
442                         clk_state = DM_PP_CLOCKS_STATE_LOW;
443                         break;
444
445                 case 2:
446                         clk_state = DM_PP_CLOCKS_STATE_NOMINAL;
447                         break;
448
449                 case 3:
450                         clk_state = DM_PP_CLOCKS_STATE_PERFORMANCE;
451                         break;
452
453                 default:
454                         clk_state = DM_PP_CLOCKS_STATE_INVALID;
455                         break;
456                 }
457
458                 /*Do not allow bad VBIOS/SBIOS to override with invalid values,
459                  * check for > 100MHz*/
460                 if (info.disp_clk_voltage[i].max_supported_clk >= 100000)
461                         clk_dce->max_clks_by_state[clk_state].display_clk_khz =
462                                 info.disp_clk_voltage[i].max_supported_clk;
463         }
464
465         if (!debug->disable_dfs_bypass && bp->integrated_info)
466                 if (bp->integrated_info->gpu_cap_info & DFS_BYPASS_ENABLE)
467                         clk_dce->dfs_bypass_enabled = true;
468 }
469
470 static void dce_clock_read_ss_info(struct dce_dccg *clk_dce)
471 {
472         struct dc_bios *bp = clk_dce->base.ctx->dc_bios;
473         int ss_info_num = bp->funcs->get_ss_entry_number(
474                         bp, AS_SIGNAL_TYPE_GPU_PLL);
475
476         if (ss_info_num) {
477                 struct spread_spectrum_info info = { { 0 } };
478                 enum bp_result result = bp->funcs->get_spread_spectrum_info(
479                                 bp, AS_SIGNAL_TYPE_GPU_PLL, 0, &info);
480
481                 /* Based on VBIOS, VBIOS will keep entry for GPU PLL SS
482                  * even if SS not enabled and in that case
483                  * SSInfo.spreadSpectrumPercentage !=0 would be sign
484                  * that SS is enabled
485                  */
486                 if (result == BP_RESULT_OK &&
487                                 info.spread_spectrum_percentage != 0) {
488                         clk_dce->ss_on_dprefclk = true;
489                         clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
490
491                         if (info.type.CENTER_MODE == 0) {
492                                 /* TODO: Currently for DP Reference clock we
493                                  * need only SS percentage for
494                                  * downspread */
495                                 clk_dce->dprefclk_ss_percentage =
496                                                 info.spread_spectrum_percentage;
497                         }
498
499                         return;
500                 }
501
502                 result = bp->funcs->get_spread_spectrum_info(
503                                 bp, AS_SIGNAL_TYPE_DISPLAY_PORT, 0, &info);
504
505                 /* Based on VBIOS, VBIOS will keep entry for DPREFCLK SS
506                  * even if SS not enabled and in that case
507                  * SSInfo.spreadSpectrumPercentage !=0 would be sign
508                  * that SS is enabled
509                  */
510                 if (result == BP_RESULT_OK &&
511                                 info.spread_spectrum_percentage != 0) {
512                         clk_dce->ss_on_dprefclk = true;
513                         clk_dce->dprefclk_ss_divider = info.spread_percentage_divider;
514
515                         if (info.type.CENTER_MODE == 0) {
516                                 /* Currently for DP Reference clock we
517                                  * need only SS percentage for
518                                  * downspread */
519                                 clk_dce->dprefclk_ss_percentage =
520                                                 info.spread_spectrum_percentage;
521                         }
522                 }
523         }
524 }
525
526 static void dce12_update_clocks(struct dccg *dccg,
527                         struct dc_clocks *new_clocks,
528                         bool safe_to_lower)
529 {
530         struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
531
532         if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
533                         || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
534                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
535                 clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
536                 dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
537                 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
538
539                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
540         }
541
542         if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
543                         || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
544                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
545                 clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
546                 dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
547
548                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
549         }
550 }
551
552 static void dcn_update_clocks(struct dccg *dccg,
553                         struct dc_clocks *new_clocks,
554                         bool safe_to_lower)
555 {
556         struct dm_pp_clock_for_voltage_req clock_voltage_req = {0};
557         bool send_request_to_increase = false;
558         bool send_request_to_lower = false;
559
560         if (new_clocks->dispclk_khz > dccg->clks.dispclk_khz
561                         || new_clocks->phyclk_khz > dccg->clks.phyclk_khz
562                         || new_clocks->fclk_khz > dccg->clks.fclk_khz
563                         || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz)
564                 send_request_to_increase = true;
565
566 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
567         if (send_request_to_increase
568                 ) {
569                 struct dc *core_dc = dccg->ctx->dc;
570
571                 /*use dcfclk to request voltage*/
572                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
573                 clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
574                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
575         }
576 #endif
577
578         if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
579                         || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
580                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAY_CLK;
581                 clock_voltage_req.clocks_in_khz = new_clocks->dispclk_khz;
582                 /* TODO: ramp up - dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);*/
583                 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
584
585                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
586                 send_request_to_lower = true;
587         }
588
589         if ((new_clocks->phyclk_khz < dccg->clks.phyclk_khz && safe_to_lower)
590                         || new_clocks->phyclk_khz > dccg->clks.phyclk_khz) {
591                 dccg->clks.phyclk_khz = new_clocks->phyclk_khz;
592                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DISPLAYPHYCLK;
593                 clock_voltage_req.clocks_in_khz = new_clocks->phyclk_khz;
594
595                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
596                 send_request_to_lower = true;
597         }
598
599         if ((new_clocks->fclk_khz < dccg->clks.fclk_khz && safe_to_lower)
600                         || new_clocks->fclk_khz > dccg->clks.fclk_khz) {
601                 dccg->clks.phyclk_khz = new_clocks->fclk_khz;
602                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_FCLK;
603                 clock_voltage_req.clocks_in_khz = new_clocks->fclk_khz;
604
605                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
606                 send_request_to_lower = true;
607         }
608
609         if ((new_clocks->dcfclk_khz < dccg->clks.dcfclk_khz && safe_to_lower)
610                         || new_clocks->dcfclk_khz > dccg->clks.dcfclk_khz) {
611                 dccg->clks.phyclk_khz = new_clocks->dcfclk_khz;
612                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
613                 clock_voltage_req.clocks_in_khz = new_clocks->dcfclk_khz;
614
615                 send_request_to_lower = true;
616         }
617
618 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
619         if (!send_request_to_increase && send_request_to_lower
620                 ) {
621                 struct dc *core_dc = dccg->ctx->dc;
622
623                 /*use dcfclk to request voltage*/
624                 clock_voltage_req.clk_type = DM_PP_CLOCK_TYPE_DCFCLK;
625                 clock_voltage_req.clocks_in_khz = dcn_find_dcfclk_suits_all(core_dc, new_clocks);
626                 dm_pp_apply_clock_for_voltage_request(dccg->ctx, &clock_voltage_req);
627         }
628 #endif
629 }
630
631 static void dce_update_clocks(struct dccg *dccg,
632                         struct dc_clocks *new_clocks,
633                         bool safe_to_lower)
634 {
635         struct dm_pp_power_level_change_request level_change_req;
636
637         level_change_req.power_level = dce_get_required_clocks_state(dccg, new_clocks);
638         /* get max clock state from PPLIB */
639         if ((level_change_req.power_level < dccg->cur_min_clks_state && safe_to_lower)
640                         || level_change_req.power_level > dccg->cur_min_clks_state) {
641                 if (dm_pp_apply_power_level_change_request(dccg->ctx, &level_change_req))
642                         dccg->cur_min_clks_state = level_change_req.power_level;
643         }
644
645         if ((new_clocks->dispclk_khz < dccg->clks.dispclk_khz && safe_to_lower)
646                         || new_clocks->dispclk_khz > dccg->clks.dispclk_khz) {
647                 dccg->funcs->set_dispclk(dccg, new_clocks->dispclk_khz);
648                 dccg->clks.dispclk_khz = new_clocks->dispclk_khz;
649         }
650 }
651
652 static const struct display_clock_funcs dcn_funcs = {
653         .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
654         .set_dispclk = dce112_set_clock,
655         .update_clocks = dcn_update_clocks
656 };
657
658 static const struct display_clock_funcs dce120_funcs = {
659         .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq_wrkaround,
660         .set_dispclk = dce112_set_clock,
661         .update_clocks = dce12_update_clocks
662 };
663
664 static const struct display_clock_funcs dce112_funcs = {
665         .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
666         .set_dispclk = dce112_set_clock,
667         .update_clocks = dce_update_clocks
668 };
669
670 static const struct display_clock_funcs dce110_funcs = {
671         .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
672         .set_dispclk = dce_psr_set_clock,
673         .update_clocks = dce_update_clocks
674 };
675
676 static const struct display_clock_funcs dce_funcs = {
677         .get_dp_ref_clk_frequency = dce_clocks_get_dp_ref_freq,
678         .set_dispclk = dce_set_clock,
679         .update_clocks = dce_update_clocks
680 };
681
682 static void dce_dccg_construct(
683         struct dce_dccg *clk_dce,
684         struct dc_context *ctx,
685         const struct dce_disp_clk_registers *regs,
686         const struct dce_disp_clk_shift *clk_shift,
687         const struct dce_disp_clk_mask *clk_mask)
688 {
689         struct dccg *base = &clk_dce->base;
690
691         base->ctx = ctx;
692         base->funcs = &dce_funcs;
693
694         clk_dce->regs = regs;
695         clk_dce->clk_shift = clk_shift;
696         clk_dce->clk_mask = clk_mask;
697
698         clk_dce->dfs_bypass_disp_clk = 0;
699
700         clk_dce->dprefclk_ss_percentage = 0;
701         clk_dce->dprefclk_ss_divider = 1000;
702         clk_dce->ss_on_dprefclk = false;
703
704         base->max_clks_state = DM_PP_CLOCKS_STATE_NOMINAL;
705         base->cur_min_clks_state = DM_PP_CLOCKS_STATE_INVALID;
706
707         dce_clock_read_integrated_info(clk_dce);
708         dce_clock_read_ss_info(clk_dce);
709
710         dce_divider_range_construct(
711                 &clk_dce->divider_ranges[DIVIDER_RANGE_01],
712                 DIVIDER_RANGE_01_START,
713                 DIVIDER_RANGE_01_STEP_SIZE,
714                 DIVIDER_RANGE_01_BASE_DIVIDER_ID,
715                 DIVIDER_RANGE_02_BASE_DIVIDER_ID);
716         dce_divider_range_construct(
717                 &clk_dce->divider_ranges[DIVIDER_RANGE_02],
718                 DIVIDER_RANGE_02_START,
719                 DIVIDER_RANGE_02_STEP_SIZE,
720                 DIVIDER_RANGE_02_BASE_DIVIDER_ID,
721                 DIVIDER_RANGE_03_BASE_DIVIDER_ID);
722         dce_divider_range_construct(
723                 &clk_dce->divider_ranges[DIVIDER_RANGE_03],
724                 DIVIDER_RANGE_03_START,
725                 DIVIDER_RANGE_03_STEP_SIZE,
726                 DIVIDER_RANGE_03_BASE_DIVIDER_ID,
727                 DIVIDER_RANGE_MAX_DIVIDER_ID);
728 }
729
730 struct dccg *dce_dccg_create(
731         struct dc_context *ctx,
732         const struct dce_disp_clk_registers *regs,
733         const struct dce_disp_clk_shift *clk_shift,
734         const struct dce_disp_clk_mask *clk_mask)
735 {
736         struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
737
738         if (clk_dce == NULL) {
739                 BREAK_TO_DEBUGGER();
740                 return NULL;
741         }
742
743         memcpy(clk_dce->max_clks_by_state,
744                 dce80_max_clks_by_state,
745                 sizeof(dce80_max_clks_by_state));
746
747         dce_dccg_construct(
748                 clk_dce, ctx, regs, clk_shift, clk_mask);
749
750         return &clk_dce->base;
751 }
752
753 struct dccg *dce110_dccg_create(
754         struct dc_context *ctx,
755         const struct dce_disp_clk_registers *regs,
756         const struct dce_disp_clk_shift *clk_shift,
757         const struct dce_disp_clk_mask *clk_mask)
758 {
759         struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
760
761         if (clk_dce == NULL) {
762                 BREAK_TO_DEBUGGER();
763                 return NULL;
764         }
765
766         memcpy(clk_dce->max_clks_by_state,
767                 dce110_max_clks_by_state,
768                 sizeof(dce110_max_clks_by_state));
769
770         dce_dccg_construct(
771                 clk_dce, ctx, regs, clk_shift, clk_mask);
772
773         clk_dce->base.funcs = &dce110_funcs;
774
775         return &clk_dce->base;
776 }
777
778 struct dccg *dce112_dccg_create(
779         struct dc_context *ctx,
780         const struct dce_disp_clk_registers *regs,
781         const struct dce_disp_clk_shift *clk_shift,
782         const struct dce_disp_clk_mask *clk_mask)
783 {
784         struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
785
786         if (clk_dce == NULL) {
787                 BREAK_TO_DEBUGGER();
788                 return NULL;
789         }
790
791         memcpy(clk_dce->max_clks_by_state,
792                 dce112_max_clks_by_state,
793                 sizeof(dce112_max_clks_by_state));
794
795         dce_dccg_construct(
796                 clk_dce, ctx, regs, clk_shift, clk_mask);
797
798         clk_dce->base.funcs = &dce112_funcs;
799
800         return &clk_dce->base;
801 }
802
803 struct dccg *dce120_dccg_create(struct dc_context *ctx)
804 {
805         struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
806
807         if (clk_dce == NULL) {
808                 BREAK_TO_DEBUGGER();
809                 return NULL;
810         }
811
812         memcpy(clk_dce->max_clks_by_state,
813                 dce120_max_clks_by_state,
814                 sizeof(dce120_max_clks_by_state));
815
816         dce_dccg_construct(
817                 clk_dce, ctx, NULL, NULL, NULL);
818
819         clk_dce->base.funcs = &dce120_funcs;
820
821         return &clk_dce->base;
822 }
823
824 struct dccg *dcn_dccg_create(struct dc_context *ctx)
825 {
826         struct dce_dccg *clk_dce = kzalloc(sizeof(*clk_dce), GFP_KERNEL);
827
828         if (clk_dce == NULL) {
829                 BREAK_TO_DEBUGGER();
830                 return NULL;
831         }
832
833         /* TODO strip out useful stuff out of dce constructor */
834         dce_dccg_construct(
835                 clk_dce, ctx, NULL, NULL, NULL);
836
837         clk_dce->base.funcs = &dcn_funcs;
838
839         return &clk_dce->base;
840 }
841
842 void dce_dccg_destroy(struct dccg **dccg)
843 {
844         struct dce_dccg *clk_dce = TO_DCE_CLOCKS(*dccg);
845
846         kfree(clk_dce);
847         *dccg = NULL;
848 }