2 * Copyright 2012-15 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "dm_services.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
32 #include "timing_generator.h"
33 #include "transform.h"
35 #include "core_types.h"
36 #include "set_mode_types.h"
37 #include "virtual/virtual_stream_encoder.h"
38 #include "dpcd_defs.h"
40 #include "dce80/dce80_resource.h"
41 #include "dce100/dce100_resource.h"
42 #include "dce110/dce110_resource.h"
43 #include "dce112/dce112_resource.h"
44 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
45 #include "dcn10/dcn10_resource.h"
47 #include "dce120/dce120_resource.h"
50 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
52 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
53 switch (asic_id.chip_family) {
56 dc_version = DCE_VERSION_8_0;
59 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
60 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
61 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
62 dc_version = DCE_VERSION_8_3;
64 dc_version = DCE_VERSION_8_1;
67 dc_version = DCE_VERSION_11_0;
71 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
72 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
73 dc_version = DCE_VERSION_10_0;
76 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
77 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
78 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
79 dc_version = DCE_VERSION_11_2;
83 dc_version = DCE_VERSION_12_0;
85 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
87 dc_version = DCN_VERSION_1_0;
91 dc_version = DCE_VERSION_UNKNOWN;
97 struct resource_pool *dc_create_resource_pool(
99 int num_virtual_links,
100 enum dce_version dc_version,
101 struct hw_asic_id asic_id)
103 struct resource_pool *res_pool = NULL;
105 switch (dc_version) {
106 case DCE_VERSION_8_0:
107 res_pool = dce80_create_resource_pool(
108 num_virtual_links, dc);
110 case DCE_VERSION_8_1:
111 res_pool = dce81_create_resource_pool(
112 num_virtual_links, dc);
114 case DCE_VERSION_8_3:
115 res_pool = dce83_create_resource_pool(
116 num_virtual_links, dc);
118 case DCE_VERSION_10_0:
119 res_pool = dce100_create_resource_pool(
120 num_virtual_links, dc);
122 case DCE_VERSION_11_0:
123 res_pool = dce110_create_resource_pool(
124 num_virtual_links, dc, asic_id);
126 case DCE_VERSION_11_2:
127 res_pool = dce112_create_resource_pool(
128 num_virtual_links, dc);
130 case DCE_VERSION_12_0:
131 res_pool = dce120_create_resource_pool(
132 num_virtual_links, dc);
135 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
136 case DCN_VERSION_1_0:
137 res_pool = dcn10_create_resource_pool(
138 num_virtual_links, dc);
146 if (res_pool != NULL) {
147 struct dc_firmware_info fw_info = { { 0 } };
149 if (dc->ctx->dc_bios->funcs->get_firmware_info(
150 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
151 res_pool->ref_clock_inKhz = fw_info.pll_info.crystal_frequency;
153 ASSERT_CRITICAL(false);
159 void dc_destroy_resource_pool(struct dc *dc)
163 dc->res_pool->funcs->destroy(&dc->res_pool);
169 static void update_num_audio(
170 const struct resource_straps *straps,
171 unsigned int *num_audio,
172 struct audio_support *aud_support)
174 aud_support->dp_audio = true;
175 aud_support->hdmi_audio_native = false;
176 aud_support->hdmi_audio_on_dongle = false;
178 if (straps->hdmi_disable == 0) {
179 if (straps->dc_pinstraps_audio & 0x2) {
180 aud_support->hdmi_audio_on_dongle = true;
181 aud_support->hdmi_audio_native = true;
185 switch (straps->audio_stream_number) {
186 case 0: /* multi streams supported */
188 case 1: /* multi streams not supported */
192 DC_ERR("DC: unexpected audio fuse!\n");
196 bool resource_construct(
197 unsigned int num_virtual_links,
199 struct resource_pool *pool,
200 const struct resource_create_funcs *create_funcs)
202 struct dc_context *ctx = dc->ctx;
203 const struct resource_caps *caps = pool->res_cap;
205 unsigned int num_audio = caps->num_audio;
206 struct resource_straps straps = {0};
208 if (create_funcs->read_dce_straps)
209 create_funcs->read_dce_straps(dc->ctx, &straps);
211 pool->audio_count = 0;
212 if (create_funcs->create_audio) {
213 /* find the total number of streams available via the
214 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
215 * registers (one for each pin) starting from pin 1
216 * up to the max number of audio pins.
217 * We stop on the first pin where
218 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
220 update_num_audio(&straps, &num_audio, &pool->audio_support);
221 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
222 struct audio *aud = create_funcs->create_audio(ctx, i);
225 DC_ERR("DC: failed to create audio!\n");
229 if (!aud->funcs->endpoint_valid(aud)) {
230 aud->funcs->destroy(&aud);
234 pool->audios[i] = aud;
239 pool->stream_enc_count = 0;
240 if (create_funcs->create_stream_encoder) {
241 for (i = 0; i < caps->num_stream_encoder; i++) {
242 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
243 if (pool->stream_enc[i] == NULL)
244 DC_ERR("DC: failed to create stream_encoder!\n");
245 pool->stream_enc_count++;
248 dc->caps.dynamic_audio = false;
249 if (pool->audio_count < pool->stream_enc_count) {
250 dc->caps.dynamic_audio = true;
252 for (i = 0; i < num_virtual_links; i++) {
253 pool->stream_enc[pool->stream_enc_count] =
254 virtual_stream_encoder_create(
256 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
257 DC_ERR("DC: failed to create stream_encoder!\n");
260 pool->stream_enc_count++;
263 dc->hwseq = create_funcs->create_hwseq(ctx);
269 void resource_unreference_clock_source(
270 struct resource_context *res_ctx,
271 const struct resource_pool *pool,
272 struct clock_source *clock_source)
276 for (i = 0; i < pool->clk_src_count; i++) {
277 if (pool->clock_sources[i] != clock_source)
280 res_ctx->clock_source_ref_count[i]--;
285 if (pool->dp_clock_source == clock_source)
286 res_ctx->dp_clock_source_ref_count--;
289 void resource_reference_clock_source(
290 struct resource_context *res_ctx,
291 const struct resource_pool *pool,
292 struct clock_source *clock_source)
295 for (i = 0; i < pool->clk_src_count; i++) {
296 if (pool->clock_sources[i] != clock_source)
299 res_ctx->clock_source_ref_count[i]++;
303 if (pool->dp_clock_source == clock_source)
304 res_ctx->dp_clock_source_ref_count++;
307 bool resource_are_streams_timing_synchronizable(
308 struct dc_stream_state *stream1,
309 struct dc_stream_state *stream2)
311 if (stream1->timing.h_total != stream2->timing.h_total)
314 if (stream1->timing.v_total != stream2->timing.v_total)
317 if (stream1->timing.h_addressable
318 != stream2->timing.h_addressable)
321 if (stream1->timing.v_addressable
322 != stream2->timing.v_addressable)
325 if (stream1->timing.pix_clk_khz
326 != stream2->timing.pix_clk_khz)
329 if (stream1->phy_pix_clk != stream2->phy_pix_clk
330 && (!dc_is_dp_signal(stream1->signal)
331 || !dc_is_dp_signal(stream2->signal)))
337 static bool is_sharable_clk_src(
338 const struct pipe_ctx *pipe_with_clk_src,
339 const struct pipe_ctx *pipe)
341 if (pipe_with_clk_src->clock_source == NULL)
344 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
347 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal))
350 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
351 && dc_is_dvi_signal(pipe->stream->signal))
354 if (dc_is_hdmi_signal(pipe->stream->signal)
355 && dc_is_dvi_signal(pipe_with_clk_src->stream->signal))
358 if (!resource_are_streams_timing_synchronizable(
359 pipe_with_clk_src->stream, pipe->stream))
365 struct clock_source *resource_find_used_clk_src_for_sharing(
366 struct resource_context *res_ctx,
367 struct pipe_ctx *pipe_ctx)
371 for (i = 0; i < MAX_PIPES; i++) {
372 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
373 return res_ctx->pipe_ctx[i].clock_source;
379 static enum pixel_format convert_pixel_format_to_dalsurface(
380 enum surface_pixel_format surface_pixel_format)
382 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
384 switch (surface_pixel_format) {
385 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
386 dal_pixel_format = PIXEL_FORMAT_INDEX8;
388 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
389 dal_pixel_format = PIXEL_FORMAT_RGB565;
391 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
392 dal_pixel_format = PIXEL_FORMAT_RGB565;
394 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
395 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
397 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
398 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
400 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
401 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
403 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
404 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
406 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
407 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
409 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
410 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
411 dal_pixel_format = PIXEL_FORMAT_FP16;
413 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
414 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
415 dal_pixel_format = PIXEL_FORMAT_420BPP8;
417 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
418 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
419 dal_pixel_format = PIXEL_FORMAT_420BPP10;
421 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
423 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
426 return dal_pixel_format;
429 static void rect_swap_helper(struct rect *rect)
431 swap(rect->height, rect->width);
432 swap(rect->x, rect->y);
435 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
437 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
438 const struct dc_stream_state *stream = pipe_ctx->stream;
439 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
440 struct rect surf_src = plane_state->src_rect;
441 struct rect clip = { 0 };
442 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
443 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
444 bool pri_split = pipe_ctx->bottom_pipe &&
445 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
446 bool sec_split = pipe_ctx->top_pipe &&
447 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
449 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
450 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
455 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
456 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
457 rect_swap_helper(&surf_src);
459 /* The actual clip is an intersection between stream
460 * source and surface clip
462 clip.x = stream->src.x > plane_state->clip_rect.x ?
463 stream->src.x : plane_state->clip_rect.x;
465 clip.width = stream->src.x + stream->src.width <
466 plane_state->clip_rect.x + plane_state->clip_rect.width ?
467 stream->src.x + stream->src.width - clip.x :
468 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
470 clip.y = stream->src.y > plane_state->clip_rect.y ?
471 stream->src.y : plane_state->clip_rect.y;
473 clip.height = stream->src.y + stream->src.height <
474 plane_state->clip_rect.y + plane_state->clip_rect.height ?
475 stream->src.y + stream->src.height - clip.y :
476 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
478 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
479 * num_pixels = clip.num_pix * scl_ratio
481 data->viewport.x = surf_src.x + (clip.x - plane_state->dst_rect.x) *
482 surf_src.width / plane_state->dst_rect.width;
483 data->viewport.width = clip.width *
484 surf_src.width / plane_state->dst_rect.width;
486 data->viewport.y = surf_src.y + (clip.y - plane_state->dst_rect.y) *
487 surf_src.height / plane_state->dst_rect.height;
488 data->viewport.height = clip.height *
489 surf_src.height / plane_state->dst_rect.height;
491 /* Round down, compensate in init */
492 data->viewport_c.x = data->viewport.x / vpc_div;
493 data->viewport_c.y = data->viewport.y / vpc_div;
494 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ?
495 dal_fixed31_32_half : dal_fixed31_32_zero;
496 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ?
497 dal_fixed31_32_half : dal_fixed31_32_zero;
498 /* Round up, assume original video size always even dimensions */
499 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
500 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
504 data->viewport.x += data->viewport.width / 2;
505 data->viewport_c.x += data->viewport_c.width / 2;
506 /* Ceil offset pipe */
507 data->viewport.width = (data->viewport.width + 1) / 2;
508 data->viewport_c.width = (data->viewport_c.width + 1) / 2;
509 } else if (pri_split) {
510 data->viewport.width /= 2;
511 data->viewport_c.width /= 2;
514 if (plane_state->rotation == ROTATION_ANGLE_90 ||
515 plane_state->rotation == ROTATION_ANGLE_270) {
516 rect_swap_helper(&data->viewport_c);
517 rect_swap_helper(&data->viewport);
521 static void calculate_recout(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
523 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
524 const struct dc_stream_state *stream = pipe_ctx->stream;
525 struct rect surf_src = plane_state->src_rect;
526 struct rect surf_clip = plane_state->clip_rect;
527 int recout_full_x, recout_full_y;
528 bool pri_split = pipe_ctx->bottom_pipe &&
529 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
530 bool sec_split = pipe_ctx->top_pipe &&
531 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
532 bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
534 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
535 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
536 rect_swap_helper(&surf_src);
538 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
539 if (stream->src.x < surf_clip.x)
540 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
541 - stream->src.x) * stream->dst.width
544 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
545 stream->dst.width / stream->src.width;
546 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
547 stream->dst.x + stream->dst.width)
548 pipe_ctx->plane_res.scl_data.recout.width =
549 stream->dst.x + stream->dst.width
550 - pipe_ctx->plane_res.scl_data.recout.x;
552 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
553 if (stream->src.y < surf_clip.y)
554 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
555 - stream->src.y) * stream->dst.height
556 / stream->src.height;
558 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
559 stream->dst.height / stream->src.height;
560 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
561 stream->dst.y + stream->dst.height)
562 pipe_ctx->plane_res.scl_data.recout.height =
563 stream->dst.y + stream->dst.height
564 - pipe_ctx->plane_res.scl_data.recout.y;
566 /* Handle h & vsplit */
567 if (sec_split && top_bottom_split) {
568 pipe_ctx->plane_res.scl_data.recout.y +=
569 pipe_ctx->plane_res.scl_data.recout.height / 2;
570 /* Floor primary pipe, ceil 2ndary pipe */
571 pipe_ctx->plane_res.scl_data.recout.height =
572 (pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
573 } else if (pri_split && top_bottom_split)
574 pipe_ctx->plane_res.scl_data.recout.height /= 2;
575 else if (pri_split || sec_split) {
576 /* HMirror XOR Secondary_pipe XOR Rotation_180 */
577 bool right_view = (sec_split != plane_state->horizontal_mirror) !=
578 (plane_state->rotation == ROTATION_ANGLE_180);
580 if (plane_state->rotation == ROTATION_ANGLE_90
581 || plane_state->rotation == ROTATION_ANGLE_270)
582 /* Secondary_pipe XOR Rotation_270 */
583 right_view = (plane_state->rotation == ROTATION_ANGLE_270) != sec_split;
586 pipe_ctx->plane_res.scl_data.recout.x +=
587 pipe_ctx->plane_res.scl_data.recout.width / 2;
588 /* Ceil offset pipe */
589 pipe_ctx->plane_res.scl_data.recout.width =
590 (pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
592 pipe_ctx->plane_res.scl_data.recout.width /= 2;
595 /* Unclipped recout offset = stream dst offset + ((surf dst offset - stream surf_src offset)
596 * * 1/ stream scaling ratio) - (surf surf_src offset * 1/ full scl
599 recout_full_x = stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
600 * stream->dst.width / stream->src.width -
601 surf_src.x * plane_state->dst_rect.width / surf_src.width
602 * stream->dst.width / stream->src.width;
603 recout_full_y = stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
604 * stream->dst.height / stream->src.height -
605 surf_src.y * plane_state->dst_rect.height / surf_src.height
606 * stream->dst.height / stream->src.height;
608 recout_skip->width = pipe_ctx->plane_res.scl_data.recout.x - recout_full_x;
609 recout_skip->height = pipe_ctx->plane_res.scl_data.recout.y - recout_full_y;
612 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
614 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
615 const struct dc_stream_state *stream = pipe_ctx->stream;
616 struct rect surf_src = plane_state->src_rect;
617 const int in_w = stream->src.width;
618 const int in_h = stream->src.height;
619 const int out_w = stream->dst.width;
620 const int out_h = stream->dst.height;
622 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
623 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
624 rect_swap_helper(&surf_src);
626 pipe_ctx->plane_res.scl_data.ratios.horz = dal_fixed31_32_from_fraction(
628 plane_state->dst_rect.width);
629 pipe_ctx->plane_res.scl_data.ratios.vert = dal_fixed31_32_from_fraction(
631 plane_state->dst_rect.height);
633 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
634 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
635 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
636 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
638 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
639 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
640 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
641 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
643 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
644 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
646 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
647 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
648 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
649 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
653 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx, struct view *recout_skip)
655 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
656 struct rect src = pipe_ctx->plane_state->src_rect;
657 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
658 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
659 bool flip_vert_scan_dir = false, flip_horz_scan_dir = false;
662 * Need to calculate the scan direction for viewport to make adjustments
664 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_180) {
665 flip_vert_scan_dir = true;
666 flip_horz_scan_dir = true;
667 } else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90)
668 flip_vert_scan_dir = true;
669 else if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
670 flip_horz_scan_dir = true;
671 if (pipe_ctx->plane_state->horizontal_mirror)
672 flip_horz_scan_dir = !flip_horz_scan_dir;
674 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
675 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
676 rect_swap_helper(&src);
677 rect_swap_helper(&data->viewport_c);
678 rect_swap_helper(&data->viewport);
682 * Init calculated according to formula:
683 * init = (scaling_ratio + number_of_taps + 1) / 2
684 * init_bot = init + scaling_ratio
685 * init_c = init + truncated_vp_c_offset(from calculate viewport)
687 data->inits.h = dal_fixed31_32_div_int(
688 dal_fixed31_32_add_int(data->ratios.horz, data->taps.h_taps + 1), 2);
690 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_div_int(
691 dal_fixed31_32_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2));
693 data->inits.v = dal_fixed31_32_div_int(
694 dal_fixed31_32_add_int(data->ratios.vert, data->taps.v_taps + 1), 2);
696 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_div_int(
697 dal_fixed31_32_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2));
700 /* Adjust for viewport end clip-off */
701 if ((data->viewport.x + data->viewport.width) < (src.x + src.width) && !flip_horz_scan_dir) {
702 int vp_clip = src.x + src.width - data->viewport.width - data->viewport.x;
703 int int_part = dal_fixed31_32_floor(
704 dal_fixed31_32_sub(data->inits.h, data->ratios.horz));
706 int_part = int_part > 0 ? int_part : 0;
707 data->viewport.width += int_part < vp_clip ? int_part : vp_clip;
709 if ((data->viewport.y + data->viewport.height) < (src.y + src.height) && !flip_vert_scan_dir) {
710 int vp_clip = src.y + src.height - data->viewport.height - data->viewport.y;
711 int int_part = dal_fixed31_32_floor(
712 dal_fixed31_32_sub(data->inits.v, data->ratios.vert));
714 int_part = int_part > 0 ? int_part : 0;
715 data->viewport.height += int_part < vp_clip ? int_part : vp_clip;
717 if ((data->viewport_c.x + data->viewport_c.width) < (src.x + src.width) / vpc_div && !flip_horz_scan_dir) {
718 int vp_clip = (src.x + src.width) / vpc_div -
719 data->viewport_c.width - data->viewport_c.x;
720 int int_part = dal_fixed31_32_floor(
721 dal_fixed31_32_sub(data->inits.h_c, data->ratios.horz_c));
723 int_part = int_part > 0 ? int_part : 0;
724 data->viewport_c.width += int_part < vp_clip ? int_part : vp_clip;
726 if ((data->viewport_c.y + data->viewport_c.height) < (src.y + src.height) / vpc_div && !flip_vert_scan_dir) {
727 int vp_clip = (src.y + src.height) / vpc_div -
728 data->viewport_c.height - data->viewport_c.y;
729 int int_part = dal_fixed31_32_floor(
730 dal_fixed31_32_sub(data->inits.v_c, data->ratios.vert_c));
732 int_part = int_part > 0 ? int_part : 0;
733 data->viewport_c.height += int_part < vp_clip ? int_part : vp_clip;
736 /* Adjust for non-0 viewport offset */
737 if (data->viewport.x && !flip_horz_scan_dir) {
740 data->inits.h = dal_fixed31_32_add(data->inits.h, dal_fixed31_32_mul_int(
741 data->ratios.horz, recout_skip->width));
742 int_part = dal_fixed31_32_floor(data->inits.h) - data->viewport.x;
743 if (int_part < data->taps.h_taps) {
744 int int_adj = data->viewport.x >= (data->taps.h_taps - int_part) ?
745 (data->taps.h_taps - int_part) : data->viewport.x;
746 data->viewport.x -= int_adj;
747 data->viewport.width += int_adj;
749 } else if (int_part > data->taps.h_taps) {
750 data->viewport.x += int_part - data->taps.h_taps;
751 data->viewport.width -= int_part - data->taps.h_taps;
752 int_part = data->taps.h_taps;
754 data->inits.h.value &= 0xffffffff;
755 data->inits.h = dal_fixed31_32_add_int(data->inits.h, int_part);
758 if (data->viewport_c.x && !flip_horz_scan_dir) {
761 data->inits.h_c = dal_fixed31_32_add(data->inits.h_c, dal_fixed31_32_mul_int(
762 data->ratios.horz_c, recout_skip->width));
763 int_part = dal_fixed31_32_floor(data->inits.h_c) - data->viewport_c.x;
764 if (int_part < data->taps.h_taps_c) {
765 int int_adj = data->viewport_c.x >= (data->taps.h_taps_c - int_part) ?
766 (data->taps.h_taps_c - int_part) : data->viewport_c.x;
767 data->viewport_c.x -= int_adj;
768 data->viewport_c.width += int_adj;
770 } else if (int_part > data->taps.h_taps_c) {
771 data->viewport_c.x += int_part - data->taps.h_taps_c;
772 data->viewport_c.width -= int_part - data->taps.h_taps_c;
773 int_part = data->taps.h_taps_c;
775 data->inits.h_c.value &= 0xffffffff;
776 data->inits.h_c = dal_fixed31_32_add_int(data->inits.h_c, int_part);
779 if (data->viewport.y && !flip_vert_scan_dir) {
782 data->inits.v = dal_fixed31_32_add(data->inits.v, dal_fixed31_32_mul_int(
783 data->ratios.vert, recout_skip->height));
784 int_part = dal_fixed31_32_floor(data->inits.v) - data->viewport.y;
785 if (int_part < data->taps.v_taps) {
786 int int_adj = data->viewport.y >= (data->taps.v_taps - int_part) ?
787 (data->taps.v_taps - int_part) : data->viewport.y;
788 data->viewport.y -= int_adj;
789 data->viewport.height += int_adj;
791 } else if (int_part > data->taps.v_taps) {
792 data->viewport.y += int_part - data->taps.v_taps;
793 data->viewport.height -= int_part - data->taps.v_taps;
794 int_part = data->taps.v_taps;
796 data->inits.v.value &= 0xffffffff;
797 data->inits.v = dal_fixed31_32_add_int(data->inits.v, int_part);
800 if (data->viewport_c.y && !flip_vert_scan_dir) {
803 data->inits.v_c = dal_fixed31_32_add(data->inits.v_c, dal_fixed31_32_mul_int(
804 data->ratios.vert_c, recout_skip->height));
805 int_part = dal_fixed31_32_floor(data->inits.v_c) - data->viewport_c.y;
806 if (int_part < data->taps.v_taps_c) {
807 int int_adj = data->viewport_c.y >= (data->taps.v_taps_c - int_part) ?
808 (data->taps.v_taps_c - int_part) : data->viewport_c.y;
809 data->viewport_c.y -= int_adj;
810 data->viewport_c.height += int_adj;
812 } else if (int_part > data->taps.v_taps_c) {
813 data->viewport_c.y += int_part - data->taps.v_taps_c;
814 data->viewport_c.height -= int_part - data->taps.v_taps_c;
815 int_part = data->taps.v_taps_c;
817 data->inits.v_c.value &= 0xffffffff;
818 data->inits.v_c = dal_fixed31_32_add_int(data->inits.v_c, int_part);
821 /* Interlaced inits based on final vert inits */
822 data->inits.v_bot = dal_fixed31_32_add(data->inits.v, data->ratios.vert);
823 data->inits.v_c_bot = dal_fixed31_32_add(data->inits.v_c, data->ratios.vert_c);
825 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
826 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270) {
827 rect_swap_helper(&data->viewport_c);
828 rect_swap_helper(&data->viewport);
832 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
834 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
835 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
836 struct view recout_skip = { 0 };
838 struct dc_context *ctx = pipe_ctx->stream->ctx;
839 /* Important: scaling ratio calculation requires pixel format,
840 * lb depth calculation requires recout and taps require scaling ratios.
841 * Inits require viewport, taps, ratios and recout of split pipe
843 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
844 pipe_ctx->plane_state->format);
846 calculate_scaling_ratios(pipe_ctx);
848 calculate_viewport(pipe_ctx);
850 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
853 calculate_recout(pipe_ctx, &recout_skip);
856 * Setting line buffer pixel depth to 24bpp yields banding
857 * on certain displays, such as the Sharp 4k
859 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
861 pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
862 pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
864 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
865 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
868 /* Taps calculations */
869 if (pipe_ctx->plane_res.xfm != NULL)
870 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
871 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
873 if (pipe_ctx->plane_res.dpp != NULL)
874 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
875 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
877 /* Try 24 bpp linebuffer */
878 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
880 if (pipe_ctx->plane_res.xfm != NULL)
881 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
882 pipe_ctx->plane_res.xfm,
883 &pipe_ctx->plane_res.scl_data,
884 &plane_state->scaling_quality);
886 if (pipe_ctx->plane_res.dpp != NULL)
887 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
888 pipe_ctx->plane_res.dpp,
889 &pipe_ctx->plane_res.scl_data,
890 &plane_state->scaling_quality);
894 /* May need to re-check lb size after this in some obscure scenario */
895 calculate_inits_and_adj_vp(pipe_ctx, &recout_skip);
898 "%s: Viewport:\nheight:%d width:%d x:%d "
899 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
902 pipe_ctx->plane_res.scl_data.viewport.height,
903 pipe_ctx->plane_res.scl_data.viewport.width,
904 pipe_ctx->plane_res.scl_data.viewport.x,
905 pipe_ctx->plane_res.scl_data.viewport.y,
906 plane_state->dst_rect.height,
907 plane_state->dst_rect.width,
908 plane_state->dst_rect.x,
909 plane_state->dst_rect.y);
915 enum dc_status resource_build_scaling_params_for_context(
917 struct dc_state *context)
921 for (i = 0; i < MAX_PIPES; i++) {
922 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
923 context->res_ctx.pipe_ctx[i].stream != NULL)
924 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
925 return DC_FAIL_SCALING;
931 struct pipe_ctx *find_idle_secondary_pipe(
932 struct resource_context *res_ctx,
933 const struct resource_pool *pool)
936 struct pipe_ctx *secondary_pipe = NULL;
939 * search backwards for the second pipe to keep pipe
940 * assignment more consistent
943 for (i = pool->pipe_count - 1; i >= 0; i--) {
944 if (res_ctx->pipe_ctx[i].stream == NULL) {
945 secondary_pipe = &res_ctx->pipe_ctx[i];
946 secondary_pipe->pipe_idx = i;
952 return secondary_pipe;
955 struct pipe_ctx *resource_get_head_pipe_for_stream(
956 struct resource_context *res_ctx,
957 struct dc_stream_state *stream)
960 for (i = 0; i < MAX_PIPES; i++) {
961 if (res_ctx->pipe_ctx[i].stream == stream &&
962 !res_ctx->pipe_ctx[i].top_pipe) {
963 return &res_ctx->pipe_ctx[i];
970 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
971 struct resource_context *res_ctx,
972 struct dc_stream_state *stream)
974 struct pipe_ctx *head_pipe, *tail_pipe;
975 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
980 tail_pipe = head_pipe->bottom_pipe;
983 head_pipe = tail_pipe;
984 tail_pipe = tail_pipe->bottom_pipe;
991 * A free_pipe for a stream is defined here as a pipe
992 * that has no surface attached yet
994 static struct pipe_ctx *acquire_free_pipe_for_stream(
995 struct dc_state *context,
996 const struct resource_pool *pool,
997 struct dc_stream_state *stream)
1000 struct resource_context *res_ctx = &context->res_ctx;
1002 struct pipe_ctx *head_pipe = NULL;
1004 /* Find head pipe, which has the back end set up*/
1006 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1013 if (!head_pipe->plane_state)
1016 /* Re-use pipe already acquired for this stream if available*/
1017 for (i = pool->pipe_count - 1; i >= 0; i--) {
1018 if (res_ctx->pipe_ctx[i].stream == stream &&
1019 !res_ctx->pipe_ctx[i].plane_state) {
1020 return &res_ctx->pipe_ctx[i];
1025 * At this point we have no re-useable pipe for this stream and we need
1026 * to acquire an idle one to satisfy the request
1029 if (!pool->funcs->acquire_idle_pipe_for_layer)
1032 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1036 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1037 static int acquire_first_split_pipe(
1038 struct resource_context *res_ctx,
1039 const struct resource_pool *pool,
1040 struct dc_stream_state *stream)
1044 for (i = 0; i < pool->pipe_count; i++) {
1045 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1047 if (pipe_ctx->top_pipe &&
1048 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1049 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1050 if (pipe_ctx->bottom_pipe)
1051 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1053 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1054 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1055 pipe_ctx->plane_res.hubp = pool->hubps[i];
1056 pipe_ctx->plane_res.ipp = pool->ipps[i];
1057 pipe_ctx->plane_res.dpp = pool->dpps[i];
1058 pipe_ctx->stream_res.opp = pool->opps[i];
1059 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1060 pipe_ctx->pipe_idx = i;
1062 pipe_ctx->stream = stream;
1070 bool dc_add_plane_to_context(
1071 const struct dc *dc,
1072 struct dc_stream_state *stream,
1073 struct dc_plane_state *plane_state,
1074 struct dc_state *context)
1077 struct resource_pool *pool = dc->res_pool;
1078 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1079 struct dc_stream_status *stream_status = NULL;
1081 for (i = 0; i < context->stream_count; i++)
1082 if (context->streams[i] == stream) {
1083 stream_status = &context->stream_status[i];
1086 if (stream_status == NULL) {
1087 dm_error("Existing stream not found; failed to attach surface!\n");
1092 if (stream_status->plane_count == MAX_SURFACE_NUM) {
1093 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1094 plane_state, MAX_SURFACE_NUM);
1098 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1101 dm_error("Head pipe not found for stream_state %p !\n", stream);
1105 free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1107 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1109 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1111 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1117 /* retain new surfaces */
1118 dc_plane_state_retain(plane_state);
1119 free_pipe->plane_state = plane_state;
1121 if (head_pipe != free_pipe) {
1123 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1126 free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1127 free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1128 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1129 free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1130 free_pipe->clock_source = tail_pipe->clock_source;
1131 free_pipe->top_pipe = tail_pipe;
1132 tail_pipe->bottom_pipe = free_pipe;
1135 /* assign new surfaces*/
1136 stream_status->plane_states[stream_status->plane_count] = plane_state;
1138 stream_status->plane_count++;
1143 bool dc_remove_plane_from_context(
1144 const struct dc *dc,
1145 struct dc_stream_state *stream,
1146 struct dc_plane_state *plane_state,
1147 struct dc_state *context)
1150 struct dc_stream_status *stream_status = NULL;
1151 struct resource_pool *pool = dc->res_pool;
1153 for (i = 0; i < context->stream_count; i++)
1154 if (context->streams[i] == stream) {
1155 stream_status = &context->stream_status[i];
1159 if (stream_status == NULL) {
1160 dm_error("Existing stream not found; failed to remove plane.\n");
1164 /* release pipe for plane*/
1165 for (i = pool->pipe_count - 1; i >= 0; i--) {
1166 struct pipe_ctx *pipe_ctx;
1168 if (context->res_ctx.pipe_ctx[i].plane_state == plane_state) {
1169 pipe_ctx = &context->res_ctx.pipe_ctx[i];
1171 if (pipe_ctx->top_pipe)
1172 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1174 /* Second condition is to avoid setting NULL to top pipe
1175 * of tail pipe making it look like head pipe in subsequent
1178 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1179 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1182 * For head pipe detach surfaces from pipe for tail
1183 * pipe just zero it out
1185 if (!pipe_ctx->top_pipe) {
1186 pipe_ctx->plane_state = NULL;
1187 pipe_ctx->bottom_pipe = NULL;
1189 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1195 for (i = 0; i < stream_status->plane_count; i++) {
1196 if (stream_status->plane_states[i] == plane_state) {
1198 dc_plane_state_release(stream_status->plane_states[i]);
1203 if (i == stream_status->plane_count) {
1204 dm_error("Existing plane_state not found; failed to detach it!\n");
1208 stream_status->plane_count--;
1210 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1211 for (; i < stream_status->plane_count; i++)
1212 stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1214 stream_status->plane_states[stream_status->plane_count] = NULL;
1219 bool dc_rem_all_planes_for_stream(
1220 const struct dc *dc,
1221 struct dc_stream_state *stream,
1222 struct dc_state *context)
1224 int i, old_plane_count;
1225 struct dc_stream_status *stream_status = NULL;
1226 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1228 for (i = 0; i < context->stream_count; i++)
1229 if (context->streams[i] == stream) {
1230 stream_status = &context->stream_status[i];
1234 if (stream_status == NULL) {
1235 dm_error("Existing stream %p not found!\n", stream);
1239 old_plane_count = stream_status->plane_count;
1241 for (i = 0; i < old_plane_count; i++)
1242 del_planes[i] = stream_status->plane_states[i];
1244 for (i = 0; i < old_plane_count; i++)
1245 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1251 static bool add_all_planes_for_stream(
1252 const struct dc *dc,
1253 struct dc_stream_state *stream,
1254 const struct dc_validation_set set[],
1256 struct dc_state *context)
1260 for (i = 0; i < set_count; i++)
1261 if (set[i].stream == stream)
1264 if (i == set_count) {
1265 dm_error("Stream %p not found in set!\n", stream);
1269 for (j = 0; j < set[i].plane_count; j++)
1270 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1276 bool dc_add_all_planes_for_stream(
1277 const struct dc *dc,
1278 struct dc_stream_state *stream,
1279 struct dc_plane_state * const *plane_states,
1281 struct dc_state *context)
1283 struct dc_validation_set set;
1286 set.stream = stream;
1287 set.plane_count = plane_count;
1289 for (i = 0; i < plane_count; i++)
1290 set.plane_states[i] = plane_states[i];
1292 return add_all_planes_for_stream(dc, stream, &set, 1, context);
1297 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1298 struct dc_stream_state *new_stream)
1300 if (cur_stream == NULL)
1303 /* If sink pointer changed, it means this is a hotplug, we should do
1306 if (cur_stream->sink != new_stream->sink)
1309 /* If output color space is changed, need to reprogram info frames */
1310 if (cur_stream->output_color_space != new_stream->output_color_space)
1314 &cur_stream->timing,
1315 &new_stream->timing,
1316 sizeof(struct dc_crtc_timing)) != 0;
1319 static bool are_stream_backends_same(
1320 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1322 if (stream_a == stream_b)
1325 if (stream_a == NULL || stream_b == NULL)
1328 if (is_timing_changed(stream_a, stream_b))
1334 bool dc_is_stream_unchanged(
1335 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1338 if (!are_stream_backends_same(old_stream, stream))
1344 bool dc_is_stream_scaling_unchanged(
1345 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1347 if (old_stream == stream)
1350 if (old_stream == NULL || stream == NULL)
1353 if (memcmp(&old_stream->src,
1355 sizeof(struct rect)) != 0)
1358 if (memcmp(&old_stream->dst,
1360 sizeof(struct rect)) != 0)
1366 static void update_stream_engine_usage(
1367 struct resource_context *res_ctx,
1368 const struct resource_pool *pool,
1369 struct stream_encoder *stream_enc,
1374 for (i = 0; i < pool->stream_enc_count; i++) {
1375 if (pool->stream_enc[i] == stream_enc)
1376 res_ctx->is_stream_enc_acquired[i] = acquired;
1380 /* TODO: release audio object */
1381 void update_audio_usage(
1382 struct resource_context *res_ctx,
1383 const struct resource_pool *pool,
1384 struct audio *audio,
1388 for (i = 0; i < pool->audio_count; i++) {
1389 if (pool->audios[i] == audio)
1390 res_ctx->is_audio_acquired[i] = acquired;
1394 static int acquire_first_free_pipe(
1395 struct resource_context *res_ctx,
1396 const struct resource_pool *pool,
1397 struct dc_stream_state *stream)
1401 for (i = 0; i < pool->pipe_count; i++) {
1402 if (!res_ctx->pipe_ctx[i].stream) {
1403 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1405 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1406 pipe_ctx->plane_res.mi = pool->mis[i];
1407 pipe_ctx->plane_res.hubp = pool->hubps[i];
1408 pipe_ctx->plane_res.ipp = pool->ipps[i];
1409 pipe_ctx->plane_res.xfm = pool->transforms[i];
1410 pipe_ctx->plane_res.dpp = pool->dpps[i];
1411 pipe_ctx->stream_res.opp = pool->opps[i];
1413 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1414 pipe_ctx->pipe_idx = i;
1417 pipe_ctx->stream = stream;
1424 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1425 struct resource_context *res_ctx,
1426 const struct resource_pool *pool,
1427 struct dc_stream_state *stream)
1431 struct dc_link *link = stream->sink->link;
1433 for (i = 0; i < pool->stream_enc_count; i++) {
1434 if (!res_ctx->is_stream_enc_acquired[i] &&
1435 pool->stream_enc[i]) {
1436 /* Store first available for MST second display
1437 * in daisy chain use case */
1439 if (pool->stream_enc[i]->id ==
1440 link->link_enc->preferred_engine)
1441 return pool->stream_enc[i];
1446 * below can happen in cases when stream encoder is acquired:
1447 * 1) for second MST display in chain, so preferred engine already
1449 * 2) for another link, which preferred engine already acquired by any
1450 * MST configuration.
1452 * If signal is of DP type and preferred engine not found, return last available
1454 * TODO - This is just a patch up and a generic solution is
1455 * required for non DP connectors.
1458 if (j >= 0 && dc_is_dp_signal(stream->signal))
1459 return pool->stream_enc[j];
1464 static struct audio *find_first_free_audio(
1465 struct resource_context *res_ctx,
1466 const struct resource_pool *pool,
1470 for (i = 0; i < pool->audio_count; i++) {
1471 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1472 /*we have enough audio endpoint, find the matching inst*/
1476 return pool->audios[i];
1479 /*not found the matching one, first come first serve*/
1480 for (i = 0; i < pool->audio_count; i++) {
1481 if (res_ctx->is_audio_acquired[i] == false) {
1482 return pool->audios[i];
1488 bool resource_is_stream_unchanged(
1489 struct dc_state *old_context, struct dc_stream_state *stream)
1493 for (i = 0; i < old_context->stream_count; i++) {
1494 struct dc_stream_state *old_stream = old_context->streams[i];
1496 if (are_stream_backends_same(old_stream, stream))
1503 enum dc_status dc_add_stream_to_ctx(
1505 struct dc_state *new_ctx,
1506 struct dc_stream_state *stream)
1508 struct dc_context *dc_ctx = dc->ctx;
1511 if (new_ctx->stream_count >= dc->res_pool->pipe_count) {
1512 DC_ERROR("Max streams reached, can add stream %p !\n", stream);
1513 return DC_ERROR_UNEXPECTED;
1516 new_ctx->streams[new_ctx->stream_count] = stream;
1517 dc_stream_retain(stream);
1518 new_ctx->stream_count++;
1520 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1522 DC_ERROR("Adding stream %p to context failed with err %d!\n", stream, res);
1527 enum dc_status dc_remove_stream_from_ctx(
1529 struct dc_state *new_ctx,
1530 struct dc_stream_state *stream)
1533 struct dc_context *dc_ctx = dc->ctx;
1534 struct pipe_ctx *del_pipe = NULL;
1536 /* Release primary pipe */
1537 for (i = 0; i < MAX_PIPES; i++) {
1538 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1539 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1540 del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1542 ASSERT(del_pipe->stream_res.stream_enc);
1543 update_stream_engine_usage(
1546 del_pipe->stream_res.stream_enc,
1549 if (del_pipe->stream_res.audio)
1553 del_pipe->stream_res.audio,
1556 resource_unreference_clock_source(&new_ctx->res_ctx,
1558 del_pipe->clock_source);
1560 if (dc->res_pool->funcs->remove_stream_from_ctx)
1561 dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1563 memset(del_pipe, 0, sizeof(*del_pipe));
1570 DC_ERROR("Pipe not found for stream %p !\n", stream);
1571 return DC_ERROR_UNEXPECTED;
1574 for (i = 0; i < new_ctx->stream_count; i++)
1575 if (new_ctx->streams[i] == stream)
1578 if (new_ctx->streams[i] != stream) {
1579 DC_ERROR("Context doesn't have stream %p !\n", stream);
1580 return DC_ERROR_UNEXPECTED;
1583 dc_stream_release(new_ctx->streams[i]);
1584 new_ctx->stream_count--;
1586 /* Trim back arrays */
1587 for (; i < new_ctx->stream_count; i++) {
1588 new_ctx->streams[i] = new_ctx->streams[i + 1];
1589 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1592 new_ctx->streams[new_ctx->stream_count] = NULL;
1594 &new_ctx->stream_status[new_ctx->stream_count],
1596 sizeof(new_ctx->stream_status[0]));
1601 static void copy_pipe_ctx(
1602 const struct pipe_ctx *from_pipe_ctx, struct pipe_ctx *to_pipe_ctx)
1604 struct dc_plane_state *plane_state = to_pipe_ctx->plane_state;
1605 struct dc_stream_state *stream = to_pipe_ctx->stream;
1607 *to_pipe_ctx = *from_pipe_ctx;
1608 to_pipe_ctx->stream = stream;
1609 if (plane_state != NULL)
1610 to_pipe_ctx->plane_state = plane_state;
1613 static struct dc_stream_state *find_pll_sharable_stream(
1614 struct dc_stream_state *stream_needs_pll,
1615 struct dc_state *context)
1619 for (i = 0; i < context->stream_count; i++) {
1620 struct dc_stream_state *stream_has_pll = context->streams[i];
1622 /* We are looking for non dp, non virtual stream */
1623 if (resource_are_streams_timing_synchronizable(
1624 stream_needs_pll, stream_has_pll)
1625 && !dc_is_dp_signal(stream_has_pll->signal)
1626 && stream_has_pll->sink->link->connector_signal
1627 != SIGNAL_TYPE_VIRTUAL)
1628 return stream_has_pll;
1635 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1637 uint32_t pix_clk = timing->pix_clk_khz;
1638 uint32_t normalized_pix_clk = pix_clk;
1640 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1642 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1643 switch (timing->display_color_depth) {
1644 case COLOR_DEPTH_888:
1645 normalized_pix_clk = pix_clk;
1647 case COLOR_DEPTH_101010:
1648 normalized_pix_clk = (pix_clk * 30) / 24;
1650 case COLOR_DEPTH_121212:
1651 normalized_pix_clk = (pix_clk * 36) / 24;
1653 case COLOR_DEPTH_161616:
1654 normalized_pix_clk = (pix_clk * 48) / 24;
1661 return normalized_pix_clk;
1664 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1666 /* update actual pixel clock on all streams */
1667 if (dc_is_hdmi_signal(stream->signal))
1668 stream->phy_pix_clk = get_norm_pix_clk(
1671 stream->phy_pix_clk =
1672 stream->timing.pix_clk_khz;
1675 enum dc_status resource_map_pool_resources(
1676 const struct dc *dc,
1677 struct dc_state *context,
1678 struct dc_stream_state *stream)
1680 const struct resource_pool *pool = dc->res_pool;
1682 struct dc_context *dc_ctx = dc->ctx;
1683 struct pipe_ctx *pipe_ctx = NULL;
1686 /* TODO Check if this is needed */
1687 /*if (!resource_is_stream_unchanged(old_context, stream)) {
1688 if (stream != NULL && old_context->streams[i] != NULL) {
1689 stream->bit_depth_params =
1690 old_context->streams[i]->bit_depth_params;
1691 stream->clamping = old_context->streams[i]->clamping;
1697 /* acquire new resources */
1698 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1700 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1702 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1706 return DC_NO_CONTROLLER_RESOURCE;
1708 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
1710 pipe_ctx->stream_res.stream_enc =
1711 find_first_free_match_stream_enc_for_link(
1712 &context->res_ctx, pool, stream);
1714 if (!pipe_ctx->stream_res.stream_enc)
1715 return DC_NO_STREAM_ENG_RESOURCE;
1717 update_stream_engine_usage(
1718 &context->res_ctx, pool,
1719 pipe_ctx->stream_res.stream_enc,
1722 /* TODO: Add check if ASIC support and EDID audio */
1723 if (!stream->sink->converter_disable_audio &&
1724 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
1725 stream->audio_info.mode_count) {
1726 pipe_ctx->stream_res.audio = find_first_free_audio(
1727 &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
1730 * Audio assigned in order first come first get.
1731 * There are asics which has number of audio
1732 * resources less then number of pipes
1734 if (pipe_ctx->stream_res.audio)
1735 update_audio_usage(&context->res_ctx, pool,
1736 pipe_ctx->stream_res.audio, true);
1739 for (i = 0; i < context->stream_count; i++)
1740 if (context->streams[i] == stream) {
1741 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
1742 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
1746 DC_ERROR("Stream %p not found in new ctx!\n", stream);
1747 return DC_ERROR_UNEXPECTED;
1750 /* first stream in the context is used to populate the rest */
1751 void validate_guaranteed_copy_streams(
1752 struct dc_state *context,
1757 for (i = 1; i < max_streams; i++) {
1758 context->streams[i] = context->streams[0];
1760 copy_pipe_ctx(&context->res_ctx.pipe_ctx[0],
1761 &context->res_ctx.pipe_ctx[i]);
1762 context->res_ctx.pipe_ctx[i].stream =
1763 context->res_ctx.pipe_ctx[0].stream;
1765 dc_stream_retain(context->streams[i]);
1766 context->stream_count++;
1770 void dc_resource_state_copy_construct_current(
1771 const struct dc *dc,
1772 struct dc_state *dst_ctx)
1774 dc_resource_state_copy_construct(dc->current_state, dst_ctx);
1778 void dc_resource_state_construct(
1779 const struct dc *dc,
1780 struct dc_state *dst_ctx)
1782 dst_ctx->dis_clk = dc->res_pool->display_clock;
1785 enum dc_status dc_validate_global_state(
1787 struct dc_state *new_ctx)
1789 enum dc_status result = DC_ERROR_UNEXPECTED;
1793 return DC_ERROR_UNEXPECTED;
1795 if (dc->res_pool->funcs->validate_global) {
1796 result = dc->res_pool->funcs->validate_global(dc, new_ctx);
1797 if (result != DC_OK)
1801 for (i = 0; i < new_ctx->stream_count; i++) {
1802 struct dc_stream_state *stream = new_ctx->streams[i];
1804 for (j = 0; j < dc->res_pool->pipe_count; j++) {
1805 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
1807 if (pipe_ctx->stream != stream)
1810 /* Switch to dp clock source only if there is
1811 * no non dp stream that shares the same timing
1812 * with the dp stream.
1814 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
1815 !find_pll_sharable_stream(stream, new_ctx)) {
1817 resource_unreference_clock_source(
1820 pipe_ctx->clock_source);
1822 pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
1823 resource_reference_clock_source(
1826 pipe_ctx->clock_source);
1831 result = resource_build_scaling_params_for_context(dc, new_ctx);
1833 if (result == DC_OK)
1834 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
1835 result = DC_FAIL_BANDWIDTH_VALIDATE;
1840 static void patch_gamut_packet_checksum(
1841 struct encoder_info_packet *gamut_packet)
1843 /* For gamut we recalc checksum */
1844 if (gamut_packet->valid) {
1845 uint8_t chk_sum = 0;
1849 /*start of the Gamut data. */
1850 ptr = &gamut_packet->sb[3];
1852 for (i = 0; i <= gamut_packet->sb[1]; i++)
1855 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
1859 static void set_avi_info_frame(
1860 struct encoder_info_packet *info_packet,
1861 struct pipe_ctx *pipe_ctx)
1863 struct dc_stream_state *stream = pipe_ctx->stream;
1864 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
1865 struct info_frame info_frame = { {0} };
1866 uint32_t pixel_encoding = 0;
1867 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
1868 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
1870 uint8_t itc_value = 0;
1871 uint8_t cn0_cn1 = 0;
1872 unsigned int cn0_cn1_value = 0;
1873 uint8_t *check_sum = NULL;
1874 uint8_t byte_index = 0;
1875 union hdmi_info_packet *hdmi_info = &info_frame.avi_info_packet.info_packet_hdmi;
1876 union display_content_support support = {0};
1877 unsigned int vic = pipe_ctx->stream->timing.vic;
1878 enum dc_timing_3d_format format;
1880 color_space = pipe_ctx->stream->output_color_space;
1881 if (color_space == COLOR_SPACE_UNKNOWN)
1882 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
1883 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
1885 /* Initialize header */
1886 hdmi_info->bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
1887 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
1888 * not be used in HDMI 2.0 (Section 10.1) */
1889 hdmi_info->bits.header.version = 2;
1890 hdmi_info->bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
1893 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
1894 * according to HDMI 2.0 spec (Section 10.1)
1897 switch (stream->timing.pixel_encoding) {
1898 case PIXEL_ENCODING_YCBCR422:
1902 case PIXEL_ENCODING_YCBCR444:
1905 case PIXEL_ENCODING_YCBCR420:
1909 case PIXEL_ENCODING_RGB:
1914 /* Y0_Y1_Y2 : The pixel encoding */
1915 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
1916 hdmi_info->bits.Y0_Y1_Y2 = pixel_encoding;
1918 /* A0 = 1 Active Format Information valid */
1919 hdmi_info->bits.A0 = ACTIVE_FORMAT_VALID;
1921 /* B0, B1 = 3; Bar info data is valid */
1922 hdmi_info->bits.B0_B1 = BAR_INFO_BOTH_VALID;
1924 hdmi_info->bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
1926 /* S0, S1 : Underscan / Overscan */
1927 /* TODO: un-hardcode scan type */
1928 scan_type = SCANNING_TYPE_UNDERSCAN;
1929 hdmi_info->bits.S0_S1 = scan_type;
1931 /* C0, C1 : Colorimetry */
1932 if (color_space == COLOR_SPACE_YCBCR709 ||
1933 color_space == COLOR_SPACE_YCBCR709_LIMITED)
1934 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU709;
1935 else if (color_space == COLOR_SPACE_YCBCR601 ||
1936 color_space == COLOR_SPACE_YCBCR601_LIMITED)
1937 hdmi_info->bits.C0_C1 = COLORIMETRY_ITU601;
1939 hdmi_info->bits.C0_C1 = COLORIMETRY_NO_DATA;
1941 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
1942 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
1943 color_space == COLOR_SPACE_2020_YCBCR) {
1944 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
1945 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1946 } else if (color_space == COLOR_SPACE_ADOBERGB) {
1947 hdmi_info->bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
1948 hdmi_info->bits.C0_C1 = COLORIMETRY_EXTENDED;
1951 /* TODO: un-hardcode aspect ratio */
1952 aspect = stream->timing.aspect_ratio;
1955 case ASPECT_RATIO_4_3:
1956 case ASPECT_RATIO_16_9:
1957 hdmi_info->bits.M0_M1 = aspect;
1960 case ASPECT_RATIO_NO_DATA:
1961 case ASPECT_RATIO_64_27:
1962 case ASPECT_RATIO_256_135:
1964 hdmi_info->bits.M0_M1 = 0;
1967 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
1968 hdmi_info->bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
1970 /* TODO: un-hardcode cn0_cn1 and itc */
1978 support = stream->sink->edid_caps.content_support;
1981 if (!support.bits.valid_content_type) {
1984 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
1985 if (support.bits.graphics_content == 1) {
1988 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
1989 if (support.bits.photo_content == 1) {
1995 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
1996 if (support.bits.cinema_content == 1) {
2002 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2003 if (support.bits.game_content == 1) {
2011 hdmi_info->bits.CN0_CN1 = cn0_cn1_value;
2012 hdmi_info->bits.ITC = itc_value;
2015 /* TODO : We should handle YCC quantization */
2016 /* but we do not have matrix calculation */
2017 if (stream->sink->edid_caps.qs_bit == 1 &&
2018 stream->sink->edid_caps.qy_bit == 1) {
2019 if (color_space == COLOR_SPACE_SRGB ||
2020 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2021 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
2022 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2023 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2024 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2025 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
2026 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2028 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2029 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2032 hdmi_info->bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2033 hdmi_info->bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2037 format = stream->timing.timing_3d_format;
2038 /*todo, add 3DStereo support*/
2039 if (format != TIMING_3D_FORMAT_NONE) {
2040 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2041 switch (pipe_ctx->stream->timing.hdmi_vic) {
2058 hdmi_info->bits.VIC0_VIC7 = vic;
2061 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2062 * repetition start from 1 */
2063 hdmi_info->bits.PR0_PR3 = 0;
2066 * barTop: Line Number of End of Top Bar.
2067 * barBottom: Line Number of Start of Bottom Bar.
2068 * barLeft: Pixel Number of End of Left Bar.
2069 * barRight: Pixel Number of Start of Right Bar. */
2070 hdmi_info->bits.bar_top = stream->timing.v_border_top;
2071 hdmi_info->bits.bar_bottom = (stream->timing.v_total
2072 - stream->timing.v_border_bottom + 1);
2073 hdmi_info->bits.bar_left = stream->timing.h_border_left;
2074 hdmi_info->bits.bar_right = (stream->timing.h_total
2075 - stream->timing.h_border_right + 1);
2077 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2078 check_sum = &info_frame.avi_info_packet.info_packet_hdmi.packet_raw_data.sb[0];
2080 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2082 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2083 *check_sum += hdmi_info->packet_raw_data.sb[byte_index];
2085 /* one byte complement */
2086 *check_sum = (uint8_t) (0x100 - *check_sum);
2088 /* Store in hw_path_mode */
2089 info_packet->hb0 = hdmi_info->packet_raw_data.hb0;
2090 info_packet->hb1 = hdmi_info->packet_raw_data.hb1;
2091 info_packet->hb2 = hdmi_info->packet_raw_data.hb2;
2093 for (byte_index = 0; byte_index < sizeof(info_frame.avi_info_packet.
2094 info_packet_hdmi.packet_raw_data.sb); byte_index++)
2095 info_packet->sb[byte_index] = info_frame.avi_info_packet.
2096 info_packet_hdmi.packet_raw_data.sb[byte_index];
2098 info_packet->valid = true;
2101 static void set_vendor_info_packet(
2102 struct encoder_info_packet *info_packet,
2103 struct dc_stream_state *stream)
2105 uint32_t length = 0;
2106 bool hdmi_vic_mode = false;
2107 uint8_t checksum = 0;
2109 enum dc_timing_3d_format format;
2110 // Can be different depending on packet content /*todo*/
2111 // unsigned int length = pPathMode->dolbyVision ? 24 : 5;
2113 info_packet->valid = false;
2115 format = stream->timing.timing_3d_format;
2116 if (stream->view_format == VIEW_3D_FORMAT_NONE)
2117 format = TIMING_3D_FORMAT_NONE;
2119 /* Can be different depending on packet content */
2122 if (stream->timing.hdmi_vic != 0
2123 && stream->timing.h_total >= 3840
2124 && stream->timing.v_total >= 2160)
2125 hdmi_vic_mode = true;
2127 /* According to HDMI 1.4a CTS, VSIF should be sent
2128 * for both 3D stereo and HDMI VIC modes.
2129 * For all other modes, there is no VSIF sent. */
2131 if (format == TIMING_3D_FORMAT_NONE && !hdmi_vic_mode)
2134 /* 24bit IEEE Registration identifier (0x000c03). LSB first. */
2135 info_packet->sb[1] = 0x03;
2136 info_packet->sb[2] = 0x0C;
2137 info_packet->sb[3] = 0x00;
2139 /*PB4: 5 lower bytes = 0 (reserved). 3 higher bits = HDMI_Video_Format.
2140 * The value for HDMI_Video_Format are:
2141 * 0x0 (0b000) - No additional HDMI video format is presented in this
2143 * 0x1 (0b001) - Extended resolution format present. 1 byte of HDMI_VIC
2145 * 0x2 (0b010) - 3D format indication present. 3D_Structure and
2146 * potentially 3D_Ext_Data follows
2147 * 0x3..0x7 (0b011..0b111) - reserved for future use */
2148 if (format != TIMING_3D_FORMAT_NONE)
2149 info_packet->sb[4] = (2 << 5);
2150 else if (hdmi_vic_mode)
2151 info_packet->sb[4] = (1 << 5);
2153 /* PB5: If PB4 claims 3D timing (HDMI_Video_Format = 0x2):
2154 * 4 lower bites = 0 (reserved). 4 higher bits = 3D_Structure.
2155 * The value for 3D_Structure are:
2156 * 0x0 - Frame Packing
2157 * 0x1 - Field Alternative
2158 * 0x2 - Line Alternative
2159 * 0x3 - Side-by-Side (full)
2161 * 0x5 - L + depth + graphics + graphics-depth
2162 * 0x6 - Top-and-Bottom
2163 * 0x7 - Reserved for future use
2164 * 0x8 - Side-by-Side (Half)
2165 * 0x9..0xE - Reserved for future use
2168 case TIMING_3D_FORMAT_HW_FRAME_PACKING:
2169 case TIMING_3D_FORMAT_SW_FRAME_PACKING:
2170 info_packet->sb[5] = (0x0 << 4);
2173 case TIMING_3D_FORMAT_SIDE_BY_SIDE:
2174 case TIMING_3D_FORMAT_SBS_SW_PACKED:
2175 info_packet->sb[5] = (0x8 << 4);
2179 case TIMING_3D_FORMAT_TOP_AND_BOTTOM:
2180 case TIMING_3D_FORMAT_TB_SW_PACKED:
2181 info_packet->sb[5] = (0x6 << 4);
2188 /*PB5: If PB4 is set to 0x1 (extended resolution format)
2189 * fill PB5 with the correct HDMI VIC code */
2191 info_packet->sb[5] = stream->timing.hdmi_vic;
2194 info_packet->hb0 = HDMI_INFOFRAME_TYPE_VENDOR; /* VSIF packet type. */
2195 info_packet->hb1 = 0x01; /* Version */
2197 /* 4 lower bits = Length, 4 higher bits = 0 (reserved) */
2198 info_packet->hb2 = (uint8_t) (length);
2200 /* Calculate checksum */
2202 checksum += info_packet->hb0;
2203 checksum += info_packet->hb1;
2204 checksum += info_packet->hb2;
2206 for (i = 1; i <= length; i++)
2207 checksum += info_packet->sb[i];
2209 info_packet->sb[0] = (uint8_t) (0x100 - checksum);
2211 info_packet->valid = true;
2214 static void set_spd_info_packet(
2215 struct encoder_info_packet *info_packet,
2216 struct dc_stream_state *stream)
2218 /* SPD info packet for FreeSync */
2220 unsigned char checksum = 0;
2221 unsigned int idx, payload_size = 0;
2223 /* Check if Freesync is supported. Return if false. If true,
2224 * set the corresponding bit in the info packet
2226 if (stream->freesync_ctx.supported == false)
2229 if (dc_is_hdmi_signal(stream->signal)) {
2233 /* HB0 = Packet Type = 0x83 (Source Product
2234 * Descriptor InfoFrame)
2236 info_packet->hb0 = HDMI_INFOFRAME_TYPE_SPD;
2238 /* HB1 = Version = 0x01 */
2239 info_packet->hb1 = 0x01;
2241 /* HB2 = [Bits 7:5 = 0] [Bits 4:0 = Length = 0x08] */
2242 info_packet->hb2 = 0x08;
2244 payload_size = 0x08;
2246 } else if (dc_is_dp_signal(stream->signal)) {
2250 /* HB0 = Secondary-data Packet ID = 0 - Only non-zero
2251 * when used to associate audio related info packets
2253 info_packet->hb0 = 0x00;
2255 /* HB1 = Packet Type = 0x83 (Source Product
2256 * Descriptor InfoFrame)
2258 info_packet->hb1 = HDMI_INFOFRAME_TYPE_SPD;
2260 /* HB2 = [Bits 7:0 = Least significant eight bits -
2261 * For INFOFRAME, the value must be 1Bh]
2263 info_packet->hb2 = 0x1B;
2265 /* HB3 = [Bits 7:2 = INFOFRAME SDP Version Number = 0x1]
2266 * [Bits 1:0 = Most significant two bits = 0x00]
2268 info_packet->hb3 = 0x04;
2270 payload_size = 0x1B;
2273 /* PB1 = 0x1A (24bit AMD IEEE OUI (0x00001A) - Byte 0) */
2274 info_packet->sb[1] = 0x1A;
2276 /* PB2 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 1) */
2277 info_packet->sb[2] = 0x00;
2279 /* PB3 = 0x00 (24bit AMD IEEE OUI (0x00001A) - Byte 2) */
2280 info_packet->sb[3] = 0x00;
2282 /* PB4 = Reserved */
2283 info_packet->sb[4] = 0x00;
2285 /* PB5 = Reserved */
2286 info_packet->sb[5] = 0x00;
2288 /* PB6 = [Bits 7:3 = Reserved] */
2289 info_packet->sb[6] = 0x00;
2291 if (stream->freesync_ctx.supported == true)
2292 /* PB6 = [Bit 0 = FreeSync Supported] */
2293 info_packet->sb[6] |= 0x01;
2295 if (stream->freesync_ctx.enabled == true)
2296 /* PB6 = [Bit 1 = FreeSync Enabled] */
2297 info_packet->sb[6] |= 0x02;
2299 if (stream->freesync_ctx.active == true)
2300 /* PB6 = [Bit 2 = FreeSync Active] */
2301 info_packet->sb[6] |= 0x04;
2303 /* PB7 = FreeSync Minimum refresh rate (Hz) */
2304 info_packet->sb[7] = (unsigned char) (stream->freesync_ctx.
2305 min_refresh_in_micro_hz / 1000000);
2307 /* PB8 = FreeSync Maximum refresh rate (Hz)
2309 * Note: We do not use the maximum capable refresh rate
2310 * of the panel, because we should never go above the field
2311 * rate of the mode timing set.
2313 info_packet->sb[8] = (unsigned char) (stream->freesync_ctx.
2314 nominal_refresh_in_micro_hz / 1000000);
2316 /* PB9 - PB27 = Reserved */
2317 for (idx = 9; idx <= 27; idx++)
2318 info_packet->sb[idx] = 0x00;
2320 /* Calculate checksum */
2321 checksum += info_packet->hb0;
2322 checksum += info_packet->hb1;
2323 checksum += info_packet->hb2;
2324 checksum += info_packet->hb3;
2326 for (idx = 1; idx <= payload_size; idx++)
2327 checksum += info_packet->sb[idx];
2329 /* PB0 = Checksum (one byte complement) */
2330 info_packet->sb[0] = (unsigned char) (0x100 - checksum);
2332 info_packet->valid = true;
2335 static void set_hdr_static_info_packet(
2336 struct encoder_info_packet *info_packet,
2337 struct dc_stream_state *stream)
2340 enum signal_type signal = stream->signal;
2343 if (!stream->hdr_static_metadata.hdr_supported)
2346 if (dc_is_hdmi_signal(signal)) {
2347 info_packet->valid = true;
2349 info_packet->hb0 = 0x87;
2350 info_packet->hb1 = 0x01;
2351 info_packet->hb2 = 0x1A;
2353 } else if (dc_is_dp_signal(signal)) {
2354 info_packet->valid = true;
2356 info_packet->hb0 = 0x00;
2357 info_packet->hb1 = 0x87;
2358 info_packet->hb2 = 0x1D;
2359 info_packet->hb3 = (0x13 << 2);
2363 data = stream->hdr_static_metadata.is_hdr;
2364 info_packet->sb[i++] = data ? 0x02 : 0x00;
2365 info_packet->sb[i++] = 0x00;
2367 data = stream->hdr_static_metadata.chromaticity_green_x / 2;
2368 info_packet->sb[i++] = data & 0xFF;
2369 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2371 data = stream->hdr_static_metadata.chromaticity_green_y / 2;
2372 info_packet->sb[i++] = data & 0xFF;
2373 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2375 data = stream->hdr_static_metadata.chromaticity_blue_x / 2;
2376 info_packet->sb[i++] = data & 0xFF;
2377 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2379 data = stream->hdr_static_metadata.chromaticity_blue_y / 2;
2380 info_packet->sb[i++] = data & 0xFF;
2381 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2383 data = stream->hdr_static_metadata.chromaticity_red_x / 2;
2384 info_packet->sb[i++] = data & 0xFF;
2385 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2387 data = stream->hdr_static_metadata.chromaticity_red_y / 2;
2388 info_packet->sb[i++] = data & 0xFF;
2389 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2391 data = stream->hdr_static_metadata.chromaticity_white_point_x / 2;
2392 info_packet->sb[i++] = data & 0xFF;
2393 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2395 data = stream->hdr_static_metadata.chromaticity_white_point_y / 2;
2396 info_packet->sb[i++] = data & 0xFF;
2397 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2399 data = stream->hdr_static_metadata.max_luminance;
2400 info_packet->sb[i++] = data & 0xFF;
2401 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2403 data = stream->hdr_static_metadata.min_luminance;
2404 info_packet->sb[i++] = data & 0xFF;
2405 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2407 data = stream->hdr_static_metadata.maximum_content_light_level;
2408 info_packet->sb[i++] = data & 0xFF;
2409 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2411 data = stream->hdr_static_metadata.maximum_frame_average_light_level;
2412 info_packet->sb[i++] = data & 0xFF;
2413 info_packet->sb[i++] = (data & 0xFF00) >> 8;
2415 if (dc_is_hdmi_signal(signal)) {
2416 uint32_t checksum = 0;
2418 checksum += info_packet->hb0;
2419 checksum += info_packet->hb1;
2420 checksum += info_packet->hb2;
2422 for (i = 1; i <= info_packet->hb2; i++)
2423 checksum += info_packet->sb[i];
2425 info_packet->sb[0] = 0x100 - checksum;
2426 } else if (dc_is_dp_signal(signal)) {
2427 info_packet->sb[0] = 0x01;
2428 info_packet->sb[1] = 0x1A;
2432 static void set_vsc_info_packet(
2433 struct encoder_info_packet *info_packet,
2434 struct dc_stream_state *stream)
2436 unsigned int vscPacketRevision = 0;
2439 /*VSC packet set to 2 when DP revision >= 1.2*/
2440 if (stream->psr_version != 0) {
2441 vscPacketRevision = 2;
2444 /* VSC packet not needed based on the features
2445 * supported by this DP display
2447 if (vscPacketRevision == 0)
2450 if (vscPacketRevision == 0x2) {
2451 /* Secondary-data Packet ID = 0*/
2452 info_packet->hb0 = 0x00;
2453 /* 07h - Packet Type Value indicating Video
2454 * Stream Configuration packet
2456 info_packet->hb1 = 0x07;
2457 /* 02h = VSC SDP supporting 3D stereo and PSR
2458 * (applies to eDP v1.3 or higher).
2460 info_packet->hb2 = 0x02;
2461 /* 08h = VSC packet supporting 3D stereo + PSR
2464 info_packet->hb3 = 0x08;
2466 for (i = 0; i < 28; i++)
2467 info_packet->sb[i] = 0;
2469 info_packet->valid = true;
2472 /*TODO: stereo 3D support and extend pixel encoding colorimetry*/
2475 void dc_resource_state_destruct(struct dc_state *context)
2479 for (i = 0; i < context->stream_count; i++) {
2480 for (j = 0; j < context->stream_status[i].plane_count; j++)
2481 dc_plane_state_release(
2482 context->stream_status[i].plane_states[j]);
2484 context->stream_status[i].plane_count = 0;
2485 dc_stream_release(context->streams[i]);
2486 context->streams[i] = NULL;
2491 * Copy src_ctx into dst_ctx and retain all surfaces and streams referenced
2494 void dc_resource_state_copy_construct(
2495 const struct dc_state *src_ctx,
2496 struct dc_state *dst_ctx)
2499 struct kref refcount = dst_ctx->refcount;
2501 *dst_ctx = *src_ctx;
2503 for (i = 0; i < MAX_PIPES; i++) {
2504 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2506 if (cur_pipe->top_pipe)
2507 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2509 if (cur_pipe->bottom_pipe)
2510 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2514 for (i = 0; i < dst_ctx->stream_count; i++) {
2515 dc_stream_retain(dst_ctx->streams[i]);
2516 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2517 dc_plane_state_retain(
2518 dst_ctx->stream_status[i].plane_states[j]);
2521 /* context refcount should not be overridden */
2522 dst_ctx->refcount = refcount;
2526 struct clock_source *dc_resource_find_first_free_pll(
2527 struct resource_context *res_ctx,
2528 const struct resource_pool *pool)
2532 for (i = 0; i < pool->clk_src_count; ++i) {
2533 if (res_ctx->clock_source_ref_count[i] == 0)
2534 return pool->clock_sources[i];
2540 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2542 enum signal_type signal = SIGNAL_TYPE_NONE;
2543 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2545 /* default all packets to invalid */
2546 info->avi.valid = false;
2547 info->gamut.valid = false;
2548 info->vendor.valid = false;
2549 info->spd.valid = false;
2550 info->hdrsmd.valid = false;
2551 info->vsc.valid = false;
2553 signal = pipe_ctx->stream->signal;
2555 /* HDMi and DP have different info packets*/
2556 if (dc_is_hdmi_signal(signal)) {
2557 set_avi_info_frame(&info->avi, pipe_ctx);
2559 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2561 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2563 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2565 } else if (dc_is_dp_signal(signal)) {
2566 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2568 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2570 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2573 patch_gamut_packet_checksum(&info->gamut);
2576 enum dc_status resource_map_clock_resources(
2577 const struct dc *dc,
2578 struct dc_state *context,
2579 struct dc_stream_state *stream)
2581 /* acquire new resources */
2582 const struct resource_pool *pool = dc->res_pool;
2583 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2584 &context->res_ctx, stream);
2587 return DC_ERROR_UNEXPECTED;
2589 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2590 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2591 pipe_ctx->clock_source = pool->dp_clock_source;
2593 pipe_ctx->clock_source = NULL;
2595 if (!dc->config.disable_disp_pll_sharing)
2596 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2600 if (pipe_ctx->clock_source == NULL)
2601 pipe_ctx->clock_source =
2602 dc_resource_find_first_free_pll(
2607 if (pipe_ctx->clock_source == NULL)
2608 return DC_NO_CLOCK_SOURCE_RESOURCE;
2610 resource_reference_clock_source(
2611 &context->res_ctx, pool,
2612 pipe_ctx->clock_source);
2618 * Note: We need to disable output if clock sources change,
2619 * since bios does optimization and doesn't apply if changing
2620 * PHY when not already disabled.
2622 bool pipe_need_reprogram(
2623 struct pipe_ctx *pipe_ctx_old,
2624 struct pipe_ctx *pipe_ctx)
2626 if (!pipe_ctx_old->stream)
2629 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2632 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2635 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2638 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2639 && pipe_ctx_old->stream != pipe_ctx->stream)
2642 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2645 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2652 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2653 struct bit_depth_reduction_params *fmt_bit_depth)
2655 enum dc_dither_option option = stream->dither_option;
2656 enum dc_pixel_encoding pixel_encoding =
2657 stream->timing.pixel_encoding;
2659 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2661 if (option == DITHER_OPTION_DEFAULT) {
2662 switch (stream->timing.display_color_depth) {
2663 case COLOR_DEPTH_666:
2664 option = DITHER_OPTION_SPATIAL6;
2666 case COLOR_DEPTH_888:
2667 option = DITHER_OPTION_SPATIAL8;
2669 case COLOR_DEPTH_101010:
2670 option = DITHER_OPTION_SPATIAL10;
2673 option = DITHER_OPTION_DISABLE;
2677 if (option == DITHER_OPTION_DISABLE)
2680 if (option == DITHER_OPTION_TRUN6) {
2681 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2682 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2683 } else if (option == DITHER_OPTION_TRUN8 ||
2684 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2685 option == DITHER_OPTION_TRUN8_FM6) {
2686 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2687 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2688 } else if (option == DITHER_OPTION_TRUN10 ||
2689 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2690 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2691 option == DITHER_OPTION_TRUN10_FM8 ||
2692 option == DITHER_OPTION_TRUN10_FM6 ||
2693 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2694 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2695 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2698 /* special case - Formatter can only reduce by 4 bits at most.
2699 * When reducing from 12 to 6 bits,
2700 * HW recommends we use trunc with round mode
2701 * (if we did nothing, trunc to 10 bits would be used)
2702 * note that any 12->10 bit reduction is ignored prior to DCE8,
2703 * as the input was 10 bits.
2705 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2706 option == DITHER_OPTION_SPATIAL6 ||
2707 option == DITHER_OPTION_FM6) {
2708 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2709 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2710 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2714 * note that spatial modes 1-3 are never used
2716 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2717 option == DITHER_OPTION_SPATIAL6 ||
2718 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2719 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2720 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2721 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2722 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2723 fmt_bit_depth->flags.RGB_RANDOM =
2724 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2725 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2726 option == DITHER_OPTION_SPATIAL8 ||
2727 option == DITHER_OPTION_SPATIAL8_FM6 ||
2728 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2729 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2730 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2731 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2732 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2733 fmt_bit_depth->flags.RGB_RANDOM =
2734 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2735 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2736 option == DITHER_OPTION_SPATIAL10 ||
2737 option == DITHER_OPTION_SPATIAL10_FM8 ||
2738 option == DITHER_OPTION_SPATIAL10_FM6) {
2739 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2740 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2741 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2742 fmt_bit_depth->flags.RGB_RANDOM =
2743 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2746 if (option == DITHER_OPTION_SPATIAL6 ||
2747 option == DITHER_OPTION_SPATIAL8 ||
2748 option == DITHER_OPTION_SPATIAL10) {
2749 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2751 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2754 //////////////////////
2755 //// temporal dither
2756 //////////////////////
2757 if (option == DITHER_OPTION_FM6 ||
2758 option == DITHER_OPTION_SPATIAL8_FM6 ||
2759 option == DITHER_OPTION_SPATIAL10_FM6 ||
2760 option == DITHER_OPTION_TRUN10_FM6 ||
2761 option == DITHER_OPTION_TRUN8_FM6 ||
2762 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2763 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2764 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2765 } else if (option == DITHER_OPTION_FM8 ||
2766 option == DITHER_OPTION_SPATIAL10_FM8 ||
2767 option == DITHER_OPTION_TRUN10_FM8) {
2768 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2769 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2770 } else if (option == DITHER_OPTION_FM10) {
2771 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2772 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2775 fmt_bit_depth->pixel_encoding = pixel_encoding;
2778 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2780 struct dc *core_dc = dc;
2781 struct dc_link *link = stream->sink->link;
2782 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2783 enum dc_status res = DC_OK;
2785 calculate_phy_pix_clks(stream);
2787 if (!tg->funcs->validate_timing(tg, &stream->timing))
2788 res = DC_FAIL_CONTROLLER_VALIDATE;
2791 if (!link->link_enc->funcs->validate_output_with_stream(
2792 link->link_enc, stream))
2793 res = DC_FAIL_ENC_VALIDATE;
2795 /* TODO: validate audio ASIC caps, encoder */
2798 res = dc_link_validate_mode_timing(stream,
2805 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2807 enum dc_status res = DC_OK;
2809 /* TODO For now validates pixel format only */
2810 if (dc->res_pool->funcs->validate_plane)
2811 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);