2 * Copyright 2012-15 Advanced Micro Devices, Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include "dm_services.h"
28 #include "include/irq_service_interface.h"
29 #include "link_encoder.h"
30 #include "stream_encoder.h"
32 #include "timing_generator.h"
33 #include "transform.h"
37 #include "core_types.h"
38 #include "set_mode_types.h"
39 #include "virtual/virtual_stream_encoder.h"
40 #include "dpcd_defs.h"
42 #include "dce80/dce80_resource.h"
43 #include "dce100/dce100_resource.h"
44 #include "dce110/dce110_resource.h"
45 #include "dce112/dce112_resource.h"
46 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
47 #include "dcn10/dcn10_resource.h"
49 #include "dce120/dce120_resource.h"
51 #define DC_LOGGER_INIT(logger)
53 enum dce_version resource_parse_asic_id(struct hw_asic_id asic_id)
55 enum dce_version dc_version = DCE_VERSION_UNKNOWN;
56 switch (asic_id.chip_family) {
59 dc_version = DCE_VERSION_8_0;
62 if (ASIC_REV_IS_KALINDI(asic_id.hw_internal_rev) ||
63 ASIC_REV_IS_BHAVANI(asic_id.hw_internal_rev) ||
64 ASIC_REV_IS_GODAVARI(asic_id.hw_internal_rev))
65 dc_version = DCE_VERSION_8_3;
67 dc_version = DCE_VERSION_8_1;
70 dc_version = DCE_VERSION_11_0;
74 if (ASIC_REV_IS_TONGA_P(asic_id.hw_internal_rev) ||
75 ASIC_REV_IS_FIJI_P(asic_id.hw_internal_rev)) {
76 dc_version = DCE_VERSION_10_0;
79 if (ASIC_REV_IS_POLARIS10_P(asic_id.hw_internal_rev) ||
80 ASIC_REV_IS_POLARIS11_M(asic_id.hw_internal_rev) ||
81 ASIC_REV_IS_POLARIS12_V(asic_id.hw_internal_rev)) {
82 dc_version = DCE_VERSION_11_2;
84 if (ASIC_REV_IS_VEGAM(asic_id.hw_internal_rev))
85 dc_version = DCE_VERSION_11_22;
88 if (ASICREV_IS_VEGA20_P(asic_id.hw_internal_rev))
89 dc_version = DCE_VERSION_12_1;
91 dc_version = DCE_VERSION_12_0;
93 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
95 dc_version = DCN_VERSION_1_0;
96 #if defined(CONFIG_DRM_AMD_DC_DCN1_01)
97 if (ASICREV_IS_RAVEN2(asic_id.hw_internal_rev))
98 dc_version = DCN_VERSION_1_01;
103 dc_version = DCE_VERSION_UNKNOWN;
109 struct resource_pool *dc_create_resource_pool(struct dc *dc,
110 const struct dc_init_data *init_data,
111 enum dce_version dc_version)
113 struct resource_pool *res_pool = NULL;
115 switch (dc_version) {
116 case DCE_VERSION_8_0:
117 res_pool = dce80_create_resource_pool(
118 init_data->num_virtual_links, dc);
120 case DCE_VERSION_8_1:
121 res_pool = dce81_create_resource_pool(
122 init_data->num_virtual_links, dc);
124 case DCE_VERSION_8_3:
125 res_pool = dce83_create_resource_pool(
126 init_data->num_virtual_links, dc);
128 case DCE_VERSION_10_0:
129 res_pool = dce100_create_resource_pool(
130 init_data->num_virtual_links, dc);
132 case DCE_VERSION_11_0:
133 res_pool = dce110_create_resource_pool(
134 init_data->num_virtual_links, dc,
137 case DCE_VERSION_11_2:
138 case DCE_VERSION_11_22:
139 res_pool = dce112_create_resource_pool(
140 init_data->num_virtual_links, dc);
142 case DCE_VERSION_12_0:
143 case DCE_VERSION_12_1:
144 res_pool = dce120_create_resource_pool(
145 init_data->num_virtual_links, dc);
148 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
149 case DCN_VERSION_1_0:
150 #if defined(CONFIG_DRM_AMD_DC_DCN1_01)
151 case DCN_VERSION_1_01:
153 res_pool = dcn10_create_resource_pool(init_data, dc);
161 if (res_pool != NULL) {
162 struct dc_firmware_info fw_info = { { 0 } };
164 if (dc->ctx->dc_bios->funcs->get_firmware_info(
165 dc->ctx->dc_bios, &fw_info) == BP_RESULT_OK) {
166 res_pool->ref_clocks.xtalin_clock_inKhz = fw_info.pll_info.crystal_frequency;
168 if (IS_FPGA_MAXIMUS_DC(dc->ctx->dce_environment)) {
169 // On FPGA these dividers are currently not configured by GDB
170 res_pool->ref_clocks.dccg_ref_clock_inKhz = res_pool->ref_clocks.xtalin_clock_inKhz;
171 res_pool->ref_clocks.dchub_ref_clock_inKhz = res_pool->ref_clocks.xtalin_clock_inKhz;
172 } else if (res_pool->dccg && res_pool->hubbub) {
173 // If DCCG reference frequency cannot be determined (usually means not set to xtalin) then this is a critical error
174 // as this value must be known for DCHUB programming
175 (res_pool->dccg->funcs->get_dccg_ref_freq)(res_pool->dccg,
176 fw_info.pll_info.crystal_frequency,
177 &res_pool->ref_clocks.dccg_ref_clock_inKhz);
179 // Similarly, if DCHUB reference frequency cannot be determined, then it is also a critical error
180 (res_pool->hubbub->funcs->get_dchub_ref_freq)(res_pool->hubbub,
181 res_pool->ref_clocks.dccg_ref_clock_inKhz,
182 &res_pool->ref_clocks.dchub_ref_clock_inKhz);
184 // Not all ASICs have DCCG sw component
185 res_pool->ref_clocks.dccg_ref_clock_inKhz = res_pool->ref_clocks.xtalin_clock_inKhz;
186 res_pool->ref_clocks.dchub_ref_clock_inKhz = res_pool->ref_clocks.xtalin_clock_inKhz;
189 ASSERT_CRITICAL(false);
195 void dc_destroy_resource_pool(struct dc *dc)
199 dc->res_pool->funcs->destroy(&dc->res_pool);
205 static void update_num_audio(
206 const struct resource_straps *straps,
207 unsigned int *num_audio,
208 struct audio_support *aud_support)
210 aud_support->dp_audio = true;
211 aud_support->hdmi_audio_native = false;
212 aud_support->hdmi_audio_on_dongle = false;
214 if (straps->hdmi_disable == 0) {
215 if (straps->dc_pinstraps_audio & 0x2) {
216 aud_support->hdmi_audio_on_dongle = true;
217 aud_support->hdmi_audio_native = true;
221 switch (straps->audio_stream_number) {
222 case 0: /* multi streams supported */
224 case 1: /* multi streams not supported */
228 DC_ERR("DC: unexpected audio fuse!\n");
232 bool resource_construct(
233 unsigned int num_virtual_links,
235 struct resource_pool *pool,
236 const struct resource_create_funcs *create_funcs)
238 struct dc_context *ctx = dc->ctx;
239 const struct resource_caps *caps = pool->res_cap;
241 unsigned int num_audio = caps->num_audio;
242 struct resource_straps straps = {0};
244 if (create_funcs->read_dce_straps)
245 create_funcs->read_dce_straps(dc->ctx, &straps);
247 pool->audio_count = 0;
248 if (create_funcs->create_audio) {
249 /* find the total number of streams available via the
250 * AZALIA_F0_CODEC_PIN_CONTROL_RESPONSE_CONFIGURATION_DEFAULT
251 * registers (one for each pin) starting from pin 1
252 * up to the max number of audio pins.
253 * We stop on the first pin where
254 * PORT_CONNECTIVITY == 1 (as instructed by HW team).
256 update_num_audio(&straps, &num_audio, &pool->audio_support);
257 for (i = 0; i < pool->pipe_count && i < num_audio; i++) {
258 struct audio *aud = create_funcs->create_audio(ctx, i);
261 DC_ERR("DC: failed to create audio!\n");
265 if (!aud->funcs->endpoint_valid(aud)) {
266 aud->funcs->destroy(&aud);
270 pool->audios[i] = aud;
275 pool->stream_enc_count = 0;
276 if (create_funcs->create_stream_encoder) {
277 for (i = 0; i < caps->num_stream_encoder; i++) {
278 pool->stream_enc[i] = create_funcs->create_stream_encoder(i, ctx);
279 if (pool->stream_enc[i] == NULL)
280 DC_ERR("DC: failed to create stream_encoder!\n");
281 pool->stream_enc_count++;
285 dc->caps.dynamic_audio = false;
286 if (pool->audio_count < pool->stream_enc_count) {
287 dc->caps.dynamic_audio = true;
289 for (i = 0; i < num_virtual_links; i++) {
290 pool->stream_enc[pool->stream_enc_count] =
291 virtual_stream_encoder_create(
293 if (pool->stream_enc[pool->stream_enc_count] == NULL) {
294 DC_ERR("DC: failed to create stream_encoder!\n");
297 pool->stream_enc_count++;
300 dc->hwseq = create_funcs->create_hwseq(ctx);
304 static int find_matching_clock_source(
305 const struct resource_pool *pool,
306 struct clock_source *clock_source)
311 for (i = 0; i < pool->clk_src_count; i++) {
312 if (pool->clock_sources[i] == clock_source)
318 void resource_unreference_clock_source(
319 struct resource_context *res_ctx,
320 const struct resource_pool *pool,
321 struct clock_source *clock_source)
323 int i = find_matching_clock_source(pool, clock_source);
326 res_ctx->clock_source_ref_count[i]--;
328 if (pool->dp_clock_source == clock_source)
329 res_ctx->dp_clock_source_ref_count--;
332 void resource_reference_clock_source(
333 struct resource_context *res_ctx,
334 const struct resource_pool *pool,
335 struct clock_source *clock_source)
337 int i = find_matching_clock_source(pool, clock_source);
340 res_ctx->clock_source_ref_count[i]++;
342 if (pool->dp_clock_source == clock_source)
343 res_ctx->dp_clock_source_ref_count++;
346 int resource_get_clock_source_reference(
347 struct resource_context *res_ctx,
348 const struct resource_pool *pool,
349 struct clock_source *clock_source)
351 int i = find_matching_clock_source(pool, clock_source);
354 return res_ctx->clock_source_ref_count[i];
356 if (pool->dp_clock_source == clock_source)
357 return res_ctx->dp_clock_source_ref_count;
362 bool resource_are_streams_timing_synchronizable(
363 struct dc_stream_state *stream1,
364 struct dc_stream_state *stream2)
366 if (stream1->timing.h_total != stream2->timing.h_total)
369 if (stream1->timing.v_total != stream2->timing.v_total)
372 if (stream1->timing.h_addressable
373 != stream2->timing.h_addressable)
376 if (stream1->timing.v_addressable
377 != stream2->timing.v_addressable)
380 if (stream1->timing.pix_clk_100hz
381 != stream2->timing.pix_clk_100hz)
384 if (stream1->clamping.c_depth != stream2->clamping.c_depth)
387 if (stream1->phy_pix_clk != stream2->phy_pix_clk
388 && (!dc_is_dp_signal(stream1->signal)
389 || !dc_is_dp_signal(stream2->signal)))
392 if (stream1->view_format != stream2->view_format)
397 static bool is_dp_and_hdmi_sharable(
398 struct dc_stream_state *stream1,
399 struct dc_stream_state *stream2)
401 if (stream1->ctx->dc->caps.disable_dp_clk_share)
404 if (stream1->clamping.c_depth != COLOR_DEPTH_888 ||
405 stream2->clamping.c_depth != COLOR_DEPTH_888)
412 static bool is_sharable_clk_src(
413 const struct pipe_ctx *pipe_with_clk_src,
414 const struct pipe_ctx *pipe)
416 if (pipe_with_clk_src->clock_source == NULL)
419 if (pipe_with_clk_src->stream->signal == SIGNAL_TYPE_VIRTUAL)
422 if (dc_is_dp_signal(pipe_with_clk_src->stream->signal) ||
423 (dc_is_dp_signal(pipe->stream->signal) &&
424 !is_dp_and_hdmi_sharable(pipe_with_clk_src->stream,
428 if (dc_is_hdmi_signal(pipe_with_clk_src->stream->signal)
429 && dc_is_dual_link_signal(pipe->stream->signal))
432 if (dc_is_hdmi_signal(pipe->stream->signal)
433 && dc_is_dual_link_signal(pipe_with_clk_src->stream->signal))
436 if (!resource_are_streams_timing_synchronizable(
437 pipe_with_clk_src->stream, pipe->stream))
443 struct clock_source *resource_find_used_clk_src_for_sharing(
444 struct resource_context *res_ctx,
445 struct pipe_ctx *pipe_ctx)
449 for (i = 0; i < MAX_PIPES; i++) {
450 if (is_sharable_clk_src(&res_ctx->pipe_ctx[i], pipe_ctx))
451 return res_ctx->pipe_ctx[i].clock_source;
457 static enum pixel_format convert_pixel_format_to_dalsurface(
458 enum surface_pixel_format surface_pixel_format)
460 enum pixel_format dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
462 switch (surface_pixel_format) {
463 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
464 dal_pixel_format = PIXEL_FORMAT_INDEX8;
466 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
467 dal_pixel_format = PIXEL_FORMAT_RGB565;
469 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
470 dal_pixel_format = PIXEL_FORMAT_RGB565;
472 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
473 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
475 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
476 dal_pixel_format = PIXEL_FORMAT_ARGB8888;
478 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
479 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
481 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
482 dal_pixel_format = PIXEL_FORMAT_ARGB2101010;
484 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
485 dal_pixel_format = PIXEL_FORMAT_ARGB2101010_XRBIAS;
487 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
488 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
489 dal_pixel_format = PIXEL_FORMAT_FP16;
491 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
492 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
493 dal_pixel_format = PIXEL_FORMAT_420BPP8;
495 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
496 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
497 dal_pixel_format = PIXEL_FORMAT_420BPP10;
499 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
501 dal_pixel_format = PIXEL_FORMAT_UNKNOWN;
504 return dal_pixel_format;
507 static inline void get_vp_scan_direction(
508 enum dc_rotation_angle rotation,
509 bool horizontal_mirror,
510 bool *orthogonal_rotation,
511 bool *flip_vert_scan_dir,
512 bool *flip_horz_scan_dir)
514 *orthogonal_rotation = false;
515 *flip_vert_scan_dir = false;
516 *flip_horz_scan_dir = false;
517 if (rotation == ROTATION_ANGLE_180) {
518 *flip_vert_scan_dir = true;
519 *flip_horz_scan_dir = true;
520 } else if (rotation == ROTATION_ANGLE_90) {
521 *orthogonal_rotation = true;
522 *flip_horz_scan_dir = true;
523 } else if (rotation == ROTATION_ANGLE_270) {
524 *orthogonal_rotation = true;
525 *flip_vert_scan_dir = true;
528 if (horizontal_mirror)
529 *flip_horz_scan_dir = !*flip_horz_scan_dir;
532 static void calculate_viewport(struct pipe_ctx *pipe_ctx)
534 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
535 const struct dc_stream_state *stream = pipe_ctx->stream;
536 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
537 struct rect surf_src = plane_state->src_rect;
538 struct rect clip, dest;
539 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
540 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
541 bool pri_split = pipe_ctx->bottom_pipe &&
542 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
543 bool sec_split = pipe_ctx->top_pipe &&
544 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
545 bool orthogonal_rotation, flip_y_start, flip_x_start;
547 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE ||
548 stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM) {
553 /* The actual clip is an intersection between stream
554 * source and surface clip
556 dest = plane_state->dst_rect;
557 clip.x = stream->src.x > plane_state->clip_rect.x ?
558 stream->src.x : plane_state->clip_rect.x;
560 clip.width = stream->src.x + stream->src.width <
561 plane_state->clip_rect.x + plane_state->clip_rect.width ?
562 stream->src.x + stream->src.width - clip.x :
563 plane_state->clip_rect.x + plane_state->clip_rect.width - clip.x ;
565 clip.y = stream->src.y > plane_state->clip_rect.y ?
566 stream->src.y : plane_state->clip_rect.y;
568 clip.height = stream->src.y + stream->src.height <
569 plane_state->clip_rect.y + plane_state->clip_rect.height ?
570 stream->src.y + stream->src.height - clip.y :
571 plane_state->clip_rect.y + plane_state->clip_rect.height - clip.y ;
574 * Need to calculate how scan origin is shifted in vp space
575 * to correctly rotate clip and dst
577 get_vp_scan_direction(
578 plane_state->rotation,
579 plane_state->horizontal_mirror,
580 &orthogonal_rotation,
584 if (orthogonal_rotation) {
585 swap(clip.x, clip.y);
586 swap(clip.width, clip.height);
587 swap(dest.x, dest.y);
588 swap(dest.width, dest.height);
591 clip.x = dest.x + dest.width - clip.x - clip.width;
595 clip.y = dest.y + dest.height - clip.y - clip.height;
599 /* offset = surf_src.ofs + (clip.ofs - surface->dst_rect.ofs) * scl_ratio
600 * num_pixels = clip.num_pix * scl_ratio
602 data->viewport.x = surf_src.x + (clip.x - dest.x) * surf_src.width / dest.width;
603 data->viewport.width = clip.width * surf_src.width / dest.width;
605 data->viewport.y = surf_src.y + (clip.y - dest.y) * surf_src.height / dest.height;
606 data->viewport.height = clip.height * surf_src.height / dest.height;
609 if (pri_split || sec_split) {
610 if (orthogonal_rotation) {
611 if (flip_y_start != pri_split)
612 data->viewport.height /= 2;
614 data->viewport.y += data->viewport.height / 2;
615 /* Ceil offset pipe */
616 data->viewport.height = (data->viewport.height + 1) / 2;
619 if (flip_x_start != pri_split)
620 data->viewport.width /= 2;
622 data->viewport.x += data->viewport.width / 2;
623 /* Ceil offset pipe */
624 data->viewport.width = (data->viewport.width + 1) / 2;
629 /* Round down, compensate in init */
630 data->viewport_c.x = data->viewport.x / vpc_div;
631 data->viewport_c.y = data->viewport.y / vpc_div;
632 data->inits.h_c = (data->viewport.x % vpc_div) != 0 ? dc_fixpt_half : dc_fixpt_zero;
633 data->inits.v_c = (data->viewport.y % vpc_div) != 0 ? dc_fixpt_half : dc_fixpt_zero;
635 /* Round up, assume original video size always even dimensions */
636 data->viewport_c.width = (data->viewport.width + vpc_div - 1) / vpc_div;
637 data->viewport_c.height = (data->viewport.height + vpc_div - 1) / vpc_div;
640 static void calculate_recout(struct pipe_ctx *pipe_ctx)
642 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
643 const struct dc_stream_state *stream = pipe_ctx->stream;
644 struct rect surf_clip = plane_state->clip_rect;
645 bool pri_split = pipe_ctx->bottom_pipe &&
646 pipe_ctx->bottom_pipe->plane_state == pipe_ctx->plane_state;
647 bool sec_split = pipe_ctx->top_pipe &&
648 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state;
649 bool top_bottom_split = stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM;
651 pipe_ctx->plane_res.scl_data.recout.x = stream->dst.x;
652 if (stream->src.x < surf_clip.x)
653 pipe_ctx->plane_res.scl_data.recout.x += (surf_clip.x
654 - stream->src.x) * stream->dst.width
657 pipe_ctx->plane_res.scl_data.recout.width = surf_clip.width *
658 stream->dst.width / stream->src.width;
659 if (pipe_ctx->plane_res.scl_data.recout.width + pipe_ctx->plane_res.scl_data.recout.x >
660 stream->dst.x + stream->dst.width)
661 pipe_ctx->plane_res.scl_data.recout.width =
662 stream->dst.x + stream->dst.width
663 - pipe_ctx->plane_res.scl_data.recout.x;
665 pipe_ctx->plane_res.scl_data.recout.y = stream->dst.y;
666 if (stream->src.y < surf_clip.y)
667 pipe_ctx->plane_res.scl_data.recout.y += (surf_clip.y
668 - stream->src.y) * stream->dst.height
669 / stream->src.height;
671 pipe_ctx->plane_res.scl_data.recout.height = surf_clip.height *
672 stream->dst.height / stream->src.height;
673 if (pipe_ctx->plane_res.scl_data.recout.height + pipe_ctx->plane_res.scl_data.recout.y >
674 stream->dst.y + stream->dst.height)
675 pipe_ctx->plane_res.scl_data.recout.height =
676 stream->dst.y + stream->dst.height
677 - pipe_ctx->plane_res.scl_data.recout.y;
679 /* Handle h & v split, handle rotation using viewport */
680 if (sec_split && top_bottom_split) {
681 pipe_ctx->plane_res.scl_data.recout.y +=
682 pipe_ctx->plane_res.scl_data.recout.height / 2;
683 /* Floor primary pipe, ceil 2ndary pipe */
684 pipe_ctx->plane_res.scl_data.recout.height =
685 (pipe_ctx->plane_res.scl_data.recout.height + 1) / 2;
686 } else if (pri_split && top_bottom_split)
687 pipe_ctx->plane_res.scl_data.recout.height /= 2;
688 else if (sec_split) {
689 pipe_ctx->plane_res.scl_data.recout.x +=
690 pipe_ctx->plane_res.scl_data.recout.width / 2;
691 /* Ceil offset pipe */
692 pipe_ctx->plane_res.scl_data.recout.width =
693 (pipe_ctx->plane_res.scl_data.recout.width + 1) / 2;
694 } else if (pri_split)
695 pipe_ctx->plane_res.scl_data.recout.width /= 2;
698 static void calculate_scaling_ratios(struct pipe_ctx *pipe_ctx)
700 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
701 const struct dc_stream_state *stream = pipe_ctx->stream;
702 struct rect surf_src = plane_state->src_rect;
703 const int in_w = stream->src.width;
704 const int in_h = stream->src.height;
705 const int out_w = stream->dst.width;
706 const int out_h = stream->dst.height;
708 /*Swap surf_src height and width since scaling ratios are in recout rotation*/
709 if (pipe_ctx->plane_state->rotation == ROTATION_ANGLE_90 ||
710 pipe_ctx->plane_state->rotation == ROTATION_ANGLE_270)
711 swap(surf_src.height, surf_src.width);
713 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_from_fraction(
715 plane_state->dst_rect.width);
716 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_from_fraction(
718 plane_state->dst_rect.height);
720 if (stream->view_format == VIEW_3D_FORMAT_SIDE_BY_SIDE)
721 pipe_ctx->plane_res.scl_data.ratios.horz.value *= 2;
722 else if (stream->view_format == VIEW_3D_FORMAT_TOP_AND_BOTTOM)
723 pipe_ctx->plane_res.scl_data.ratios.vert.value *= 2;
725 pipe_ctx->plane_res.scl_data.ratios.vert.value = div64_s64(
726 pipe_ctx->plane_res.scl_data.ratios.vert.value * in_h, out_h);
727 pipe_ctx->plane_res.scl_data.ratios.horz.value = div64_s64(
728 pipe_ctx->plane_res.scl_data.ratios.horz.value * in_w, out_w);
730 pipe_ctx->plane_res.scl_data.ratios.horz_c = pipe_ctx->plane_res.scl_data.ratios.horz;
731 pipe_ctx->plane_res.scl_data.ratios.vert_c = pipe_ctx->plane_res.scl_data.ratios.vert;
733 if (pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP8
734 || pipe_ctx->plane_res.scl_data.format == PIXEL_FORMAT_420BPP10) {
735 pipe_ctx->plane_res.scl_data.ratios.horz_c.value /= 2;
736 pipe_ctx->plane_res.scl_data.ratios.vert_c.value /= 2;
738 pipe_ctx->plane_res.scl_data.ratios.horz = dc_fixpt_truncate(
739 pipe_ctx->plane_res.scl_data.ratios.horz, 19);
740 pipe_ctx->plane_res.scl_data.ratios.vert = dc_fixpt_truncate(
741 pipe_ctx->plane_res.scl_data.ratios.vert, 19);
742 pipe_ctx->plane_res.scl_data.ratios.horz_c = dc_fixpt_truncate(
743 pipe_ctx->plane_res.scl_data.ratios.horz_c, 19);
744 pipe_ctx->plane_res.scl_data.ratios.vert_c = dc_fixpt_truncate(
745 pipe_ctx->plane_res.scl_data.ratios.vert_c, 19);
748 static inline void adjust_vp_and_init_for_seamless_clip(
753 struct fixed31_32 ratio,
754 struct fixed31_32 *init,
758 if (!flip_scan_dir) {
759 /* Adjust for viewport end clip-off */
760 if ((*vp_offset + *vp_size) < src_size) {
761 int vp_clip = src_size - *vp_size - *vp_offset;
762 int int_part = dc_fixpt_floor(dc_fixpt_sub(*init, ratio));
764 int_part = int_part > 0 ? int_part : 0;
765 *vp_size += int_part < vp_clip ? int_part : vp_clip;
768 /* Adjust for non-0 viewport offset */
772 *init = dc_fixpt_add(*init, dc_fixpt_mul_int(ratio, recout_skip));
773 int_part = dc_fixpt_floor(*init) - *vp_offset;
774 if (int_part < taps) {
775 int int_adj = *vp_offset >= (taps - int_part) ?
776 (taps - int_part) : *vp_offset;
777 *vp_offset -= int_adj;
780 } else if (int_part > taps) {
781 *vp_offset += int_part - taps;
782 *vp_size -= int_part - taps;
785 init->value &= 0xffffffff;
786 *init = dc_fixpt_add_int(*init, int_part);
789 /* Adjust for non-0 viewport offset */
791 int int_part = dc_fixpt_floor(dc_fixpt_sub(*init, ratio));
793 int_part = int_part > 0 ? int_part : 0;
794 *vp_size += int_part < *vp_offset ? int_part : *vp_offset;
795 *vp_offset -= int_part < *vp_offset ? int_part : *vp_offset;
798 /* Adjust for viewport end clip-off */
799 if ((*vp_offset + *vp_size) < src_size) {
801 int end_offset = src_size - *vp_offset - *vp_size;
804 * this is init if vp had no offset, keep in mind this is from the
805 * right side of vp due to scan direction
807 *init = dc_fixpt_add(*init, dc_fixpt_mul_int(ratio, recout_skip));
809 * this is the difference between first pixel of viewport available to read
810 * and init position, takning into account scan direction
812 int_part = dc_fixpt_floor(*init) - end_offset;
813 if (int_part < taps) {
814 int int_adj = end_offset >= (taps - int_part) ?
815 (taps - int_part) : end_offset;
818 } else if (int_part > taps) {
819 *vp_size += int_part - taps;
822 init->value &= 0xffffffff;
823 *init = dc_fixpt_add_int(*init, int_part);
828 static void calculate_inits_and_adj_vp(struct pipe_ctx *pipe_ctx)
830 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
831 const struct dc_stream_state *stream = pipe_ctx->stream;
832 struct scaler_data *data = &pipe_ctx->plane_res.scl_data;
833 struct rect src = pipe_ctx->plane_state->src_rect;
834 int recout_skip_h, recout_skip_v, surf_size_h, surf_size_v;
835 int vpc_div = (data->format == PIXEL_FORMAT_420BPP8
836 || data->format == PIXEL_FORMAT_420BPP10) ? 2 : 1;
837 bool orthogonal_rotation, flip_vert_scan_dir, flip_horz_scan_dir;
840 * Need to calculate the scan direction for viewport to make adjustments
842 get_vp_scan_direction(
843 plane_state->rotation,
844 plane_state->horizontal_mirror,
845 &orthogonal_rotation,
847 &flip_horz_scan_dir);
849 /* Calculate src rect rotation adjusted to recout space */
850 surf_size_h = src.x + src.width;
851 surf_size_v = src.y + src.height;
852 if (flip_horz_scan_dir)
854 if (flip_vert_scan_dir)
856 if (orthogonal_rotation) {
858 swap(src.width, src.height);
861 /* Recout matching initial vp offset = recout_offset - (stream dst offset +
862 * ((surf dst offset - stream src offset) * 1/ stream scaling ratio)
863 * - (surf surf_src offset * 1/ full scl ratio))
865 recout_skip_h = data->recout.x - (stream->dst.x + (plane_state->dst_rect.x - stream->src.x)
866 * stream->dst.width / stream->src.width -
867 src.x * plane_state->dst_rect.width / src.width
868 * stream->dst.width / stream->src.width);
869 recout_skip_v = data->recout.y - (stream->dst.y + (plane_state->dst_rect.y - stream->src.y)
870 * stream->dst.height / stream->src.height -
871 src.y * plane_state->dst_rect.height / src.height
872 * stream->dst.height / stream->src.height);
873 if (orthogonal_rotation)
874 swap(recout_skip_h, recout_skip_v);
876 * Init calculated according to formula:
877 * init = (scaling_ratio + number_of_taps + 1) / 2
878 * init_bot = init + scaling_ratio
879 * init_c = init + truncated_vp_c_offset(from calculate viewport)
881 data->inits.h = dc_fixpt_truncate(dc_fixpt_div_int(
882 dc_fixpt_add_int(data->ratios.horz, data->taps.h_taps + 1), 2), 19);
884 data->inits.h_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.h_c, dc_fixpt_div_int(
885 dc_fixpt_add_int(data->ratios.horz_c, data->taps.h_taps_c + 1), 2)), 19);
887 data->inits.v = dc_fixpt_truncate(dc_fixpt_div_int(
888 dc_fixpt_add_int(data->ratios.vert, data->taps.v_taps + 1), 2), 19);
890 data->inits.v_c = dc_fixpt_truncate(dc_fixpt_add(data->inits.v_c, dc_fixpt_div_int(
891 dc_fixpt_add_int(data->ratios.vert_c, data->taps.v_taps_c + 1), 2)), 19);
894 * Taps, inits and scaling ratios are in recout space need to rotate
895 * to viewport rotation before adjustment
897 adjust_vp_and_init_for_seamless_clip(
901 orthogonal_rotation ? data->taps.v_taps : data->taps.h_taps,
902 orthogonal_rotation ? data->ratios.vert : data->ratios.horz,
903 orthogonal_rotation ? &data->inits.v : &data->inits.h,
905 &data->viewport.width);
906 adjust_vp_and_init_for_seamless_clip(
909 surf_size_h / vpc_div,
910 orthogonal_rotation ? data->taps.v_taps_c : data->taps.h_taps_c,
911 orthogonal_rotation ? data->ratios.vert_c : data->ratios.horz_c,
912 orthogonal_rotation ? &data->inits.v_c : &data->inits.h_c,
914 &data->viewport_c.width);
915 adjust_vp_and_init_for_seamless_clip(
919 orthogonal_rotation ? data->taps.h_taps : data->taps.v_taps,
920 orthogonal_rotation ? data->ratios.horz : data->ratios.vert,
921 orthogonal_rotation ? &data->inits.h : &data->inits.v,
923 &data->viewport.height);
924 adjust_vp_and_init_for_seamless_clip(
927 surf_size_v / vpc_div,
928 orthogonal_rotation ? data->taps.h_taps_c : data->taps.v_taps_c,
929 orthogonal_rotation ? data->ratios.horz_c : data->ratios.vert_c,
930 orthogonal_rotation ? &data->inits.h_c : &data->inits.v_c,
932 &data->viewport_c.height);
934 /* Interlaced inits based on final vert inits */
935 data->inits.v_bot = dc_fixpt_add(data->inits.v, data->ratios.vert);
936 data->inits.v_c_bot = dc_fixpt_add(data->inits.v_c, data->ratios.vert_c);
940 bool resource_build_scaling_params(struct pipe_ctx *pipe_ctx)
942 const struct dc_plane_state *plane_state = pipe_ctx->plane_state;
943 struct dc_crtc_timing *timing = &pipe_ctx->stream->timing;
945 DC_LOGGER_INIT(pipe_ctx->stream->ctx->logger);
946 /* Important: scaling ratio calculation requires pixel format,
947 * lb depth calculation requires recout and taps require scaling ratios.
948 * Inits require viewport, taps, ratios and recout of split pipe
950 pipe_ctx->plane_res.scl_data.format = convert_pixel_format_to_dalsurface(
951 pipe_ctx->plane_state->format);
953 calculate_scaling_ratios(pipe_ctx);
955 calculate_viewport(pipe_ctx);
957 if (pipe_ctx->plane_res.scl_data.viewport.height < 16 || pipe_ctx->plane_res.scl_data.viewport.width < 16)
960 calculate_recout(pipe_ctx);
963 * Setting line buffer pixel depth to 24bpp yields banding
964 * on certain displays, such as the Sharp 4k
966 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_30BPP;
968 pipe_ctx->plane_res.scl_data.recout.x += timing->h_border_left;
969 pipe_ctx->plane_res.scl_data.recout.y += timing->v_border_top;
971 pipe_ctx->plane_res.scl_data.h_active = timing->h_addressable + timing->h_border_left + timing->h_border_right;
972 pipe_ctx->plane_res.scl_data.v_active = timing->v_addressable + timing->v_border_top + timing->v_border_bottom;
974 /* Taps calculations */
975 if (pipe_ctx->plane_res.xfm != NULL)
976 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
977 pipe_ctx->plane_res.xfm, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
979 if (pipe_ctx->plane_res.dpp != NULL)
980 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
981 pipe_ctx->plane_res.dpp, &pipe_ctx->plane_res.scl_data, &plane_state->scaling_quality);
983 /* Try 24 bpp linebuffer */
984 pipe_ctx->plane_res.scl_data.lb_params.depth = LB_PIXEL_DEPTH_24BPP;
986 if (pipe_ctx->plane_res.xfm != NULL)
987 res = pipe_ctx->plane_res.xfm->funcs->transform_get_optimal_number_of_taps(
988 pipe_ctx->plane_res.xfm,
989 &pipe_ctx->plane_res.scl_data,
990 &plane_state->scaling_quality);
992 if (pipe_ctx->plane_res.dpp != NULL)
993 res = pipe_ctx->plane_res.dpp->funcs->dpp_get_optimal_number_of_taps(
994 pipe_ctx->plane_res.dpp,
995 &pipe_ctx->plane_res.scl_data,
996 &plane_state->scaling_quality);
1000 /* May need to re-check lb size after this in some obscure scenario */
1001 calculate_inits_and_adj_vp(pipe_ctx);
1004 "%s: Viewport:\nheight:%d width:%d x:%d "
1005 "y:%d\n dst_rect:\nheight:%d width:%d x:%d "
1008 pipe_ctx->plane_res.scl_data.viewport.height,
1009 pipe_ctx->plane_res.scl_data.viewport.width,
1010 pipe_ctx->plane_res.scl_data.viewport.x,
1011 pipe_ctx->plane_res.scl_data.viewport.y,
1012 plane_state->dst_rect.height,
1013 plane_state->dst_rect.width,
1014 plane_state->dst_rect.x,
1015 plane_state->dst_rect.y);
1021 enum dc_status resource_build_scaling_params_for_context(
1022 const struct dc *dc,
1023 struct dc_state *context)
1027 for (i = 0; i < MAX_PIPES; i++) {
1028 if (context->res_ctx.pipe_ctx[i].plane_state != NULL &&
1029 context->res_ctx.pipe_ctx[i].stream != NULL)
1030 if (!resource_build_scaling_params(&context->res_ctx.pipe_ctx[i]))
1031 return DC_FAIL_SCALING;
1037 struct pipe_ctx *find_idle_secondary_pipe(
1038 struct resource_context *res_ctx,
1039 const struct resource_pool *pool,
1040 const struct pipe_ctx *primary_pipe)
1043 struct pipe_ctx *secondary_pipe = NULL;
1046 * We add a preferred pipe mapping to avoid the chance that
1047 * MPCCs already in use will need to be reassigned to other trees.
1048 * For example, if we went with the strict, assign backwards logic:
1051 * Display A on, no surface, top pipe = 0
1052 * Display B on, no surface, top pipe = 1
1055 * Display A on, no surface, top pipe = 0
1056 * Display B on, surface enable, top pipe = 1, bottom pipe = 5
1059 * Display A on, surface enable, top pipe = 0, bottom pipe = 5
1060 * Display B on, surface enable, top pipe = 1, bottom pipe = 4
1062 * The state 2->3 transition requires remapping MPCC 5 from display B
1065 * However, with the preferred pipe logic, state 2 would look like:
1068 * Display A on, no surface, top pipe = 0
1069 * Display B on, surface enable, top pipe = 1, bottom pipe = 4
1071 * This would then cause 2->3 to not require remapping any MPCCs.
1074 int preferred_pipe_idx = (pool->pipe_count - 1) - primary_pipe->pipe_idx;
1075 if (res_ctx->pipe_ctx[preferred_pipe_idx].stream == NULL) {
1076 secondary_pipe = &res_ctx->pipe_ctx[preferred_pipe_idx];
1077 secondary_pipe->pipe_idx = preferred_pipe_idx;
1082 * search backwards for the second pipe to keep pipe
1083 * assignment more consistent
1085 if (!secondary_pipe)
1086 for (i = pool->pipe_count - 1; i >= 0; i--) {
1087 if (res_ctx->pipe_ctx[i].stream == NULL) {
1088 secondary_pipe = &res_ctx->pipe_ctx[i];
1089 secondary_pipe->pipe_idx = i;
1094 return secondary_pipe;
1097 struct pipe_ctx *resource_get_head_pipe_for_stream(
1098 struct resource_context *res_ctx,
1099 struct dc_stream_state *stream)
1102 for (i = 0; i < MAX_PIPES; i++) {
1103 if (res_ctx->pipe_ctx[i].stream == stream &&
1104 !res_ctx->pipe_ctx[i].top_pipe) {
1105 return &res_ctx->pipe_ctx[i];
1112 static struct pipe_ctx *resource_get_tail_pipe_for_stream(
1113 struct resource_context *res_ctx,
1114 struct dc_stream_state *stream)
1116 struct pipe_ctx *head_pipe, *tail_pipe;
1117 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1122 tail_pipe = head_pipe->bottom_pipe;
1125 head_pipe = tail_pipe;
1126 tail_pipe = tail_pipe->bottom_pipe;
1133 * A free_pipe for a stream is defined here as a pipe
1134 * that has no surface attached yet
1136 static struct pipe_ctx *acquire_free_pipe_for_stream(
1137 struct dc_state *context,
1138 const struct resource_pool *pool,
1139 struct dc_stream_state *stream)
1142 struct resource_context *res_ctx = &context->res_ctx;
1144 struct pipe_ctx *head_pipe = NULL;
1146 /* Find head pipe, which has the back end set up*/
1148 head_pipe = resource_get_head_pipe_for_stream(res_ctx, stream);
1155 if (!head_pipe->plane_state)
1158 /* Re-use pipe already acquired for this stream if available*/
1159 for (i = pool->pipe_count - 1; i >= 0; i--) {
1160 if (res_ctx->pipe_ctx[i].stream == stream &&
1161 !res_ctx->pipe_ctx[i].plane_state) {
1162 return &res_ctx->pipe_ctx[i];
1167 * At this point we have no re-useable pipe for this stream and we need
1168 * to acquire an idle one to satisfy the request
1171 if (!pool->funcs->acquire_idle_pipe_for_layer)
1174 return pool->funcs->acquire_idle_pipe_for_layer(context, pool, stream);
1178 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1179 static int acquire_first_split_pipe(
1180 struct resource_context *res_ctx,
1181 const struct resource_pool *pool,
1182 struct dc_stream_state *stream)
1186 for (i = 0; i < pool->pipe_count; i++) {
1187 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1189 if (pipe_ctx->top_pipe &&
1190 pipe_ctx->top_pipe->plane_state == pipe_ctx->plane_state) {
1191 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1192 if (pipe_ctx->bottom_pipe)
1193 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1195 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1196 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1197 pipe_ctx->plane_res.hubp = pool->hubps[i];
1198 pipe_ctx->plane_res.ipp = pool->ipps[i];
1199 pipe_ctx->plane_res.dpp = pool->dpps[i];
1200 pipe_ctx->stream_res.opp = pool->opps[i];
1201 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1202 pipe_ctx->pipe_idx = i;
1204 pipe_ctx->stream = stream;
1212 bool dc_add_plane_to_context(
1213 const struct dc *dc,
1214 struct dc_stream_state *stream,
1215 struct dc_plane_state *plane_state,
1216 struct dc_state *context)
1219 struct resource_pool *pool = dc->res_pool;
1220 struct pipe_ctx *head_pipe, *tail_pipe, *free_pipe;
1221 struct dc_stream_status *stream_status = NULL;
1223 for (i = 0; i < context->stream_count; i++)
1224 if (context->streams[i] == stream) {
1225 stream_status = &context->stream_status[i];
1228 if (stream_status == NULL) {
1229 dm_error("Existing stream not found; failed to attach surface!\n");
1234 if (stream_status->plane_count == MAX_SURFACE_NUM) {
1235 dm_error("Surface: can not attach plane_state %p! Maximum is: %d\n",
1236 plane_state, MAX_SURFACE_NUM);
1240 head_pipe = resource_get_head_pipe_for_stream(&context->res_ctx, stream);
1243 dm_error("Head pipe not found for stream_state %p !\n", stream);
1247 tail_pipe = resource_get_tail_pipe_for_stream(&context->res_ctx, stream);
1250 free_pipe = acquire_free_pipe_for_stream(context, pool, stream);
1252 #if defined(CONFIG_DRM_AMD_DC_DCN1_0)
1254 int pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
1256 free_pipe = &context->res_ctx.pipe_ctx[pipe_idx];
1262 /* retain new surfaces */
1263 dc_plane_state_retain(plane_state);
1264 free_pipe->plane_state = plane_state;
1266 if (head_pipe != free_pipe) {
1267 free_pipe->stream_res.tg = tail_pipe->stream_res.tg;
1268 free_pipe->stream_res.abm = tail_pipe->stream_res.abm;
1269 free_pipe->stream_res.opp = tail_pipe->stream_res.opp;
1270 free_pipe->stream_res.stream_enc = tail_pipe->stream_res.stream_enc;
1271 free_pipe->stream_res.audio = tail_pipe->stream_res.audio;
1272 free_pipe->clock_source = tail_pipe->clock_source;
1273 free_pipe->top_pipe = tail_pipe;
1274 tail_pipe->bottom_pipe = free_pipe;
1275 } else if (free_pipe->bottom_pipe && free_pipe->bottom_pipe->plane_state == NULL) {
1276 ASSERT(free_pipe->bottom_pipe->stream_res.opp != free_pipe->stream_res.opp);
1277 free_pipe->bottom_pipe->plane_state = plane_state;
1280 /* assign new surfaces*/
1281 stream_status->plane_states[stream_status->plane_count] = plane_state;
1283 stream_status->plane_count++;
1288 struct pipe_ctx *dc_res_get_odm_bottom_pipe(struct pipe_ctx *pipe_ctx)
1290 struct pipe_ctx *bottom_pipe = pipe_ctx->bottom_pipe;
1292 /* ODM should only be updated once per otg */
1293 if (pipe_ctx->top_pipe)
1296 while (bottom_pipe) {
1297 if (bottom_pipe->stream_res.opp != pipe_ctx->stream_res.opp)
1299 bottom_pipe = bottom_pipe->bottom_pipe;
1305 static bool dc_res_is_odm_bottom_pipe(struct pipe_ctx *pipe_ctx)
1307 struct pipe_ctx *top_pipe = pipe_ctx->top_pipe;
1308 bool result = false;
1310 if (top_pipe && top_pipe->stream_res.opp == pipe_ctx->stream_res.opp)
1314 if (!top_pipe->top_pipe && top_pipe->stream_res.opp != pipe_ctx->stream_res.opp)
1316 top_pipe = top_pipe->top_pipe;
1322 bool dc_remove_plane_from_context(
1323 const struct dc *dc,
1324 struct dc_stream_state *stream,
1325 struct dc_plane_state *plane_state,
1326 struct dc_state *context)
1329 struct dc_stream_status *stream_status = NULL;
1330 struct resource_pool *pool = dc->res_pool;
1332 for (i = 0; i < context->stream_count; i++)
1333 if (context->streams[i] == stream) {
1334 stream_status = &context->stream_status[i];
1338 if (stream_status == NULL) {
1339 dm_error("Existing stream not found; failed to remove plane.\n");
1343 /* release pipe for plane*/
1344 for (i = pool->pipe_count - 1; i >= 0; i--) {
1345 struct pipe_ctx *pipe_ctx = &context->res_ctx.pipe_ctx[i];
1347 if (pipe_ctx->plane_state == plane_state) {
1348 if (dc_res_is_odm_bottom_pipe(pipe_ctx)) {
1349 pipe_ctx->plane_state = NULL;
1350 pipe_ctx->bottom_pipe = NULL;
1354 if (pipe_ctx->top_pipe)
1355 pipe_ctx->top_pipe->bottom_pipe = pipe_ctx->bottom_pipe;
1357 /* Second condition is to avoid setting NULL to top pipe
1358 * of tail pipe making it look like head pipe in subsequent
1361 if (pipe_ctx->bottom_pipe && pipe_ctx->top_pipe)
1362 pipe_ctx->bottom_pipe->top_pipe = pipe_ctx->top_pipe;
1365 * For head pipe detach surfaces from pipe for tail
1366 * pipe just zero it out
1368 if (!pipe_ctx->top_pipe) {
1369 pipe_ctx->plane_state = NULL;
1370 if (!dc_res_get_odm_bottom_pipe(pipe_ctx))
1371 pipe_ctx->bottom_pipe = NULL;
1373 memset(pipe_ctx, 0, sizeof(*pipe_ctx));
1379 for (i = 0; i < stream_status->plane_count; i++) {
1380 if (stream_status->plane_states[i] == plane_state) {
1382 dc_plane_state_release(stream_status->plane_states[i]);
1387 if (i == stream_status->plane_count) {
1388 dm_error("Existing plane_state not found; failed to detach it!\n");
1392 stream_status->plane_count--;
1394 /* Start at the plane we've just released, and move all the planes one index forward to "trim" the array */
1395 for (; i < stream_status->plane_count; i++)
1396 stream_status->plane_states[i] = stream_status->plane_states[i + 1];
1398 stream_status->plane_states[stream_status->plane_count] = NULL;
1403 bool dc_rem_all_planes_for_stream(
1404 const struct dc *dc,
1405 struct dc_stream_state *stream,
1406 struct dc_state *context)
1408 int i, old_plane_count;
1409 struct dc_stream_status *stream_status = NULL;
1410 struct dc_plane_state *del_planes[MAX_SURFACE_NUM] = { 0 };
1412 for (i = 0; i < context->stream_count; i++)
1413 if (context->streams[i] == stream) {
1414 stream_status = &context->stream_status[i];
1418 if (stream_status == NULL) {
1419 dm_error("Existing stream %p not found!\n", stream);
1423 old_plane_count = stream_status->plane_count;
1425 for (i = 0; i < old_plane_count; i++)
1426 del_planes[i] = stream_status->plane_states[i];
1428 for (i = 0; i < old_plane_count; i++)
1429 if (!dc_remove_plane_from_context(dc, stream, del_planes[i], context))
1435 static bool add_all_planes_for_stream(
1436 const struct dc *dc,
1437 struct dc_stream_state *stream,
1438 const struct dc_validation_set set[],
1440 struct dc_state *context)
1444 for (i = 0; i < set_count; i++)
1445 if (set[i].stream == stream)
1448 if (i == set_count) {
1449 dm_error("Stream %p not found in set!\n", stream);
1453 for (j = 0; j < set[i].plane_count; j++)
1454 if (!dc_add_plane_to_context(dc, stream, set[i].plane_states[j], context))
1460 bool dc_add_all_planes_for_stream(
1461 const struct dc *dc,
1462 struct dc_stream_state *stream,
1463 struct dc_plane_state * const *plane_states,
1465 struct dc_state *context)
1467 struct dc_validation_set set;
1470 set.stream = stream;
1471 set.plane_count = plane_count;
1473 for (i = 0; i < plane_count; i++)
1474 set.plane_states[i] = plane_states[i];
1476 return add_all_planes_for_stream(dc, stream, &set, 1, context);
1480 static bool is_hdr_static_meta_changed(struct dc_stream_state *cur_stream,
1481 struct dc_stream_state *new_stream)
1483 if (cur_stream == NULL)
1486 if (memcmp(&cur_stream->hdr_static_metadata,
1487 &new_stream->hdr_static_metadata,
1488 sizeof(struct dc_info_packet)) != 0)
1494 static bool is_vsc_info_packet_changed(struct dc_stream_state *cur_stream,
1495 struct dc_stream_state *new_stream)
1497 if (cur_stream == NULL)
1500 if (memcmp(&cur_stream->vsc_infopacket,
1501 &new_stream->vsc_infopacket,
1502 sizeof(struct dc_info_packet)) != 0)
1508 static bool is_timing_changed(struct dc_stream_state *cur_stream,
1509 struct dc_stream_state *new_stream)
1511 if (cur_stream == NULL)
1514 /* If sink pointer changed, it means this is a hotplug, we should do
1517 if (cur_stream->sink != new_stream->sink)
1520 /* If output color space is changed, need to reprogram info frames */
1521 if (cur_stream->output_color_space != new_stream->output_color_space)
1525 &cur_stream->timing,
1526 &new_stream->timing,
1527 sizeof(struct dc_crtc_timing)) != 0;
1530 static bool are_stream_backends_same(
1531 struct dc_stream_state *stream_a, struct dc_stream_state *stream_b)
1533 if (stream_a == stream_b)
1536 if (stream_a == NULL || stream_b == NULL)
1539 if (is_timing_changed(stream_a, stream_b))
1542 if (is_hdr_static_meta_changed(stream_a, stream_b))
1545 if (stream_a->dpms_off != stream_b->dpms_off)
1548 if (is_vsc_info_packet_changed(stream_a, stream_b))
1555 * dc_is_stream_unchanged() - Compare two stream states for equivalence.
1557 * Checks if there a difference between the two states
1558 * that would require a mode change.
1560 * Does not compare cursor position or attributes.
1562 bool dc_is_stream_unchanged(
1563 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1566 if (!are_stream_backends_same(old_stream, stream))
1573 * dc_is_stream_scaling_unchanged() - Compare scaling rectangles of two streams.
1575 bool dc_is_stream_scaling_unchanged(
1576 struct dc_stream_state *old_stream, struct dc_stream_state *stream)
1578 if (old_stream == stream)
1581 if (old_stream == NULL || stream == NULL)
1584 if (memcmp(&old_stream->src,
1586 sizeof(struct rect)) != 0)
1589 if (memcmp(&old_stream->dst,
1591 sizeof(struct rect)) != 0)
1597 static void update_stream_engine_usage(
1598 struct resource_context *res_ctx,
1599 const struct resource_pool *pool,
1600 struct stream_encoder *stream_enc,
1605 for (i = 0; i < pool->stream_enc_count; i++) {
1606 if (pool->stream_enc[i] == stream_enc)
1607 res_ctx->is_stream_enc_acquired[i] = acquired;
1611 /* TODO: release audio object */
1612 void update_audio_usage(
1613 struct resource_context *res_ctx,
1614 const struct resource_pool *pool,
1615 struct audio *audio,
1619 for (i = 0; i < pool->audio_count; i++) {
1620 if (pool->audios[i] == audio)
1621 res_ctx->is_audio_acquired[i] = acquired;
1625 static int acquire_first_free_pipe(
1626 struct resource_context *res_ctx,
1627 const struct resource_pool *pool,
1628 struct dc_stream_state *stream)
1632 for (i = 0; i < pool->pipe_count; i++) {
1633 if (!res_ctx->pipe_ctx[i].stream) {
1634 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[i];
1636 pipe_ctx->stream_res.tg = pool->timing_generators[i];
1637 pipe_ctx->plane_res.mi = pool->mis[i];
1638 pipe_ctx->plane_res.hubp = pool->hubps[i];
1639 pipe_ctx->plane_res.ipp = pool->ipps[i];
1640 pipe_ctx->plane_res.xfm = pool->transforms[i];
1641 pipe_ctx->plane_res.dpp = pool->dpps[i];
1642 pipe_ctx->stream_res.opp = pool->opps[i];
1644 pipe_ctx->plane_res.mpcc_inst = pool->dpps[i]->inst;
1645 pipe_ctx->pipe_idx = i;
1648 pipe_ctx->stream = stream;
1655 static struct stream_encoder *find_first_free_match_stream_enc_for_link(
1656 struct resource_context *res_ctx,
1657 const struct resource_pool *pool,
1658 struct dc_stream_state *stream)
1662 struct dc_link *link = stream->link;
1664 for (i = 0; i < pool->stream_enc_count; i++) {
1665 if (!res_ctx->is_stream_enc_acquired[i] &&
1666 pool->stream_enc[i]) {
1667 /* Store first available for MST second display
1668 * in daisy chain use case */
1670 if (pool->stream_enc[i]->id ==
1671 link->link_enc->preferred_engine)
1672 return pool->stream_enc[i];
1677 * below can happen in cases when stream encoder is acquired:
1678 * 1) for second MST display in chain, so preferred engine already
1680 * 2) for another link, which preferred engine already acquired by any
1681 * MST configuration.
1683 * If signal is of DP type and preferred engine not found, return last available
1685 * TODO - This is just a patch up and a generic solution is
1686 * required for non DP connectors.
1689 if (j >= 0 && link->connector_signal == SIGNAL_TYPE_DISPLAY_PORT)
1690 return pool->stream_enc[j];
1695 static struct audio *find_first_free_audio(
1696 struct resource_context *res_ctx,
1697 const struct resource_pool *pool,
1701 for (i = 0; i < pool->audio_count; i++) {
1702 if ((res_ctx->is_audio_acquired[i] == false) && (res_ctx->is_stream_enc_acquired[i] == true)) {
1703 /*we have enough audio endpoint, find the matching inst*/
1707 return pool->audios[i];
1710 /*not found the matching one, first come first serve*/
1711 for (i = 0; i < pool->audio_count; i++) {
1712 if (res_ctx->is_audio_acquired[i] == false) {
1713 return pool->audios[i];
1719 bool resource_is_stream_unchanged(
1720 struct dc_state *old_context, struct dc_stream_state *stream)
1724 for (i = 0; i < old_context->stream_count; i++) {
1725 struct dc_stream_state *old_stream = old_context->streams[i];
1727 if (are_stream_backends_same(old_stream, stream))
1735 * dc_add_stream_to_ctx() - Add a new dc_stream_state to a dc_state.
1737 enum dc_status dc_add_stream_to_ctx(
1739 struct dc_state *new_ctx,
1740 struct dc_stream_state *stream)
1743 DC_LOGGER_INIT(dc->ctx->logger);
1745 if (new_ctx->stream_count >= dc->res_pool->timing_generator_count) {
1746 DC_LOG_WARNING("Max streams reached, can't add stream %p !\n", stream);
1747 return DC_ERROR_UNEXPECTED;
1750 new_ctx->streams[new_ctx->stream_count] = stream;
1751 dc_stream_retain(stream);
1752 new_ctx->stream_count++;
1754 res = dc->res_pool->funcs->add_stream_to_ctx(dc, new_ctx, stream);
1756 DC_LOG_WARNING("Adding stream %p to context failed with err %d!\n", stream, res);
1762 * dc_remove_stream_from_ctx() - Remove a stream from a dc_state.
1764 enum dc_status dc_remove_stream_from_ctx(
1766 struct dc_state *new_ctx,
1767 struct dc_stream_state *stream)
1770 struct dc_context *dc_ctx = dc->ctx;
1771 struct pipe_ctx *del_pipe = NULL;
1773 /* Release primary pipe */
1774 for (i = 0; i < MAX_PIPES; i++) {
1775 if (new_ctx->res_ctx.pipe_ctx[i].stream == stream &&
1776 !new_ctx->res_ctx.pipe_ctx[i].top_pipe) {
1777 struct pipe_ctx *odm_pipe =
1778 dc_res_get_odm_bottom_pipe(&new_ctx->res_ctx.pipe_ctx[i]);
1780 del_pipe = &new_ctx->res_ctx.pipe_ctx[i];
1782 ASSERT(del_pipe->stream_res.stream_enc);
1783 update_stream_engine_usage(
1786 del_pipe->stream_res.stream_enc,
1789 if (del_pipe->stream_res.audio)
1793 del_pipe->stream_res.audio,
1796 resource_unreference_clock_source(&new_ctx->res_ctx,
1798 del_pipe->clock_source);
1800 if (dc->res_pool->funcs->remove_stream_from_ctx)
1801 dc->res_pool->funcs->remove_stream_from_ctx(dc, new_ctx, stream);
1803 memset(del_pipe, 0, sizeof(*del_pipe));
1805 memset(odm_pipe, 0, sizeof(*odm_pipe));
1812 DC_ERROR("Pipe not found for stream %p !\n", stream);
1813 return DC_ERROR_UNEXPECTED;
1816 for (i = 0; i < new_ctx->stream_count; i++)
1817 if (new_ctx->streams[i] == stream)
1820 if (new_ctx->streams[i] != stream) {
1821 DC_ERROR("Context doesn't have stream %p !\n", stream);
1822 return DC_ERROR_UNEXPECTED;
1825 dc_stream_release(new_ctx->streams[i]);
1826 new_ctx->stream_count--;
1828 /* Trim back arrays */
1829 for (; i < new_ctx->stream_count; i++) {
1830 new_ctx->streams[i] = new_ctx->streams[i + 1];
1831 new_ctx->stream_status[i] = new_ctx->stream_status[i + 1];
1834 new_ctx->streams[new_ctx->stream_count] = NULL;
1836 &new_ctx->stream_status[new_ctx->stream_count],
1838 sizeof(new_ctx->stream_status[0]));
1843 static struct dc_stream_state *find_pll_sharable_stream(
1844 struct dc_stream_state *stream_needs_pll,
1845 struct dc_state *context)
1849 for (i = 0; i < context->stream_count; i++) {
1850 struct dc_stream_state *stream_has_pll = context->streams[i];
1852 /* We are looking for non dp, non virtual stream */
1853 if (resource_are_streams_timing_synchronizable(
1854 stream_needs_pll, stream_has_pll)
1855 && !dc_is_dp_signal(stream_has_pll->signal)
1856 && stream_has_pll->link->connector_signal
1857 != SIGNAL_TYPE_VIRTUAL)
1858 return stream_has_pll;
1865 static int get_norm_pix_clk(const struct dc_crtc_timing *timing)
1867 uint32_t pix_clk = timing->pix_clk_100hz;
1868 uint32_t normalized_pix_clk = pix_clk;
1870 if (timing->pixel_encoding == PIXEL_ENCODING_YCBCR420)
1872 if (timing->pixel_encoding != PIXEL_ENCODING_YCBCR422) {
1873 switch (timing->display_color_depth) {
1874 case COLOR_DEPTH_888:
1875 normalized_pix_clk = pix_clk;
1877 case COLOR_DEPTH_101010:
1878 normalized_pix_clk = (pix_clk * 30) / 24;
1880 case COLOR_DEPTH_121212:
1881 normalized_pix_clk = (pix_clk * 36) / 24;
1883 case COLOR_DEPTH_161616:
1884 normalized_pix_clk = (pix_clk * 48) / 24;
1891 return normalized_pix_clk;
1894 static void calculate_phy_pix_clks(struct dc_stream_state *stream)
1896 /* update actual pixel clock on all streams */
1897 if (dc_is_hdmi_signal(stream->signal))
1898 stream->phy_pix_clk = get_norm_pix_clk(
1899 &stream->timing) / 10;
1901 stream->phy_pix_clk =
1902 stream->timing.pix_clk_100hz / 10;
1904 if (stream->timing.timing_3d_format == TIMING_3D_FORMAT_HW_FRAME_PACKING)
1905 stream->phy_pix_clk *= 2;
1908 static int acquire_resource_from_hw_enabled_state(
1909 struct resource_context *res_ctx,
1910 const struct resource_pool *pool,
1911 struct dc_stream_state *stream)
1913 struct dc_link *link = stream->link;
1916 /* Check for enabled DIG to identify enabled display */
1917 if (!link->link_enc->funcs->is_dig_enabled(link->link_enc))
1920 /* Check for which front end is used by this encoder.
1921 * Note the inst is 1 indexed, where 0 is undefined.
1922 * Note that DIG_FE can source from different OTG but our
1923 * current implementation always map 1-to-1, so this code makes
1924 * the same assumption and doesn't check OTG source.
1926 inst = link->link_enc->funcs->get_dig_frontend(link->link_enc) - 1;
1928 /* Instance should be within the range of the pool */
1929 if (inst >= pool->pipe_count)
1932 if (!res_ctx->pipe_ctx[inst].stream) {
1933 struct pipe_ctx *pipe_ctx = &res_ctx->pipe_ctx[inst];
1935 pipe_ctx->stream_res.tg = pool->timing_generators[inst];
1936 pipe_ctx->plane_res.mi = pool->mis[inst];
1937 pipe_ctx->plane_res.hubp = pool->hubps[inst];
1938 pipe_ctx->plane_res.ipp = pool->ipps[inst];
1939 pipe_ctx->plane_res.xfm = pool->transforms[inst];
1940 pipe_ctx->plane_res.dpp = pool->dpps[inst];
1941 pipe_ctx->stream_res.opp = pool->opps[inst];
1942 if (pool->dpps[inst])
1943 pipe_ctx->plane_res.mpcc_inst = pool->dpps[inst]->inst;
1944 pipe_ctx->pipe_idx = inst;
1946 pipe_ctx->stream = stream;
1953 enum dc_status resource_map_pool_resources(
1954 const struct dc *dc,
1955 struct dc_state *context,
1956 struct dc_stream_state *stream)
1958 const struct resource_pool *pool = dc->res_pool;
1960 struct dc_context *dc_ctx = dc->ctx;
1961 struct pipe_ctx *pipe_ctx = NULL;
1963 struct dc_bios *dcb = dc->ctx->dc_bios;
1965 /* TODO Check if this is needed */
1966 /*if (!resource_is_stream_unchanged(old_context, stream)) {
1967 if (stream != NULL && old_context->streams[i] != NULL) {
1968 stream->bit_depth_params =
1969 old_context->streams[i]->bit_depth_params;
1970 stream->clamping = old_context->streams[i]->clamping;
1976 calculate_phy_pix_clks(stream);
1978 /* TODO: Check Linux */
1979 if (dc->config.allow_seamless_boot_optimization &&
1980 !dcb->funcs->is_accelerated_mode(dcb)) {
1981 if (dc_validate_seamless_boot_timing(dc, stream->sink, &stream->timing))
1982 stream->apply_seamless_boot_optimization = true;
1985 if (stream->apply_seamless_boot_optimization)
1986 pipe_idx = acquire_resource_from_hw_enabled_state(
1992 /* acquire new resources */
1993 pipe_idx = acquire_first_free_pipe(&context->res_ctx, pool, stream);
1995 #ifdef CONFIG_DRM_AMD_DC_DCN1_0
1997 pipe_idx = acquire_first_split_pipe(&context->res_ctx, pool, stream);
2000 if (pipe_idx < 0 || context->res_ctx.pipe_ctx[pipe_idx].stream_res.tg == NULL)
2001 return DC_NO_CONTROLLER_RESOURCE;
2003 pipe_ctx = &context->res_ctx.pipe_ctx[pipe_idx];
2005 pipe_ctx->stream_res.stream_enc =
2006 find_first_free_match_stream_enc_for_link(
2007 &context->res_ctx, pool, stream);
2009 if (!pipe_ctx->stream_res.stream_enc)
2010 return DC_NO_STREAM_ENC_RESOURCE;
2012 update_stream_engine_usage(
2013 &context->res_ctx, pool,
2014 pipe_ctx->stream_res.stream_enc,
2017 /* TODO: Add check if ASIC support and EDID audio */
2018 if (!stream->converter_disable_audio &&
2019 dc_is_audio_capable_signal(pipe_ctx->stream->signal) &&
2020 stream->audio_info.mode_count) {
2021 pipe_ctx->stream_res.audio = find_first_free_audio(
2022 &context->res_ctx, pool, pipe_ctx->stream_res.stream_enc->id);
2025 * Audio assigned in order first come first get.
2026 * There are asics which has number of audio
2027 * resources less then number of pipes
2029 if (pipe_ctx->stream_res.audio)
2030 update_audio_usage(&context->res_ctx, pool,
2031 pipe_ctx->stream_res.audio, true);
2034 /* Add ABM to the resource if on EDP */
2035 if (pipe_ctx->stream && dc_is_embedded_signal(pipe_ctx->stream->signal))
2036 pipe_ctx->stream_res.abm = pool->abm;
2038 for (i = 0; i < context->stream_count; i++)
2039 if (context->streams[i] == stream) {
2040 context->stream_status[i].primary_otg_inst = pipe_ctx->stream_res.tg->inst;
2041 context->stream_status[i].stream_enc_inst = pipe_ctx->stream_res.stream_enc->id;
2045 DC_ERROR("Stream %p not found in new ctx!\n", stream);
2046 return DC_ERROR_UNEXPECTED;
2050 * dc_resource_state_copy_construct_current() - Creates a new dc_state from existing state
2051 * Is a shallow copy. Increments refcounts on existing streams and planes.
2052 * @dc: copy out of dc->current_state
2053 * @dst_ctx: copy into this
2055 void dc_resource_state_copy_construct_current(
2056 const struct dc *dc,
2057 struct dc_state *dst_ctx)
2059 dc_resource_state_copy_construct(dc->current_state, dst_ctx);
2063 void dc_resource_state_construct(
2064 const struct dc *dc,
2065 struct dc_state *dst_ctx)
2067 dst_ctx->dccg = dc->res_pool->clk_mgr;
2071 * dc_validate_global_state() - Determine if HW can support a given state
2072 * Checks HW resource availability and bandwidth requirement.
2073 * @dc: dc struct for this driver
2074 * @new_ctx: state to be validated
2076 * Return: DC_OK if the result can be programmed. Otherwise, an error code.
2078 enum dc_status dc_validate_global_state(
2080 struct dc_state *new_ctx)
2082 enum dc_status result = DC_ERROR_UNEXPECTED;
2086 return DC_ERROR_UNEXPECTED;
2088 if (dc->res_pool->funcs->validate_global) {
2089 result = dc->res_pool->funcs->validate_global(dc, new_ctx);
2090 if (result != DC_OK)
2094 for (i = 0; i < new_ctx->stream_count; i++) {
2095 struct dc_stream_state *stream = new_ctx->streams[i];
2097 for (j = 0; j < dc->res_pool->pipe_count; j++) {
2098 struct pipe_ctx *pipe_ctx = &new_ctx->res_ctx.pipe_ctx[j];
2100 if (pipe_ctx->stream != stream)
2103 if (dc->res_pool->funcs->get_default_swizzle_mode &&
2104 pipe_ctx->plane_state &&
2105 pipe_ctx->plane_state->tiling_info.gfx9.swizzle == DC_SW_UNKNOWN) {
2106 result = dc->res_pool->funcs->get_default_swizzle_mode(pipe_ctx->plane_state);
2107 if (result != DC_OK)
2111 /* Switch to dp clock source only if there is
2112 * no non dp stream that shares the same timing
2113 * with the dp stream.
2115 if (dc_is_dp_signal(pipe_ctx->stream->signal) &&
2116 !find_pll_sharable_stream(stream, new_ctx)) {
2118 resource_unreference_clock_source(
2121 pipe_ctx->clock_source);
2123 pipe_ctx->clock_source = dc->res_pool->dp_clock_source;
2124 resource_reference_clock_source(
2127 pipe_ctx->clock_source);
2132 result = resource_build_scaling_params_for_context(dc, new_ctx);
2134 if (result == DC_OK)
2135 if (!dc->res_pool->funcs->validate_bandwidth(dc, new_ctx))
2136 result = DC_FAIL_BANDWIDTH_VALIDATE;
2141 static void patch_gamut_packet_checksum(
2142 struct dc_info_packet *gamut_packet)
2144 /* For gamut we recalc checksum */
2145 if (gamut_packet->valid) {
2146 uint8_t chk_sum = 0;
2150 /*start of the Gamut data. */
2151 ptr = &gamut_packet->sb[3];
2153 for (i = 0; i <= gamut_packet->sb[1]; i++)
2156 gamut_packet->sb[2] = (uint8_t) (0x100 - chk_sum);
2160 static void set_avi_info_frame(
2161 struct dc_info_packet *info_packet,
2162 struct pipe_ctx *pipe_ctx)
2164 struct dc_stream_state *stream = pipe_ctx->stream;
2165 enum dc_color_space color_space = COLOR_SPACE_UNKNOWN;
2166 uint32_t pixel_encoding = 0;
2167 enum scanning_type scan_type = SCANNING_TYPE_NODATA;
2168 enum dc_aspect_ratio aspect = ASPECT_RATIO_NO_DATA;
2170 uint8_t itc_value = 0;
2171 uint8_t cn0_cn1 = 0;
2172 unsigned int cn0_cn1_value = 0;
2173 uint8_t *check_sum = NULL;
2174 uint8_t byte_index = 0;
2175 union hdmi_info_packet hdmi_info;
2176 union display_content_support support = {0};
2177 unsigned int vic = pipe_ctx->stream->timing.vic;
2178 enum dc_timing_3d_format format;
2180 memset(&hdmi_info, 0, sizeof(union hdmi_info_packet));
2182 color_space = pipe_ctx->stream->output_color_space;
2183 if (color_space == COLOR_SPACE_UNKNOWN)
2184 color_space = (stream->timing.pixel_encoding == PIXEL_ENCODING_RGB) ?
2185 COLOR_SPACE_SRGB:COLOR_SPACE_YCBCR709;
2187 /* Initialize header */
2188 hdmi_info.bits.header.info_frame_type = HDMI_INFOFRAME_TYPE_AVI;
2189 /* InfoFrameVersion_3 is defined by CEA861F (Section 6.4), but shall
2190 * not be used in HDMI 2.0 (Section 10.1) */
2191 hdmi_info.bits.header.version = 2;
2192 hdmi_info.bits.header.length = HDMI_AVI_INFOFRAME_SIZE;
2195 * IDO-defined (Y2,Y1,Y0 = 1,1,1) shall not be used by devices built
2196 * according to HDMI 2.0 spec (Section 10.1)
2199 switch (stream->timing.pixel_encoding) {
2200 case PIXEL_ENCODING_YCBCR422:
2204 case PIXEL_ENCODING_YCBCR444:
2207 case PIXEL_ENCODING_YCBCR420:
2211 case PIXEL_ENCODING_RGB:
2216 /* Y0_Y1_Y2 : The pixel encoding */
2217 /* H14b AVI InfoFrame has extension on Y-field from 2 bits to 3 bits */
2218 hdmi_info.bits.Y0_Y1_Y2 = pixel_encoding;
2220 /* A0 = 1 Active Format Information valid */
2221 hdmi_info.bits.A0 = ACTIVE_FORMAT_VALID;
2223 /* B0, B1 = 3; Bar info data is valid */
2224 hdmi_info.bits.B0_B1 = BAR_INFO_BOTH_VALID;
2226 hdmi_info.bits.SC0_SC1 = PICTURE_SCALING_UNIFORM;
2228 /* S0, S1 : Underscan / Overscan */
2229 /* TODO: un-hardcode scan type */
2230 scan_type = SCANNING_TYPE_UNDERSCAN;
2231 hdmi_info.bits.S0_S1 = scan_type;
2233 /* C0, C1 : Colorimetry */
2234 if (color_space == COLOR_SPACE_YCBCR709 ||
2235 color_space == COLOR_SPACE_YCBCR709_LIMITED)
2236 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU709;
2237 else if (color_space == COLOR_SPACE_YCBCR601 ||
2238 color_space == COLOR_SPACE_YCBCR601_LIMITED)
2239 hdmi_info.bits.C0_C1 = COLORIMETRY_ITU601;
2241 hdmi_info.bits.C0_C1 = COLORIMETRY_NO_DATA;
2243 if (color_space == COLOR_SPACE_2020_RGB_FULLRANGE ||
2244 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE ||
2245 color_space == COLOR_SPACE_2020_YCBCR) {
2246 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_BT2020RGBYCBCR;
2247 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2248 } else if (color_space == COLOR_SPACE_ADOBERGB) {
2249 hdmi_info.bits.EC0_EC2 = COLORIMETRYEX_ADOBERGB;
2250 hdmi_info.bits.C0_C1 = COLORIMETRY_EXTENDED;
2253 /* TODO: un-hardcode aspect ratio */
2254 aspect = stream->timing.aspect_ratio;
2257 case ASPECT_RATIO_4_3:
2258 case ASPECT_RATIO_16_9:
2259 hdmi_info.bits.M0_M1 = aspect;
2262 case ASPECT_RATIO_NO_DATA:
2263 case ASPECT_RATIO_64_27:
2264 case ASPECT_RATIO_256_135:
2266 hdmi_info.bits.M0_M1 = 0;
2269 /* Active Format Aspect ratio - same as Picture Aspect Ratio. */
2270 hdmi_info.bits.R0_R3 = ACTIVE_FORMAT_ASPECT_RATIO_SAME_AS_PICTURE;
2272 /* TODO: un-hardcode cn0_cn1 and itc */
2280 support = stream->content_support;
2283 if (!support.bits.valid_content_type) {
2286 if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GRAPHICS) {
2287 if (support.bits.graphics_content == 1) {
2290 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_PHOTO) {
2291 if (support.bits.photo_content == 1) {
2297 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_CINEMA) {
2298 if (support.bits.cinema_content == 1) {
2304 } else if (cn0_cn1 == DISPLAY_CONTENT_TYPE_GAME) {
2305 if (support.bits.game_content == 1) {
2313 hdmi_info.bits.CN0_CN1 = cn0_cn1_value;
2314 hdmi_info.bits.ITC = itc_value;
2317 /* TODO : We should handle YCC quantization */
2318 /* but we do not have matrix calculation */
2319 if (stream->qs_bit == 1 &&
2320 stream->qy_bit == 1) {
2321 if (color_space == COLOR_SPACE_SRGB ||
2322 color_space == COLOR_SPACE_2020_RGB_FULLRANGE) {
2323 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_FULL_RANGE;
2324 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_FULL_RANGE;
2325 } else if (color_space == COLOR_SPACE_SRGB_LIMITED ||
2326 color_space == COLOR_SPACE_2020_RGB_LIMITEDRANGE) {
2327 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_LIMITED_RANGE;
2328 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2330 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2331 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2334 hdmi_info.bits.Q0_Q1 = RGB_QUANTIZATION_DEFAULT_RANGE;
2335 hdmi_info.bits.YQ0_YQ1 = YYC_QUANTIZATION_LIMITED_RANGE;
2339 format = stream->timing.timing_3d_format;
2340 /*todo, add 3DStereo support*/
2341 if (format != TIMING_3D_FORMAT_NONE) {
2342 // Based on HDMI specs hdmi vic needs to be converted to cea vic when 3D is enabled
2343 switch (pipe_ctx->stream->timing.hdmi_vic) {
2360 hdmi_info.bits.VIC0_VIC7 = vic;
2363 * PR0 - PR3 start from 0 whereas pHwPathMode->mode.timing.flags.pixel
2364 * repetition start from 1 */
2365 hdmi_info.bits.PR0_PR3 = 0;
2368 * barTop: Line Number of End of Top Bar.
2369 * barBottom: Line Number of Start of Bottom Bar.
2370 * barLeft: Pixel Number of End of Left Bar.
2371 * barRight: Pixel Number of Start of Right Bar. */
2372 hdmi_info.bits.bar_top = stream->timing.v_border_top;
2373 hdmi_info.bits.bar_bottom = (stream->timing.v_total
2374 - stream->timing.v_border_bottom + 1);
2375 hdmi_info.bits.bar_left = stream->timing.h_border_left;
2376 hdmi_info.bits.bar_right = (stream->timing.h_total
2377 - stream->timing.h_border_right + 1);
2379 /* check_sum - Calculate AFMT_AVI_INFO0 ~ AFMT_AVI_INFO3 */
2380 check_sum = &hdmi_info.packet_raw_data.sb[0];
2382 *check_sum = HDMI_INFOFRAME_TYPE_AVI + HDMI_AVI_INFOFRAME_SIZE + 2;
2384 for (byte_index = 1; byte_index <= HDMI_AVI_INFOFRAME_SIZE; byte_index++)
2385 *check_sum += hdmi_info.packet_raw_data.sb[byte_index];
2387 /* one byte complement */
2388 *check_sum = (uint8_t) (0x100 - *check_sum);
2390 /* Store in hw_path_mode */
2391 info_packet->hb0 = hdmi_info.packet_raw_data.hb0;
2392 info_packet->hb1 = hdmi_info.packet_raw_data.hb1;
2393 info_packet->hb2 = hdmi_info.packet_raw_data.hb2;
2395 for (byte_index = 0; byte_index < sizeof(hdmi_info.packet_raw_data.sb); byte_index++)
2396 info_packet->sb[byte_index] = hdmi_info.packet_raw_data.sb[byte_index];
2398 info_packet->valid = true;
2401 static void set_vendor_info_packet(
2402 struct dc_info_packet *info_packet,
2403 struct dc_stream_state *stream)
2405 /* SPD info packet for FreeSync */
2407 /* Check if Freesync is supported. Return if false. If true,
2408 * set the corresponding bit in the info packet
2410 if (!stream->vsp_infopacket.valid)
2413 *info_packet = stream->vsp_infopacket;
2416 static void set_spd_info_packet(
2417 struct dc_info_packet *info_packet,
2418 struct dc_stream_state *stream)
2420 /* SPD info packet for FreeSync */
2422 /* Check if Freesync is supported. Return if false. If true,
2423 * set the corresponding bit in the info packet
2425 if (!stream->vrr_infopacket.valid)
2428 *info_packet = stream->vrr_infopacket;
2431 static void set_dp_sdp_info_packet(
2432 struct dc_info_packet *info_packet,
2433 struct dc_stream_state *stream)
2435 /* SPD info packet for custom sdp message */
2437 /* Return if false. If true,
2438 * set the corresponding bit in the info packet
2440 if (!stream->dpsdp_infopacket.valid)
2443 *info_packet = stream->dpsdp_infopacket;
2446 static void set_hdr_static_info_packet(
2447 struct dc_info_packet *info_packet,
2448 struct dc_stream_state *stream)
2450 /* HDR Static Metadata info packet for HDR10 */
2452 if (!stream->hdr_static_metadata.valid ||
2453 stream->use_dynamic_meta)
2456 *info_packet = stream->hdr_static_metadata;
2459 static void set_vsc_info_packet(
2460 struct dc_info_packet *info_packet,
2461 struct dc_stream_state *stream)
2463 if (!stream->vsc_infopacket.valid)
2466 *info_packet = stream->vsc_infopacket;
2469 void dc_resource_state_destruct(struct dc_state *context)
2473 for (i = 0; i < context->stream_count; i++) {
2474 for (j = 0; j < context->stream_status[i].plane_count; j++)
2475 dc_plane_state_release(
2476 context->stream_status[i].plane_states[j]);
2478 context->stream_status[i].plane_count = 0;
2479 dc_stream_release(context->streams[i]);
2480 context->streams[i] = NULL;
2484 void dc_resource_state_copy_construct(
2485 const struct dc_state *src_ctx,
2486 struct dc_state *dst_ctx)
2489 struct kref refcount = dst_ctx->refcount;
2491 *dst_ctx = *src_ctx;
2493 for (i = 0; i < MAX_PIPES; i++) {
2494 struct pipe_ctx *cur_pipe = &dst_ctx->res_ctx.pipe_ctx[i];
2496 if (cur_pipe->top_pipe)
2497 cur_pipe->top_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->top_pipe->pipe_idx];
2499 if (cur_pipe->bottom_pipe)
2500 cur_pipe->bottom_pipe = &dst_ctx->res_ctx.pipe_ctx[cur_pipe->bottom_pipe->pipe_idx];
2504 for (i = 0; i < dst_ctx->stream_count; i++) {
2505 dc_stream_retain(dst_ctx->streams[i]);
2506 for (j = 0; j < dst_ctx->stream_status[i].plane_count; j++)
2507 dc_plane_state_retain(
2508 dst_ctx->stream_status[i].plane_states[j]);
2511 /* context refcount should not be overridden */
2512 dst_ctx->refcount = refcount;
2516 struct clock_source *dc_resource_find_first_free_pll(
2517 struct resource_context *res_ctx,
2518 const struct resource_pool *pool)
2522 for (i = 0; i < pool->clk_src_count; ++i) {
2523 if (res_ctx->clock_source_ref_count[i] == 0)
2524 return pool->clock_sources[i];
2530 void resource_build_info_frame(struct pipe_ctx *pipe_ctx)
2532 enum signal_type signal = SIGNAL_TYPE_NONE;
2533 struct encoder_info_frame *info = &pipe_ctx->stream_res.encoder_info_frame;
2535 /* default all packets to invalid */
2536 info->avi.valid = false;
2537 info->gamut.valid = false;
2538 info->vendor.valid = false;
2539 info->spd.valid = false;
2540 info->hdrsmd.valid = false;
2541 info->vsc.valid = false;
2542 info->dpsdp.valid = false;
2544 signal = pipe_ctx->stream->signal;
2546 /* HDMi and DP have different info packets*/
2547 if (dc_is_hdmi_signal(signal)) {
2548 set_avi_info_frame(&info->avi, pipe_ctx);
2550 set_vendor_info_packet(&info->vendor, pipe_ctx->stream);
2552 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2554 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2556 } else if (dc_is_dp_signal(signal)) {
2557 set_vsc_info_packet(&info->vsc, pipe_ctx->stream);
2559 set_spd_info_packet(&info->spd, pipe_ctx->stream);
2561 set_hdr_static_info_packet(&info->hdrsmd, pipe_ctx->stream);
2563 set_dp_sdp_info_packet(&info->dpsdp, pipe_ctx->stream);
2566 patch_gamut_packet_checksum(&info->gamut);
2569 enum dc_status resource_map_clock_resources(
2570 const struct dc *dc,
2571 struct dc_state *context,
2572 struct dc_stream_state *stream)
2574 /* acquire new resources */
2575 const struct resource_pool *pool = dc->res_pool;
2576 struct pipe_ctx *pipe_ctx = resource_get_head_pipe_for_stream(
2577 &context->res_ctx, stream);
2580 return DC_ERROR_UNEXPECTED;
2582 if (dc_is_dp_signal(pipe_ctx->stream->signal)
2583 || pipe_ctx->stream->signal == SIGNAL_TYPE_VIRTUAL)
2584 pipe_ctx->clock_source = pool->dp_clock_source;
2586 pipe_ctx->clock_source = NULL;
2588 if (!dc->config.disable_disp_pll_sharing)
2589 pipe_ctx->clock_source = resource_find_used_clk_src_for_sharing(
2593 if (pipe_ctx->clock_source == NULL)
2594 pipe_ctx->clock_source =
2595 dc_resource_find_first_free_pll(
2600 if (pipe_ctx->clock_source == NULL)
2601 return DC_NO_CLOCK_SOURCE_RESOURCE;
2603 resource_reference_clock_source(
2604 &context->res_ctx, pool,
2605 pipe_ctx->clock_source);
2611 * Note: We need to disable output if clock sources change,
2612 * since bios does optimization and doesn't apply if changing
2613 * PHY when not already disabled.
2615 bool pipe_need_reprogram(
2616 struct pipe_ctx *pipe_ctx_old,
2617 struct pipe_ctx *pipe_ctx)
2619 if (!pipe_ctx_old->stream)
2622 if (pipe_ctx_old->stream->sink != pipe_ctx->stream->sink)
2625 if (pipe_ctx_old->stream->signal != pipe_ctx->stream->signal)
2628 if (pipe_ctx_old->stream_res.audio != pipe_ctx->stream_res.audio)
2631 if (pipe_ctx_old->clock_source != pipe_ctx->clock_source
2632 && pipe_ctx_old->stream != pipe_ctx->stream)
2635 if (pipe_ctx_old->stream_res.stream_enc != pipe_ctx->stream_res.stream_enc)
2638 if (is_timing_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2641 if (is_hdr_static_meta_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2644 if (pipe_ctx_old->stream->dpms_off != pipe_ctx->stream->dpms_off)
2647 if (is_vsc_info_packet_changed(pipe_ctx_old->stream, pipe_ctx->stream))
2653 void resource_build_bit_depth_reduction_params(struct dc_stream_state *stream,
2654 struct bit_depth_reduction_params *fmt_bit_depth)
2656 enum dc_dither_option option = stream->dither_option;
2657 enum dc_pixel_encoding pixel_encoding =
2658 stream->timing.pixel_encoding;
2660 memset(fmt_bit_depth, 0, sizeof(*fmt_bit_depth));
2662 if (option == DITHER_OPTION_DEFAULT) {
2663 switch (stream->timing.display_color_depth) {
2664 case COLOR_DEPTH_666:
2665 option = DITHER_OPTION_SPATIAL6;
2667 case COLOR_DEPTH_888:
2668 option = DITHER_OPTION_SPATIAL8;
2670 case COLOR_DEPTH_101010:
2671 option = DITHER_OPTION_SPATIAL10;
2674 option = DITHER_OPTION_DISABLE;
2678 if (option == DITHER_OPTION_DISABLE)
2681 if (option == DITHER_OPTION_TRUN6) {
2682 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2683 fmt_bit_depth->flags.TRUNCATE_DEPTH = 0;
2684 } else if (option == DITHER_OPTION_TRUN8 ||
2685 option == DITHER_OPTION_TRUN8_SPATIAL6 ||
2686 option == DITHER_OPTION_TRUN8_FM6) {
2687 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2688 fmt_bit_depth->flags.TRUNCATE_DEPTH = 1;
2689 } else if (option == DITHER_OPTION_TRUN10 ||
2690 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2691 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2692 option == DITHER_OPTION_TRUN10_FM8 ||
2693 option == DITHER_OPTION_TRUN10_FM6 ||
2694 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2695 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2696 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2699 /* special case - Formatter can only reduce by 4 bits at most.
2700 * When reducing from 12 to 6 bits,
2701 * HW recommends we use trunc with round mode
2702 * (if we did nothing, trunc to 10 bits would be used)
2703 * note that any 12->10 bit reduction is ignored prior to DCE8,
2704 * as the input was 10 bits.
2706 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2707 option == DITHER_OPTION_SPATIAL6 ||
2708 option == DITHER_OPTION_FM6) {
2709 fmt_bit_depth->flags.TRUNCATE_ENABLED = 1;
2710 fmt_bit_depth->flags.TRUNCATE_DEPTH = 2;
2711 fmt_bit_depth->flags.TRUNCATE_MODE = 1;
2715 * note that spatial modes 1-3 are never used
2717 if (option == DITHER_OPTION_SPATIAL6_FRAME_RANDOM ||
2718 option == DITHER_OPTION_SPATIAL6 ||
2719 option == DITHER_OPTION_TRUN10_SPATIAL6 ||
2720 option == DITHER_OPTION_TRUN8_SPATIAL6) {
2721 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2722 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 0;
2723 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2724 fmt_bit_depth->flags.RGB_RANDOM =
2725 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2726 } else if (option == DITHER_OPTION_SPATIAL8_FRAME_RANDOM ||
2727 option == DITHER_OPTION_SPATIAL8 ||
2728 option == DITHER_OPTION_SPATIAL8_FM6 ||
2729 option == DITHER_OPTION_TRUN10_SPATIAL8 ||
2730 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2731 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2732 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 1;
2733 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2734 fmt_bit_depth->flags.RGB_RANDOM =
2735 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2736 } else if (option == DITHER_OPTION_SPATIAL10_FRAME_RANDOM ||
2737 option == DITHER_OPTION_SPATIAL10 ||
2738 option == DITHER_OPTION_SPATIAL10_FM8 ||
2739 option == DITHER_OPTION_SPATIAL10_FM6) {
2740 fmt_bit_depth->flags.SPATIAL_DITHER_ENABLED = 1;
2741 fmt_bit_depth->flags.SPATIAL_DITHER_DEPTH = 2;
2742 fmt_bit_depth->flags.HIGHPASS_RANDOM = 1;
2743 fmt_bit_depth->flags.RGB_RANDOM =
2744 (pixel_encoding == PIXEL_ENCODING_RGB) ? 1 : 0;
2747 if (option == DITHER_OPTION_SPATIAL6 ||
2748 option == DITHER_OPTION_SPATIAL8 ||
2749 option == DITHER_OPTION_SPATIAL10) {
2750 fmt_bit_depth->flags.FRAME_RANDOM = 0;
2752 fmt_bit_depth->flags.FRAME_RANDOM = 1;
2755 //////////////////////
2756 //// temporal dither
2757 //////////////////////
2758 if (option == DITHER_OPTION_FM6 ||
2759 option == DITHER_OPTION_SPATIAL8_FM6 ||
2760 option == DITHER_OPTION_SPATIAL10_FM6 ||
2761 option == DITHER_OPTION_TRUN10_FM6 ||
2762 option == DITHER_OPTION_TRUN8_FM6 ||
2763 option == DITHER_OPTION_TRUN10_SPATIAL8_FM6) {
2764 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2765 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 0;
2766 } else if (option == DITHER_OPTION_FM8 ||
2767 option == DITHER_OPTION_SPATIAL10_FM8 ||
2768 option == DITHER_OPTION_TRUN10_FM8) {
2769 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2770 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 1;
2771 } else if (option == DITHER_OPTION_FM10) {
2772 fmt_bit_depth->flags.FRAME_MODULATION_ENABLED = 1;
2773 fmt_bit_depth->flags.FRAME_MODULATION_DEPTH = 2;
2776 fmt_bit_depth->pixel_encoding = pixel_encoding;
2779 enum dc_status dc_validate_stream(struct dc *dc, struct dc_stream_state *stream)
2781 struct dc *core_dc = dc;
2782 struct dc_link *link = stream->link;
2783 struct timing_generator *tg = core_dc->res_pool->timing_generators[0];
2784 enum dc_status res = DC_OK;
2786 calculate_phy_pix_clks(stream);
2788 if (!tg->funcs->validate_timing(tg, &stream->timing))
2789 res = DC_FAIL_CONTROLLER_VALIDATE;
2792 if (!link->link_enc->funcs->validate_output_with_stream(
2793 link->link_enc, stream))
2794 res = DC_FAIL_ENC_VALIDATE;
2797 /* TODO: validate audio ASIC caps, encoder */
2800 res = dc_link_validate_mode_timing(stream,
2807 enum dc_status dc_validate_plane(struct dc *dc, const struct dc_plane_state *plane_state)
2809 enum dc_status res = DC_OK;
2811 /* TODO For now validates pixel format only */
2812 if (dc->res_pool->funcs->validate_plane)
2813 return dc->res_pool->funcs->validate_plane(plane_state, &dc->caps);
2818 unsigned int resource_pixel_format_to_bpp(enum surface_pixel_format format)
2821 case SURFACE_PIXEL_FORMAT_GRPH_PALETA_256_COLORS:
2823 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCbCr:
2824 case SURFACE_PIXEL_FORMAT_VIDEO_420_YCrCb:
2826 case SURFACE_PIXEL_FORMAT_GRPH_ARGB1555:
2827 case SURFACE_PIXEL_FORMAT_GRPH_RGB565:
2828 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCbCr:
2829 case SURFACE_PIXEL_FORMAT_VIDEO_420_10bpc_YCrCb:
2831 case SURFACE_PIXEL_FORMAT_GRPH_ARGB8888:
2832 case SURFACE_PIXEL_FORMAT_GRPH_ABGR8888:
2833 case SURFACE_PIXEL_FORMAT_GRPH_ARGB2101010:
2834 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010:
2835 case SURFACE_PIXEL_FORMAT_GRPH_ABGR2101010_XR_BIAS:
2837 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616:
2838 case SURFACE_PIXEL_FORMAT_GRPH_ARGB16161616F:
2839 case SURFACE_PIXEL_FORMAT_GRPH_ABGR16161616F:
2842 ASSERT_CRITICAL(false);