1 // SPDX-License-Identifier: GPL-2.0-only
3 * Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
4 * Copyright (C) 2017 Linaro Ltd.
7 #include <linux/iopoll.h>
8 #include <linux/list.h>
9 #include <linux/mutex.h>
10 #include <linux/pm_runtime.h>
11 #include <linux/slab.h>
12 #include <media/videobuf2-dma-sg.h>
13 #include <media/v4l2-mem2mem.h>
14 #include <asm/div64.h>
18 #include "hfi_helper.h"
19 #include "hfi_venus_io.h"
22 struct list_head list;
30 bool venus_helper_check_codec(struct venus_inst *inst, u32 v4l2_pixfmt)
32 struct venus_core *core = inst->core;
33 u32 session_type = inst->session_type;
36 switch (v4l2_pixfmt) {
37 case V4L2_PIX_FMT_H264:
38 codec = HFI_VIDEO_CODEC_H264;
40 case V4L2_PIX_FMT_H263:
41 codec = HFI_VIDEO_CODEC_H263;
43 case V4L2_PIX_FMT_MPEG1:
44 codec = HFI_VIDEO_CODEC_MPEG1;
46 case V4L2_PIX_FMT_MPEG2:
47 codec = HFI_VIDEO_CODEC_MPEG2;
49 case V4L2_PIX_FMT_MPEG4:
50 codec = HFI_VIDEO_CODEC_MPEG4;
52 case V4L2_PIX_FMT_VC1_ANNEX_G:
53 case V4L2_PIX_FMT_VC1_ANNEX_L:
54 codec = HFI_VIDEO_CODEC_VC1;
56 case V4L2_PIX_FMT_VP8:
57 codec = HFI_VIDEO_CODEC_VP8;
59 case V4L2_PIX_FMT_VP9:
60 codec = HFI_VIDEO_CODEC_VP9;
62 case V4L2_PIX_FMT_XVID:
63 codec = HFI_VIDEO_CODEC_DIVX;
65 case V4L2_PIX_FMT_HEVC:
66 codec = HFI_VIDEO_CODEC_HEVC;
72 if (session_type == VIDC_SESSION_TYPE_ENC && core->enc_codecs & codec)
75 if (session_type == VIDC_SESSION_TYPE_DEC && core->dec_codecs & codec)
80 EXPORT_SYMBOL_GPL(venus_helper_check_codec);
82 int venus_helper_queue_dpb_bufs(struct venus_inst *inst)
87 list_for_each_entry(buf, &inst->dpbbufs, list) {
88 struct hfi_frame_data fdata;
90 memset(&fdata, 0, sizeof(fdata));
91 fdata.alloc_len = buf->size;
92 fdata.device_addr = buf->da;
93 fdata.buffer_type = buf->type;
95 ret = hfi_session_process_buf(inst, &fdata);
103 EXPORT_SYMBOL_GPL(venus_helper_queue_dpb_bufs);
105 int venus_helper_free_dpb_bufs(struct venus_inst *inst)
107 struct intbuf *buf, *n;
109 list_for_each_entry_safe(buf, n, &inst->dpbbufs, list) {
110 list_del_init(&buf->list);
111 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
116 INIT_LIST_HEAD(&inst->dpbbufs);
120 EXPORT_SYMBOL_GPL(venus_helper_free_dpb_bufs);
122 int venus_helper_alloc_dpb_bufs(struct venus_inst *inst)
124 struct venus_core *core = inst->core;
125 struct device *dev = core->dev;
126 enum hfi_version ver = core->res->hfi_version;
127 struct hfi_buffer_requirements bufreq;
128 u32 buftype = inst->dpb_buftype;
129 unsigned int dpb_size = 0;
135 /* no need to allocate dpb buffers */
139 if (inst->dpb_buftype == HFI_BUFFER_OUTPUT)
140 dpb_size = inst->output_buf_size;
141 else if (inst->dpb_buftype == HFI_BUFFER_OUTPUT2)
142 dpb_size = inst->output2_buf_size;
147 ret = venus_helper_get_bufreq(inst, buftype, &bufreq);
151 count = HFI_BUFREQ_COUNT_MIN(&bufreq, ver);
153 for (i = 0; i < count; i++) {
154 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
161 buf->size = dpb_size;
162 buf->attrs = DMA_ATTR_WRITE_COMBINE |
163 DMA_ATTR_NO_KERNEL_MAPPING;
164 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
172 list_add_tail(&buf->list, &inst->dpbbufs);
178 venus_helper_free_dpb_bufs(inst);
181 EXPORT_SYMBOL_GPL(venus_helper_alloc_dpb_bufs);
183 static int intbufs_set_buffer(struct venus_inst *inst, u32 type)
185 struct venus_core *core = inst->core;
186 struct device *dev = core->dev;
187 struct hfi_buffer_requirements bufreq;
188 struct hfi_buffer_desc bd;
193 ret = venus_helper_get_bufreq(inst, type, &bufreq);
200 for (i = 0; i < bufreq.count_actual; i++) {
201 buf = kzalloc(sizeof(*buf), GFP_KERNEL);
207 buf->type = bufreq.type;
208 buf->size = bufreq.size;
209 buf->attrs = DMA_ATTR_WRITE_COMBINE |
210 DMA_ATTR_NO_KERNEL_MAPPING;
211 buf->va = dma_alloc_attrs(dev, buf->size, &buf->da, GFP_KERNEL,
218 memset(&bd, 0, sizeof(bd));
219 bd.buffer_size = buf->size;
220 bd.buffer_type = buf->type;
222 bd.device_addr = buf->da;
224 ret = hfi_session_set_buffers(inst, &bd);
226 dev_err(dev, "set session buffers failed\n");
230 list_add_tail(&buf->list, &inst->internalbufs);
236 dma_free_attrs(dev, buf->size, buf->va, buf->da, buf->attrs);
242 static int intbufs_unset_buffers(struct venus_inst *inst)
244 struct hfi_buffer_desc bd = {0};
245 struct intbuf *buf, *n;
248 list_for_each_entry_safe(buf, n, &inst->internalbufs, list) {
249 bd.buffer_size = buf->size;
250 bd.buffer_type = buf->type;
252 bd.device_addr = buf->da;
253 bd.response_required = true;
255 ret = hfi_session_unset_buffers(inst, &bd);
257 list_del_init(&buf->list);
258 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
266 static const unsigned int intbuf_types_1xx[] = {
267 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_1XX),
268 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_1XX),
269 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_1XX),
270 HFI_BUFFER_INTERNAL_PERSIST,
271 HFI_BUFFER_INTERNAL_PERSIST_1,
274 static const unsigned int intbuf_types_4xx[] = {
275 HFI_BUFFER_INTERNAL_SCRATCH(HFI_VERSION_4XX),
276 HFI_BUFFER_INTERNAL_SCRATCH_1(HFI_VERSION_4XX),
277 HFI_BUFFER_INTERNAL_SCRATCH_2(HFI_VERSION_4XX),
278 HFI_BUFFER_INTERNAL_PERSIST,
279 HFI_BUFFER_INTERNAL_PERSIST_1,
282 int venus_helper_intbufs_alloc(struct venus_inst *inst)
284 const unsigned int *intbuf;
288 if (IS_V4(inst->core)) {
289 arr_sz = ARRAY_SIZE(intbuf_types_4xx);
290 intbuf = intbuf_types_4xx;
292 arr_sz = ARRAY_SIZE(intbuf_types_1xx);
293 intbuf = intbuf_types_1xx;
296 for (i = 0; i < arr_sz; i++) {
297 ret = intbufs_set_buffer(inst, intbuf[i]);
305 intbufs_unset_buffers(inst);
308 EXPORT_SYMBOL_GPL(venus_helper_intbufs_alloc);
310 int venus_helper_intbufs_free(struct venus_inst *inst)
312 return intbufs_unset_buffers(inst);
314 EXPORT_SYMBOL_GPL(venus_helper_intbufs_free);
316 int venus_helper_intbufs_realloc(struct venus_inst *inst)
318 enum hfi_version ver = inst->core->res->hfi_version;
319 struct hfi_buffer_desc bd;
320 struct intbuf *buf, *n;
323 list_for_each_entry_safe(buf, n, &inst->internalbufs, list) {
324 if (buf->type == HFI_BUFFER_INTERNAL_PERSIST ||
325 buf->type == HFI_BUFFER_INTERNAL_PERSIST_1)
328 memset(&bd, 0, sizeof(bd));
329 bd.buffer_size = buf->size;
330 bd.buffer_type = buf->type;
332 bd.device_addr = buf->da;
333 bd.response_required = true;
335 ret = hfi_session_unset_buffers(inst, &bd);
337 dma_free_attrs(inst->core->dev, buf->size, buf->va, buf->da,
340 list_del_init(&buf->list);
344 ret = intbufs_set_buffer(inst, HFI_BUFFER_INTERNAL_SCRATCH(ver));
348 ret = intbufs_set_buffer(inst, HFI_BUFFER_INTERNAL_SCRATCH_1(ver));
352 ret = intbufs_set_buffer(inst, HFI_BUFFER_INTERNAL_SCRATCH_2(ver));
360 EXPORT_SYMBOL_GPL(venus_helper_intbufs_realloc);
362 static u32 load_per_instance(struct venus_inst *inst)
366 if (!inst || !(inst->state >= INST_INIT && inst->state < INST_STOP))
369 mbs = (ALIGN(inst->width, 16) / 16) * (ALIGN(inst->height, 16) / 16);
371 return mbs * inst->fps;
374 static u32 load_per_type(struct venus_core *core, u32 session_type)
376 struct venus_inst *inst = NULL;
379 mutex_lock(&core->lock);
380 list_for_each_entry(inst, &core->instances, list) {
381 if (inst->session_type != session_type)
384 mbs_per_sec += load_per_instance(inst);
386 mutex_unlock(&core->lock);
391 int venus_helper_load_scale_clocks(struct venus_core *core)
393 const struct freq_tbl *table = core->res->freq_tbl;
394 unsigned int num_rows = core->res->freq_tbl_size;
395 unsigned long freq = table[0].freq;
396 struct clk *clk = core->clks[0];
397 struct device *dev = core->dev;
402 mbs_per_sec = load_per_type(core, VIDC_SESSION_TYPE_ENC) +
403 load_per_type(core, VIDC_SESSION_TYPE_DEC);
405 if (mbs_per_sec > core->res->max_load)
406 dev_warn(dev, "HW is overloaded, needed: %d max: %d\n",
407 mbs_per_sec, core->res->max_load);
409 if (!mbs_per_sec && num_rows > 1) {
410 freq = table[num_rows - 1].freq;
414 for (i = 0; i < num_rows; i++) {
415 if (mbs_per_sec > table[i].load)
417 freq = table[i].freq;
422 ret = clk_set_rate(clk, freq);
426 ret = clk_set_rate(core->core0_clk, freq);
430 ret = clk_set_rate(core->core1_clk, freq);
437 dev_err(dev, "failed to set clock rate %lu (%d)\n", freq, ret);
440 EXPORT_SYMBOL_GPL(venus_helper_load_scale_clocks);
442 static void fill_buffer_desc(const struct venus_buffer *buf,
443 struct hfi_buffer_desc *bd, bool response)
445 memset(bd, 0, sizeof(*bd));
446 bd->buffer_type = HFI_BUFFER_OUTPUT;
447 bd->buffer_size = buf->size;
449 bd->device_addr = buf->dma_addr;
450 bd->response_required = response;
453 static void return_buf_error(struct venus_inst *inst,
454 struct vb2_v4l2_buffer *vbuf)
456 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
458 if (vbuf->vb2_buf.type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
459 v4l2_m2m_src_buf_remove_by_buf(m2m_ctx, vbuf);
461 v4l2_m2m_dst_buf_remove_by_buf(m2m_ctx, vbuf);
463 v4l2_m2m_buf_done(vbuf, VB2_BUF_STATE_ERROR);
467 put_ts_metadata(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
469 struct vb2_buffer *vb = &vbuf->vb2_buf;
472 u64 ts_us = vb->timestamp;
474 for (i = 0; i < ARRAY_SIZE(inst->tss); i++) {
475 if (!inst->tss[i].used) {
482 dev_dbg(inst->core->dev, "%s: no free slot\n", __func__);
486 do_div(ts_us, NSEC_PER_USEC);
488 inst->tss[slot].used = true;
489 inst->tss[slot].flags = vbuf->flags;
490 inst->tss[slot].tc = vbuf->timecode;
491 inst->tss[slot].ts_us = ts_us;
492 inst->tss[slot].ts_ns = vb->timestamp;
495 void venus_helper_get_ts_metadata(struct venus_inst *inst, u64 timestamp_us,
496 struct vb2_v4l2_buffer *vbuf)
498 struct vb2_buffer *vb = &vbuf->vb2_buf;
501 for (i = 0; i < ARRAY_SIZE(inst->tss); ++i) {
502 if (!inst->tss[i].used)
505 if (inst->tss[i].ts_us != timestamp_us)
508 inst->tss[i].used = false;
509 vbuf->flags |= inst->tss[i].flags;
510 vbuf->timecode = inst->tss[i].tc;
511 vb->timestamp = inst->tss[i].ts_ns;
515 EXPORT_SYMBOL_GPL(venus_helper_get_ts_metadata);
518 session_process_buf(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
520 struct venus_buffer *buf = to_venus_buffer(vbuf);
521 struct vb2_buffer *vb = &vbuf->vb2_buf;
522 unsigned int type = vb->type;
523 struct hfi_frame_data fdata;
526 memset(&fdata, 0, sizeof(fdata));
527 fdata.alloc_len = buf->size;
528 fdata.device_addr = buf->dma_addr;
529 fdata.timestamp = vb->timestamp;
530 do_div(fdata.timestamp, NSEC_PER_USEC);
532 fdata.clnt_data = vbuf->vb2_buf.index;
534 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
535 fdata.buffer_type = HFI_BUFFER_INPUT;
536 fdata.filled_len = vb2_get_plane_payload(vb, 0);
537 fdata.offset = vb->planes[0].data_offset;
539 if (vbuf->flags & V4L2_BUF_FLAG_LAST || !fdata.filled_len)
540 fdata.flags |= HFI_BUFFERFLAG_EOS;
542 if (inst->session_type == VIDC_SESSION_TYPE_DEC)
543 put_ts_metadata(inst, vbuf);
544 } else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
545 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
546 fdata.buffer_type = HFI_BUFFER_OUTPUT;
548 fdata.buffer_type = inst->opb_buftype;
549 fdata.filled_len = 0;
553 ret = hfi_session_process_buf(inst, &fdata);
560 static bool is_dynamic_bufmode(struct venus_inst *inst)
562 struct venus_core *core = inst->core;
563 struct venus_caps *caps;
566 * v4 doesn't send BUFFER_ALLOC_MODE_SUPPORTED property and supports
567 * dynamic buffer mode by default for HFI_BUFFER_OUTPUT/OUTPUT2.
572 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
576 return caps->cap_bufs_mode_dynamic;
579 int venus_helper_unregister_bufs(struct venus_inst *inst)
581 struct venus_buffer *buf, *n;
582 struct hfi_buffer_desc bd;
585 if (is_dynamic_bufmode(inst))
588 list_for_each_entry_safe(buf, n, &inst->registeredbufs, reg_list) {
589 fill_buffer_desc(buf, &bd, true);
590 ret = hfi_session_unset_buffers(inst, &bd);
591 list_del_init(&buf->reg_list);
596 EXPORT_SYMBOL_GPL(venus_helper_unregister_bufs);
598 static int session_register_bufs(struct venus_inst *inst)
600 struct venus_core *core = inst->core;
601 struct device *dev = core->dev;
602 struct hfi_buffer_desc bd;
603 struct venus_buffer *buf;
606 if (is_dynamic_bufmode(inst))
609 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
610 fill_buffer_desc(buf, &bd, false);
611 ret = hfi_session_set_buffers(inst, &bd);
613 dev_err(dev, "%s: set buffer failed\n", __func__);
621 static u32 to_hfi_raw_fmt(u32 v4l2_fmt)
624 case V4L2_PIX_FMT_NV12:
625 return HFI_COLOR_FORMAT_NV12;
626 case V4L2_PIX_FMT_NV21:
627 return HFI_COLOR_FORMAT_NV21;
635 int venus_helper_get_bufreq(struct venus_inst *inst, u32 type,
636 struct hfi_buffer_requirements *req)
638 u32 ptype = HFI_PROPERTY_CONFIG_BUFFER_REQUIREMENTS;
639 union hfi_get_property hprop;
644 memset(req, 0, sizeof(*req));
646 ret = hfi_session_get_property(inst, ptype, &hprop);
652 for (i = 0; i < HFI_BUFFER_TYPE_MAX; i++) {
653 if (hprop.bufreq[i].type != type)
657 memcpy(req, &hprop.bufreq[i], sizeof(*req));
664 EXPORT_SYMBOL_GPL(venus_helper_get_bufreq);
666 static u32 get_framesize_raw_nv12(u32 width, u32 height)
668 u32 y_stride, uv_stride, y_plane;
669 u32 y_sclines, uv_sclines, uv_plane;
672 y_stride = ALIGN(width, 128);
673 uv_stride = ALIGN(width, 128);
674 y_sclines = ALIGN(height, 32);
675 uv_sclines = ALIGN(((height + 1) >> 1), 16);
677 y_plane = y_stride * y_sclines;
678 uv_plane = uv_stride * uv_sclines + SZ_4K;
679 size = y_plane + uv_plane + SZ_8K;
681 return ALIGN(size, SZ_4K);
684 static u32 get_framesize_raw_nv12_ubwc(u32 width, u32 height)
686 u32 y_meta_stride, y_meta_plane;
687 u32 y_stride, y_plane;
688 u32 uv_meta_stride, uv_meta_plane;
689 u32 uv_stride, uv_plane;
690 u32 extradata = SZ_16K;
692 y_meta_stride = ALIGN(DIV_ROUND_UP(width, 32), 64);
693 y_meta_plane = y_meta_stride * ALIGN(DIV_ROUND_UP(height, 8), 16);
694 y_meta_plane = ALIGN(y_meta_plane, SZ_4K);
696 y_stride = ALIGN(width, 128);
697 y_plane = ALIGN(y_stride * ALIGN(height, 32), SZ_4K);
699 uv_meta_stride = ALIGN(DIV_ROUND_UP(width / 2, 16), 64);
700 uv_meta_plane = uv_meta_stride * ALIGN(DIV_ROUND_UP(height / 2, 8), 16);
701 uv_meta_plane = ALIGN(uv_meta_plane, SZ_4K);
703 uv_stride = ALIGN(width, 128);
704 uv_plane = ALIGN(uv_stride * ALIGN(height / 2, 32), SZ_4K);
706 return ALIGN(y_meta_plane + y_plane + uv_meta_plane + uv_plane +
707 max(extradata, y_stride * 48), SZ_4K);
710 u32 venus_helper_get_framesz_raw(u32 hfi_fmt, u32 width, u32 height)
713 case HFI_COLOR_FORMAT_NV12:
714 case HFI_COLOR_FORMAT_NV21:
715 return get_framesize_raw_nv12(width, height);
716 case HFI_COLOR_FORMAT_NV12_UBWC:
717 return get_framesize_raw_nv12_ubwc(width, height);
722 EXPORT_SYMBOL_GPL(venus_helper_get_framesz_raw);
724 u32 venus_helper_get_framesz(u32 v4l2_fmt, u32 width, u32 height)
730 case V4L2_PIX_FMT_MPEG:
731 case V4L2_PIX_FMT_H264:
732 case V4L2_PIX_FMT_H264_NO_SC:
733 case V4L2_PIX_FMT_H264_MVC:
734 case V4L2_PIX_FMT_H263:
735 case V4L2_PIX_FMT_MPEG1:
736 case V4L2_PIX_FMT_MPEG2:
737 case V4L2_PIX_FMT_MPEG4:
738 case V4L2_PIX_FMT_XVID:
739 case V4L2_PIX_FMT_VC1_ANNEX_G:
740 case V4L2_PIX_FMT_VC1_ANNEX_L:
741 case V4L2_PIX_FMT_VP8:
742 case V4L2_PIX_FMT_VP9:
743 case V4L2_PIX_FMT_HEVC:
752 sz = ALIGN(height, 32) * ALIGN(width, 32) * 3 / 2 / 2;
753 return ALIGN(sz, SZ_4K);
756 hfi_fmt = to_hfi_raw_fmt(v4l2_fmt);
760 return venus_helper_get_framesz_raw(hfi_fmt, width, height);
762 EXPORT_SYMBOL_GPL(venus_helper_get_framesz);
764 int venus_helper_set_input_resolution(struct venus_inst *inst,
765 unsigned int width, unsigned int height)
767 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
768 struct hfi_framesize fs;
770 fs.buffer_type = HFI_BUFFER_INPUT;
774 return hfi_session_set_property(inst, ptype, &fs);
776 EXPORT_SYMBOL_GPL(venus_helper_set_input_resolution);
778 int venus_helper_set_output_resolution(struct venus_inst *inst,
779 unsigned int width, unsigned int height,
782 u32 ptype = HFI_PROPERTY_PARAM_FRAME_SIZE;
783 struct hfi_framesize fs;
785 fs.buffer_type = buftype;
789 return hfi_session_set_property(inst, ptype, &fs);
791 EXPORT_SYMBOL_GPL(venus_helper_set_output_resolution);
793 int venus_helper_set_work_mode(struct venus_inst *inst, u32 mode)
795 const u32 ptype = HFI_PROPERTY_PARAM_WORK_MODE;
796 struct hfi_video_work_mode wm;
798 if (!IS_V4(inst->core))
801 wm.video_work_mode = mode;
803 return hfi_session_set_property(inst, ptype, &wm);
805 EXPORT_SYMBOL_GPL(venus_helper_set_work_mode);
807 int venus_helper_set_core_usage(struct venus_inst *inst, u32 usage)
809 const u32 ptype = HFI_PROPERTY_CONFIG_VIDEOCORES_USAGE;
810 struct hfi_videocores_usage_type cu;
812 if (!IS_V4(inst->core))
815 cu.video_core_enable_mask = usage;
817 return hfi_session_set_property(inst, ptype, &cu);
819 EXPORT_SYMBOL_GPL(venus_helper_set_core_usage);
821 int venus_helper_set_num_bufs(struct venus_inst *inst, unsigned int input_bufs,
822 unsigned int output_bufs,
823 unsigned int output2_bufs)
825 u32 ptype = HFI_PROPERTY_PARAM_BUFFER_COUNT_ACTUAL;
826 struct hfi_buffer_count_actual buf_count;
829 buf_count.type = HFI_BUFFER_INPUT;
830 buf_count.count_actual = input_bufs;
832 ret = hfi_session_set_property(inst, ptype, &buf_count);
836 buf_count.type = HFI_BUFFER_OUTPUT;
837 buf_count.count_actual = output_bufs;
839 ret = hfi_session_set_property(inst, ptype, &buf_count);
844 buf_count.type = HFI_BUFFER_OUTPUT2;
845 buf_count.count_actual = output2_bufs;
847 ret = hfi_session_set_property(inst, ptype, &buf_count);
852 EXPORT_SYMBOL_GPL(venus_helper_set_num_bufs);
854 int venus_helper_set_raw_format(struct venus_inst *inst, u32 hfi_format,
857 const u32 ptype = HFI_PROPERTY_PARAM_UNCOMPRESSED_FORMAT_SELECT;
858 struct hfi_uncompressed_format_select fmt;
860 fmt.buffer_type = buftype;
861 fmt.format = hfi_format;
863 return hfi_session_set_property(inst, ptype, &fmt);
865 EXPORT_SYMBOL_GPL(venus_helper_set_raw_format);
867 int venus_helper_set_color_format(struct venus_inst *inst, u32 pixfmt)
869 u32 hfi_format, buftype;
871 if (inst->session_type == VIDC_SESSION_TYPE_DEC)
872 buftype = HFI_BUFFER_OUTPUT;
873 else if (inst->session_type == VIDC_SESSION_TYPE_ENC)
874 buftype = HFI_BUFFER_INPUT;
878 hfi_format = to_hfi_raw_fmt(pixfmt);
882 return venus_helper_set_raw_format(inst, hfi_format, buftype);
884 EXPORT_SYMBOL_GPL(venus_helper_set_color_format);
886 int venus_helper_set_multistream(struct venus_inst *inst, bool out_en,
889 struct hfi_multi_stream multi = {0};
890 u32 ptype = HFI_PROPERTY_PARAM_VDEC_MULTI_STREAM;
893 multi.buffer_type = HFI_BUFFER_OUTPUT;
894 multi.enable = out_en;
896 ret = hfi_session_set_property(inst, ptype, &multi);
900 multi.buffer_type = HFI_BUFFER_OUTPUT2;
901 multi.enable = out2_en;
903 return hfi_session_set_property(inst, ptype, &multi);
905 EXPORT_SYMBOL_GPL(venus_helper_set_multistream);
907 int venus_helper_set_dyn_bufmode(struct venus_inst *inst)
909 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_ALLOC_MODE;
910 struct hfi_buffer_alloc_mode mode;
913 if (!is_dynamic_bufmode(inst))
916 mode.type = HFI_BUFFER_OUTPUT;
917 mode.mode = HFI_BUFFER_MODE_DYNAMIC;
919 ret = hfi_session_set_property(inst, ptype, &mode);
923 mode.type = HFI_BUFFER_OUTPUT2;
925 return hfi_session_set_property(inst, ptype, &mode);
927 EXPORT_SYMBOL_GPL(venus_helper_set_dyn_bufmode);
929 int venus_helper_set_bufsize(struct venus_inst *inst, u32 bufsize, u32 buftype)
931 const u32 ptype = HFI_PROPERTY_PARAM_BUFFER_SIZE_ACTUAL;
932 struct hfi_buffer_size_actual bufsz;
934 bufsz.type = buftype;
935 bufsz.size = bufsize;
937 return hfi_session_set_property(inst, ptype, &bufsz);
939 EXPORT_SYMBOL_GPL(venus_helper_set_bufsize);
941 unsigned int venus_helper_get_opb_size(struct venus_inst *inst)
943 /* the encoder has only one output */
944 if (inst->session_type == VIDC_SESSION_TYPE_ENC)
945 return inst->output_buf_size;
947 if (inst->opb_buftype == HFI_BUFFER_OUTPUT)
948 return inst->output_buf_size;
949 else if (inst->opb_buftype == HFI_BUFFER_OUTPUT2)
950 return inst->output2_buf_size;
954 EXPORT_SYMBOL_GPL(venus_helper_get_opb_size);
956 static void delayed_process_buf_func(struct work_struct *work)
958 struct venus_buffer *buf, *n;
959 struct venus_inst *inst;
962 inst = container_of(work, struct venus_inst, delayed_process_work);
964 mutex_lock(&inst->lock);
966 if (!(inst->streamon_out & inst->streamon_cap))
969 list_for_each_entry_safe(buf, n, &inst->delayed_process, ref_list) {
970 if (buf->flags & HFI_BUFFERFLAG_READONLY)
973 ret = session_process_buf(inst, &buf->vb);
975 return_buf_error(inst, &buf->vb);
977 list_del_init(&buf->ref_list);
980 mutex_unlock(&inst->lock);
983 void venus_helper_release_buf_ref(struct venus_inst *inst, unsigned int idx)
985 struct venus_buffer *buf;
987 list_for_each_entry(buf, &inst->registeredbufs, reg_list) {
988 if (buf->vb.vb2_buf.index == idx) {
989 buf->flags &= ~HFI_BUFFERFLAG_READONLY;
990 schedule_work(&inst->delayed_process_work);
995 EXPORT_SYMBOL_GPL(venus_helper_release_buf_ref);
997 void venus_helper_acquire_buf_ref(struct vb2_v4l2_buffer *vbuf)
999 struct venus_buffer *buf = to_venus_buffer(vbuf);
1001 buf->flags |= HFI_BUFFERFLAG_READONLY;
1003 EXPORT_SYMBOL_GPL(venus_helper_acquire_buf_ref);
1005 static int is_buf_refed(struct venus_inst *inst, struct vb2_v4l2_buffer *vbuf)
1007 struct venus_buffer *buf = to_venus_buffer(vbuf);
1009 if (buf->flags & HFI_BUFFERFLAG_READONLY) {
1010 list_add_tail(&buf->ref_list, &inst->delayed_process);
1011 schedule_work(&inst->delayed_process_work);
1018 struct vb2_v4l2_buffer *
1019 venus_helper_find_buf(struct venus_inst *inst, unsigned int type, u32 idx)
1021 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1023 if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
1024 return v4l2_m2m_src_buf_remove_by_idx(m2m_ctx, idx);
1026 return v4l2_m2m_dst_buf_remove_by_idx(m2m_ctx, idx);
1028 EXPORT_SYMBOL_GPL(venus_helper_find_buf);
1030 int venus_helper_vb2_buf_init(struct vb2_buffer *vb)
1032 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
1033 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1034 struct venus_buffer *buf = to_venus_buffer(vbuf);
1035 struct sg_table *sgt;
1037 sgt = vb2_dma_sg_plane_desc(vb, 0);
1041 buf->size = vb2_plane_size(vb, 0);
1042 buf->dma_addr = sg_dma_address(sgt->sgl);
1044 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
1045 list_add_tail(&buf->reg_list, &inst->registeredbufs);
1049 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_init);
1051 int venus_helper_vb2_buf_prepare(struct vb2_buffer *vb)
1053 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
1054 unsigned int out_buf_size = venus_helper_get_opb_size(inst);
1055 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1057 if (V4L2_TYPE_IS_OUTPUT(vb->vb2_queue->type)) {
1058 if (vbuf->field == V4L2_FIELD_ANY)
1059 vbuf->field = V4L2_FIELD_NONE;
1060 if (vbuf->field != V4L2_FIELD_NONE) {
1061 dev_err(inst->core->dev, "%s field isn't supported\n",
1067 if (vb->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE &&
1068 vb2_plane_size(vb, 0) < out_buf_size)
1070 if (vb->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE &&
1071 vb2_plane_size(vb, 0) < inst->input_buf_size)
1076 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_prepare);
1078 void venus_helper_vb2_buf_queue(struct vb2_buffer *vb)
1080 struct vb2_v4l2_buffer *vbuf = to_vb2_v4l2_buffer(vb);
1081 struct venus_inst *inst = vb2_get_drv_priv(vb->vb2_queue);
1082 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1085 mutex_lock(&inst->lock);
1087 v4l2_m2m_buf_queue(m2m_ctx, vbuf);
1089 if (inst->session_type == VIDC_SESSION_TYPE_ENC &&
1090 !(inst->streamon_out && inst->streamon_cap))
1093 if (vb2_start_streaming_called(vb->vb2_queue)) {
1094 ret = is_buf_refed(inst, vbuf);
1098 ret = session_process_buf(inst, vbuf);
1100 return_buf_error(inst, vbuf);
1104 mutex_unlock(&inst->lock);
1106 EXPORT_SYMBOL_GPL(venus_helper_vb2_buf_queue);
1108 void venus_helper_buffers_done(struct venus_inst *inst,
1109 enum vb2_buffer_state state)
1111 struct vb2_v4l2_buffer *buf;
1113 while ((buf = v4l2_m2m_src_buf_remove(inst->m2m_ctx)))
1114 v4l2_m2m_buf_done(buf, state);
1115 while ((buf = v4l2_m2m_dst_buf_remove(inst->m2m_ctx)))
1116 v4l2_m2m_buf_done(buf, state);
1118 EXPORT_SYMBOL_GPL(venus_helper_buffers_done);
1120 void venus_helper_vb2_stop_streaming(struct vb2_queue *q)
1122 struct venus_inst *inst = vb2_get_drv_priv(q);
1123 struct venus_core *core = inst->core;
1126 mutex_lock(&inst->lock);
1128 if (inst->streamon_out & inst->streamon_cap) {
1129 ret = hfi_session_stop(inst);
1130 ret |= hfi_session_unload_res(inst);
1131 ret |= venus_helper_unregister_bufs(inst);
1132 ret |= venus_helper_intbufs_free(inst);
1133 ret |= hfi_session_deinit(inst);
1135 if (inst->session_error || core->sys_error)
1139 hfi_session_abort(inst);
1141 venus_helper_free_dpb_bufs(inst);
1143 venus_helper_load_scale_clocks(core);
1144 INIT_LIST_HEAD(&inst->registeredbufs);
1147 venus_helper_buffers_done(inst, VB2_BUF_STATE_ERROR);
1149 if (q->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
1150 inst->streamon_out = 0;
1152 inst->streamon_cap = 0;
1154 mutex_unlock(&inst->lock);
1156 EXPORT_SYMBOL_GPL(venus_helper_vb2_stop_streaming);
1158 int venus_helper_process_initial_cap_bufs(struct venus_inst *inst)
1160 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1161 struct v4l2_m2m_buffer *buf, *n;
1164 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buf, n) {
1165 ret = session_process_buf(inst, &buf->vb);
1167 return_buf_error(inst, &buf->vb);
1174 EXPORT_SYMBOL_GPL(venus_helper_process_initial_cap_bufs);
1176 int venus_helper_process_initial_out_bufs(struct venus_inst *inst)
1178 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1179 struct v4l2_m2m_buffer *buf, *n;
1182 v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buf, n) {
1183 ret = session_process_buf(inst, &buf->vb);
1185 return_buf_error(inst, &buf->vb);
1192 EXPORT_SYMBOL_GPL(venus_helper_process_initial_out_bufs);
1194 int venus_helper_vb2_start_streaming(struct venus_inst *inst)
1196 struct venus_core *core = inst->core;
1199 ret = venus_helper_intbufs_alloc(inst);
1203 ret = session_register_bufs(inst);
1207 venus_helper_load_scale_clocks(core);
1209 ret = hfi_session_load_res(inst);
1211 goto err_unreg_bufs;
1213 ret = hfi_session_start(inst);
1215 goto err_unload_res;
1220 hfi_session_unload_res(inst);
1222 venus_helper_unregister_bufs(inst);
1224 venus_helper_intbufs_free(inst);
1227 EXPORT_SYMBOL_GPL(venus_helper_vb2_start_streaming);
1229 void venus_helper_m2m_device_run(void *priv)
1231 struct venus_inst *inst = priv;
1232 struct v4l2_m2m_ctx *m2m_ctx = inst->m2m_ctx;
1233 struct v4l2_m2m_buffer *buf, *n;
1236 mutex_lock(&inst->lock);
1238 v4l2_m2m_for_each_dst_buf_safe(m2m_ctx, buf, n) {
1239 ret = session_process_buf(inst, &buf->vb);
1241 return_buf_error(inst, &buf->vb);
1244 v4l2_m2m_for_each_src_buf_safe(m2m_ctx, buf, n) {
1245 ret = session_process_buf(inst, &buf->vb);
1247 return_buf_error(inst, &buf->vb);
1250 mutex_unlock(&inst->lock);
1252 EXPORT_SYMBOL_GPL(venus_helper_m2m_device_run);
1254 void venus_helper_m2m_job_abort(void *priv)
1256 struct venus_inst *inst = priv;
1258 v4l2_m2m_job_finish(inst->m2m_dev, inst->m2m_ctx);
1260 EXPORT_SYMBOL_GPL(venus_helper_m2m_job_abort);
1262 void venus_helper_init_instance(struct venus_inst *inst)
1264 if (inst->session_type == VIDC_SESSION_TYPE_DEC) {
1265 INIT_LIST_HEAD(&inst->delayed_process);
1266 INIT_WORK(&inst->delayed_process_work,
1267 delayed_process_buf_func);
1270 EXPORT_SYMBOL_GPL(venus_helper_init_instance);
1272 static bool find_fmt_from_caps(struct venus_caps *caps, u32 buftype, u32 fmt)
1276 for (i = 0; i < caps->num_fmts; i++) {
1277 if (caps->fmts[i].buftype == buftype &&
1278 caps->fmts[i].fmt == fmt)
1285 int venus_helper_get_out_fmts(struct venus_inst *inst, u32 v4l2_fmt,
1286 u32 *out_fmt, u32 *out2_fmt, bool ubwc)
1288 struct venus_core *core = inst->core;
1289 struct venus_caps *caps;
1290 u32 ubwc_fmt, fmt = to_hfi_raw_fmt(v4l2_fmt);
1291 bool found, found_ubwc;
1293 *out_fmt = *out2_fmt = 0;
1298 caps = venus_caps_by_codec(core, inst->hfi_codec, inst->session_type);
1303 ubwc_fmt = fmt | HFI_COLOR_FORMAT_UBWC_BASE;
1304 found_ubwc = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT,
1306 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1308 if (found_ubwc && found) {
1309 *out_fmt = ubwc_fmt;
1315 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT, fmt);
1322 found = find_fmt_from_caps(caps, HFI_BUFFER_OUTPUT2, fmt);
1331 EXPORT_SYMBOL_GPL(venus_helper_get_out_fmts);
1333 int venus_helper_power_enable(struct venus_core *core, u32 session_type,
1336 void __iomem *ctrl, *stat;
1340 if (!IS_V3(core) && !IS_V4(core))
1344 if (session_type == VIDC_SESSION_TYPE_DEC)
1345 ctrl = core->base + WRAPPER_VDEC_VCODEC_POWER_CONTROL;
1347 ctrl = core->base + WRAPPER_VENC_VCODEC_POWER_CONTROL;
1356 if (session_type == VIDC_SESSION_TYPE_DEC) {
1357 ctrl = core->base + WRAPPER_VCODEC0_MMCC_POWER_CONTROL;
1358 stat = core->base + WRAPPER_VCODEC0_MMCC_POWER_STATUS;
1360 ctrl = core->base + WRAPPER_VCODEC1_MMCC_POWER_CONTROL;
1361 stat = core->base + WRAPPER_VCODEC1_MMCC_POWER_STATUS;
1367 ret = readl_poll_timeout(stat, val, val & BIT(1), 1, 100);
1373 ret = readl_poll_timeout(stat, val, !(val & BIT(1)), 1, 100);
1380 EXPORT_SYMBOL_GPL(venus_helper_power_enable);