2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
26 #include <linux/hdmi.h>
29 #include <drm/drm_atomic.h>
30 #include <drm/drm_atomic_helper.h>
31 #include <drm/drm_crtc_helper.h>
32 #include <drm/drm_dp_helper.h>
33 #include <drm/drm_fb_helper.h>
34 #include <drm/drm_plane_helper.h>
35 #include <drm/drm_edid.h>
37 #include <nvif/class.h>
38 #include <nvif/cl0002.h>
39 #include <nvif/cl5070.h>
40 #include <nvif/cl507a.h>
41 #include <nvif/cl507b.h>
42 #include <nvif/cl507c.h>
43 #include <nvif/cl507d.h>
44 #include <nvif/cl507e.h>
45 #include <nvif/event.h>
47 #include "nouveau_drv.h"
48 #include "nouveau_dma.h"
49 #include "nouveau_gem.h"
50 #include "nouveau_connector.h"
51 #include "nouveau_encoder.h"
52 #include "nouveau_crtc.h"
53 #include "nouveau_fence.h"
54 #include "nouveau_fbcon.h"
55 #include "nv50_display.h"
59 #define EVO_MASTER (0x00)
60 #define EVO_FLIP(c) (0x01 + (c))
61 #define EVO_OVLY(c) (0x05 + (c))
62 #define EVO_OIMM(c) (0x09 + (c))
63 #define EVO_CURS(c) (0x0d + (c))
65 /* offsets in shared sync bo of various structures */
66 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
67 #define EVO_MAST_NTFY EVO_SYNC( 0, 0x00)
68 #define EVO_FLIP_SEM0(c) EVO_SYNC((c) + 1, 0x00)
69 #define EVO_FLIP_SEM1(c) EVO_SYNC((c) + 1, 0x10)
70 #define EVO_FLIP_NTFY0(c) EVO_SYNC((c) + 1, 0x20)
71 #define EVO_FLIP_NTFY1(c) EVO_SYNC((c) + 1, 0x30)
73 /******************************************************************************
75 *****************************************************************************/
76 #define nv50_atom(p) container_of((p), struct nv50_atom, state)
79 struct drm_atomic_state state;
81 struct list_head outp;
86 struct nv50_outp_atom {
87 struct list_head head;
89 struct drm_encoder *encoder;
107 #define nv50_head_atom(p) container_of((p), struct nv50_head_atom, state)
109 struct nv50_head_atom {
110 struct drm_crtc_state state;
119 struct nv50_head_mode {
216 static inline struct nv50_head_atom *
217 nv50_head_atom_get(struct drm_atomic_state *state, struct drm_crtc *crtc)
219 struct drm_crtc_state *statec = drm_atomic_get_crtc_state(state, crtc);
221 return (void *)statec;
222 return nv50_head_atom(statec);
225 #define nv50_wndw_atom(p) container_of((p), struct nv50_wndw_atom, state)
227 struct nv50_wndw_atom {
228 struct drm_plane_state state;
231 struct drm_rect clip;
292 /******************************************************************************
294 *****************************************************************************/
297 struct nvif_object user;
298 struct nvif_device *device;
302 nv50_chan_create(struct nvif_device *device, struct nvif_object *disp,
303 const s32 *oclass, u8 head, void *data, u32 size,
304 struct nv50_chan *chan)
306 struct nvif_sclass *sclass;
309 chan->device = device;
311 ret = n = nvif_object_sclass_get(disp, &sclass);
316 for (i = 0; i < n; i++) {
317 if (sclass[i].oclass == oclass[0]) {
318 ret = nvif_object_init(disp, 0, oclass[0],
319 data, size, &chan->user);
321 nvif_object_map(&chan->user, NULL, 0);
322 nvif_object_sclass_put(&sclass);
329 nvif_object_sclass_put(&sclass);
334 nv50_chan_destroy(struct nv50_chan *chan)
336 nvif_object_fini(&chan->user);
339 /******************************************************************************
341 *****************************************************************************/
344 struct nv50_chan base;
348 nv50_pioc_destroy(struct nv50_pioc *pioc)
350 nv50_chan_destroy(&pioc->base);
354 nv50_pioc_create(struct nvif_device *device, struct nvif_object *disp,
355 const s32 *oclass, u8 head, void *data, u32 size,
356 struct nv50_pioc *pioc)
358 return nv50_chan_create(device, disp, oclass, head, data, size,
362 /******************************************************************************
364 *****************************************************************************/
367 struct nv50_pioc base;
371 nv50_oimm_create(struct nvif_device *device, struct nvif_object *disp,
372 int head, struct nv50_oimm *oimm)
374 struct nv50_disp_cursor_v0 args = {
377 static const s32 oclass[] = {
386 return nv50_pioc_create(device, disp, oclass, head, &args, sizeof(args),
390 /******************************************************************************
392 *****************************************************************************/
394 struct nv50_dmac_ctxdma {
395 struct list_head head;
396 struct nvif_object object;
400 struct nv50_chan base;
404 struct nvif_object sync;
405 struct nvif_object vram;
406 struct list_head ctxdma;
408 /* Protects against concurrent pushbuf access to this channel, lock is
409 * grabbed by evo_wait (if the pushbuf reservation is successful) and
410 * dropped again by evo_kick. */
415 nv50_dmac_ctxdma_del(struct nv50_dmac_ctxdma *ctxdma)
417 nvif_object_fini(&ctxdma->object);
418 list_del(&ctxdma->head);
422 static struct nv50_dmac_ctxdma *
423 nv50_dmac_ctxdma_new(struct nv50_dmac *dmac, struct nouveau_framebuffer *fb)
425 struct nouveau_drm *drm = nouveau_drm(fb->base.dev);
426 struct nv50_dmac_ctxdma *ctxdma;
427 const u8 kind = fb->nvbo->kind;
428 const u32 handle = 0xfb000000 | kind;
430 struct nv_dma_v0 base;
432 struct nv50_dma_v0 nv50;
433 struct gf100_dma_v0 gf100;
434 struct gf119_dma_v0 gf119;
437 u32 argc = sizeof(args.base);
440 list_for_each_entry(ctxdma, &dmac->ctxdma, head) {
441 if (ctxdma->object.handle == handle)
445 if (!(ctxdma = kzalloc(sizeof(*ctxdma), GFP_KERNEL)))
446 return ERR_PTR(-ENOMEM);
447 list_add(&ctxdma->head, &dmac->ctxdma);
449 args.base.target = NV_DMA_V0_TARGET_VRAM;
450 args.base.access = NV_DMA_V0_ACCESS_RDWR;
452 args.base.limit = drm->client.device.info.ram_user - 1;
454 if (drm->client.device.info.chipset < 0x80) {
455 args.nv50.part = NV50_DMA_V0_PART_256;
456 argc += sizeof(args.nv50);
458 if (drm->client.device.info.chipset < 0xc0) {
459 args.nv50.part = NV50_DMA_V0_PART_256;
460 args.nv50.kind = kind;
461 argc += sizeof(args.nv50);
463 if (drm->client.device.info.chipset < 0xd0) {
464 args.gf100.kind = kind;
465 argc += sizeof(args.gf100);
467 args.gf119.page = GF119_DMA_V0_PAGE_LP;
468 args.gf119.kind = kind;
469 argc += sizeof(args.gf119);
472 ret = nvif_object_init(&dmac->base.user, handle, NV_DMA_IN_MEMORY,
473 &args, argc, &ctxdma->object);
475 nv50_dmac_ctxdma_del(ctxdma);
483 nv50_dmac_destroy(struct nv50_dmac *dmac, struct nvif_object *disp)
485 struct nvif_device *device = dmac->base.device;
486 struct nv50_dmac_ctxdma *ctxdma, *ctxtmp;
488 list_for_each_entry_safe(ctxdma, ctxtmp, &dmac->ctxdma, head) {
489 nv50_dmac_ctxdma_del(ctxdma);
492 nvif_object_fini(&dmac->vram);
493 nvif_object_fini(&dmac->sync);
495 nv50_chan_destroy(&dmac->base);
498 struct device *dev = nvxx_device(device)->dev;
499 dma_free_coherent(dev, PAGE_SIZE, dmac->ptr, dmac->handle);
504 nv50_dmac_create(struct nvif_device *device, struct nvif_object *disp,
505 const s32 *oclass, u8 head, void *data, u32 size, u64 syncbuf,
506 struct nv50_dmac *dmac)
508 struct nv50_disp_core_channel_dma_v0 *args = data;
509 struct nvif_object pushbuf;
512 mutex_init(&dmac->lock);
513 INIT_LIST_HEAD(&dmac->ctxdma);
515 dmac->ptr = dma_alloc_coherent(nvxx_device(device)->dev, PAGE_SIZE,
516 &dmac->handle, GFP_KERNEL);
520 ret = nvif_object_init(&device->object, 0, NV_DMA_FROM_MEMORY,
521 &(struct nv_dma_v0) {
522 .target = NV_DMA_V0_TARGET_PCI_US,
523 .access = NV_DMA_V0_ACCESS_RD,
524 .start = dmac->handle + 0x0000,
525 .limit = dmac->handle + 0x0fff,
526 }, sizeof(struct nv_dma_v0), &pushbuf);
530 args->pushbuf = nvif_handle(&pushbuf);
532 ret = nv50_chan_create(device, disp, oclass, head, data, size,
534 nvif_object_fini(&pushbuf);
538 ret = nvif_object_init(&dmac->base.user, 0xf0000000, NV_DMA_IN_MEMORY,
539 &(struct nv_dma_v0) {
540 .target = NV_DMA_V0_TARGET_VRAM,
541 .access = NV_DMA_V0_ACCESS_RDWR,
542 .start = syncbuf + 0x0000,
543 .limit = syncbuf + 0x0fff,
544 }, sizeof(struct nv_dma_v0),
549 ret = nvif_object_init(&dmac->base.user, 0xf0000001, NV_DMA_IN_MEMORY,
550 &(struct nv_dma_v0) {
551 .target = NV_DMA_V0_TARGET_VRAM,
552 .access = NV_DMA_V0_ACCESS_RDWR,
554 .limit = device->info.ram_user - 1,
555 }, sizeof(struct nv_dma_v0),
563 /******************************************************************************
565 *****************************************************************************/
568 struct nv50_dmac base;
572 nv50_core_create(struct nvif_device *device, struct nvif_object *disp,
573 u64 syncbuf, struct nv50_mast *core)
575 struct nv50_disp_core_channel_dma_v0 args = {
576 .pushbuf = 0xb0007d00,
578 static const s32 oclass[] = {
579 GP102_DISP_CORE_CHANNEL_DMA,
580 GP100_DISP_CORE_CHANNEL_DMA,
581 GM200_DISP_CORE_CHANNEL_DMA,
582 GM107_DISP_CORE_CHANNEL_DMA,
583 GK110_DISP_CORE_CHANNEL_DMA,
584 GK104_DISP_CORE_CHANNEL_DMA,
585 GF110_DISP_CORE_CHANNEL_DMA,
586 GT214_DISP_CORE_CHANNEL_DMA,
587 GT206_DISP_CORE_CHANNEL_DMA,
588 GT200_DISP_CORE_CHANNEL_DMA,
589 G82_DISP_CORE_CHANNEL_DMA,
590 NV50_DISP_CORE_CHANNEL_DMA,
594 return nv50_dmac_create(device, disp, oclass, 0, &args, sizeof(args),
595 syncbuf, &core->base);
598 /******************************************************************************
600 *****************************************************************************/
603 struct nv50_dmac base;
609 nv50_base_create(struct nvif_device *device, struct nvif_object *disp,
610 int head, u64 syncbuf, struct nv50_sync *base)
612 struct nv50_disp_base_channel_dma_v0 args = {
613 .pushbuf = 0xb0007c00 | head,
616 static const s32 oclass[] = {
617 GK110_DISP_BASE_CHANNEL_DMA,
618 GK104_DISP_BASE_CHANNEL_DMA,
619 GF110_DISP_BASE_CHANNEL_DMA,
620 GT214_DISP_BASE_CHANNEL_DMA,
621 GT200_DISP_BASE_CHANNEL_DMA,
622 G82_DISP_BASE_CHANNEL_DMA,
623 NV50_DISP_BASE_CHANNEL_DMA,
627 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
628 syncbuf, &base->base);
631 /******************************************************************************
633 *****************************************************************************/
636 struct nv50_dmac base;
640 nv50_ovly_create(struct nvif_device *device, struct nvif_object *disp,
641 int head, u64 syncbuf, struct nv50_ovly *ovly)
643 struct nv50_disp_overlay_channel_dma_v0 args = {
644 .pushbuf = 0xb0007e00 | head,
647 static const s32 oclass[] = {
648 GK104_DISP_OVERLAY_CONTROL_DMA,
649 GF110_DISP_OVERLAY_CONTROL_DMA,
650 GT214_DISP_OVERLAY_CHANNEL_DMA,
651 GT200_DISP_OVERLAY_CHANNEL_DMA,
652 G82_DISP_OVERLAY_CHANNEL_DMA,
653 NV50_DISP_OVERLAY_CHANNEL_DMA,
657 return nv50_dmac_create(device, disp, oclass, head, &args, sizeof(args),
658 syncbuf, &ovly->base);
662 struct nouveau_crtc base;
664 struct nouveau_bo *nvbo[1];
666 struct nv50_ovly ovly;
667 struct nv50_oimm oimm;
670 #define nv50_head(c) ((struct nv50_head *)nouveau_crtc(c))
671 #define nv50_ovly(c) (&nv50_head(c)->ovly)
672 #define nv50_oimm(c) (&nv50_head(c)->oimm)
673 #define nv50_chan(c) (&(c)->base.base)
674 #define nv50_vers(c) nv50_chan(c)->user.oclass
677 struct nvif_object *disp;
678 struct nv50_mast mast;
680 struct nouveau_bo *sync;
685 static struct nv50_disp *
686 nv50_disp(struct drm_device *dev)
688 return nouveau_display(dev)->priv;
691 #define nv50_mast(d) (&nv50_disp(d)->mast)
693 /******************************************************************************
694 * EVO channel helpers
695 *****************************************************************************/
697 evo_wait(void *evoc, int nr)
699 struct nv50_dmac *dmac = evoc;
700 struct nvif_device *device = dmac->base.device;
701 u32 put = nvif_rd32(&dmac->base.user, 0x0000) / 4;
703 mutex_lock(&dmac->lock);
704 if (put + nr >= (PAGE_SIZE / 4) - 8) {
705 dmac->ptr[put] = 0x20000000;
707 nvif_wr32(&dmac->base.user, 0x0000, 0x00000000);
708 if (nvif_msec(device, 2000,
709 if (!nvif_rd32(&dmac->base.user, 0x0004))
712 mutex_unlock(&dmac->lock);
713 pr_err("nouveau: evo channel stalled\n");
720 return dmac->ptr + put;
724 evo_kick(u32 *push, void *evoc)
726 struct nv50_dmac *dmac = evoc;
727 nvif_wr32(&dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
728 mutex_unlock(&dmac->lock);
731 #define evo_mthd(p, m, s) do { \
732 const u32 _m = (m), _s = (s); \
733 if (drm_debug & DRM_UT_KMS) \
734 pr_err("%04x %d %s\n", _m, _s, __func__); \
735 *((p)++) = ((_s << 18) | _m); \
738 #define evo_data(p, d) do { \
739 const u32 _d = (d); \
740 if (drm_debug & DRM_UT_KMS) \
741 pr_err("\t%08x\n", _d); \
745 /******************************************************************************
747 *****************************************************************************/
748 #define nv50_wndw(p) container_of((p), struct nv50_wndw, plane)
751 const struct nv50_wndw_func *func;
752 struct nv50_dmac *dmac;
754 struct drm_plane plane;
756 struct nvif_notify notify;
762 struct nv50_wndw_func {
763 void *(*dtor)(struct nv50_wndw *);
764 int (*acquire)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
765 struct nv50_head_atom *asyh);
766 void (*release)(struct nv50_wndw *, struct nv50_wndw_atom *asyw,
767 struct nv50_head_atom *asyh);
768 void (*prepare)(struct nv50_wndw *, struct nv50_head_atom *asyh,
769 struct nv50_wndw_atom *asyw);
771 void (*sema_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
772 void (*sema_clr)(struct nv50_wndw *);
773 void (*ntfy_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
774 void (*ntfy_clr)(struct nv50_wndw *);
775 int (*ntfy_wait_begun)(struct nv50_wndw *, struct nv50_wndw_atom *);
776 void (*image_set)(struct nv50_wndw *, struct nv50_wndw_atom *);
777 void (*image_clr)(struct nv50_wndw *);
778 void (*lut)(struct nv50_wndw *, struct nv50_wndw_atom *);
779 void (*point)(struct nv50_wndw *, struct nv50_wndw_atom *);
781 u32 (*update)(struct nv50_wndw *, u32 interlock);
785 nv50_wndw_wait_armed(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
788 return wndw->func->ntfy_wait_begun(wndw, asyw);
793 nv50_wndw_flush_clr(struct nv50_wndw *wndw, u32 interlock, bool flush,
794 struct nv50_wndw_atom *asyw)
796 if (asyw->clr.sema && (!asyw->set.sema || flush))
797 wndw->func->sema_clr(wndw);
798 if (asyw->clr.ntfy && (!asyw->set.ntfy || flush))
799 wndw->func->ntfy_clr(wndw);
800 if (asyw->clr.image && (!asyw->set.image || flush))
801 wndw->func->image_clr(wndw);
803 return flush ? wndw->func->update(wndw, interlock) : 0;
807 nv50_wndw_flush_set(struct nv50_wndw *wndw, u32 interlock,
808 struct nv50_wndw_atom *asyw)
811 asyw->image.mode = 0;
812 asyw->image.interval = 1;
815 if (asyw->set.sema ) wndw->func->sema_set (wndw, asyw);
816 if (asyw->set.ntfy ) wndw->func->ntfy_set (wndw, asyw);
817 if (asyw->set.image) wndw->func->image_set(wndw, asyw);
818 if (asyw->set.lut ) wndw->func->lut (wndw, asyw);
819 if (asyw->set.point) wndw->func->point (wndw, asyw);
821 return wndw->func->update(wndw, interlock);
825 nv50_wndw_atomic_check_release(struct nv50_wndw *wndw,
826 struct nv50_wndw_atom *asyw,
827 struct nv50_head_atom *asyh)
829 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
830 NV_ATOMIC(drm, "%s release\n", wndw->plane.name);
831 wndw->func->release(wndw, asyw, asyh);
832 asyw->ntfy.handle = 0;
833 asyw->sema.handle = 0;
837 nv50_wndw_atomic_check_acquire(struct nv50_wndw *wndw,
838 struct nv50_wndw_atom *asyw,
839 struct nv50_head_atom *asyh)
841 struct nouveau_framebuffer *fb = nouveau_framebuffer(asyw->state.fb);
842 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
845 NV_ATOMIC(drm, "%s acquire\n", wndw->plane.name);
848 asyw->clip.x2 = asyh->state.mode.hdisplay;
849 asyw->clip.y2 = asyh->state.mode.vdisplay;
851 asyw->image.w = fb->base.width;
852 asyw->image.h = fb->base.height;
853 asyw->image.kind = fb->nvbo->kind;
855 if (asyh->state.pageflip_flags & DRM_MODE_PAGE_FLIP_ASYNC)
860 if (asyw->image.kind) {
861 asyw->image.layout = 0;
862 if (drm->client.device.info.chipset >= 0xc0)
863 asyw->image.block = fb->nvbo->mode >> 4;
865 asyw->image.block = fb->nvbo->mode;
866 asyw->image.pitch = (fb->base.pitches[0] / 4) << 4;
868 asyw->image.layout = 1;
869 asyw->image.block = 0;
870 asyw->image.pitch = fb->base.pitches[0];
873 ret = wndw->func->acquire(wndw, asyw, asyh);
877 if (asyw->set.image) {
878 if (!(asyw->image.mode = asyw->interval ? 0 : 1))
879 asyw->image.interval = asyw->interval;
881 asyw->image.interval = 0;
888 nv50_wndw_atomic_check(struct drm_plane *plane, struct drm_plane_state *state)
890 struct nouveau_drm *drm = nouveau_drm(plane->dev);
891 struct nv50_wndw *wndw = nv50_wndw(plane);
892 struct nv50_wndw_atom *armw = nv50_wndw_atom(wndw->plane.state);
893 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
894 struct nv50_head_atom *harm = NULL, *asyh = NULL;
895 bool varm = false, asyv = false, asym = false;
898 NV_ATOMIC(drm, "%s atomic_check\n", plane->name);
899 if (asyw->state.crtc) {
900 asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
902 return PTR_ERR(asyh);
903 asym = drm_atomic_crtc_needs_modeset(&asyh->state);
904 asyv = asyh->state.active;
907 if (armw->state.crtc) {
908 harm = nv50_head_atom_get(asyw->state.state, armw->state.crtc);
910 return PTR_ERR(harm);
911 varm = harm->state.crtc->state->active;
915 asyw->point.x = asyw->state.crtc_x;
916 asyw->point.y = asyw->state.crtc_y;
917 if (memcmp(&armw->point, &asyw->point, sizeof(asyw->point)))
918 asyw->set.point = true;
920 ret = nv50_wndw_atomic_check_acquire(wndw, asyw, asyh);
925 nv50_wndw_atomic_check_release(wndw, asyw, harm);
931 asyw->clr.ntfy = armw->ntfy.handle != 0;
932 asyw->clr.sema = armw->sema.handle != 0;
933 if (wndw->func->image_clr)
934 asyw->clr.image = armw->image.handle != 0;
935 asyw->set.lut = wndw->func->lut && asyv;
942 nv50_wndw_cleanup_fb(struct drm_plane *plane, struct drm_plane_state *old_state)
944 struct nouveau_framebuffer *fb = nouveau_framebuffer(old_state->fb);
945 struct nouveau_drm *drm = nouveau_drm(plane->dev);
947 NV_ATOMIC(drm, "%s cleanup: %p\n", plane->name, old_state->fb);
951 nouveau_bo_unpin(fb->nvbo);
955 nv50_wndw_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
957 struct nouveau_framebuffer *fb = nouveau_framebuffer(state->fb);
958 struct nouveau_drm *drm = nouveau_drm(plane->dev);
959 struct nv50_wndw *wndw = nv50_wndw(plane);
960 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
961 struct nv50_head_atom *asyh;
962 struct nv50_dmac_ctxdma *ctxdma;
965 NV_ATOMIC(drm, "%s prepare: %p\n", plane->name, state->fb);
969 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM, true);
973 ctxdma = nv50_dmac_ctxdma_new(wndw->dmac, fb);
974 if (IS_ERR(ctxdma)) {
975 nouveau_bo_unpin(fb->nvbo);
976 return PTR_ERR(ctxdma);
979 asyw->state.fence = reservation_object_get_excl_rcu(fb->nvbo->bo.resv);
980 asyw->image.handle = ctxdma->object.handle;
981 asyw->image.offset = fb->nvbo->bo.offset;
983 if (wndw->func->prepare) {
984 asyh = nv50_head_atom_get(asyw->state.state, asyw->state.crtc);
986 return PTR_ERR(asyh);
988 wndw->func->prepare(wndw, asyh, asyw);
994 static const struct drm_plane_helper_funcs
996 .prepare_fb = nv50_wndw_prepare_fb,
997 .cleanup_fb = nv50_wndw_cleanup_fb,
998 .atomic_check = nv50_wndw_atomic_check,
1002 nv50_wndw_atomic_destroy_state(struct drm_plane *plane,
1003 struct drm_plane_state *state)
1005 struct nv50_wndw_atom *asyw = nv50_wndw_atom(state);
1006 __drm_atomic_helper_plane_destroy_state(&asyw->state);
1010 static struct drm_plane_state *
1011 nv50_wndw_atomic_duplicate_state(struct drm_plane *plane)
1013 struct nv50_wndw_atom *armw = nv50_wndw_atom(plane->state);
1014 struct nv50_wndw_atom *asyw;
1015 if (!(asyw = kmalloc(sizeof(*asyw), GFP_KERNEL)))
1017 __drm_atomic_helper_plane_duplicate_state(plane, &asyw->state);
1019 asyw->sema = armw->sema;
1020 asyw->ntfy = armw->ntfy;
1021 asyw->image = armw->image;
1022 asyw->point = armw->point;
1023 asyw->lut = armw->lut;
1026 return &asyw->state;
1030 nv50_wndw_reset(struct drm_plane *plane)
1032 struct nv50_wndw_atom *asyw;
1034 if (WARN_ON(!(asyw = kzalloc(sizeof(*asyw), GFP_KERNEL))))
1038 plane->funcs->atomic_destroy_state(plane, plane->state);
1039 plane->state = &asyw->state;
1040 plane->state->plane = plane;
1041 plane->state->rotation = DRM_MODE_ROTATE_0;
1045 nv50_wndw_destroy(struct drm_plane *plane)
1047 struct nv50_wndw *wndw = nv50_wndw(plane);
1049 nvif_notify_fini(&wndw->notify);
1050 data = wndw->func->dtor(wndw);
1051 drm_plane_cleanup(&wndw->plane);
1055 static const struct drm_plane_funcs
1057 .update_plane = drm_atomic_helper_update_plane,
1058 .disable_plane = drm_atomic_helper_disable_plane,
1059 .destroy = nv50_wndw_destroy,
1060 .reset = nv50_wndw_reset,
1061 .atomic_duplicate_state = nv50_wndw_atomic_duplicate_state,
1062 .atomic_destroy_state = nv50_wndw_atomic_destroy_state,
1066 nv50_wndw_fini(struct nv50_wndw *wndw)
1068 nvif_notify_put(&wndw->notify);
1072 nv50_wndw_init(struct nv50_wndw *wndw)
1074 nvif_notify_get(&wndw->notify);
1078 nv50_wndw_ctor(const struct nv50_wndw_func *func, struct drm_device *dev,
1079 enum drm_plane_type type, const char *name, int index,
1080 struct nv50_dmac *dmac, const u32 *format, int nformat,
1081 struct nv50_wndw *wndw)
1088 ret = drm_universal_plane_init(dev, &wndw->plane, 0, &nv50_wndw,
1089 format, nformat, NULL,
1090 type, "%s-%d", name, index);
1094 drm_plane_helper_add(&wndw->plane, &nv50_wndw_helper);
1098 /******************************************************************************
1100 *****************************************************************************/
1101 #define nv50_curs(p) container_of((p), struct nv50_curs, wndw)
1104 struct nv50_wndw wndw;
1105 struct nvif_object chan;
1109 nv50_curs_update(struct nv50_wndw *wndw, u32 interlock)
1111 struct nv50_curs *curs = nv50_curs(wndw);
1112 nvif_wr32(&curs->chan, 0x0080, 0x00000000);
1117 nv50_curs_point(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1119 struct nv50_curs *curs = nv50_curs(wndw);
1120 nvif_wr32(&curs->chan, 0x0084, (asyw->point.y << 16) | asyw->point.x);
1124 nv50_curs_prepare(struct nv50_wndw *wndw, struct nv50_head_atom *asyh,
1125 struct nv50_wndw_atom *asyw)
1127 u32 handle = nv50_disp(wndw->plane.dev)->mast.base.vram.handle;
1128 u32 offset = asyw->image.offset;
1129 if (asyh->curs.handle != handle || asyh->curs.offset != offset) {
1130 asyh->curs.handle = handle;
1131 asyh->curs.offset = offset;
1132 asyh->set.curs = asyh->curs.visible;
1137 nv50_curs_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1138 struct nv50_head_atom *asyh)
1140 asyh->curs.visible = false;
1144 nv50_curs_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1145 struct nv50_head_atom *asyh)
1149 ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
1151 DRM_PLANE_HELPER_NO_SCALING,
1152 DRM_PLANE_HELPER_NO_SCALING,
1154 asyh->curs.visible = asyw->state.visible;
1155 if (ret || !asyh->curs.visible)
1158 switch (asyw->state.fb->width) {
1159 case 32: asyh->curs.layout = 0; break;
1160 case 64: asyh->curs.layout = 1; break;
1165 if (asyw->state.fb->width != asyw->state.fb->height)
1168 switch (asyw->state.fb->format->format) {
1169 case DRM_FORMAT_ARGB8888: asyh->curs.format = 1; break;
1179 nv50_curs_dtor(struct nv50_wndw *wndw)
1181 struct nv50_curs *curs = nv50_curs(wndw);
1182 nvif_object_fini(&curs->chan);
1187 nv50_curs_format[] = {
1188 DRM_FORMAT_ARGB8888,
1191 static const struct nv50_wndw_func
1193 .dtor = nv50_curs_dtor,
1194 .acquire = nv50_curs_acquire,
1195 .release = nv50_curs_release,
1196 .prepare = nv50_curs_prepare,
1197 .point = nv50_curs_point,
1198 .update = nv50_curs_update,
1202 nv50_curs_new(struct nouveau_drm *drm, struct nv50_head *head,
1203 struct nv50_curs **pcurs)
1205 static const struct nvif_mclass curses[] = {
1206 { GK104_DISP_CURSOR, 0 },
1207 { GF110_DISP_CURSOR, 0 },
1208 { GT214_DISP_CURSOR, 0 },
1209 { G82_DISP_CURSOR, 0 },
1210 { NV50_DISP_CURSOR, 0 },
1213 struct nv50_disp_cursor_v0 args = {
1214 .head = head->base.index,
1216 struct nv50_disp *disp = nv50_disp(drm->dev);
1217 struct nv50_curs *curs;
1220 cid = nvif_mclass(disp->disp, curses);
1222 NV_ERROR(drm, "No supported cursor immediate class\n");
1226 if (!(curs = *pcurs = kzalloc(sizeof(*curs), GFP_KERNEL)))
1229 ret = nv50_wndw_ctor(&nv50_curs, drm->dev, DRM_PLANE_TYPE_CURSOR,
1230 "curs", head->base.index, &disp->mast.base,
1231 nv50_curs_format, ARRAY_SIZE(nv50_curs_format),
1238 ret = nvif_object_init(disp->disp, 0, curses[cid].oclass, &args,
1239 sizeof(args), &curs->chan);
1241 NV_ERROR(drm, "curs%04x allocation failed: %d\n",
1242 curses[cid].oclass, ret);
1249 /******************************************************************************
1251 *****************************************************************************/
1252 #define nv50_base(p) container_of((p), struct nv50_base, wndw)
1255 struct nv50_wndw wndw;
1256 struct nv50_sync chan;
1261 nv50_base_notify(struct nvif_notify *notify)
1263 return NVIF_NOTIFY_KEEP;
1267 nv50_base_lut(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1269 struct nv50_base *base = nv50_base(wndw);
1271 if ((push = evo_wait(&base->chan, 2))) {
1272 evo_mthd(push, 0x00e0, 1);
1273 evo_data(push, asyw->lut.enable << 30);
1274 evo_kick(push, &base->chan);
1279 nv50_base_image_clr(struct nv50_wndw *wndw)
1281 struct nv50_base *base = nv50_base(wndw);
1283 if ((push = evo_wait(&base->chan, 4))) {
1284 evo_mthd(push, 0x0084, 1);
1285 evo_data(push, 0x00000000);
1286 evo_mthd(push, 0x00c0, 1);
1287 evo_data(push, 0x00000000);
1288 evo_kick(push, &base->chan);
1293 nv50_base_image_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1295 struct nv50_base *base = nv50_base(wndw);
1296 const s32 oclass = base->chan.base.base.user.oclass;
1298 if ((push = evo_wait(&base->chan, 10))) {
1299 evo_mthd(push, 0x0084, 1);
1300 evo_data(push, (asyw->image.mode << 8) |
1301 (asyw->image.interval << 4));
1302 evo_mthd(push, 0x00c0, 1);
1303 evo_data(push, asyw->image.handle);
1304 if (oclass < G82_DISP_BASE_CHANNEL_DMA) {
1305 evo_mthd(push, 0x0800, 5);
1306 evo_data(push, asyw->image.offset >> 8);
1307 evo_data(push, 0x00000000);
1308 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1309 evo_data(push, (asyw->image.layout << 20) |
1312 evo_data(push, (asyw->image.kind << 16) |
1313 (asyw->image.format << 8));
1315 if (oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1316 evo_mthd(push, 0x0800, 5);
1317 evo_data(push, asyw->image.offset >> 8);
1318 evo_data(push, 0x00000000);
1319 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1320 evo_data(push, (asyw->image.layout << 20) |
1323 evo_data(push, asyw->image.format << 8);
1325 evo_mthd(push, 0x0400, 5);
1326 evo_data(push, asyw->image.offset >> 8);
1327 evo_data(push, 0x00000000);
1328 evo_data(push, (asyw->image.h << 16) | asyw->image.w);
1329 evo_data(push, (asyw->image.layout << 24) |
1332 evo_data(push, asyw->image.format << 8);
1334 evo_kick(push, &base->chan);
1339 nv50_base_ntfy_clr(struct nv50_wndw *wndw)
1341 struct nv50_base *base = nv50_base(wndw);
1343 if ((push = evo_wait(&base->chan, 2))) {
1344 evo_mthd(push, 0x00a4, 1);
1345 evo_data(push, 0x00000000);
1346 evo_kick(push, &base->chan);
1351 nv50_base_ntfy_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1353 struct nv50_base *base = nv50_base(wndw);
1355 if ((push = evo_wait(&base->chan, 3))) {
1356 evo_mthd(push, 0x00a0, 2);
1357 evo_data(push, (asyw->ntfy.awaken << 30) | asyw->ntfy.offset);
1358 evo_data(push, asyw->ntfy.handle);
1359 evo_kick(push, &base->chan);
1364 nv50_base_sema_clr(struct nv50_wndw *wndw)
1366 struct nv50_base *base = nv50_base(wndw);
1368 if ((push = evo_wait(&base->chan, 2))) {
1369 evo_mthd(push, 0x0094, 1);
1370 evo_data(push, 0x00000000);
1371 evo_kick(push, &base->chan);
1376 nv50_base_sema_set(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1378 struct nv50_base *base = nv50_base(wndw);
1380 if ((push = evo_wait(&base->chan, 5))) {
1381 evo_mthd(push, 0x0088, 4);
1382 evo_data(push, asyw->sema.offset);
1383 evo_data(push, asyw->sema.acquire);
1384 evo_data(push, asyw->sema.release);
1385 evo_data(push, asyw->sema.handle);
1386 evo_kick(push, &base->chan);
1391 nv50_base_update(struct nv50_wndw *wndw, u32 interlock)
1393 struct nv50_base *base = nv50_base(wndw);
1396 if (!(push = evo_wait(&base->chan, 2)))
1398 evo_mthd(push, 0x0080, 1);
1399 evo_data(push, interlock);
1400 evo_kick(push, &base->chan);
1402 if (base->chan.base.base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA)
1403 return interlock ? 2 << (base->id * 8) : 0;
1404 return interlock ? 2 << (base->id * 4) : 0;
1408 nv50_base_ntfy_wait_begun(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw)
1410 struct nouveau_drm *drm = nouveau_drm(wndw->plane.dev);
1411 struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1412 if (nvif_msec(&drm->client.device, 2000ULL,
1413 u32 data = nouveau_bo_rd32(disp->sync, asyw->ntfy.offset / 4);
1414 if ((data & 0xc0000000) == 0x40000000)
1423 nv50_base_release(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1424 struct nv50_head_atom *asyh)
1430 nv50_base_acquire(struct nv50_wndw *wndw, struct nv50_wndw_atom *asyw,
1431 struct nv50_head_atom *asyh)
1433 const struct drm_framebuffer *fb = asyw->state.fb;
1436 if (!fb->format->depth)
1439 ret = drm_atomic_helper_check_plane_state(&asyw->state, &asyh->state,
1441 DRM_PLANE_HELPER_NO_SCALING,
1442 DRM_PLANE_HELPER_NO_SCALING,
1447 asyh->base.depth = fb->format->depth;
1448 asyh->base.cpp = fb->format->cpp[0];
1449 asyh->base.x = asyw->state.src.x1 >> 16;
1450 asyh->base.y = asyw->state.src.y1 >> 16;
1451 asyh->base.w = asyw->state.fb->width;
1452 asyh->base.h = asyw->state.fb->height;
1454 switch (fb->format->format) {
1455 case DRM_FORMAT_C8 : asyw->image.format = 0x1e; break;
1456 case DRM_FORMAT_RGB565 : asyw->image.format = 0xe8; break;
1457 case DRM_FORMAT_XRGB1555 :
1458 case DRM_FORMAT_ARGB1555 : asyw->image.format = 0xe9; break;
1459 case DRM_FORMAT_XRGB8888 :
1460 case DRM_FORMAT_ARGB8888 : asyw->image.format = 0xcf; break;
1461 case DRM_FORMAT_XBGR2101010:
1462 case DRM_FORMAT_ABGR2101010: asyw->image.format = 0xd1; break;
1463 case DRM_FORMAT_XBGR8888 :
1464 case DRM_FORMAT_ABGR8888 : asyw->image.format = 0xd5; break;
1470 asyw->lut.enable = 1;
1471 asyw->set.image = true;
1476 nv50_base_dtor(struct nv50_wndw *wndw)
1478 struct nv50_disp *disp = nv50_disp(wndw->plane.dev);
1479 struct nv50_base *base = nv50_base(wndw);
1480 nv50_dmac_destroy(&base->chan.base, disp->disp);
1485 nv50_base_format[] = {
1488 DRM_FORMAT_XRGB1555,
1489 DRM_FORMAT_ARGB1555,
1490 DRM_FORMAT_XRGB8888,
1491 DRM_FORMAT_ARGB8888,
1492 DRM_FORMAT_XBGR2101010,
1493 DRM_FORMAT_ABGR2101010,
1494 DRM_FORMAT_XBGR8888,
1495 DRM_FORMAT_ABGR8888,
1498 static const struct nv50_wndw_func
1500 .dtor = nv50_base_dtor,
1501 .acquire = nv50_base_acquire,
1502 .release = nv50_base_release,
1503 .sema_set = nv50_base_sema_set,
1504 .sema_clr = nv50_base_sema_clr,
1505 .ntfy_set = nv50_base_ntfy_set,
1506 .ntfy_clr = nv50_base_ntfy_clr,
1507 .ntfy_wait_begun = nv50_base_ntfy_wait_begun,
1508 .image_set = nv50_base_image_set,
1509 .image_clr = nv50_base_image_clr,
1510 .lut = nv50_base_lut,
1511 .update = nv50_base_update,
1515 nv50_base_new(struct nouveau_drm *drm, struct nv50_head *head,
1516 struct nv50_base **pbase)
1518 struct nv50_disp *disp = nv50_disp(drm->dev);
1519 struct nv50_base *base;
1522 if (!(base = *pbase = kzalloc(sizeof(*base), GFP_KERNEL)))
1524 base->id = head->base.index;
1525 base->wndw.ntfy = EVO_FLIP_NTFY0(base->id);
1526 base->wndw.sema = EVO_FLIP_SEM0(base->id);
1527 base->wndw.data = 0x00000000;
1529 ret = nv50_wndw_ctor(&nv50_base, drm->dev, DRM_PLANE_TYPE_PRIMARY,
1530 "base", base->id, &base->chan.base,
1531 nv50_base_format, ARRAY_SIZE(nv50_base_format),
1538 ret = nv50_base_create(&drm->client.device, disp->disp, base->id,
1539 disp->sync->bo.offset, &base->chan);
1543 return nvif_notify_init(&base->chan.base.base.user, nv50_base_notify,
1545 NV50_DISP_BASE_CHANNEL_DMA_V0_NTFY_UEVENT,
1546 &(struct nvif_notify_uevent_req) {},
1547 sizeof(struct nvif_notify_uevent_req),
1548 sizeof(struct nvif_notify_uevent_rep),
1549 &base->wndw.notify);
1552 /******************************************************************************
1554 *****************************************************************************/
1556 nv50_head_procamp(struct nv50_head *head, struct nv50_head_atom *asyh)
1558 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1560 if ((push = evo_wait(core, 2))) {
1561 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1562 evo_mthd(push, 0x08a8 + (head->base.index * 0x400), 1);
1564 evo_mthd(push, 0x0498 + (head->base.index * 0x300), 1);
1565 evo_data(push, (asyh->procamp.sat.sin << 20) |
1566 (asyh->procamp.sat.cos << 8));
1567 evo_kick(push, core);
1572 nv50_head_dither(struct nv50_head *head, struct nv50_head_atom *asyh)
1574 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1576 if ((push = evo_wait(core, 2))) {
1577 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1578 evo_mthd(push, 0x08a0 + (head->base.index * 0x0400), 1);
1580 if (core->base.user.oclass < GK104_DISP_CORE_CHANNEL_DMA)
1581 evo_mthd(push, 0x0490 + (head->base.index * 0x0300), 1);
1583 evo_mthd(push, 0x04a0 + (head->base.index * 0x0300), 1);
1584 evo_data(push, (asyh->dither.mode << 3) |
1585 (asyh->dither.bits << 1) |
1586 asyh->dither.enable);
1587 evo_kick(push, core);
1592 nv50_head_ovly(struct nv50_head *head, struct nv50_head_atom *asyh)
1594 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1598 if (asyh->base.cpp) {
1599 switch (asyh->base.cpp) {
1600 case 8: bounds |= 0x00000500; break;
1601 case 4: bounds |= 0x00000300; break;
1602 case 2: bounds |= 0x00000100; break;
1607 bounds |= 0x00000001;
1610 if ((push = evo_wait(core, 2))) {
1611 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1612 evo_mthd(push, 0x0904 + head->base.index * 0x400, 1);
1614 evo_mthd(push, 0x04d4 + head->base.index * 0x300, 1);
1615 evo_data(push, bounds);
1616 evo_kick(push, core);
1621 nv50_head_base(struct nv50_head *head, struct nv50_head_atom *asyh)
1623 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1627 if (asyh->base.cpp) {
1628 switch (asyh->base.cpp) {
1629 case 8: bounds |= 0x00000500; break;
1630 case 4: bounds |= 0x00000300; break;
1631 case 2: bounds |= 0x00000100; break;
1632 case 1: bounds |= 0x00000000; break;
1637 bounds |= 0x00000001;
1640 if ((push = evo_wait(core, 2))) {
1641 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1642 evo_mthd(push, 0x0900 + head->base.index * 0x400, 1);
1644 evo_mthd(push, 0x04d0 + head->base.index * 0x300, 1);
1645 evo_data(push, bounds);
1646 evo_kick(push, core);
1651 nv50_head_curs_clr(struct nv50_head *head)
1653 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1655 if ((push = evo_wait(core, 4))) {
1656 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1657 evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1658 evo_data(push, 0x05000000);
1660 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1661 evo_mthd(push, 0x0880 + head->base.index * 0x400, 1);
1662 evo_data(push, 0x05000000);
1663 evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1664 evo_data(push, 0x00000000);
1666 evo_mthd(push, 0x0480 + head->base.index * 0x300, 1);
1667 evo_data(push, 0x05000000);
1668 evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1669 evo_data(push, 0x00000000);
1671 evo_kick(push, core);
1676 nv50_head_curs_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1678 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1680 if ((push = evo_wait(core, 5))) {
1681 if (core->base.user.oclass < G82_DISP_BASE_CHANNEL_DMA) {
1682 evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1683 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1684 (asyh->curs.format << 24));
1685 evo_data(push, asyh->curs.offset >> 8);
1687 if (core->base.user.oclass < GF110_DISP_BASE_CHANNEL_DMA) {
1688 evo_mthd(push, 0x0880 + head->base.index * 0x400, 2);
1689 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1690 (asyh->curs.format << 24));
1691 evo_data(push, asyh->curs.offset >> 8);
1692 evo_mthd(push, 0x089c + head->base.index * 0x400, 1);
1693 evo_data(push, asyh->curs.handle);
1695 evo_mthd(push, 0x0480 + head->base.index * 0x300, 2);
1696 evo_data(push, 0x80000000 | (asyh->curs.layout << 26) |
1697 (asyh->curs.format << 24));
1698 evo_data(push, asyh->curs.offset >> 8);
1699 evo_mthd(push, 0x048c + head->base.index * 0x300, 1);
1700 evo_data(push, asyh->curs.handle);
1702 evo_kick(push, core);
1707 nv50_head_core_clr(struct nv50_head *head)
1709 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1711 if ((push = evo_wait(core, 2))) {
1712 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA)
1713 evo_mthd(push, 0x0874 + head->base.index * 0x400, 1);
1715 evo_mthd(push, 0x0474 + head->base.index * 0x300, 1);
1716 evo_data(push, 0x00000000);
1717 evo_kick(push, core);
1722 nv50_head_core_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1724 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1726 if ((push = evo_wait(core, 9))) {
1727 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1728 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1729 evo_data(push, asyh->core.offset >> 8);
1730 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1731 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1732 evo_data(push, asyh->core.layout << 20 |
1733 (asyh->core.pitch >> 8) << 8 |
1735 evo_data(push, asyh->core.kind << 16 |
1736 asyh->core.format << 8);
1737 evo_data(push, asyh->core.handle);
1738 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1739 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1740 /* EVO will complain with INVALID_STATE if we have an
1741 * active cursor and (re)specify HeadSetContextDmaIso
1742 * without also updating HeadSetOffsetCursor.
1744 asyh->set.curs = asyh->curs.visible;
1746 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1747 evo_mthd(push, 0x0860 + head->base.index * 0x400, 1);
1748 evo_data(push, asyh->core.offset >> 8);
1749 evo_mthd(push, 0x0868 + head->base.index * 0x400, 4);
1750 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1751 evo_data(push, asyh->core.layout << 20 |
1752 (asyh->core.pitch >> 8) << 8 |
1754 evo_data(push, asyh->core.format << 8);
1755 evo_data(push, asyh->core.handle);
1756 evo_mthd(push, 0x08c0 + head->base.index * 0x400, 1);
1757 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1759 evo_mthd(push, 0x0460 + head->base.index * 0x300, 1);
1760 evo_data(push, asyh->core.offset >> 8);
1761 evo_mthd(push, 0x0468 + head->base.index * 0x300, 4);
1762 evo_data(push, (asyh->core.h << 16) | asyh->core.w);
1763 evo_data(push, asyh->core.layout << 24 |
1764 (asyh->core.pitch >> 8) << 8 |
1766 evo_data(push, asyh->core.format << 8);
1767 evo_data(push, asyh->core.handle);
1768 evo_mthd(push, 0x04b0 + head->base.index * 0x300, 1);
1769 evo_data(push, (asyh->core.y << 16) | asyh->core.x);
1771 evo_kick(push, core);
1776 nv50_head_lut_clr(struct nv50_head *head)
1778 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1780 if ((push = evo_wait(core, 4))) {
1781 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1782 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1783 evo_data(push, 0x40000000);
1785 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1786 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 1);
1787 evo_data(push, 0x40000000);
1788 evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1789 evo_data(push, 0x00000000);
1791 evo_mthd(push, 0x0440 + (head->base.index * 0x300), 1);
1792 evo_data(push, 0x03000000);
1793 evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1794 evo_data(push, 0x00000000);
1796 evo_kick(push, core);
1801 nv50_head_lut_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1803 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1805 if ((push = evo_wait(core, 7))) {
1806 if (core->base.user.oclass < G82_DISP_CORE_CHANNEL_DMA) {
1807 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1808 evo_data(push, asyh->base.depth == 8 ?
1809 0x80000000 : 0xc0000000);
1810 evo_data(push, asyh->lut.offset >> 8);
1812 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1813 evo_mthd(push, 0x0840 + (head->base.index * 0x400), 2);
1814 evo_data(push, asyh->base.depth == 8 ?
1815 0x80000000 : 0xc0000000);
1816 evo_data(push, asyh->lut.offset >> 8);
1817 evo_mthd(push, 0x085c + (head->base.index * 0x400), 1);
1818 evo_data(push, asyh->lut.handle);
1820 evo_mthd(push, 0x0440 + (head->base.index * 0x300), 4);
1821 evo_data(push, 0x83000000);
1822 evo_data(push, asyh->lut.offset >> 8);
1823 evo_data(push, 0x00000000);
1824 evo_data(push, 0x00000000);
1825 evo_mthd(push, 0x045c + (head->base.index * 0x300), 1);
1826 evo_data(push, asyh->lut.handle);
1828 evo_kick(push, core);
1833 nv50_head_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
1835 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1836 struct nv50_head_mode *m = &asyh->mode;
1838 if ((push = evo_wait(core, 14))) {
1839 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1840 evo_mthd(push, 0x0804 + (head->base.index * 0x400), 2);
1841 evo_data(push, 0x00800000 | m->clock);
1842 evo_data(push, m->interlace ? 0x00000002 : 0x00000000);
1843 evo_mthd(push, 0x0810 + (head->base.index * 0x400), 7);
1844 evo_data(push, 0x00000000);
1845 evo_data(push, (m->v.active << 16) | m->h.active );
1846 evo_data(push, (m->v.synce << 16) | m->h.synce );
1847 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
1848 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
1849 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1850 evo_data(push, asyh->mode.v.blankus);
1851 evo_mthd(push, 0x082c + (head->base.index * 0x400), 1);
1852 evo_data(push, 0x00000000);
1854 evo_mthd(push, 0x0410 + (head->base.index * 0x300), 6);
1855 evo_data(push, 0x00000000);
1856 evo_data(push, (m->v.active << 16) | m->h.active );
1857 evo_data(push, (m->v.synce << 16) | m->h.synce );
1858 evo_data(push, (m->v.blanke << 16) | m->h.blanke );
1859 evo_data(push, (m->v.blanks << 16) | m->h.blanks );
1860 evo_data(push, (m->v.blank2e << 16) | m->v.blank2s);
1861 evo_mthd(push, 0x042c + (head->base.index * 0x300), 2);
1862 evo_data(push, 0x00000000); /* ??? */
1863 evo_data(push, 0xffffff00);
1864 evo_mthd(push, 0x0450 + (head->base.index * 0x300), 3);
1865 evo_data(push, m->clock * 1000);
1866 evo_data(push, 0x00200000); /* ??? */
1867 evo_data(push, m->clock * 1000);
1869 evo_kick(push, core);
1874 nv50_head_view(struct nv50_head *head, struct nv50_head_atom *asyh)
1876 struct nv50_dmac *core = &nv50_disp(head->base.base.dev)->mast.base;
1878 if ((push = evo_wait(core, 10))) {
1879 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
1880 evo_mthd(push, 0x08a4 + (head->base.index * 0x400), 1);
1881 evo_data(push, 0x00000000);
1882 evo_mthd(push, 0x08c8 + (head->base.index * 0x400), 1);
1883 evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1884 evo_mthd(push, 0x08d8 + (head->base.index * 0x400), 2);
1885 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1886 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1888 evo_mthd(push, 0x0494 + (head->base.index * 0x300), 1);
1889 evo_data(push, 0x00000000);
1890 evo_mthd(push, 0x04b8 + (head->base.index * 0x300), 1);
1891 evo_data(push, (asyh->view.iH << 16) | asyh->view.iW);
1892 evo_mthd(push, 0x04c0 + (head->base.index * 0x300), 3);
1893 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1894 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1895 evo_data(push, (asyh->view.oH << 16) | asyh->view.oW);
1897 evo_kick(push, core);
1902 nv50_head_flush_clr(struct nv50_head *head, struct nv50_head_atom *asyh, bool y)
1904 if (asyh->clr.core && (!asyh->set.core || y))
1905 nv50_head_lut_clr(head);
1906 if (asyh->clr.core && (!asyh->set.core || y))
1907 nv50_head_core_clr(head);
1908 if (asyh->clr.curs && (!asyh->set.curs || y))
1909 nv50_head_curs_clr(head);
1913 nv50_head_flush_set(struct nv50_head *head, struct nv50_head_atom *asyh)
1915 if (asyh->set.view ) nv50_head_view (head, asyh);
1916 if (asyh->set.mode ) nv50_head_mode (head, asyh);
1917 if (asyh->set.core ) nv50_head_lut_set (head, asyh);
1918 if (asyh->set.core ) nv50_head_core_set(head, asyh);
1919 if (asyh->set.curs ) nv50_head_curs_set(head, asyh);
1920 if (asyh->set.base ) nv50_head_base (head, asyh);
1921 if (asyh->set.ovly ) nv50_head_ovly (head, asyh);
1922 if (asyh->set.dither ) nv50_head_dither (head, asyh);
1923 if (asyh->set.procamp) nv50_head_procamp (head, asyh);
1927 nv50_head_atomic_check_procamp(struct nv50_head_atom *armh,
1928 struct nv50_head_atom *asyh,
1929 struct nouveau_conn_atom *asyc)
1931 const int vib = asyc->procamp.color_vibrance - 100;
1932 const int hue = asyc->procamp.vibrant_hue - 90;
1933 const int adj = (vib > 0) ? 50 : 0;
1934 asyh->procamp.sat.cos = ((vib * 2047 + adj) / 100) & 0xfff;
1935 asyh->procamp.sat.sin = ((hue * 2047) / 100) & 0xfff;
1936 asyh->set.procamp = true;
1940 nv50_head_atomic_check_dither(struct nv50_head_atom *armh,
1941 struct nv50_head_atom *asyh,
1942 struct nouveau_conn_atom *asyc)
1944 struct drm_connector *connector = asyc->state.connector;
1947 if (asyc->dither.mode == DITHERING_MODE_AUTO) {
1948 if (asyh->base.depth > connector->display_info.bpc * 3)
1949 mode = DITHERING_MODE_DYNAMIC2X2;
1951 mode = asyc->dither.mode;
1954 if (asyc->dither.depth == DITHERING_DEPTH_AUTO) {
1955 if (connector->display_info.bpc >= 8)
1956 mode |= DITHERING_DEPTH_8BPC;
1958 mode |= asyc->dither.depth;
1961 asyh->dither.enable = mode;
1962 asyh->dither.bits = mode >> 1;
1963 asyh->dither.mode = mode >> 3;
1964 asyh->set.dither = true;
1968 nv50_head_atomic_check_view(struct nv50_head_atom *armh,
1969 struct nv50_head_atom *asyh,
1970 struct nouveau_conn_atom *asyc)
1972 struct drm_connector *connector = asyc->state.connector;
1973 struct drm_display_mode *omode = &asyh->state.adjusted_mode;
1974 struct drm_display_mode *umode = &asyh->state.mode;
1975 int mode = asyc->scaler.mode;
1977 int umode_vdisplay, omode_hdisplay, omode_vdisplay;
1979 if (connector->edid_blob_ptr)
1980 edid = (struct edid *)connector->edid_blob_ptr->data;
1984 if (!asyc->scaler.full) {
1985 if (mode == DRM_MODE_SCALE_NONE)
1988 /* Non-EDID LVDS/eDP mode. */
1989 mode = DRM_MODE_SCALE_FULLSCREEN;
1992 /* For the user-specified mode, we must ignore doublescan and
1993 * the like, but honor frame packing.
1995 umode_vdisplay = umode->vdisplay;
1996 if ((umode->flags & DRM_MODE_FLAG_3D_MASK) == DRM_MODE_FLAG_3D_FRAME_PACKING)
1997 umode_vdisplay += umode->vtotal;
1998 asyh->view.iW = umode->hdisplay;
1999 asyh->view.iH = umode_vdisplay;
2000 /* For the output mode, we can just use the stock helper. */
2001 drm_mode_get_hv_timing(omode, &omode_hdisplay, &omode_vdisplay);
2002 asyh->view.oW = omode_hdisplay;
2003 asyh->view.oH = omode_vdisplay;
2005 /* Add overscan compensation if necessary, will keep the aspect
2006 * ratio the same as the backend mode unless overridden by the
2007 * user setting both hborder and vborder properties.
2009 if ((asyc->scaler.underscan.mode == UNDERSCAN_ON ||
2010 (asyc->scaler.underscan.mode == UNDERSCAN_AUTO &&
2011 drm_detect_hdmi_monitor(edid)))) {
2012 u32 bX = asyc->scaler.underscan.hborder;
2013 u32 bY = asyc->scaler.underscan.vborder;
2014 u32 r = (asyh->view.oH << 19) / asyh->view.oW;
2017 asyh->view.oW -= (bX * 2);
2018 if (bY) asyh->view.oH -= (bY * 2);
2019 else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2021 asyh->view.oW -= (asyh->view.oW >> 4) + 32;
2022 if (bY) asyh->view.oH -= (bY * 2);
2023 else asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2027 /* Handle CENTER/ASPECT scaling, taking into account the areas
2028 * removed already for overscan compensation.
2031 case DRM_MODE_SCALE_CENTER:
2032 asyh->view.oW = min((u16)umode->hdisplay, asyh->view.oW);
2033 asyh->view.oH = min((u16)umode_vdisplay, asyh->view.oH);
2035 case DRM_MODE_SCALE_ASPECT:
2036 if (asyh->view.oH < asyh->view.oW) {
2037 u32 r = (asyh->view.iW << 19) / asyh->view.iH;
2038 asyh->view.oW = ((asyh->view.oH * r) + (r / 2)) >> 19;
2040 u32 r = (asyh->view.iH << 19) / asyh->view.iW;
2041 asyh->view.oH = ((asyh->view.oW * r) + (r / 2)) >> 19;
2048 asyh->set.view = true;
2052 nv50_head_atomic_check_mode(struct nv50_head *head, struct nv50_head_atom *asyh)
2054 struct drm_display_mode *mode = &asyh->state.adjusted_mode;
2055 struct nv50_head_mode *m = &asyh->mode;
2058 drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V | CRTC_STEREO_DOUBLE);
2061 * DRM modes are defined in terms of a repeating interval
2062 * starting with the active display area. The hardware modes
2063 * are defined in terms of a repeating interval starting one
2064 * unit (pixel or line) into the sync pulse. So, add bias.
2067 m->h.active = mode->crtc_htotal;
2068 m->h.synce = mode->crtc_hsync_end - mode->crtc_hsync_start - 1;
2069 m->h.blanke = mode->crtc_hblank_end - mode->crtc_hsync_start - 1;
2070 m->h.blanks = m->h.blanke + mode->crtc_hdisplay;
2072 m->v.active = mode->crtc_vtotal;
2073 m->v.synce = mode->crtc_vsync_end - mode->crtc_vsync_start - 1;
2074 m->v.blanke = mode->crtc_vblank_end - mode->crtc_vsync_start - 1;
2075 m->v.blanks = m->v.blanke + mode->crtc_vdisplay;
2077 /*XXX: Safe underestimate, even "0" works */
2078 blankus = (m->v.active - mode->crtc_vdisplay - 2) * m->h.active;
2080 blankus /= mode->crtc_clock;
2081 m->v.blankus = blankus;
2083 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
2084 m->v.blank2e = m->v.active + m->v.blanke;
2085 m->v.blank2s = m->v.blank2e + mode->crtc_vdisplay;
2086 m->v.active = (m->v.active * 2) + 1;
2087 m->interlace = true;
2091 m->interlace = false;
2093 m->clock = mode->crtc_clock;
2095 asyh->set.mode = true;
2099 nv50_head_atomic_check(struct drm_crtc *crtc, struct drm_crtc_state *state)
2101 struct nouveau_drm *drm = nouveau_drm(crtc->dev);
2102 struct nv50_disp *disp = nv50_disp(crtc->dev);
2103 struct nv50_head *head = nv50_head(crtc);
2104 struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2105 struct nv50_head_atom *asyh = nv50_head_atom(state);
2106 struct nouveau_conn_atom *asyc = NULL;
2107 struct drm_connector_state *conns;
2108 struct drm_connector *conn;
2111 NV_ATOMIC(drm, "%s atomic_check %d\n", crtc->name, asyh->state.active);
2112 if (asyh->state.active) {
2113 for_each_new_connector_in_state(asyh->state.state, conn, conns, i) {
2114 if (conns->crtc == crtc) {
2115 asyc = nouveau_conn_atom(conns);
2120 if (armh->state.active) {
2122 if (asyh->state.mode_changed)
2123 asyc->set.scaler = true;
2124 if (armh->base.depth != asyh->base.depth)
2125 asyc->set.dither = true;
2129 asyc->set.mask = ~0;
2130 asyh->set.mask = ~0;
2133 if (asyh->state.mode_changed)
2134 nv50_head_atomic_check_mode(head, asyh);
2137 if (asyc->set.scaler)
2138 nv50_head_atomic_check_view(armh, asyh, asyc);
2139 if (asyc->set.dither)
2140 nv50_head_atomic_check_dither(armh, asyh, asyc);
2141 if (asyc->set.procamp)
2142 nv50_head_atomic_check_procamp(armh, asyh, asyc);
2145 if ((asyh->core.visible = (asyh->base.cpp != 0))) {
2146 asyh->core.x = asyh->base.x;
2147 asyh->core.y = asyh->base.y;
2148 asyh->core.w = asyh->base.w;
2149 asyh->core.h = asyh->base.h;
2151 if ((asyh->core.visible = asyh->curs.visible)) {
2152 /*XXX: We need to either find some way of having the
2153 * primary base layer appear black, while still
2154 * being able to display the other layers, or we
2155 * need to allocate a dummy black surface here.
2159 asyh->core.w = asyh->state.mode.hdisplay;
2160 asyh->core.h = asyh->state.mode.vdisplay;
2162 asyh->core.handle = disp->mast.base.vram.handle;
2163 asyh->core.offset = 0;
2164 asyh->core.format = 0xcf;
2165 asyh->core.kind = 0;
2166 asyh->core.layout = 1;
2167 asyh->core.block = 0;
2168 asyh->core.pitch = ALIGN(asyh->core.w, 64) * 4;
2169 asyh->lut.handle = disp->mast.base.vram.handle;
2170 asyh->lut.offset = head->lut.nvbo[0]->bo.offset;
2171 asyh->set.base = armh->base.cpp != asyh->base.cpp;
2172 asyh->set.ovly = armh->ovly.cpp != asyh->ovly.cpp;
2174 asyh->core.visible = false;
2175 asyh->curs.visible = false;
2180 if (!drm_atomic_crtc_needs_modeset(&asyh->state)) {
2181 if (asyh->core.visible) {
2182 if (memcmp(&armh->core, &asyh->core, sizeof(asyh->core)))
2183 asyh->set.core = true;
2185 if (armh->core.visible) {
2186 asyh->clr.core = true;
2189 if (asyh->curs.visible) {
2190 if (memcmp(&armh->curs, &asyh->curs, sizeof(asyh->curs)))
2191 asyh->set.curs = true;
2193 if (armh->curs.visible) {
2194 asyh->clr.curs = true;
2197 asyh->clr.core = armh->core.visible;
2198 asyh->clr.curs = armh->curs.visible;
2199 asyh->set.core = asyh->core.visible;
2200 asyh->set.curs = asyh->curs.visible;
2203 if (asyh->clr.mask || asyh->set.mask)
2204 nv50_atom(asyh->state.state)->lock_core = true;
2209 nv50_head_lut_load(struct drm_crtc *crtc)
2211 struct nv50_disp *disp = nv50_disp(crtc->dev);
2212 struct nv50_head *head = nv50_head(crtc);
2213 void __iomem *lut = nvbo_kmap_obj_iovirtual(head->lut.nvbo[0]);
2217 r = crtc->gamma_store;
2218 g = r + crtc->gamma_size;
2219 b = g + crtc->gamma_size;
2221 for (i = 0; i < 256; i++) {
2222 if (disp->disp->oclass < GF110_DISP) {
2223 writew((*r++ >> 2) + 0x0000, lut + (i * 0x08) + 0);
2224 writew((*g++ >> 2) + 0x0000, lut + (i * 0x08) + 2);
2225 writew((*b++ >> 2) + 0x0000, lut + (i * 0x08) + 4);
2227 /* 0x6000 interferes with the 14-bit color??? */
2228 writew((*r++ >> 2) + 0x6000, lut + (i * 0x20) + 0);
2229 writew((*g++ >> 2) + 0x6000, lut + (i * 0x20) + 2);
2230 writew((*b++ >> 2) + 0x6000, lut + (i * 0x20) + 4);
2235 static const struct drm_crtc_helper_funcs
2237 .atomic_check = nv50_head_atomic_check,
2241 nv50_head_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
2243 struct drm_modeset_acquire_ctx *ctx)
2245 nv50_head_lut_load(crtc);
2250 nv50_head_atomic_destroy_state(struct drm_crtc *crtc,
2251 struct drm_crtc_state *state)
2253 struct nv50_head_atom *asyh = nv50_head_atom(state);
2254 __drm_atomic_helper_crtc_destroy_state(&asyh->state);
2258 static struct drm_crtc_state *
2259 nv50_head_atomic_duplicate_state(struct drm_crtc *crtc)
2261 struct nv50_head_atom *armh = nv50_head_atom(crtc->state);
2262 struct nv50_head_atom *asyh;
2263 if (!(asyh = kmalloc(sizeof(*asyh), GFP_KERNEL)))
2265 __drm_atomic_helper_crtc_duplicate_state(crtc, &asyh->state);
2266 asyh->view = armh->view;
2267 asyh->mode = armh->mode;
2268 asyh->lut = armh->lut;
2269 asyh->core = armh->core;
2270 asyh->curs = armh->curs;
2271 asyh->base = armh->base;
2272 asyh->ovly = armh->ovly;
2273 asyh->dither = armh->dither;
2274 asyh->procamp = armh->procamp;
2277 return &asyh->state;
2281 __drm_atomic_helper_crtc_reset(struct drm_crtc *crtc,
2282 struct drm_crtc_state *state)
2285 crtc->funcs->atomic_destroy_state(crtc, crtc->state);
2286 crtc->state = state;
2287 crtc->state->crtc = crtc;
2291 nv50_head_reset(struct drm_crtc *crtc)
2293 struct nv50_head_atom *asyh;
2295 if (WARN_ON(!(asyh = kzalloc(sizeof(*asyh), GFP_KERNEL))))
2298 __drm_atomic_helper_crtc_reset(crtc, &asyh->state);
2302 nv50_head_destroy(struct drm_crtc *crtc)
2304 struct nv50_disp *disp = nv50_disp(crtc->dev);
2305 struct nv50_head *head = nv50_head(crtc);
2308 nv50_dmac_destroy(&head->ovly.base, disp->disp);
2309 nv50_pioc_destroy(&head->oimm.base);
2311 for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++)
2312 nouveau_bo_unmap_unpin_unref(&head->lut.nvbo[i]);
2314 drm_crtc_cleanup(crtc);
2318 static const struct drm_crtc_funcs
2320 .reset = nv50_head_reset,
2321 .gamma_set = nv50_head_gamma_set,
2322 .destroy = nv50_head_destroy,
2323 .set_config = drm_atomic_helper_set_config,
2324 .page_flip = drm_atomic_helper_page_flip,
2325 .atomic_duplicate_state = nv50_head_atomic_duplicate_state,
2326 .atomic_destroy_state = nv50_head_atomic_destroy_state,
2330 nv50_head_create(struct drm_device *dev, int index)
2332 struct nouveau_drm *drm = nouveau_drm(dev);
2333 struct nvif_device *device = &drm->client.device;
2334 struct nv50_disp *disp = nv50_disp(dev);
2335 struct nv50_head *head;
2336 struct nv50_base *base;
2337 struct nv50_curs *curs;
2338 struct drm_crtc *crtc;
2341 head = kzalloc(sizeof(*head), GFP_KERNEL);
2345 head->base.index = index;
2346 ret = nv50_base_new(drm, head, &base);
2348 ret = nv50_curs_new(drm, head, &curs);
2354 crtc = &head->base.base;
2355 drm_crtc_init_with_planes(dev, crtc, &base->wndw.plane,
2356 &curs->wndw.plane, &nv50_head_func,
2357 "head-%d", head->base.index);
2358 drm_crtc_helper_add(crtc, &nv50_head_help);
2359 drm_mode_crtc_set_gamma_size(crtc, 256);
2361 for (i = 0; i < ARRAY_SIZE(head->lut.nvbo); i++) {
2362 ret = nouveau_bo_new_pin_map(&drm->client, 1025 * 8, 0x100,
2364 &head->lut.nvbo[i]);
2369 /* allocate overlay resources */
2370 ret = nv50_oimm_create(device, disp->disp, index, &head->oimm);
2374 ret = nv50_ovly_create(device, disp->disp, index, disp->sync->bo.offset,
2381 nv50_head_destroy(crtc);
2385 /******************************************************************************
2386 * Output path helpers
2387 *****************************************************************************/
2389 nv50_outp_release(struct nouveau_encoder *nv_encoder)
2391 struct nv50_disp *disp = nv50_disp(nv_encoder->base.base.dev);
2393 struct nv50_disp_mthd_v1 base;
2396 .base.method = NV50_DISP_MTHD_V1_RELEASE,
2397 .base.hasht = nv_encoder->dcb->hasht,
2398 .base.hashm = nv_encoder->dcb->hashm,
2401 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2402 nv_encoder->or = -1;
2403 nv_encoder->link = 0;
2407 nv50_outp_acquire(struct nouveau_encoder *nv_encoder)
2409 struct nouveau_drm *drm = nouveau_drm(nv_encoder->base.base.dev);
2410 struct nv50_disp *disp = nv50_disp(drm->dev);
2412 struct nv50_disp_mthd_v1 base;
2413 struct nv50_disp_acquire_v0 info;
2416 .base.method = NV50_DISP_MTHD_V1_ACQUIRE,
2417 .base.hasht = nv_encoder->dcb->hasht,
2418 .base.hashm = nv_encoder->dcb->hashm,
2422 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2424 NV_ERROR(drm, "error acquiring output path: %d\n", ret);
2428 nv_encoder->or = args.info.or;
2429 nv_encoder->link = args.info.link;
2434 nv50_outp_atomic_check_view(struct drm_encoder *encoder,
2435 struct drm_crtc_state *crtc_state,
2436 struct drm_connector_state *conn_state,
2437 struct drm_display_mode *native_mode)
2439 struct drm_display_mode *adjusted_mode = &crtc_state->adjusted_mode;
2440 struct drm_display_mode *mode = &crtc_state->mode;
2441 struct drm_connector *connector = conn_state->connector;
2442 struct nouveau_conn_atom *asyc = nouveau_conn_atom(conn_state);
2443 struct nouveau_drm *drm = nouveau_drm(encoder->dev);
2445 NV_ATOMIC(drm, "%s atomic_check\n", encoder->name);
2446 asyc->scaler.full = false;
2450 if (asyc->scaler.mode == DRM_MODE_SCALE_NONE) {
2451 switch (connector->connector_type) {
2452 case DRM_MODE_CONNECTOR_LVDS:
2453 case DRM_MODE_CONNECTOR_eDP:
2454 /* Force use of scaler for non-EDID modes. */
2455 if (adjusted_mode->type & DRM_MODE_TYPE_DRIVER)
2458 asyc->scaler.full = true;
2467 if (!drm_mode_equal(adjusted_mode, mode)) {
2468 drm_mode_copy(adjusted_mode, mode);
2469 crtc_state->mode_changed = true;
2476 nv50_outp_atomic_check(struct drm_encoder *encoder,
2477 struct drm_crtc_state *crtc_state,
2478 struct drm_connector_state *conn_state)
2480 struct nouveau_connector *nv_connector =
2481 nouveau_connector(conn_state->connector);
2482 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2483 nv_connector->native_mode);
2486 /******************************************************************************
2488 *****************************************************************************/
2490 nv50_dac_disable(struct drm_encoder *encoder)
2492 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2493 struct nv50_mast *mast = nv50_mast(encoder->dev);
2494 const int or = nv_encoder->or;
2497 if (nv_encoder->crtc) {
2498 push = evo_wait(mast, 4);
2500 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2501 evo_mthd(push, 0x0400 + (or * 0x080), 1);
2502 evo_data(push, 0x00000000);
2504 evo_mthd(push, 0x0180 + (or * 0x020), 1);
2505 evo_data(push, 0x00000000);
2507 evo_kick(push, mast);
2511 nv_encoder->crtc = NULL;
2512 nv50_outp_release(nv_encoder);
2516 nv50_dac_enable(struct drm_encoder *encoder)
2518 struct nv50_mast *mast = nv50_mast(encoder->dev);
2519 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2520 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2521 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
2524 nv50_outp_acquire(nv_encoder);
2526 push = evo_wait(mast, 8);
2528 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
2529 u32 syncs = 0x00000000;
2531 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2532 syncs |= 0x00000001;
2533 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2534 syncs |= 0x00000002;
2536 evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
2537 evo_data(push, 1 << nv_crtc->index);
2538 evo_data(push, syncs);
2540 u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
2541 u32 syncs = 0x00000001;
2543 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
2544 syncs |= 0x00000008;
2545 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
2546 syncs |= 0x00000010;
2548 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2549 magic |= 0x00000001;
2551 evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
2552 evo_data(push, syncs);
2553 evo_data(push, magic);
2554 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
2555 evo_data(push, 1 << nv_crtc->index);
2558 evo_kick(push, mast);
2561 nv_encoder->crtc = encoder->crtc;
2564 static enum drm_connector_status
2565 nv50_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
2567 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2568 struct nv50_disp *disp = nv50_disp(encoder->dev);
2570 struct nv50_disp_mthd_v1 base;
2571 struct nv50_disp_dac_load_v0 load;
2574 .base.method = NV50_DISP_MTHD_V1_DAC_LOAD,
2575 .base.hasht = nv_encoder->dcb->hasht,
2576 .base.hashm = nv_encoder->dcb->hashm,
2580 args.load.data = nouveau_drm(encoder->dev)->vbios.dactestval;
2581 if (args.load.data == 0)
2582 args.load.data = 340;
2584 ret = nvif_mthd(disp->disp, 0, &args, sizeof(args));
2585 if (ret || !args.load.load)
2586 return connector_status_disconnected;
2588 return connector_status_connected;
2591 static const struct drm_encoder_helper_funcs
2593 .atomic_check = nv50_outp_atomic_check,
2594 .enable = nv50_dac_enable,
2595 .disable = nv50_dac_disable,
2596 .detect = nv50_dac_detect
2600 nv50_dac_destroy(struct drm_encoder *encoder)
2602 drm_encoder_cleanup(encoder);
2606 static const struct drm_encoder_funcs
2608 .destroy = nv50_dac_destroy,
2612 nv50_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
2614 struct nouveau_drm *drm = nouveau_drm(connector->dev);
2615 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
2616 struct nvkm_i2c_bus *bus;
2617 struct nouveau_encoder *nv_encoder;
2618 struct drm_encoder *encoder;
2619 int type = DRM_MODE_ENCODER_DAC;
2621 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
2624 nv_encoder->dcb = dcbe;
2626 bus = nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
2628 nv_encoder->i2c = &bus->i2c;
2630 encoder = to_drm_encoder(nv_encoder);
2631 encoder->possible_crtcs = dcbe->heads;
2632 encoder->possible_clones = 0;
2633 drm_encoder_init(connector->dev, encoder, &nv50_dac_func, type,
2634 "dac-%04x-%04x", dcbe->hasht, dcbe->hashm);
2635 drm_encoder_helper_add(encoder, &nv50_dac_help);
2637 drm_mode_connector_attach_encoder(connector, encoder);
2641 /******************************************************************************
2643 *****************************************************************************/
2645 nv50_audio_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2647 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2648 struct nv50_disp *disp = nv50_disp(encoder->dev);
2650 struct nv50_disp_mthd_v1 base;
2651 struct nv50_disp_sor_hda_eld_v0 eld;
2654 .base.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2655 .base.hasht = nv_encoder->dcb->hasht,
2656 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2657 (0x0100 << nv_crtc->index),
2660 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2664 nv50_audio_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2666 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2667 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2668 struct nouveau_connector *nv_connector;
2669 struct nv50_disp *disp = nv50_disp(encoder->dev);
2672 struct nv50_disp_mthd_v1 mthd;
2673 struct nv50_disp_sor_hda_eld_v0 eld;
2675 u8 data[sizeof(nv_connector->base.eld)];
2677 .base.mthd.version = 1,
2678 .base.mthd.method = NV50_DISP_MTHD_V1_SOR_HDA_ELD,
2679 .base.mthd.hasht = nv_encoder->dcb->hasht,
2680 .base.mthd.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2681 (0x0100 << nv_crtc->index),
2684 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2685 if (!drm_detect_monitor_audio(nv_connector->edid))
2688 memcpy(args.data, nv_connector->base.eld, sizeof(args.data));
2690 nvif_mthd(disp->disp, 0, &args,
2691 sizeof(args.base) + drm_eld_size(args.data));
2694 /******************************************************************************
2696 *****************************************************************************/
2698 nv50_hdmi_disable(struct drm_encoder *encoder, struct nouveau_crtc *nv_crtc)
2700 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2701 struct nv50_disp *disp = nv50_disp(encoder->dev);
2703 struct nv50_disp_mthd_v1 base;
2704 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2707 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2708 .base.hasht = nv_encoder->dcb->hasht,
2709 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2710 (0x0100 << nv_crtc->index),
2713 nvif_mthd(disp->disp, 0, &args, sizeof(args));
2717 nv50_hdmi_enable(struct drm_encoder *encoder, struct drm_display_mode *mode)
2719 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
2720 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
2721 struct nv50_disp *disp = nv50_disp(encoder->dev);
2723 struct nv50_disp_mthd_v1 base;
2724 struct nv50_disp_sor_hdmi_pwr_v0 pwr;
2725 u8 infoframes[2 * 17]; /* two frames, up to 17 bytes each */
2728 .base.method = NV50_DISP_MTHD_V1_SOR_HDMI_PWR,
2729 .base.hasht = nv_encoder->dcb->hasht,
2730 .base.hashm = (0xf0ff & nv_encoder->dcb->hashm) |
2731 (0x0100 << nv_crtc->index),
2733 .pwr.rekey = 56, /* binary driver, and tegra, constant */
2735 struct nouveau_connector *nv_connector;
2737 union hdmi_infoframe avi_frame;
2738 union hdmi_infoframe vendor_frame;
2742 nv_connector = nouveau_encoder_connector_get(nv_encoder);
2743 if (!drm_detect_hdmi_monitor(nv_connector->edid))
2746 ret = drm_hdmi_avi_infoframe_from_display_mode(&avi_frame.avi, mode,
2749 /* We have an AVI InfoFrame, populate it to the display */
2750 args.pwr.avi_infoframe_length
2751 = hdmi_infoframe_pack(&avi_frame, args.infoframes, 17);
2754 ret = drm_hdmi_vendor_infoframe_from_display_mode(&vendor_frame.vendor.hdmi,
2755 &nv_connector->base, mode);
2757 /* We have a Vendor InfoFrame, populate it to the display */
2758 args.pwr.vendor_infoframe_length
2759 = hdmi_infoframe_pack(&vendor_frame,
2761 + args.pwr.avi_infoframe_length,
2765 max_ac_packet = mode->htotal - mode->hdisplay;
2766 max_ac_packet -= args.pwr.rekey;
2767 max_ac_packet -= 18; /* constant from tegra */
2768 args.pwr.max_ac_packet = max_ac_packet / 32;
2770 size = sizeof(args.base)
2772 + args.pwr.avi_infoframe_length
2773 + args.pwr.vendor_infoframe_length;
2774 nvif_mthd(disp->disp, 0, &args, size);
2775 nv50_audio_enable(encoder, mode);
2778 /******************************************************************************
2780 *****************************************************************************/
2781 #define nv50_mstm(p) container_of((p), struct nv50_mstm, mgr)
2782 #define nv50_mstc(p) container_of((p), struct nv50_mstc, connector)
2783 #define nv50_msto(p) container_of((p), struct nv50_msto, encoder)
2786 struct nouveau_encoder *outp;
2788 struct drm_dp_mst_topology_mgr mgr;
2789 struct nv50_msto *msto[4];
2797 struct nv50_mstm *mstm;
2798 struct drm_dp_mst_port *port;
2799 struct drm_connector connector;
2801 struct drm_display_mode *native;
2808 struct drm_encoder encoder;
2810 struct nv50_head *head;
2811 struct nv50_mstc *mstc;
2815 static struct drm_dp_payload *
2816 nv50_msto_payload(struct nv50_msto *msto)
2818 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2819 struct nv50_mstc *mstc = msto->mstc;
2820 struct nv50_mstm *mstm = mstc->mstm;
2821 int vcpi = mstc->port->vcpi.vcpi, i;
2823 NV_ATOMIC(drm, "%s: vcpi %d\n", msto->encoder.name, vcpi);
2824 for (i = 0; i < mstm->mgr.max_payloads; i++) {
2825 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2826 NV_ATOMIC(drm, "%s: %d: vcpi %d start 0x%02x slots 0x%02x\n",
2827 mstm->outp->base.base.name, i, payload->vcpi,
2828 payload->start_slot, payload->num_slots);
2831 for (i = 0; i < mstm->mgr.max_payloads; i++) {
2832 struct drm_dp_payload *payload = &mstm->mgr.payloads[i];
2833 if (payload->vcpi == vcpi)
2841 nv50_msto_cleanup(struct nv50_msto *msto)
2843 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2844 struct nv50_mstc *mstc = msto->mstc;
2845 struct nv50_mstm *mstm = mstc->mstm;
2847 NV_ATOMIC(drm, "%s: msto cleanup\n", msto->encoder.name);
2848 if (mstc->port && mstc->port->vcpi.vcpi > 0 && !nv50_msto_payload(msto))
2849 drm_dp_mst_deallocate_vcpi(&mstm->mgr, mstc->port);
2850 if (msto->disabled) {
2853 msto->disabled = false;
2858 nv50_msto_prepare(struct nv50_msto *msto)
2860 struct nouveau_drm *drm = nouveau_drm(msto->encoder.dev);
2861 struct nv50_mstc *mstc = msto->mstc;
2862 struct nv50_mstm *mstm = mstc->mstm;
2864 struct nv50_disp_mthd_v1 base;
2865 struct nv50_disp_sor_dp_mst_vcpi_v0 vcpi;
2868 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_VCPI,
2869 .base.hasht = mstm->outp->dcb->hasht,
2870 .base.hashm = (0xf0ff & mstm->outp->dcb->hashm) |
2871 (0x0100 << msto->head->base.index),
2874 NV_ATOMIC(drm, "%s: msto prepare\n", msto->encoder.name);
2875 if (mstc->port && mstc->port->vcpi.vcpi > 0) {
2876 struct drm_dp_payload *payload = nv50_msto_payload(msto);
2878 args.vcpi.start_slot = payload->start_slot;
2879 args.vcpi.num_slots = payload->num_slots;
2880 args.vcpi.pbn = mstc->port->vcpi.pbn;
2881 args.vcpi.aligned_pbn = mstc->port->vcpi.aligned_pbn;
2885 NV_ATOMIC(drm, "%s: %s: %02x %02x %04x %04x\n",
2886 msto->encoder.name, msto->head->base.base.name,
2887 args.vcpi.start_slot, args.vcpi.num_slots,
2888 args.vcpi.pbn, args.vcpi.aligned_pbn);
2889 nvif_mthd(&drm->display->disp, 0, &args, sizeof(args));
2893 nv50_msto_atomic_check(struct drm_encoder *encoder,
2894 struct drm_crtc_state *crtc_state,
2895 struct drm_connector_state *conn_state)
2897 struct nv50_mstc *mstc = nv50_mstc(conn_state->connector);
2898 struct nv50_mstm *mstm = mstc->mstm;
2899 int bpp = conn_state->connector->display_info.bpc * 3;
2902 mstc->pbn = drm_dp_calc_pbn_mode(crtc_state->adjusted_mode.clock, bpp);
2904 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2908 return nv50_outp_atomic_check_view(encoder, crtc_state, conn_state,
2913 nv50_msto_enable(struct drm_encoder *encoder)
2915 struct nv50_head *head = nv50_head(encoder->crtc);
2916 struct nv50_msto *msto = nv50_msto(encoder);
2917 struct nv50_mstc *mstc = NULL;
2918 struct nv50_mstm *mstm = NULL;
2919 struct drm_connector *connector;
2920 struct drm_connector_list_iter conn_iter;
2925 drm_connector_list_iter_begin(encoder->dev, &conn_iter);
2926 drm_for_each_connector_iter(connector, &conn_iter) {
2927 if (connector->state->best_encoder == &msto->encoder) {
2928 mstc = nv50_mstc(connector);
2933 drm_connector_list_iter_end(&conn_iter);
2938 slots = drm_dp_find_vcpi_slots(&mstm->mgr, mstc->pbn);
2939 r = drm_dp_mst_allocate_vcpi(&mstm->mgr, mstc->port, mstc->pbn, slots);
2943 nv50_outp_acquire(mstm->outp);
2945 if (mstm->outp->link & 1)
2950 switch (mstc->connector.display_info.bpc) {
2951 case 6: depth = 0x2; break;
2952 case 8: depth = 0x5; break;
2954 default: depth = 0x6; break;
2957 mstm->outp->update(mstm->outp, head->base.index,
2958 &head->base.base.state->adjusted_mode, proto, depth);
2962 mstm->modified = true;
2966 nv50_msto_disable(struct drm_encoder *encoder)
2968 struct nv50_msto *msto = nv50_msto(encoder);
2969 struct nv50_mstc *mstc = msto->mstc;
2970 struct nv50_mstm *mstm = mstc->mstm;
2973 drm_dp_mst_reset_vcpi_slots(&mstm->mgr, mstc->port);
2975 mstm->outp->update(mstm->outp, msto->head->base.index, NULL, 0, 0);
2976 mstm->modified = true;
2978 mstm->disabled = true;
2979 msto->disabled = true;
2982 static const struct drm_encoder_helper_funcs
2984 .disable = nv50_msto_disable,
2985 .enable = nv50_msto_enable,
2986 .atomic_check = nv50_msto_atomic_check,
2990 nv50_msto_destroy(struct drm_encoder *encoder)
2992 struct nv50_msto *msto = nv50_msto(encoder);
2993 drm_encoder_cleanup(&msto->encoder);
2997 static const struct drm_encoder_funcs
2999 .destroy = nv50_msto_destroy,
3003 nv50_msto_new(struct drm_device *dev, u32 heads, const char *name, int id,
3004 struct nv50_msto **pmsto)
3006 struct nv50_msto *msto;
3009 if (!(msto = *pmsto = kzalloc(sizeof(*msto), GFP_KERNEL)))
3012 ret = drm_encoder_init(dev, &msto->encoder, &nv50_msto,
3013 DRM_MODE_ENCODER_DPMST, "%s-mst-%d", name, id);
3020 drm_encoder_helper_add(&msto->encoder, &nv50_msto_help);
3021 msto->encoder.possible_crtcs = heads;
3025 static struct drm_encoder *
3026 nv50_mstc_atomic_best_encoder(struct drm_connector *connector,
3027 struct drm_connector_state *connector_state)
3029 struct nv50_head *head = nv50_head(connector_state->crtc);
3030 struct nv50_mstc *mstc = nv50_mstc(connector);
3032 struct nv50_mstm *mstm = mstc->mstm;
3033 return &mstm->msto[head->base.index]->encoder;
3038 static struct drm_encoder *
3039 nv50_mstc_best_encoder(struct drm_connector *connector)
3041 struct nv50_mstc *mstc = nv50_mstc(connector);
3043 struct nv50_mstm *mstm = mstc->mstm;
3044 return &mstm->msto[0]->encoder;
3049 static enum drm_mode_status
3050 nv50_mstc_mode_valid(struct drm_connector *connector,
3051 struct drm_display_mode *mode)
3057 nv50_mstc_get_modes(struct drm_connector *connector)
3059 struct nv50_mstc *mstc = nv50_mstc(connector);
3062 mstc->edid = drm_dp_mst_get_edid(&mstc->connector, mstc->port->mgr, mstc->port);
3063 drm_mode_connector_update_edid_property(&mstc->connector, mstc->edid);
3065 ret = drm_add_edid_modes(&mstc->connector, mstc->edid);
3067 if (!mstc->connector.display_info.bpc)
3068 mstc->connector.display_info.bpc = 8;
3071 drm_mode_destroy(mstc->connector.dev, mstc->native);
3072 mstc->native = nouveau_conn_native_mode(&mstc->connector);
3076 static const struct drm_connector_helper_funcs
3078 .get_modes = nv50_mstc_get_modes,
3079 .mode_valid = nv50_mstc_mode_valid,
3080 .best_encoder = nv50_mstc_best_encoder,
3081 .atomic_best_encoder = nv50_mstc_atomic_best_encoder,
3084 static enum drm_connector_status
3085 nv50_mstc_detect(struct drm_connector *connector, bool force)
3087 struct nv50_mstc *mstc = nv50_mstc(connector);
3089 return connector_status_disconnected;
3090 return drm_dp_mst_detect_port(connector, mstc->port->mgr, mstc->port);
3094 nv50_mstc_destroy(struct drm_connector *connector)
3096 struct nv50_mstc *mstc = nv50_mstc(connector);
3097 drm_connector_cleanup(&mstc->connector);
3101 static const struct drm_connector_funcs
3103 .reset = nouveau_conn_reset,
3104 .detect = nv50_mstc_detect,
3105 .fill_modes = drm_helper_probe_single_connector_modes,
3106 .destroy = nv50_mstc_destroy,
3107 .atomic_duplicate_state = nouveau_conn_atomic_duplicate_state,
3108 .atomic_destroy_state = nouveau_conn_atomic_destroy_state,
3109 .atomic_set_property = nouveau_conn_atomic_set_property,
3110 .atomic_get_property = nouveau_conn_atomic_get_property,
3114 nv50_mstc_new(struct nv50_mstm *mstm, struct drm_dp_mst_port *port,
3115 const char *path, struct nv50_mstc **pmstc)
3117 struct drm_device *dev = mstm->outp->base.base.dev;
3118 struct nv50_mstc *mstc;
3121 if (!(mstc = *pmstc = kzalloc(sizeof(*mstc), GFP_KERNEL)))
3126 ret = drm_connector_init(dev, &mstc->connector, &nv50_mstc,
3127 DRM_MODE_CONNECTOR_DisplayPort);
3134 drm_connector_helper_add(&mstc->connector, &nv50_mstc_help);
3136 mstc->connector.funcs->reset(&mstc->connector);
3137 nouveau_conn_attach_properties(&mstc->connector);
3139 for (i = 0; i < ARRAY_SIZE(mstm->msto) && mstm->msto[i]; i++)
3140 drm_mode_connector_attach_encoder(&mstc->connector, &mstm->msto[i]->encoder);
3142 drm_object_attach_property(&mstc->connector.base, dev->mode_config.path_property, 0);
3143 drm_object_attach_property(&mstc->connector.base, dev->mode_config.tile_property, 0);
3144 drm_mode_connector_set_path_property(&mstc->connector, path);
3149 nv50_mstm_cleanup(struct nv50_mstm *mstm)
3151 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3152 struct drm_encoder *encoder;
3155 NV_ATOMIC(drm, "%s: mstm cleanup\n", mstm->outp->base.base.name);
3156 ret = drm_dp_check_act_status(&mstm->mgr);
3158 ret = drm_dp_update_payload_part2(&mstm->mgr);
3160 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3161 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3162 struct nv50_msto *msto = nv50_msto(encoder);
3163 struct nv50_mstc *mstc = msto->mstc;
3164 if (mstc && mstc->mstm == mstm)
3165 nv50_msto_cleanup(msto);
3169 mstm->modified = false;
3173 nv50_mstm_prepare(struct nv50_mstm *mstm)
3175 struct nouveau_drm *drm = nouveau_drm(mstm->outp->base.base.dev);
3176 struct drm_encoder *encoder;
3179 NV_ATOMIC(drm, "%s: mstm prepare\n", mstm->outp->base.base.name);
3180 ret = drm_dp_update_payload_part1(&mstm->mgr);
3182 drm_for_each_encoder(encoder, mstm->outp->base.base.dev) {
3183 if (encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
3184 struct nv50_msto *msto = nv50_msto(encoder);
3185 struct nv50_mstc *mstc = msto->mstc;
3186 if (mstc && mstc->mstm == mstm)
3187 nv50_msto_prepare(msto);
3191 if (mstm->disabled) {
3193 nv50_outp_release(mstm->outp);
3194 mstm->disabled = false;
3199 nv50_mstm_hotplug(struct drm_dp_mst_topology_mgr *mgr)
3201 struct nv50_mstm *mstm = nv50_mstm(mgr);
3202 drm_kms_helper_hotplug_event(mstm->outp->base.base.dev);
3206 nv50_mstm_destroy_connector(struct drm_dp_mst_topology_mgr *mgr,
3207 struct drm_connector *connector)
3209 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3210 struct nv50_mstc *mstc = nv50_mstc(connector);
3212 drm_connector_unregister(&mstc->connector);
3214 drm_modeset_lock_all(drm->dev);
3215 drm_fb_helper_remove_one_connector(&drm->fbcon->helper, &mstc->connector);
3217 drm_modeset_unlock_all(drm->dev);
3219 drm_connector_unreference(&mstc->connector);
3223 nv50_mstm_register_connector(struct drm_connector *connector)
3225 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3227 drm_modeset_lock_all(drm->dev);
3228 drm_fb_helper_add_one_connector(&drm->fbcon->helper, connector);
3229 drm_modeset_unlock_all(drm->dev);
3231 drm_connector_register(connector);
3234 static struct drm_connector *
3235 nv50_mstm_add_connector(struct drm_dp_mst_topology_mgr *mgr,
3236 struct drm_dp_mst_port *port, const char *path)
3238 struct nv50_mstm *mstm = nv50_mstm(mgr);
3239 struct nv50_mstc *mstc;
3242 ret = nv50_mstc_new(mstm, port, path, &mstc);
3245 mstc->connector.funcs->destroy(&mstc->connector);
3249 return &mstc->connector;
3252 static const struct drm_dp_mst_topology_cbs
3254 .add_connector = nv50_mstm_add_connector,
3255 .register_connector = nv50_mstm_register_connector,
3256 .destroy_connector = nv50_mstm_destroy_connector,
3257 .hotplug = nv50_mstm_hotplug,
3261 nv50_mstm_service(struct nv50_mstm *mstm)
3263 struct drm_dp_aux *aux = mstm ? mstm->mgr.aux : NULL;
3264 bool handled = true;
3272 ret = drm_dp_dpcd_read(aux, DP_SINK_COUNT_ESI, esi, 8);
3274 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3278 drm_dp_mst_hpd_irq(&mstm->mgr, esi, &handled);
3282 drm_dp_dpcd_write(aux, DP_SINK_COUNT_ESI + 1, &esi[1], 3);
3287 nv50_mstm_remove(struct nv50_mstm *mstm)
3290 drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, false);
3294 nv50_mstm_enable(struct nv50_mstm *mstm, u8 dpcd, int state)
3296 struct nouveau_encoder *outp = mstm->outp;
3298 struct nv50_disp_mthd_v1 base;
3299 struct nv50_disp_sor_dp_mst_link_v0 mst;
3302 .base.method = NV50_DISP_MTHD_V1_SOR_DP_MST_LINK,
3303 .base.hasht = outp->dcb->hasht,
3304 .base.hashm = outp->dcb->hashm,
3307 struct nouveau_drm *drm = nouveau_drm(outp->base.base.dev);
3308 struct nvif_object *disp = &drm->display->disp;
3312 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CTRL, &dpcd);
3320 ret = drm_dp_dpcd_writeb(mstm->mgr.aux, DP_MSTM_CTRL, dpcd);
3325 return nvif_mthd(disp, 0, &args, sizeof(args));
3329 nv50_mstm_detect(struct nv50_mstm *mstm, u8 dpcd[8], int allow)
3336 if (dpcd[0] >= 0x12) {
3337 ret = drm_dp_dpcd_readb(mstm->mgr.aux, DP_MSTM_CAP, &dpcd[1]);
3341 if (!(dpcd[1] & DP_MST_CAP))
3347 ret = nv50_mstm_enable(mstm, dpcd[0], state);
3351 ret = drm_dp_mst_topology_mgr_set_mst(&mstm->mgr, state);
3353 return nv50_mstm_enable(mstm, dpcd[0], 0);
3355 return mstm->mgr.mst_state;
3359 nv50_mstm_fini(struct nv50_mstm *mstm)
3361 if (mstm && mstm->mgr.mst_state)
3362 drm_dp_mst_topology_mgr_suspend(&mstm->mgr);
3366 nv50_mstm_init(struct nv50_mstm *mstm)
3368 if (mstm && mstm->mgr.mst_state)
3369 drm_dp_mst_topology_mgr_resume(&mstm->mgr);
3373 nv50_mstm_del(struct nv50_mstm **pmstm)
3375 struct nv50_mstm *mstm = *pmstm;
3383 nv50_mstm_new(struct nouveau_encoder *outp, struct drm_dp_aux *aux, int aux_max,
3384 int conn_base_id, struct nv50_mstm **pmstm)
3386 const int max_payloads = hweight8(outp->dcb->heads);
3387 struct drm_device *dev = outp->base.base.dev;
3388 struct nv50_mstm *mstm;
3392 /* This is a workaround for some monitors not functioning
3393 * correctly in MST mode on initial module load. I think
3394 * some bad interaction with the VBIOS may be responsible.
3396 * A good ol' off and on again seems to work here ;)
3398 ret = drm_dp_dpcd_readb(aux, DP_DPCD_REV, &dpcd);
3399 if (ret >= 0 && dpcd >= 0x12)
3400 drm_dp_dpcd_writeb(aux, DP_MSTM_CTRL, 0);
3402 if (!(mstm = *pmstm = kzalloc(sizeof(*mstm), GFP_KERNEL)))
3405 mstm->mgr.cbs = &nv50_mstm;
3407 ret = drm_dp_mst_topology_mgr_init(&mstm->mgr, dev, aux, aux_max,
3408 max_payloads, conn_base_id);
3412 for (i = 0; i < max_payloads; i++) {
3413 ret = nv50_msto_new(dev, outp->dcb->heads, outp->base.base.name,
3422 /******************************************************************************
3424 *****************************************************************************/
3426 nv50_sor_update(struct nouveau_encoder *nv_encoder, u8 head,
3427 struct drm_display_mode *mode, u8 proto, u8 depth)
3429 struct nv50_dmac *core = &nv50_mast(nv_encoder->base.base.dev)->base;
3433 nv_encoder->ctrl &= ~BIT(head);
3434 if (!(nv_encoder->ctrl & 0x0000000f))
3435 nv_encoder->ctrl = 0;
3437 nv_encoder->ctrl |= proto << 8;
3438 nv_encoder->ctrl |= BIT(head);
3441 if ((push = evo_wait(core, 6))) {
3442 if (core->base.user.oclass < GF110_DISP_CORE_CHANNEL_DMA) {
3444 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3445 nv_encoder->ctrl |= 0x00001000;
3446 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3447 nv_encoder->ctrl |= 0x00002000;
3448 nv_encoder->ctrl |= depth << 16;
3450 evo_mthd(push, 0x0600 + (nv_encoder->or * 0x40), 1);
3453 u32 magic = 0x31ec6000 | (head << 25);
3454 u32 syncs = 0x00000001;
3455 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3456 syncs |= 0x00000008;
3457 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3458 syncs |= 0x00000010;
3459 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
3460 magic |= 0x00000001;
3462 evo_mthd(push, 0x0404 + (head * 0x300), 2);
3463 evo_data(push, syncs | (depth << 6));
3464 evo_data(push, magic);
3466 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
3468 evo_data(push, nv_encoder->ctrl);
3469 evo_kick(push, core);
3474 nv50_sor_disable(struct drm_encoder *encoder)
3476 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3477 struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
3479 nv_encoder->crtc = NULL;
3482 struct nvkm_i2c_aux *aux = nv_encoder->aux;
3486 int ret = nvkm_rdaux(aux, DP_SET_POWER, &pwr, 1);
3488 pwr &= ~DP_SET_POWER_MASK;
3489 pwr |= DP_SET_POWER_D3;
3490 nvkm_wraux(aux, DP_SET_POWER, &pwr, 1);
3494 nv_encoder->update(nv_encoder, nv_crtc->index, NULL, 0, 0);
3495 nv50_audio_disable(encoder, nv_crtc);
3496 nv50_hdmi_disable(&nv_encoder->base.base, nv_crtc);
3497 nv50_outp_release(nv_encoder);
3502 nv50_sor_enable(struct drm_encoder *encoder)
3504 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3505 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3506 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3508 struct nv50_disp_mthd_v1 base;
3509 struct nv50_disp_sor_lvds_script_v0 lvds;
3512 .base.method = NV50_DISP_MTHD_V1_SOR_LVDS_SCRIPT,
3513 .base.hasht = nv_encoder->dcb->hasht,
3514 .base.hashm = nv_encoder->dcb->hashm,
3516 struct nv50_disp *disp = nv50_disp(encoder->dev);
3517 struct drm_device *dev = encoder->dev;
3518 struct nouveau_drm *drm = nouveau_drm(dev);
3519 struct nouveau_connector *nv_connector;
3520 struct nvbios *bios = &drm->vbios;
3524 nv_connector = nouveau_encoder_connector_get(nv_encoder);
3525 nv_encoder->crtc = encoder->crtc;
3526 nv50_outp_acquire(nv_encoder);
3528 switch (nv_encoder->dcb->type) {
3529 case DCB_OUTPUT_TMDS:
3530 if (nv_encoder->link & 1) {
3532 /* Only enable dual-link if:
3533 * - Need to (i.e. rate > 165MHz)
3535 * - Not an HDMI monitor, since there's no dual-link
3538 if (mode->clock >= 165000 &&
3539 nv_encoder->dcb->duallink_possible &&
3540 !drm_detect_hdmi_monitor(nv_connector->edid))
3546 nv50_hdmi_enable(&nv_encoder->base.base, mode);
3548 case DCB_OUTPUT_LVDS:
3551 if (bios->fp_no_ddc) {
3552 if (bios->fp.dual_link)
3553 lvds.lvds.script |= 0x0100;
3554 if (bios->fp.if_is_24bit)
3555 lvds.lvds.script |= 0x0200;
3557 if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
3558 if (((u8 *)nv_connector->edid)[121] == 2)
3559 lvds.lvds.script |= 0x0100;
3561 if (mode->clock >= bios->fp.duallink_transition_clk) {
3562 lvds.lvds.script |= 0x0100;
3565 if (lvds.lvds.script & 0x0100) {
3566 if (bios->fp.strapless_is_24bit & 2)
3567 lvds.lvds.script |= 0x0200;
3569 if (bios->fp.strapless_is_24bit & 1)
3570 lvds.lvds.script |= 0x0200;
3573 if (nv_connector->base.display_info.bpc == 8)
3574 lvds.lvds.script |= 0x0200;
3577 nvif_mthd(disp->disp, 0, &lvds, sizeof(lvds));
3580 if (nv_connector->base.display_info.bpc == 6)
3583 if (nv_connector->base.display_info.bpc == 8)
3588 if (nv_encoder->link & 1)
3593 nv50_audio_enable(encoder, mode);
3600 nv_encoder->update(nv_encoder, nv_crtc->index, mode, proto, depth);
3603 static const struct drm_encoder_helper_funcs
3605 .atomic_check = nv50_outp_atomic_check,
3606 .enable = nv50_sor_enable,
3607 .disable = nv50_sor_disable,
3611 nv50_sor_destroy(struct drm_encoder *encoder)
3613 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3614 nv50_mstm_del(&nv_encoder->dp.mstm);
3615 drm_encoder_cleanup(encoder);
3619 static const struct drm_encoder_funcs
3621 .destroy = nv50_sor_destroy,
3625 nv50_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
3627 struct nouveau_connector *nv_connector = nouveau_connector(connector);
3628 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3629 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3630 struct nouveau_encoder *nv_encoder;
3631 struct drm_encoder *encoder;
3634 switch (dcbe->type) {
3635 case DCB_OUTPUT_LVDS: type = DRM_MODE_ENCODER_LVDS; break;
3636 case DCB_OUTPUT_TMDS:
3639 type = DRM_MODE_ENCODER_TMDS;
3643 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3646 nv_encoder->dcb = dcbe;
3647 nv_encoder->update = nv50_sor_update;
3649 encoder = to_drm_encoder(nv_encoder);
3650 encoder->possible_crtcs = dcbe->heads;
3651 encoder->possible_clones = 0;
3652 drm_encoder_init(connector->dev, encoder, &nv50_sor_func, type,
3653 "sor-%04x-%04x", dcbe->hasht, dcbe->hashm);
3654 drm_encoder_helper_add(encoder, &nv50_sor_help);
3656 drm_mode_connector_attach_encoder(connector, encoder);
3658 if (dcbe->type == DCB_OUTPUT_DP) {
3659 struct nv50_disp *disp = nv50_disp(encoder->dev);
3660 struct nvkm_i2c_aux *aux =
3661 nvkm_i2c_aux_find(i2c, dcbe->i2c_index);
3663 if (disp->disp->oclass < GF110_DISP) {
3664 /* HW has no support for address-only
3665 * transactions, so we're required to
3666 * use custom I2C-over-AUX code.
3668 nv_encoder->i2c = &aux->i2c;
3670 nv_encoder->i2c = &nv_connector->aux.ddc;
3672 nv_encoder->aux = aux;
3675 /*TODO: Use DP Info Table to check for support. */
3676 if (disp->disp->oclass >= GF110_DISP) {
3677 ret = nv50_mstm_new(nv_encoder, &nv_connector->aux, 16,
3678 nv_connector->base.base.id,
3679 &nv_encoder->dp.mstm);
3684 struct nvkm_i2c_bus *bus =
3685 nvkm_i2c_bus_find(i2c, dcbe->i2c_index);
3687 nv_encoder->i2c = &bus->i2c;
3693 /******************************************************************************
3695 *****************************************************************************/
3697 nv50_pior_atomic_check(struct drm_encoder *encoder,
3698 struct drm_crtc_state *crtc_state,
3699 struct drm_connector_state *conn_state)
3701 int ret = nv50_outp_atomic_check(encoder, crtc_state, conn_state);
3704 crtc_state->adjusted_mode.clock *= 2;
3709 nv50_pior_disable(struct drm_encoder *encoder)
3711 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3712 struct nv50_mast *mast = nv50_mast(encoder->dev);
3713 const int or = nv_encoder->or;
3716 if (nv_encoder->crtc) {
3717 push = evo_wait(mast, 4);
3719 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3720 evo_mthd(push, 0x0700 + (or * 0x040), 1);
3721 evo_data(push, 0x00000000);
3723 evo_kick(push, mast);
3727 nv_encoder->crtc = NULL;
3728 nv50_outp_release(nv_encoder);
3732 nv50_pior_enable(struct drm_encoder *encoder)
3734 struct nv50_mast *mast = nv50_mast(encoder->dev);
3735 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
3736 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
3737 struct nouveau_connector *nv_connector;
3738 struct drm_display_mode *mode = &nv_crtc->base.state->adjusted_mode;
3739 u8 owner = 1 << nv_crtc->index;
3743 nv50_outp_acquire(nv_encoder);
3745 nv_connector = nouveau_encoder_connector_get(nv_encoder);
3746 switch (nv_connector->base.display_info.bpc) {
3747 case 10: depth = 0x6; break;
3748 case 8: depth = 0x5; break;
3749 case 6: depth = 0x2; break;
3750 default: depth = 0x0; break;
3753 switch (nv_encoder->dcb->type) {
3754 case DCB_OUTPUT_TMDS:
3763 push = evo_wait(mast, 8);
3765 if (nv50_vers(mast) < GF110_DISP_CORE_CHANNEL_DMA) {
3766 u32 ctrl = (depth << 16) | (proto << 8) | owner;
3767 if (mode->flags & DRM_MODE_FLAG_NHSYNC)
3769 if (mode->flags & DRM_MODE_FLAG_NVSYNC)
3771 evo_mthd(push, 0x0700 + (nv_encoder->or * 0x040), 1);
3772 evo_data(push, ctrl);
3775 evo_kick(push, mast);
3778 nv_encoder->crtc = encoder->crtc;
3781 static const struct drm_encoder_helper_funcs
3783 .atomic_check = nv50_pior_atomic_check,
3784 .enable = nv50_pior_enable,
3785 .disable = nv50_pior_disable,
3789 nv50_pior_destroy(struct drm_encoder *encoder)
3791 drm_encoder_cleanup(encoder);
3795 static const struct drm_encoder_funcs
3797 .destroy = nv50_pior_destroy,
3801 nv50_pior_create(struct drm_connector *connector, struct dcb_output *dcbe)
3803 struct nouveau_connector *nv_connector = nouveau_connector(connector);
3804 struct nouveau_drm *drm = nouveau_drm(connector->dev);
3805 struct nvkm_i2c *i2c = nvxx_i2c(&drm->client.device);
3806 struct nvkm_i2c_bus *bus = NULL;
3807 struct nvkm_i2c_aux *aux = NULL;
3808 struct i2c_adapter *ddc;
3809 struct nouveau_encoder *nv_encoder;
3810 struct drm_encoder *encoder;
3813 switch (dcbe->type) {
3814 case DCB_OUTPUT_TMDS:
3815 bus = nvkm_i2c_bus_find(i2c, NVKM_I2C_BUS_EXT(dcbe->extdev));
3816 ddc = bus ? &bus->i2c : NULL;
3817 type = DRM_MODE_ENCODER_TMDS;
3820 aux = nvkm_i2c_aux_find(i2c, NVKM_I2C_AUX_EXT(dcbe->extdev));
3821 ddc = aux ? &nv_connector->aux.ddc : NULL;
3822 type = DRM_MODE_ENCODER_TMDS;
3828 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
3831 nv_encoder->dcb = dcbe;
3832 nv_encoder->i2c = ddc;
3833 nv_encoder->aux = aux;
3835 encoder = to_drm_encoder(nv_encoder);
3836 encoder->possible_crtcs = dcbe->heads;
3837 encoder->possible_clones = 0;
3838 drm_encoder_init(connector->dev, encoder, &nv50_pior_func, type,
3839 "pior-%04x-%04x", dcbe->hasht, dcbe->hashm);
3840 drm_encoder_helper_add(encoder, &nv50_pior_help);
3842 drm_mode_connector_attach_encoder(connector, encoder);
3846 /******************************************************************************
3848 *****************************************************************************/
3851 nv50_disp_atomic_commit_core(struct nouveau_drm *drm, u32 interlock)
3853 struct nv50_disp *disp = nv50_disp(drm->dev);
3854 struct nv50_dmac *core = &disp->mast.base;
3855 struct nv50_mstm *mstm;
3856 struct drm_encoder *encoder;
3859 NV_ATOMIC(drm, "commit core %08x\n", interlock);
3861 drm_for_each_encoder(encoder, drm->dev) {
3862 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3863 mstm = nouveau_encoder(encoder)->dp.mstm;
3864 if (mstm && mstm->modified)
3865 nv50_mstm_prepare(mstm);
3869 if ((push = evo_wait(core, 5))) {
3870 evo_mthd(push, 0x0084, 1);
3871 evo_data(push, 0x80000000);
3872 evo_mthd(push, 0x0080, 2);
3873 evo_data(push, interlock);
3874 evo_data(push, 0x00000000);
3875 nouveau_bo_wr32(disp->sync, 0, 0x00000000);
3876 evo_kick(push, core);
3877 if (nvif_msec(&drm->client.device, 2000ULL,
3878 if (nouveau_bo_rd32(disp->sync, 0))
3882 NV_ERROR(drm, "EVO timeout\n");
3885 drm_for_each_encoder(encoder, drm->dev) {
3886 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
3887 mstm = nouveau_encoder(encoder)->dp.mstm;
3888 if (mstm && mstm->modified)
3889 nv50_mstm_cleanup(mstm);
3895 nv50_disp_atomic_commit_tail(struct drm_atomic_state *state)
3897 struct drm_device *dev = state->dev;
3898 struct drm_crtc_state *new_crtc_state, *old_crtc_state;
3899 struct drm_crtc *crtc;
3900 struct drm_plane_state *new_plane_state;
3901 struct drm_plane *plane;
3902 struct nouveau_drm *drm = nouveau_drm(dev);
3903 struct nv50_disp *disp = nv50_disp(dev);
3904 struct nv50_atom *atom = nv50_atom(state);
3905 struct nv50_outp_atom *outp, *outt;
3906 u32 interlock_core = 0;
3907 u32 interlock_chan = 0;
3910 NV_ATOMIC(drm, "commit %d %d\n", atom->lock_core, atom->flush_disable);
3911 drm_atomic_helper_wait_for_fences(dev, state, false);
3912 drm_atomic_helper_wait_for_dependencies(state);
3913 drm_atomic_helper_update_legacy_modeset_state(dev, state);
3915 if (atom->lock_core)
3916 mutex_lock(&disp->mutex);
3918 /* Disable head(s). */
3919 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
3920 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
3921 struct nv50_head *head = nv50_head(crtc);
3923 NV_ATOMIC(drm, "%s: clr %04x (set %04x)\n", crtc->name,
3924 asyh->clr.mask, asyh->set.mask);
3925 if (old_crtc_state->active && !new_crtc_state->active)
3926 drm_crtc_vblank_off(crtc);
3928 if (asyh->clr.mask) {
3929 nv50_head_flush_clr(head, asyh, atom->flush_disable);
3930 interlock_core |= 1;
3934 /* Disable plane(s). */
3935 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
3936 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
3937 struct nv50_wndw *wndw = nv50_wndw(plane);
3939 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", plane->name,
3940 asyw->clr.mask, asyw->set.mask);
3941 if (!asyw->clr.mask)
3944 interlock_chan |= nv50_wndw_flush_clr(wndw, interlock_core,
3945 atom->flush_disable,
3949 /* Disable output path(s). */
3950 list_for_each_entry(outp, &atom->outp, head) {
3951 const struct drm_encoder_helper_funcs *help;
3952 struct drm_encoder *encoder;
3954 encoder = outp->encoder;
3955 help = encoder->helper_private;
3957 NV_ATOMIC(drm, "%s: clr %02x (set %02x)\n", encoder->name,
3958 outp->clr.mask, outp->set.mask);
3960 if (outp->clr.mask) {
3961 help->disable(encoder);
3962 interlock_core |= 1;
3963 if (outp->flush_disable) {
3964 nv50_disp_atomic_commit_core(drm, interlock_chan);
3971 /* Flush disable. */
3972 if (interlock_core) {
3973 if (atom->flush_disable) {
3974 nv50_disp_atomic_commit_core(drm, interlock_chan);
3980 /* Update output path(s). */
3981 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
3982 const struct drm_encoder_helper_funcs *help;
3983 struct drm_encoder *encoder;
3985 encoder = outp->encoder;
3986 help = encoder->helper_private;
3988 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", encoder->name,
3989 outp->set.mask, outp->clr.mask);
3991 if (outp->set.mask) {
3992 help->enable(encoder);
3996 list_del(&outp->head);
4000 /* Update head(s). */
4001 for_each_oldnew_crtc_in_state(state, crtc, old_crtc_state, new_crtc_state, i) {
4002 struct nv50_head_atom *asyh = nv50_head_atom(new_crtc_state);
4003 struct nv50_head *head = nv50_head(crtc);
4005 NV_ATOMIC(drm, "%s: set %04x (clr %04x)\n", crtc->name,
4006 asyh->set.mask, asyh->clr.mask);
4008 if (asyh->set.mask) {
4009 nv50_head_flush_set(head, asyh);
4013 if (new_crtc_state->active) {
4014 if (!old_crtc_state->active)
4015 drm_crtc_vblank_on(crtc);
4016 if (new_crtc_state->event)
4017 drm_crtc_vblank_get(crtc);
4021 /* Update plane(s). */
4022 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4023 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4024 struct nv50_wndw *wndw = nv50_wndw(plane);
4026 NV_ATOMIC(drm, "%s: set %02x (clr %02x)\n", plane->name,
4027 asyw->set.mask, asyw->clr.mask);
4028 if ( !asyw->set.mask &&
4029 (!asyw->clr.mask || atom->flush_disable))
4032 interlock_chan |= nv50_wndw_flush_set(wndw, interlock_core, asyw);
4036 if (interlock_core) {
4037 if (!interlock_chan && atom->state.legacy_cursor_update) {
4038 u32 *push = evo_wait(&disp->mast, 2);
4040 evo_mthd(push, 0x0080, 1);
4041 evo_data(push, 0x00000000);
4042 evo_kick(push, &disp->mast);
4045 nv50_disp_atomic_commit_core(drm, interlock_chan);
4049 if (atom->lock_core)
4050 mutex_unlock(&disp->mutex);
4052 /* Wait for HW to signal completion. */
4053 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4054 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4055 struct nv50_wndw *wndw = nv50_wndw(plane);
4056 int ret = nv50_wndw_wait_armed(wndw, asyw);
4058 NV_ERROR(drm, "%s: timeout\n", plane->name);
4061 for_each_new_crtc_in_state(state, crtc, new_crtc_state, i) {
4062 if (new_crtc_state->event) {
4063 unsigned long flags;
4064 /* Get correct count/ts if racing with vblank irq */
4065 if (new_crtc_state->active)
4066 drm_crtc_accurate_vblank_count(crtc);
4067 spin_lock_irqsave(&crtc->dev->event_lock, flags);
4068 drm_crtc_send_vblank_event(crtc, new_crtc_state->event);
4069 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
4071 new_crtc_state->event = NULL;
4072 if (new_crtc_state->active)
4073 drm_crtc_vblank_put(crtc);
4077 drm_atomic_helper_commit_hw_done(state);
4078 drm_atomic_helper_cleanup_planes(dev, state);
4079 drm_atomic_helper_commit_cleanup_done(state);
4080 drm_atomic_state_put(state);
4084 nv50_disp_atomic_commit_work(struct work_struct *work)
4086 struct drm_atomic_state *state =
4087 container_of(work, typeof(*state), commit_work);
4088 nv50_disp_atomic_commit_tail(state);
4092 nv50_disp_atomic_commit(struct drm_device *dev,
4093 struct drm_atomic_state *state, bool nonblock)
4095 struct nouveau_drm *drm = nouveau_drm(dev);
4096 struct nv50_disp *disp = nv50_disp(dev);
4097 struct drm_plane_state *new_plane_state;
4098 struct drm_plane *plane;
4099 struct drm_crtc *crtc;
4100 bool active = false;
4103 ret = pm_runtime_get_sync(dev->dev);
4104 if (ret < 0 && ret != -EACCES)
4107 ret = drm_atomic_helper_setup_commit(state, nonblock);
4111 INIT_WORK(&state->commit_work, nv50_disp_atomic_commit_work);
4113 ret = drm_atomic_helper_prepare_planes(dev, state);
4118 ret = drm_atomic_helper_wait_for_fences(dev, state, true);
4123 ret = drm_atomic_helper_swap_state(state, true);
4127 for_each_new_plane_in_state(state, plane, new_plane_state, i) {
4128 struct nv50_wndw_atom *asyw = nv50_wndw_atom(new_plane_state);
4129 struct nv50_wndw *wndw = nv50_wndw(plane);
4131 if (asyw->set.image) {
4132 asyw->ntfy.handle = wndw->dmac->sync.handle;
4133 asyw->ntfy.offset = wndw->ntfy;
4134 asyw->ntfy.awaken = false;
4135 asyw->set.ntfy = true;
4136 nouveau_bo_wr32(disp->sync, wndw->ntfy / 4, 0x00000000);
4141 drm_atomic_state_get(state);
4144 queue_work(system_unbound_wq, &state->commit_work);
4146 nv50_disp_atomic_commit_tail(state);
4148 drm_for_each_crtc(crtc, dev) {
4149 if (crtc->state->enable) {
4150 if (!drm->have_disp_power_ref) {
4151 drm->have_disp_power_ref = true;
4159 if (!active && drm->have_disp_power_ref) {
4160 pm_runtime_put_autosuspend(dev->dev);
4161 drm->have_disp_power_ref = false;
4166 drm_atomic_helper_cleanup_planes(dev, state);
4168 pm_runtime_put_autosuspend(dev->dev);
4172 static struct nv50_outp_atom *
4173 nv50_disp_outp_atomic_add(struct nv50_atom *atom, struct drm_encoder *encoder)
4175 struct nv50_outp_atom *outp;
4177 list_for_each_entry(outp, &atom->outp, head) {
4178 if (outp->encoder == encoder)
4182 outp = kzalloc(sizeof(*outp), GFP_KERNEL);
4184 return ERR_PTR(-ENOMEM);
4186 list_add(&outp->head, &atom->outp);
4187 outp->encoder = encoder;
4192 nv50_disp_outp_atomic_check_clr(struct nv50_atom *atom,
4193 struct drm_connector_state *old_connector_state)
4195 struct drm_encoder *encoder = old_connector_state->best_encoder;
4196 struct drm_crtc_state *old_crtc_state, *new_crtc_state;
4197 struct drm_crtc *crtc;
4198 struct nv50_outp_atom *outp;
4200 if (!(crtc = old_connector_state->crtc))
4203 old_crtc_state = drm_atomic_get_old_crtc_state(&atom->state, crtc);
4204 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
4205 if (old_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
4206 outp = nv50_disp_outp_atomic_add(atom, encoder);
4208 return PTR_ERR(outp);
4210 if (outp->encoder->encoder_type == DRM_MODE_ENCODER_DPMST) {
4211 outp->flush_disable = true;
4212 atom->flush_disable = true;
4214 outp->clr.ctrl = true;
4215 atom->lock_core = true;
4222 nv50_disp_outp_atomic_check_set(struct nv50_atom *atom,
4223 struct drm_connector_state *connector_state)
4225 struct drm_encoder *encoder = connector_state->best_encoder;
4226 struct drm_crtc_state *new_crtc_state;
4227 struct drm_crtc *crtc;
4228 struct nv50_outp_atom *outp;
4230 if (!(crtc = connector_state->crtc))
4233 new_crtc_state = drm_atomic_get_new_crtc_state(&atom->state, crtc);
4234 if (new_crtc_state->active && drm_atomic_crtc_needs_modeset(new_crtc_state)) {
4235 outp = nv50_disp_outp_atomic_add(atom, encoder);
4237 return PTR_ERR(outp);
4239 outp->set.ctrl = true;
4240 atom->lock_core = true;
4247 nv50_disp_atomic_check(struct drm_device *dev, struct drm_atomic_state *state)
4249 struct nv50_atom *atom = nv50_atom(state);
4250 struct drm_connector_state *old_connector_state, *new_connector_state;
4251 struct drm_connector *connector;
4254 ret = drm_atomic_helper_check(dev, state);
4258 for_each_oldnew_connector_in_state(state, connector, old_connector_state, new_connector_state, i) {
4259 ret = nv50_disp_outp_atomic_check_clr(atom, old_connector_state);
4263 ret = nv50_disp_outp_atomic_check_set(atom, new_connector_state);
4272 nv50_disp_atomic_state_clear(struct drm_atomic_state *state)
4274 struct nv50_atom *atom = nv50_atom(state);
4275 struct nv50_outp_atom *outp, *outt;
4277 list_for_each_entry_safe(outp, outt, &atom->outp, head) {
4278 list_del(&outp->head);
4282 drm_atomic_state_default_clear(state);
4286 nv50_disp_atomic_state_free(struct drm_atomic_state *state)
4288 struct nv50_atom *atom = nv50_atom(state);
4289 drm_atomic_state_default_release(&atom->state);
4293 static struct drm_atomic_state *
4294 nv50_disp_atomic_state_alloc(struct drm_device *dev)
4296 struct nv50_atom *atom;
4297 if (!(atom = kzalloc(sizeof(*atom), GFP_KERNEL)) ||
4298 drm_atomic_state_init(dev, &atom->state) < 0) {
4302 INIT_LIST_HEAD(&atom->outp);
4303 return &atom->state;
4306 static const struct drm_mode_config_funcs
4308 .fb_create = nouveau_user_framebuffer_create,
4309 .output_poll_changed = drm_fb_helper_output_poll_changed,
4310 .atomic_check = nv50_disp_atomic_check,
4311 .atomic_commit = nv50_disp_atomic_commit,
4312 .atomic_state_alloc = nv50_disp_atomic_state_alloc,
4313 .atomic_state_clear = nv50_disp_atomic_state_clear,
4314 .atomic_state_free = nv50_disp_atomic_state_free,
4317 /******************************************************************************
4319 *****************************************************************************/
4322 nv50_display_fini(struct drm_device *dev)
4324 struct nouveau_encoder *nv_encoder;
4325 struct drm_encoder *encoder;
4326 struct drm_plane *plane;
4328 drm_for_each_plane(plane, dev) {
4329 struct nv50_wndw *wndw = nv50_wndw(plane);
4330 if (plane->funcs != &nv50_wndw)
4332 nv50_wndw_fini(wndw);
4335 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4336 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4337 nv_encoder = nouveau_encoder(encoder);
4338 nv50_mstm_fini(nv_encoder->dp.mstm);
4344 nv50_display_init(struct drm_device *dev)
4346 struct drm_encoder *encoder;
4347 struct drm_plane *plane;
4348 struct drm_crtc *crtc;
4351 push = evo_wait(nv50_mast(dev), 32);
4355 evo_mthd(push, 0x0088, 1);
4356 evo_data(push, nv50_mast(dev)->base.sync.handle);
4357 evo_kick(push, nv50_mast(dev));
4359 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
4360 if (encoder->encoder_type != DRM_MODE_ENCODER_DPMST) {
4361 struct nouveau_encoder *nv_encoder =
4362 nouveau_encoder(encoder);
4363 nv50_mstm_init(nv_encoder->dp.mstm);
4367 drm_for_each_crtc(crtc, dev) {
4368 nv50_head_lut_load(crtc);
4371 drm_for_each_plane(plane, dev) {
4372 struct nv50_wndw *wndw = nv50_wndw(plane);
4373 if (plane->funcs != &nv50_wndw)
4375 nv50_wndw_init(wndw);
4382 nv50_display_destroy(struct drm_device *dev)
4384 struct nv50_disp *disp = nv50_disp(dev);
4386 nv50_dmac_destroy(&disp->mast.base, disp->disp);
4388 nouveau_bo_unmap(disp->sync);
4390 nouveau_bo_unpin(disp->sync);
4391 nouveau_bo_ref(NULL, &disp->sync);
4393 nouveau_display(dev)->priv = NULL;
4397 MODULE_PARM_DESC(atomic, "Expose atomic ioctl (default: disabled)");
4398 static int nouveau_atomic = 0;
4399 module_param_named(atomic, nouveau_atomic, int, 0400);
4402 nv50_display_create(struct drm_device *dev)
4404 struct nvif_device *device = &nouveau_drm(dev)->client.device;
4405 struct nouveau_drm *drm = nouveau_drm(dev);
4406 struct dcb_table *dcb = &drm->vbios.dcb;
4407 struct drm_connector *connector, *tmp;
4408 struct nv50_disp *disp;
4409 struct dcb_output *dcbe;
4412 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
4416 mutex_init(&disp->mutex);
4418 nouveau_display(dev)->priv = disp;
4419 nouveau_display(dev)->dtor = nv50_display_destroy;
4420 nouveau_display(dev)->init = nv50_display_init;
4421 nouveau_display(dev)->fini = nv50_display_fini;
4422 disp->disp = &nouveau_display(dev)->disp;
4423 dev->mode_config.funcs = &nv50_disp_func;
4425 dev->driver->driver_features |= DRIVER_ATOMIC;
4427 /* small shared memory area we use for notifiers and semaphores */
4428 ret = nouveau_bo_new(&drm->client, 4096, 0x1000, TTM_PL_FLAG_VRAM,
4429 0, 0x0000, NULL, NULL, &disp->sync);
4431 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM, true);
4433 ret = nouveau_bo_map(disp->sync);
4435 nouveau_bo_unpin(disp->sync);
4438 nouveau_bo_ref(NULL, &disp->sync);
4444 /* allocate master evo channel */
4445 ret = nv50_core_create(device, disp->disp, disp->sync->bo.offset,
4450 /* create crtc objects to represent the hw heads */
4451 if (disp->disp->oclass >= GF110_DISP)
4452 crtcs = nvif_rd32(&device->object, 0x612004) & 0xf;
4456 for (i = 0; i < fls(crtcs); i++) {
4457 if (!(crtcs & (1 << i)))
4459 ret = nv50_head_create(dev, i);
4464 /* create encoder/connector objects based on VBIOS DCB table */
4465 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
4466 connector = nouveau_connector_create(dev, dcbe->connector);
4467 if (IS_ERR(connector))
4470 if (dcbe->location == DCB_LOC_ON_CHIP) {
4471 switch (dcbe->type) {
4472 case DCB_OUTPUT_TMDS:
4473 case DCB_OUTPUT_LVDS:
4475 ret = nv50_sor_create(connector, dcbe);
4477 case DCB_OUTPUT_ANALOG:
4478 ret = nv50_dac_create(connector, dcbe);
4485 ret = nv50_pior_create(connector, dcbe);
4489 NV_WARN(drm, "failed to create encoder %d/%d/%d: %d\n",
4490 dcbe->location, dcbe->type,
4491 ffs(dcbe->or) - 1, ret);
4496 /* cull any connectors we created that don't have an encoder */
4497 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
4498 if (connector->encoder_ids[0])
4501 NV_WARN(drm, "%s has no encoders, removing\n",
4503 connector->funcs->destroy(connector);
4508 nv50_display_destroy(dev);