1 // SPDX-License-Identifier: GPL-2.0-or-later
3 #include <drm/drm_gem_vram_helper.h>
4 #include <drm/drm_device.h>
5 #include <drm/drm_mode.h>
6 #include <drm/drm_prime.h>
7 #include <drm/drm_vram_mm_helper.h>
8 #include <drm/ttm/ttm_page_alloc.h>
13 * This library provides a GEM buffer object that is backed by video RAM
14 * (VRAM). It can be used for framebuffer devices with dedicated memory.
18 * Buffer-objects helpers
21 static void drm_gem_vram_cleanup(struct drm_gem_vram_object *gbo)
23 /* We got here via ttm_bo_put(), which means that the
24 * TTM buffer object in 'bo' has already been cleaned
25 * up; only release the GEM object.
27 drm_gem_object_release(&gbo->gem);
30 static void drm_gem_vram_destroy(struct drm_gem_vram_object *gbo)
32 drm_gem_vram_cleanup(gbo);
36 static void ttm_buffer_object_destroy(struct ttm_buffer_object *bo)
38 struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
40 drm_gem_vram_destroy(gbo);
43 static void drm_gem_vram_placement(struct drm_gem_vram_object *gbo,
44 unsigned long pl_flag)
49 gbo->placement.placement = gbo->placements;
50 gbo->placement.busy_placement = gbo->placements;
52 if (pl_flag & TTM_PL_FLAG_VRAM)
53 gbo->placements[c++].flags = TTM_PL_FLAG_WC |
54 TTM_PL_FLAG_UNCACHED |
57 if (pl_flag & TTM_PL_FLAG_SYSTEM)
58 gbo->placements[c++].flags = TTM_PL_MASK_CACHING |
62 gbo->placements[c++].flags = TTM_PL_MASK_CACHING |
65 gbo->placement.num_placement = c;
66 gbo->placement.num_busy_placement = c;
68 for (i = 0; i < c; ++i) {
69 gbo->placements[i].fpfn = 0;
70 gbo->placements[i].lpfn = 0;
74 static int drm_gem_vram_init(struct drm_device *dev,
75 struct ttm_bo_device *bdev,
76 struct drm_gem_vram_object *gbo,
77 size_t size, unsigned long pg_align,
83 ret = drm_gem_object_init(dev, &gbo->gem, size);
87 acc_size = ttm_bo_dma_acc_size(bdev, size, sizeof(*gbo));
90 drm_gem_vram_placement(gbo, TTM_PL_FLAG_VRAM | TTM_PL_FLAG_SYSTEM);
92 ret = ttm_bo_init(bdev, &gbo->bo, size, ttm_bo_type_device,
93 &gbo->placement, pg_align, interruptible, acc_size,
94 NULL, NULL, ttm_buffer_object_destroy);
96 goto err_drm_gem_object_release;
100 err_drm_gem_object_release:
101 drm_gem_object_release(&gbo->gem);
106 * drm_gem_vram_create() - Creates a VRAM-backed GEM object
107 * @dev: the DRM device
108 * @bdev: the TTM BO device backing the object
109 * @size: the buffer size in bytes
110 * @pg_align: the buffer's alignment in multiples of the page size
111 * @interruptible: sleep interruptible if waiting for memory
114 * A new instance of &struct drm_gem_vram_object on success, or
115 * an ERR_PTR()-encoded error code otherwise.
117 struct drm_gem_vram_object *drm_gem_vram_create(struct drm_device *dev,
118 struct ttm_bo_device *bdev,
120 unsigned long pg_align,
123 struct drm_gem_vram_object *gbo;
126 gbo = kzalloc(sizeof(*gbo), GFP_KERNEL);
128 return ERR_PTR(-ENOMEM);
130 ret = drm_gem_vram_init(dev, bdev, gbo, size, pg_align, interruptible);
140 EXPORT_SYMBOL(drm_gem_vram_create);
143 * drm_gem_vram_put() - Releases a reference to a VRAM-backed GEM object
144 * @gbo: the GEM VRAM object
146 * See ttm_bo_put() for more information.
148 void drm_gem_vram_put(struct drm_gem_vram_object *gbo)
150 ttm_bo_put(&gbo->bo);
152 EXPORT_SYMBOL(drm_gem_vram_put);
155 * drm_gem_vram_reserve() - Reserves a VRAM-backed GEM object
156 * @gbo: the GEM VRAM object
157 * @no_wait: don't wait for buffer object to become available
159 * See ttm_bo_reserve() for more information.
163 * a negative error code otherwise
165 int drm_gem_vram_reserve(struct drm_gem_vram_object *gbo, bool no_wait)
167 return ttm_bo_reserve(&gbo->bo, true, no_wait, NULL);
169 EXPORT_SYMBOL(drm_gem_vram_reserve);
172 * drm_gem_vram_unreserve() - \
173 Release a reservation acquired by drm_gem_vram_reserve()
174 * @gbo: the GEM VRAM object
176 * See ttm_bo_unreserve() for more information.
178 void drm_gem_vram_unreserve(struct drm_gem_vram_object *gbo)
180 ttm_bo_unreserve(&gbo->bo);
182 EXPORT_SYMBOL(drm_gem_vram_unreserve);
185 * drm_gem_vram_mmap_offset() - Returns a GEM VRAM object's mmap offset
186 * @gbo: the GEM VRAM object
188 * See drm_vma_node_offset_addr() for more information.
191 * The buffer object's offset for userspace mappings on success, or
192 * 0 if no offset is allocated.
194 u64 drm_gem_vram_mmap_offset(struct drm_gem_vram_object *gbo)
196 return drm_vma_node_offset_addr(&gbo->bo.vma_node);
198 EXPORT_SYMBOL(drm_gem_vram_mmap_offset);
201 * drm_gem_vram_offset() - \
202 Returns a GEM VRAM object's offset in video memory
203 * @gbo: the GEM VRAM object
205 * This function returns the buffer object's offset in the device's video
206 * memory. The buffer object has to be pinned to %TTM_PL_VRAM.
209 * The buffer object's offset in video memory on success, or
210 * a negative errno code otherwise.
212 s64 drm_gem_vram_offset(struct drm_gem_vram_object *gbo)
214 if (WARN_ON_ONCE(!gbo->pin_count))
216 return gbo->bo.offset;
218 EXPORT_SYMBOL(drm_gem_vram_offset);
221 * drm_gem_vram_pin() - Pins a GEM VRAM object in a region.
222 * @gbo: the GEM VRAM object
223 * @pl_flag: a bitmask of possible memory regions
225 * Pinning a buffer object ensures that it is not evicted from
226 * a memory region. A pinned buffer object has to be unpinned before
227 * it can be pinned to another region.
231 * a negative error code otherwise.
233 int drm_gem_vram_pin(struct drm_gem_vram_object *gbo, unsigned long pl_flag)
236 struct ttm_operation_ctx ctx = { false, false };
238 ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
245 drm_gem_vram_placement(gbo, pl_flag);
246 for (i = 0; i < gbo->placement.num_placement; ++i)
247 gbo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
249 ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
251 goto err_ttm_bo_unreserve;
255 ttm_bo_unreserve(&gbo->bo);
259 err_ttm_bo_unreserve:
260 ttm_bo_unreserve(&gbo->bo);
263 EXPORT_SYMBOL(drm_gem_vram_pin);
266 * drm_gem_vram_pin_reserved() - Pins a GEM VRAM object in a region.
267 * @gbo: the GEM VRAM object
268 * @pl_flag: a bitmask of possible memory regions
270 * Pinning a buffer object ensures that it is not evicted from
271 * a memory region. A pinned buffer object has to be unpinned before
272 * it can be pinned to another region.
274 * This function pins a GEM VRAM object that has already been
275 * reserved. Use drm_gem_vram_pin() if possible.
279 * a negative error code otherwise.
281 int drm_gem_vram_pin_reserved(struct drm_gem_vram_object *gbo,
282 unsigned long pl_flag)
285 struct ttm_operation_ctx ctx = { false, false };
287 if (gbo->pin_count) {
292 drm_gem_vram_placement(gbo, pl_flag);
293 for (i = 0; i < gbo->placement.num_placement; ++i)
294 gbo->placements[i].flags |= TTM_PL_FLAG_NO_EVICT;
296 ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
304 EXPORT_SYMBOL(drm_gem_vram_pin_reserved);
307 * drm_gem_vram_unpin() - Unpins a GEM VRAM object
308 * @gbo: the GEM VRAM object
312 * a negative error code otherwise.
314 int drm_gem_vram_unpin(struct drm_gem_vram_object *gbo)
317 struct ttm_operation_ctx ctx = { false, false };
319 ret = ttm_bo_reserve(&gbo->bo, true, false, NULL);
323 if (WARN_ON_ONCE(!gbo->pin_count))
330 for (i = 0; i < gbo->placement.num_placement ; ++i)
331 gbo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
333 ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
335 goto err_ttm_bo_unreserve;
338 ttm_bo_unreserve(&gbo->bo);
342 err_ttm_bo_unreserve:
343 ttm_bo_unreserve(&gbo->bo);
346 EXPORT_SYMBOL(drm_gem_vram_unpin);
349 * drm_gem_vram_unpin_reserved() - Unpins a GEM VRAM object
350 * @gbo: the GEM VRAM object
352 * This function unpins a GEM VRAM object that has already been
353 * reserved. Use drm_gem_vram_unpin() if possible.
357 * a negative error code otherwise.
359 int drm_gem_vram_unpin_reserved(struct drm_gem_vram_object *gbo)
362 struct ttm_operation_ctx ctx = { false, false };
364 if (WARN_ON_ONCE(!gbo->pin_count))
371 for (i = 0; i < gbo->placement.num_placement ; ++i)
372 gbo->placements[i].flags &= ~TTM_PL_FLAG_NO_EVICT;
374 ret = ttm_bo_validate(&gbo->bo, &gbo->placement, &ctx);
380 EXPORT_SYMBOL(drm_gem_vram_unpin_reserved);
383 * drm_gem_vram_kmap_at() - Maps a GEM VRAM object into kernel address space
384 * @gbo: the GEM VRAM object
385 * @map: establish a mapping if necessary
386 * @is_iomem: returns true if the mapped memory is I/O memory, or false \
387 otherwise; can be NULL
388 * @kmap: the mapping's kmap object
390 * This function maps the buffer object into the kernel's address space
391 * or returns the current mapping. If the parameter map is false, the
392 * function only queries the current mapping, but does not establish a
396 * The buffers virtual address if mapped, or
397 * NULL if not mapped, or
398 * an ERR_PTR()-encoded error code otherwise.
400 void *drm_gem_vram_kmap_at(struct drm_gem_vram_object *gbo, bool map,
401 bool *is_iomem, struct ttm_bo_kmap_obj *kmap)
405 if (kmap->virtual || !map)
408 ret = ttm_bo_kmap(&gbo->bo, 0, gbo->bo.num_pages, kmap);
414 return kmap->virtual;
415 if (!kmap->virtual) {
419 return ttm_kmap_obj_virtual(kmap, is_iomem);
421 EXPORT_SYMBOL(drm_gem_vram_kmap_at);
424 * drm_gem_vram_kmap() - Maps a GEM VRAM object into kernel address space
425 * @gbo: the GEM VRAM object
426 * @map: establish a mapping if necessary
427 * @is_iomem: returns true if the mapped memory is I/O memory, or false \
428 otherwise; can be NULL
430 * This function maps the buffer object into the kernel's address space
431 * or returns the current mapping. If the parameter map is false, the
432 * function only queries the current mapping, but does not establish a
436 * The buffers virtual address if mapped, or
437 * NULL if not mapped, or
438 * an ERR_PTR()-encoded error code otherwise.
440 void *drm_gem_vram_kmap(struct drm_gem_vram_object *gbo, bool map,
443 return drm_gem_vram_kmap_at(gbo, map, is_iomem, &gbo->kmap);
445 EXPORT_SYMBOL(drm_gem_vram_kmap);
448 * drm_gem_vram_kunmap_at() - Unmaps a GEM VRAM object
449 * @gbo: the GEM VRAM object
450 * @kmap: the mapping's kmap object
452 void drm_gem_vram_kunmap_at(struct drm_gem_vram_object *gbo,
453 struct ttm_bo_kmap_obj *kmap)
459 kmap->virtual = NULL;
461 EXPORT_SYMBOL(drm_gem_vram_kunmap_at);
464 * drm_gem_vram_kunmap() - Unmaps a GEM VRAM object
465 * @gbo: the GEM VRAM object
467 void drm_gem_vram_kunmap(struct drm_gem_vram_object *gbo)
469 drm_gem_vram_kunmap_at(gbo, &gbo->kmap);
471 EXPORT_SYMBOL(drm_gem_vram_kunmap);
474 * drm_gem_vram_fill_create_dumb() - \
475 Helper for implementing &struct drm_driver.dumb_create
476 * @file: the DRM file
477 * @dev: the DRM device
478 * @bdev: the TTM BO device managing the buffer object
479 * @pg_align: the buffer's alignment in multiples of the page size
480 * @interruptible: sleep interruptible if waiting for memory
481 * @args: the arguments as provided to \
482 &struct drm_driver.dumb_create
484 * This helper function fills &struct drm_mode_create_dumb, which is used
485 * by &struct drm_driver.dumb_create. Implementations of this interface
486 * should forwards their arguments to this helper, plus the driver-specific
491 * a negative error code otherwise.
493 int drm_gem_vram_fill_create_dumb(struct drm_file *file,
494 struct drm_device *dev,
495 struct ttm_bo_device *bdev,
496 unsigned long pg_align,
498 struct drm_mode_create_dumb *args)
501 struct drm_gem_vram_object *gbo;
505 pitch = args->width * ((args->bpp + 7) / 8);
506 size = pitch * args->height;
508 size = roundup(size, PAGE_SIZE);
512 gbo = drm_gem_vram_create(dev, bdev, size, pg_align, interruptible);
516 ret = drm_gem_handle_create(file, &gbo->gem, &handle);
518 goto err_drm_gem_object_put_unlocked;
520 drm_gem_object_put_unlocked(&gbo->gem);
524 args->handle = handle;
528 err_drm_gem_object_put_unlocked:
529 drm_gem_object_put_unlocked(&gbo->gem);
532 EXPORT_SYMBOL(drm_gem_vram_fill_create_dumb);
535 * Helpers for struct ttm_bo_driver
538 static bool drm_is_gem_vram(struct ttm_buffer_object *bo)
540 return (bo->destroy == ttm_buffer_object_destroy);
544 * drm_gem_vram_bo_driver_evict_flags() - \
545 Implements &struct ttm_bo_driver.evict_flags
546 * @bo: TTM buffer object. Refers to &struct drm_gem_vram_object.bo
547 * @pl: TTM placement information.
549 void drm_gem_vram_bo_driver_evict_flags(struct ttm_buffer_object *bo,
550 struct ttm_placement *pl)
552 struct drm_gem_vram_object *gbo;
554 /* TTM may pass BOs that are not GEM VRAM BOs. */
555 if (!drm_is_gem_vram(bo))
558 gbo = drm_gem_vram_of_bo(bo);
559 drm_gem_vram_placement(gbo, TTM_PL_FLAG_SYSTEM);
560 *pl = gbo->placement;
562 EXPORT_SYMBOL(drm_gem_vram_bo_driver_evict_flags);
565 * drm_gem_vram_bo_driver_verify_access() - \
566 Implements &struct ttm_bo_driver.verify_access
567 * @bo: TTM buffer object. Refers to &struct drm_gem_vram_object.bo
568 * @filp: File pointer.
572 * a negative errno code otherwise.
574 int drm_gem_vram_bo_driver_verify_access(struct ttm_buffer_object *bo,
577 struct drm_gem_vram_object *gbo = drm_gem_vram_of_bo(bo);
579 return drm_vma_node_verify_access(&gbo->gem.vma_node,
582 EXPORT_SYMBOL(drm_gem_vram_bo_driver_verify_access);
585 * drm_gem_vram_mm_funcs - Functions for &struct drm_vram_mm
587 * Most users of @struct drm_gem_vram_object will also use
588 * @struct drm_vram_mm. This instance of &struct drm_vram_mm_funcs
589 * can be used to connect both.
591 const struct drm_vram_mm_funcs drm_gem_vram_mm_funcs = {
592 .evict_flags = drm_gem_vram_bo_driver_evict_flags,
593 .verify_access = drm_gem_vram_bo_driver_verify_access
595 EXPORT_SYMBOL(drm_gem_vram_mm_funcs);
598 * Helpers for struct drm_driver
602 * drm_gem_vram_driver_gem_free_object_unlocked() - \
603 Implements &struct drm_driver.gem_free_object_unlocked
604 * @gem: GEM object. Refers to &struct drm_gem_vram_object.gem
606 void drm_gem_vram_driver_gem_free_object_unlocked(struct drm_gem_object *gem)
608 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
610 drm_gem_vram_put(gbo);
612 EXPORT_SYMBOL(drm_gem_vram_driver_gem_free_object_unlocked);
615 * drm_gem_vram_driver_create_dumb() - \
616 Implements &struct drm_driver.dumb_create
617 * @file: the DRM file
618 * @dev: the DRM device
619 * @args: the arguments as provided to \
620 &struct drm_driver.dumb_create
622 * This function requires the driver to use @drm_device.vram_mm for its
623 * instance of VRAM MM.
627 * a negative error code otherwise.
629 int drm_gem_vram_driver_dumb_create(struct drm_file *file,
630 struct drm_device *dev,
631 struct drm_mode_create_dumb *args)
633 if (WARN_ONCE(!dev->vram_mm, "VRAM MM not initialized"))
636 return drm_gem_vram_fill_create_dumb(file, dev, &dev->vram_mm->bdev, 0,
639 EXPORT_SYMBOL(drm_gem_vram_driver_dumb_create);
642 * drm_gem_vram_driver_dumb_mmap_offset() - \
643 Implements &struct drm_driver.dumb_mmap_offset
644 * @file: DRM file pointer.
646 * @handle: GEM handle
647 * @offset: Returns the mapping's memory offset on success
651 * a negative errno code otherwise.
653 int drm_gem_vram_driver_dumb_mmap_offset(struct drm_file *file,
654 struct drm_device *dev,
655 uint32_t handle, uint64_t *offset)
657 struct drm_gem_object *gem;
658 struct drm_gem_vram_object *gbo;
660 gem = drm_gem_object_lookup(file, handle);
664 gbo = drm_gem_vram_of_gem(gem);
665 *offset = drm_gem_vram_mmap_offset(gbo);
667 drm_gem_object_put_unlocked(gem);
671 EXPORT_SYMBOL(drm_gem_vram_driver_dumb_mmap_offset);
674 * PRIME helpers for struct drm_driver
678 * drm_gem_vram_driver_gem_prime_pin() - \
679 Implements &struct drm_driver.gem_prime_pin
680 * @gem: The GEM object to pin
684 * a negative errno code otherwise.
686 int drm_gem_vram_driver_gem_prime_pin(struct drm_gem_object *gem)
688 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
690 return drm_gem_vram_pin(gbo, DRM_GEM_VRAM_PL_FLAG_VRAM);
692 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_pin);
695 * drm_gem_vram_driver_gem_prime_unpin() - \
696 Implements &struct drm_driver.gem_prime_unpin
697 * @gem: The GEM object to unpin
699 void drm_gem_vram_driver_gem_prime_unpin(struct drm_gem_object *gem)
701 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
703 drm_gem_vram_unpin(gbo);
705 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_unpin);
708 * drm_gem_vram_driver_gem_prime_vmap() - \
709 Implements &struct drm_driver.gem_prime_vmap
710 * @gem: The GEM object to map
713 * The buffers virtual address on success, or
716 void *drm_gem_vram_driver_gem_prime_vmap(struct drm_gem_object *gem)
718 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
722 ret = drm_gem_vram_pin(gbo, DRM_GEM_VRAM_PL_FLAG_VRAM);
725 base = drm_gem_vram_kmap(gbo, true, NULL);
727 drm_gem_vram_unpin(gbo);
732 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_vmap);
735 * drm_gem_vram_driver_gem_prime_vunmap() - \
736 Implements &struct drm_driver.gem_prime_vunmap
737 * @gem: The GEM object to unmap
738 * @vaddr: The mapping's base address
740 void drm_gem_vram_driver_gem_prime_vunmap(struct drm_gem_object *gem,
743 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
745 drm_gem_vram_kunmap(gbo);
746 drm_gem_vram_unpin(gbo);
748 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_vunmap);
751 * drm_gem_vram_driver_gem_prime_mmap() - \
752 Implements &struct drm_driver.gem_prime_mmap
753 * @gem: The GEM object to map
754 * @vma: The VMA describing the mapping
758 * a negative errno code otherwise.
760 int drm_gem_vram_driver_gem_prime_mmap(struct drm_gem_object *gem,
761 struct vm_area_struct *vma)
763 struct drm_gem_vram_object *gbo = drm_gem_vram_of_gem(gem);
765 gbo->gem.vma_node.vm_node.start = gbo->bo.vma_node.vm_node.start;
766 return drm_gem_prime_mmap(gem, vma);
768 EXPORT_SYMBOL(drm_gem_vram_driver_gem_prime_mmap);