/linux-6.12.1/drivers/media/common/videobuf2/ |
D | videobuf2-dma-contig.c | 53 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) in vb2_dc_get_contiguous_size() argument 56 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size() 60 for_each_sgtable_dma_sg(sgt, s, i) { in vb2_dc_get_contiguous_size() 126 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_prepare() local 140 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dc_prepare() 146 struct sg_table *sgt = buf->dma_sgt; in vb2_dc_finish() local 160 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dc_finish() 313 struct sg_table sgt; member 323 struct sg_table *sgt; in vb2_dc_dmabuf_ops_attach() local 331 sgt = &attach->sgt; in vb2_dc_dmabuf_ops_attach() [all …]
|
D | videobuf2-dma-sg.c | 105 struct sg_table *sgt; in vb2_dma_sg_alloc() local 145 sgt = &buf->sg_table; in vb2_dma_sg_alloc() 150 if (dma_map_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_alloc() 182 struct sg_table *sgt = &buf->sg_table; in vb2_dma_sg_put() local 188 dma_unmap_sgtable(buf->dev, sgt, buf->dma_dir, in vb2_dma_sg_put() 204 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_prepare() local 209 dma_sync_sgtable_for_device(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_prepare() 215 struct sg_table *sgt = buf->dma_sgt; in vb2_dma_sg_finish() local 220 dma_sync_sgtable_for_cpu(buf->dev, sgt, buf->dma_dir); in vb2_dma_sg_finish() 227 struct sg_table *sgt; in vb2_dma_sg_get_userptr() local [all …]
|
D | videobuf2-vmalloc.c | 208 struct sg_table sgt; member 218 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_attach() local 228 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_attach() 229 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); in vb2_vmalloc_dmabuf_ops_attach() 234 for_each_sgtable_sg(sgt, sg, i) { in vb2_vmalloc_dmabuf_ops_attach() 238 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_attach() 255 struct sg_table *sgt; in vb2_vmalloc_dmabuf_ops_detach() local 260 sgt = &attach->sgt; in vb2_vmalloc_dmabuf_ops_detach() 264 dma_unmap_sgtable(db_attach->dev, sgt, attach->dma_dir, 0); in vb2_vmalloc_dmabuf_ops_detach() 265 sg_free_table(sgt); in vb2_vmalloc_dmabuf_ops_detach() [all …]
|
/linux-6.12.1/drivers/gpu/drm/i915/ |
D | i915_mm.c | 37 struct sgt_iter sgt; member 46 return (r->sgt.dma + r->sgt.curr + r->iobase) >> PAGE_SHIFT; in sgt_pfn() 48 return r->sgt.pfn + (r->sgt.curr >> PAGE_SHIFT); in sgt_pfn() 55 if (GEM_WARN_ON(!r->sgt.sgp)) in remap_sg() 63 r->sgt.curr += PAGE_SIZE; in remap_sg() 64 if (r->sgt.curr >= r->sgt.max) in remap_sg() 65 r->sgt = __sgt_iter(__sg_next(r->sgt.sgp), use_dma(r->iobase)); in remap_sg() 138 .sgt = __sgt_iter(sgl, use_dma(iobase)), in remap_io_sg() 146 while (offset >= sg_dma_len(r.sgt.sgp) >> PAGE_SHIFT) { in remap_io_sg() 147 offset -= sg_dma_len(r.sgt.sgp) >> PAGE_SHIFT; in remap_io_sg() [all …]
|
/linux-6.12.1/drivers/gpu/drm/tests/ |
D | drm_gem_shmem_test.c | 70 struct sg_table *sgt; in drm_gem_shmem_test_obj_create_private() local 78 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in drm_gem_shmem_test_obj_create_private() 79 KUNIT_ASSERT_NOT_NULL(test, sgt); in drm_gem_shmem_test_obj_create_private() 81 ret = kunit_add_action_or_reset(test, kfree_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() 84 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in drm_gem_shmem_test_obj_create_private() 87 ret = kunit_add_action_or_reset(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() 90 sg_init_one(sgt->sgl, buf, TEST_SIZE); in drm_gem_shmem_test_obj_create_private() 100 ret = dma_map_sgtable(drm_dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_test_obj_create_private() 107 gem_obj = drm_gem_shmem_prime_import_sg_table(drm_dev, &attach_mock, sgt); in drm_gem_shmem_test_obj_create_private() 114 kunit_remove_action(test, sg_free_table_wrapper, sgt); in drm_gem_shmem_test_obj_create_private() [all …]
|
/linux-6.12.1/drivers/gpu/drm/tegra/ |
D | gem.c | 47 static inline unsigned int sgt_dma_count_chunks(struct sg_table *sgt) in sgt_dma_count_chunks() argument 49 return sg_dma_count_chunks(sgt->sgl, sgt->nents); in sgt_dma_count_chunks() 88 map->sgt = dma_buf_map_attachment_unlocked(map->attach, direction); in tegra_bo_pin() 89 if (IS_ERR(map->sgt)) { in tegra_bo_pin() 91 err = PTR_ERR(map->sgt); in tegra_bo_pin() 92 map->sgt = NULL; in tegra_bo_pin() 96 err = sgt_dma_count_chunks(map->sgt); in tegra_bo_pin() 106 map->sgt = kzalloc(sizeof(*map->sgt), GFP_KERNEL); in tegra_bo_pin() 107 if (!map->sgt) { in tegra_bo_pin() 117 err = sg_alloc_table_from_pages(map->sgt, obj->pages, obj->num_pages, 0, gem->size, in tegra_bo_pin() [all …]
|
/linux-6.12.1/drivers/gpu/drm/armada/ |
D | armada_gem.c | 68 if (dobj->sgt) in armada_gem_free_object() 70 dobj->sgt, DMA_TO_DEVICE); in armada_gem_free_object() 392 struct sg_table *sgt; in armada_gem_prime_map_dma_buf() local 395 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in armada_gem_prime_map_dma_buf() 396 if (!sgt) in armada_gem_prime_map_dma_buf() 404 if (sg_alloc_table(sgt, count, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 409 for_each_sgtable_sg(sgt, sg, i) { in armada_gem_prime_map_dma_buf() 419 if (dma_map_sgtable(attach->dev, sgt, dir, 0)) in armada_gem_prime_map_dma_buf() 423 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) in armada_gem_prime_map_dma_buf() 426 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf() [all …]
|
/linux-6.12.1/drivers/gpu/drm/virtio/ |
D | virtgpu_vram.c | 75 struct sg_table *sgt; in virtio_gpu_vram_map_dma_buf() local 79 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in virtio_gpu_vram_map_dma_buf() 80 if (!sgt) in virtio_gpu_vram_map_dma_buf() 90 return sgt; in virtio_gpu_vram_map_dma_buf() 93 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); in virtio_gpu_vram_map_dma_buf() 104 sg_set_page(sgt->sgl, NULL, vram->vram_node.size, 0); in virtio_gpu_vram_map_dma_buf() 105 sg_dma_address(sgt->sgl) = addr; in virtio_gpu_vram_map_dma_buf() 106 sg_dma_len(sgt->sgl) = vram->vram_node.size; in virtio_gpu_vram_map_dma_buf() 108 return sgt; in virtio_gpu_vram_map_dma_buf() 110 sg_free_table(sgt); in virtio_gpu_vram_map_dma_buf() [all …]
|
/linux-6.12.1/drivers/gpu/drm/mediatek/ |
D | mtk_gem.c | 197 struct sg_table *sgt; in mtk_gem_prime_get_sg_table() local 200 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); in mtk_gem_prime_get_sg_table() 201 if (!sgt) in mtk_gem_prime_get_sg_table() 204 ret = dma_get_sgtable_attrs(priv->dma_dev, sgt, mtk_gem->cookie, in mtk_gem_prime_get_sg_table() 209 kfree(sgt); in mtk_gem_prime_get_sg_table() 213 return sgt; in mtk_gem_prime_get_sg_table() 240 struct sg_table *sgt = NULL; in mtk_gem_prime_vmap() local 246 sgt = mtk_gem_prime_get_sg_table(obj); in mtk_gem_prime_vmap() 247 if (IS_ERR(sgt)) in mtk_gem_prime_vmap() 248 return PTR_ERR(sgt); in mtk_gem_prime_vmap() [all …]
|
/linux-6.12.1/drivers/xen/ |
D | gntdev-dmabuf.c | 46 struct sg_table *sgt; member 65 struct sg_table *sgt; member 198 struct sg_table *sgt; in dmabuf_pages_to_sgt() local 201 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in dmabuf_pages_to_sgt() 202 if (!sgt) { in dmabuf_pages_to_sgt() 207 ret = sg_alloc_table_from_pages(sgt, pages, nr_pages, 0, in dmabuf_pages_to_sgt() 213 return sgt; in dmabuf_pages_to_sgt() 216 kfree(sgt); in dmabuf_pages_to_sgt() 241 struct sg_table *sgt = gntdev_dmabuf_attach->sgt; in dmabuf_exp_ops_detach() local 243 if (sgt) { in dmabuf_exp_ops_detach() [all …]
|
/linux-6.12.1/drivers/hwtracing/intel_th/ |
D | msu-sink.c | 51 static int msu_sink_alloc_window(void *data, struct sg_table **sgt, size_t size) in msu_sink_alloc_window() argument 64 ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); in msu_sink_alloc_window() 68 priv->sgts[priv->nr_sgts++] = *sgt; in msu_sink_alloc_window() 70 for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { in msu_sink_alloc_window() 84 static void msu_sink_free_window(void *data, struct sg_table *sgt) in msu_sink_free_window() argument 90 for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { in msu_sink_free_window() 95 sg_free_table(sgt); in msu_sink_free_window() 99 static int msu_sink_ready(void *data, struct sg_table *sgt, size_t bytes) in msu_sink_ready() argument 103 intel_th_msc_window_unlock(priv->dev, sgt); in msu_sink_ready()
|
/linux-6.12.1/kernel/dma/ |
D | mapping.c | 288 int dma_map_sgtable(struct device *dev, struct sg_table *sgt, in dma_map_sgtable() argument 293 nents = __dma_map_sg_attrs(dev, sgt->sgl, sgt->orig_nents, dir, attrs); in dma_map_sgtable() 296 sgt->nents = nents; in dma_map_sgtable() 481 int dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, in dma_get_sgtable_attrs() argument 488 return dma_direct_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs() 491 return iommu_dma_get_sgtable(dev, sgt, cpu_addr, dma_addr, in dma_get_sgtable_attrs() 495 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, attrs); in dma_get_sgtable_attrs() 733 struct sg_table *sgt; in alloc_single_sgt() local 736 sgt = kmalloc(sizeof(*sgt), gfp); in alloc_single_sgt() 737 if (!sgt) in alloc_single_sgt() [all …]
|
/linux-6.12.1/drivers/gpu/drm/i915/gem/ |
D | i915_gem_dmabuf.c | 32 struct sg_table *sgt; in i915_gem_map_dma_buf() local 40 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); in i915_gem_map_dma_buf() 41 if (!sgt) { in i915_gem_map_dma_buf() 46 ret = sg_alloc_table(sgt, obj->mm.pages->orig_nents, GFP_KERNEL); in i915_gem_map_dma_buf() 50 dst = sgt->sgl; in i915_gem_map_dma_buf() 56 ret = dma_map_sgtable(attach->dev, sgt, dir, DMA_ATTR_SKIP_CPU_SYNC); in i915_gem_map_dma_buf() 60 return sgt; in i915_gem_map_dma_buf() 63 sg_free_table(sgt); in i915_gem_map_dma_buf() 65 kfree(sgt); in i915_gem_map_dma_buf() 240 struct sg_table *sgt; in i915_gem_object_get_pages_dmabuf() local [all …]
|
/linux-6.12.1/drivers/media/platform/nvidia/tegra-vde/ |
D | dmabuf-cache.c | 27 struct sg_table *sgt; member 41 dma_buf_unmap_attachment_unlocked(entry->a, entry->sgt, entry->dma_dir); in tegra_vde_release_entry() 72 struct sg_table *sgt; in tegra_vde_dmabuf_cache_map() local 93 *addrp = sg_dma_address(entry->sgt->sgl); in tegra_vde_dmabuf_cache_map() 105 sgt = dma_buf_map_attachment_unlocked(attachment, dma_dir); in tegra_vde_dmabuf_cache_map() 106 if (IS_ERR(sgt)) { in tegra_vde_dmabuf_cache_map() 108 err = PTR_ERR(sgt); in tegra_vde_dmabuf_cache_map() 112 if (!vde->domain && sgt->nents > 1) { in tegra_vde_dmabuf_cache_map() 125 err = tegra_vde_iommu_map(vde, sgt, &iova, dmabuf->size); in tegra_vde_dmabuf_cache_map() 131 *addrp = sg_dma_address(sgt->sgl); in tegra_vde_dmabuf_cache_map() [all …]
|
/linux-6.12.1/drivers/gpu/drm/ |
D | drm_gem_shmem_helper.c | 142 drm_prime_gem_destroy(obj, shmem->sgt); in drm_gem_shmem_free() 148 if (shmem->sgt) { in drm_gem_shmem_free() 149 dma_unmap_sgtable(obj->dev->dev, shmem->sgt, in drm_gem_shmem_free() 151 sg_free_table(shmem->sgt); in drm_gem_shmem_free() 152 kfree(shmem->sgt); in drm_gem_shmem_free() 452 dma_unmap_sgtable(dev->dev, shmem->sgt, DMA_BIDIRECTIONAL, 0); in drm_gem_shmem_purge() 453 sg_free_table(shmem->sgt); in drm_gem_shmem_purge() 454 kfree(shmem->sgt); in drm_gem_shmem_purge() 455 shmem->sgt = NULL; in drm_gem_shmem_purge() 681 struct sg_table *sgt; in drm_gem_shmem_get_pages_sgt_locked() local [all …]
|
D | drm_prime.c | 655 struct sg_table *sgt; in drm_gem_map_dma_buf() local 664 sgt = obj->funcs->get_sg_table(obj); in drm_gem_map_dma_buf() 665 if (IS_ERR(sgt)) in drm_gem_map_dma_buf() 666 return sgt; in drm_gem_map_dma_buf() 668 ret = dma_map_sgtable(attach->dev, sgt, dir, in drm_gem_map_dma_buf() 671 sg_free_table(sgt); in drm_gem_map_dma_buf() 672 kfree(sgt); in drm_gem_map_dma_buf() 673 sgt = ERR_PTR(ret); in drm_gem_map_dma_buf() 676 return sgt; in drm_gem_map_dma_buf() 689 struct sg_table *sgt, in drm_gem_unmap_dma_buf() argument [all …]
|
/linux-6.12.1/net/ceph/ |
D | crypto.c | 161 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, in setup_sgtable() argument 173 memset(sgt, 0, sizeof(*sgt)); in setup_sgtable() 183 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); in setup_sgtable() 189 sgt->sgl = prealloc_sg; in setup_sgtable() 190 sgt->nents = sgt->orig_nents = 1; in setup_sgtable() 193 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable() 213 static void teardown_sgtable(struct sg_table *sgt) in teardown_sgtable() argument 215 if (sgt->orig_nents > 1) in teardown_sgtable() 216 sg_free_table(sgt); in teardown_sgtable() 223 struct sg_table sgt; in ceph_aes_crypt() local [all …]
|
/linux-6.12.1/include/linux/ |
D | scatterlist.h | 46 struct sg_table sgt; /* The scatter list table */ member 201 #define for_each_sgtable_sg(sgt, sg, i) \ argument 202 for_each_sg((sgt)->sgl, sg, (sgt)->orig_nents, i) 209 #define for_each_sgtable_dma_sg(sgt, sg, i) \ argument 210 for_each_sg((sgt)->sgl, sg, (sgt)->nents, i) 437 void sg_free_append_table(struct sg_append_table *sgt); 441 int sg_alloc_append_table_from_pages(struct sg_append_table *sgt, 446 int sg_alloc_table_from_pages_segment(struct sg_table *sgt, struct page **pages, 471 static inline int sg_alloc_table_from_pages(struct sg_table *sgt, in sg_alloc_table_from_pages() argument 477 return sg_alloc_table_from_pages_segment(sgt, pages, n_pages, offset, in sg_alloc_table_from_pages() [all …]
|
/linux-6.12.1/drivers/gpu/drm/xe/ |
D | xe_dma_buf.c | 95 struct sg_table *sgt; in xe_dma_buf_map() local 112 sgt = drm_prime_pages_to_sg(obj->dev, in xe_dma_buf_map() 115 if (IS_ERR(sgt)) in xe_dma_buf_map() 116 return sgt; in xe_dma_buf_map() 118 if (dma_map_sgtable(attach->dev, sgt, dir, in xe_dma_buf_map() 128 dir, &sgt); in xe_dma_buf_map() 136 return sgt; in xe_dma_buf_map() 139 sg_free_table(sgt); in xe_dma_buf_map() 140 kfree(sgt); in xe_dma_buf_map() 145 struct sg_table *sgt, in xe_dma_buf_unmap() argument [all …]
|
/linux-6.12.1/drivers/gpu/drm/amd/amdgpu/ |
D | amdgpu_dma_buf.c | 117 struct sg_table *sgt; in amdgpu_dma_buf_map() local 141 sgt = drm_prime_pages_to_sg(obj->dev, in amdgpu_dma_buf_map() 144 if (IS_ERR(sgt)) in amdgpu_dma_buf_map() 145 return sgt; in amdgpu_dma_buf_map() 147 if (dma_map_sgtable(attach->dev, sgt, dir, in amdgpu_dma_buf_map() 155 dir, &sgt); in amdgpu_dma_buf_map() 163 return sgt; in amdgpu_dma_buf_map() 166 sg_free_table(sgt); in amdgpu_dma_buf_map() 167 kfree(sgt); in amdgpu_dma_buf_map() 181 struct sg_table *sgt, in amdgpu_dma_buf_unmap() argument [all …]
|
/linux-6.12.1/drivers/infiniband/core/ |
D | umem_dmabuf.c | 17 struct sg_table *sgt; in ib_umem_dmabuf_map_pages() local 29 if (umem_dmabuf->sgt) in ib_umem_dmabuf_map_pages() 32 sgt = dma_buf_map_attachment(umem_dmabuf->attach, in ib_umem_dmabuf_map_pages() 34 if (IS_ERR(sgt)) in ib_umem_dmabuf_map_pages() 35 return PTR_ERR(sgt); in ib_umem_dmabuf_map_pages() 42 for_each_sgtable_dma_sg(sgt, sg, i) { in ib_umem_dmabuf_map_pages() 65 umem_dmabuf->umem.sgt_append.sgt.sgl = umem_dmabuf->first_sg; in ib_umem_dmabuf_map_pages() 66 umem_dmabuf->umem.sgt_append.sgt.nents = nmap; in ib_umem_dmabuf_map_pages() 67 umem_dmabuf->sgt = sgt; in ib_umem_dmabuf_map_pages() 90 if (!umem_dmabuf->sgt) in ib_umem_dmabuf_unmap_pages() [all …]
|
/linux-6.12.1/drivers/gpu/drm/imagination/ |
D | pvr_gem.c | 216 if (shmem_obj->sgt) in pvr_gem_object_vmap() 217 dma_sync_sgtable_for_cpu(dev, shmem_obj->sgt, DMA_BIDIRECTIONAL); in pvr_gem_object_vmap() 256 if (shmem_obj->sgt) in pvr_gem_object_vunmap() 257 dma_sync_sgtable_for_device(dev, shmem_obj->sgt, DMA_BIDIRECTIONAL); in pvr_gem_object_vunmap() 341 struct sg_table *sgt; in pvr_gem_object_create() local 357 sgt = drm_gem_shmem_get_pages_sgt(shmem_obj); in pvr_gem_object_create() 358 if (IS_ERR(sgt)) { in pvr_gem_object_create() 359 err = PTR_ERR(sgt); in pvr_gem_object_create() 363 dma_sync_sgtable_for_device(shmem_obj->base.dev->dev, sgt, in pvr_gem_object_create() 400 WARN_ON(!shmem_obj->sgt); in pvr_gem_get_dma_addr() [all …]
|
/linux-6.12.1/drivers/gpu/drm/etnaviv/ |
D | etnaviv_gem.c | 23 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatter_map() local 30 dma_map_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatter_map() 36 struct sg_table *sgt = etnaviv_obj->sgt; in etnaviv_gem_scatterlist_unmap() local 54 dma_unmap_sgtable(dev->dev, sgt, DMA_BIDIRECTIONAL, 0); in etnaviv_gem_scatterlist_unmap() 75 if (etnaviv_obj->sgt) { in put_pages() 77 sg_free_table(etnaviv_obj->sgt); in put_pages() 78 kfree(etnaviv_obj->sgt); in put_pages() 79 etnaviv_obj->sgt = NULL; in put_pages() 101 if (!etnaviv_obj->sgt) { in etnaviv_gem_get_pages() 104 struct sg_table *sgt; in etnaviv_gem_get_pages() local [all …]
|
/linux-6.12.1/drivers/gpu/drm/lima/ |
D | lima_gem.c | 29 struct sg_table sgt; in lima_heap_alloc() local 67 ret = sg_alloc_table_from_pages(&sgt, pages, i, 0, in lima_heap_alloc() 72 if (bo->base.sgt) { in lima_heap_alloc() 73 dma_unmap_sgtable(dev, bo->base.sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 74 sg_free_table(bo->base.sgt); in lima_heap_alloc() 76 bo->base.sgt = kmalloc(sizeof(*bo->base.sgt), GFP_KERNEL); in lima_heap_alloc() 77 if (!bo->base.sgt) { in lima_heap_alloc() 83 ret = dma_map_sgtable(dev, &sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() 87 *bo->base.sgt = sgt; in lima_heap_alloc() 99 dma_unmap_sgtable(dev, &sgt, DMA_BIDIRECTIONAL, 0); in lima_heap_alloc() [all …]
|
/linux-6.12.1/drivers/gpu/drm/rockchip/ |
D | rockchip_drm_gem.c | 41 ret = iommu_map_sgtable(private->domain, rk_obj->dma_addr, rk_obj->sgt, in rockchip_gem_iommu_map() 90 rk_obj->sgt = drm_prime_pages_to_sg(rk_obj->base.dev, in rockchip_gem_get_pages() 92 if (IS_ERR(rk_obj->sgt)) { in rockchip_gem_get_pages() 93 ret = PTR_ERR(rk_obj->sgt); in rockchip_gem_get_pages() 104 for_each_sgtable_sg(rk_obj->sgt, s, i) in rockchip_gem_get_pages() 107 dma_sync_sgtable_for_device(drm->dev, rk_obj->sgt, DMA_TO_DEVICE); in rockchip_gem_get_pages() 118 sg_free_table(rk_obj->sgt); in rockchip_gem_put_pages() 119 kfree(rk_obj->sgt); in rockchip_gem_put_pages() 339 dma_unmap_sgtable(drm->dev, rk_obj->sgt, in rockchip_gem_free_object() 342 drm_prime_gem_destroy(obj, rk_obj->sgt); in rockchip_gem_free_object() [all …]
|