Searched refs:sgt (Results 1 - 80 of 80) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/exynos/
H A Dexynos_drm_dmabuf.c21 struct sg_table sgt; member in struct:exynos_drm_dmabuf_attachment
51 struct sg_table *sgt; exynos_gem_detach_dma_buf() local
56 sgt = &exynos_attach->sgt; exynos_gem_detach_dma_buf()
59 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, exynos_gem_detach_dma_buf()
62 sg_free_table(sgt); exynos_gem_detach_dma_buf()
76 struct sg_table *sgt = NULL; exynos_gem_map_dma_buf() local
80 /* just return current sgt if already requested. */ exynos_gem_map_dma_buf()
82 return &exynos_attach->sgt; exynos_gem_map_dma_buf()
90 sgt = &exynos_attach->sgt; exynos_gem_map_dma_buf()
92 ret = sg_alloc_table(sgt, buf->sgt->orig_nents, GFP_KERNEL); exynos_gem_map_dma_buf()
94 DRM_ERROR("failed to alloc sgt.\n"); exynos_gem_map_dma_buf()
100 rd = buf->sgt->sgl; exynos_gem_map_dma_buf()
101 wr = sgt->sgl; exynos_gem_map_dma_buf()
102 for (i = 0; i < sgt->orig_nents; ++i) { exynos_gem_map_dma_buf()
109 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); exynos_gem_map_dma_buf()
112 sg_free_table(sgt); exynos_gem_map_dma_buf()
113 sgt = ERR_PTR(-EIO); exynos_gem_map_dma_buf()
126 return sgt; exynos_gem_map_dma_buf()
130 struct sg_table *sgt, exynos_gem_unmap_dma_buf()
202 struct sg_table *sgt; exynos_dmabuf_prime_import() local
231 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); exynos_dmabuf_prime_import()
232 if (IS_ERR(sgt)) { exynos_dmabuf_prime_import()
233 ret = PTR_ERR(sgt); exynos_dmabuf_prime_import()
249 sgl = sgt->sgl; exynos_dmabuf_prime_import()
254 if (sgt->nents == 1) { exynos_dmabuf_prime_import()
255 /* always physically continuous memory if sgt->nents is 1. */ exynos_dmabuf_prime_import()
268 buffer->sgt = sgt; exynos_dmabuf_prime_import()
280 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); exynos_dmabuf_prime_import()
129 exynos_gem_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) exynos_gem_unmap_dma_buf() argument
H A Dexynos_drm_buf.h23 /* allocate physical memory region and setup sgt. */
28 /* release physical memory region, and sgt. */
H A Dexynos_drm_buf.c93 buf->sgt = drm_prime_pages_to_sg(buf->pages, nr_pages); lowlevel_buffer_allocate()
94 if (IS_ERR(buf->sgt)) { lowlevel_buffer_allocate()
96 ret = PTR_ERR(buf->sgt); lowlevel_buffer_allocate()
129 sg_free_table(buf->sgt); lowlevel_buffer_deallocate()
131 kfree(buf->sgt); lowlevel_buffer_deallocate()
132 buf->sgt = NULL; lowlevel_buffer_deallocate()
H A Dexynos_drm_gem.c90 if (!buf->sgt) exynos_drm_gem_map_buf()
98 sgl = buf->sgt->sgl; exynos_drm_gem_map_buf()
99 for_each_sg(buf->sgt->sgl, sgl, buf->sgt->nents, i) { exynos_drm_gem_map_buf()
479 struct sg_table *sgt, exynos_gem_map_sgt_with_dma()
486 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); exynos_gem_map_sgt_with_dma()
498 struct sg_table *sgt, exynos_gem_unmap_sgt_from_dma()
501 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); exynos_gem_unmap_sgt_from_dma()
513 drm_prime_gem_destroy(obj, buf->sgt); exynos_drm_gem_free_object()
478 exynos_gem_map_sgt_with_dma(struct drm_device *drm_dev, struct sg_table *sgt, enum dma_data_direction dir) exynos_gem_map_sgt_with_dma() argument
497 exynos_gem_unmap_sgt_from_dma(struct drm_device *drm_dev, struct sg_table *sgt, enum dma_data_direction dir) exynos_gem_unmap_sgt_from_dma() argument
H A Dexynos_drm_gem.h33 * @sgt: sg table to transfer page data.
46 struct sg_table *sgt; member in struct:exynos_drm_gem_buf
172 /* map sgt with dma region. */
174 struct sg_table *sgt,
177 /* unmap sgt from dma region. */
179 struct sg_table *sgt,
H A Dexynos_drm_g2d.c195 struct sg_table *sgt; member in struct:g2d_cmdlist_userptr
382 exynos_gem_unmap_sgt_from_dma(drm_dev, g2d_userptr->sgt, g2d_userptr_put_dma_addr()
394 sg_free_table(g2d_userptr->sgt); g2d_userptr_put_dma_addr()
395 kfree(g2d_userptr->sgt); g2d_userptr_put_dma_addr()
412 struct sg_table *sgt; g2d_userptr_get_dma_addr() local
510 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); g2d_userptr_get_dma_addr()
511 if (!sgt) { g2d_userptr_get_dma_addr()
516 ret = sg_alloc_table_from_pages(sgt, pages, npages, offset, g2d_userptr_get_dma_addr()
519 DRM_ERROR("failed to get sgt from pages.\n"); g2d_userptr_get_dma_addr()
523 g2d_userptr->sgt = sgt; g2d_userptr_get_dma_addr()
525 ret = exynos_gem_map_sgt_with_dma(drm_dev, g2d_userptr->sgt, g2d_userptr_get_dma_addr()
528 DRM_ERROR("failed to map sgt with dma region.\n"); g2d_userptr_get_dma_addr()
532 g2d_userptr->dma_addr = sgt->sgl[0].dma_address; g2d_userptr_get_dma_addr()
547 sg_free_table(sgt); g2d_userptr_get_dma_addr()
550 kfree(sgt); g2d_userptr_get_dma_addr()
/linux-4.1.27/drivers/media/v4l2-core/
H A Dvideobuf2-dma-contig.c53 static void vb2_dc_sgt_foreach_page(struct sg_table *sgt, vb2_dc_sgt_foreach_page() argument
59 for_each_sg(sgt->sgl, s, sgt->orig_nents, i) { vb2_dc_sgt_foreach_page()
70 static unsigned long vb2_dc_get_contiguous_size(struct sg_table *sgt) vb2_dc_get_contiguous_size() argument
73 dma_addr_t expected = sg_dma_address(sgt->sgl); vb2_dc_get_contiguous_size()
77 for_each_sg(sgt->sgl, s, sgt->nents, i) { vb2_dc_get_contiguous_size()
117 struct sg_table *sgt = buf->dma_sgt; vb2_dc_prepare() local
120 if (!sgt || buf->db_attach) vb2_dc_prepare()
123 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dc_prepare()
129 struct sg_table *sgt = buf->dma_sgt; vb2_dc_finish() local
132 if (!sgt || buf->db_attach) vb2_dc_finish()
135 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dc_finish()
233 struct sg_table sgt; member in struct:vb2_dc_attachment
243 struct sg_table *sgt; vb2_dc_dmabuf_ops_attach() local
251 sgt = &attach->sgt; vb2_dc_dmabuf_ops_attach()
255 ret = sg_alloc_table(sgt, buf->sgt_base->orig_nents, GFP_KERNEL); vb2_dc_dmabuf_ops_attach()
262 wr = sgt->sgl; vb2_dc_dmabuf_ops_attach()
263 for (i = 0; i < sgt->orig_nents; ++i) { vb2_dc_dmabuf_ops_attach()
279 struct sg_table *sgt; vb2_dc_dmabuf_ops_detach() local
284 sgt = &attach->sgt; vb2_dc_dmabuf_ops_detach()
288 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dc_dmabuf_ops_detach()
290 sg_free_table(sgt); vb2_dc_dmabuf_ops_detach()
301 struct sg_table *sgt; vb2_dc_dmabuf_ops_map() local
306 sgt = &attach->sgt; vb2_dc_dmabuf_ops_map()
310 return sgt; vb2_dc_dmabuf_ops_map()
315 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dc_dmabuf_ops_map()
321 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_dc_dmabuf_ops_map()
332 return sgt; vb2_dc_dmabuf_ops_map()
336 struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_dc_dmabuf_ops_unmap()
382 struct sg_table *sgt; vb2_dc_get_base_sgt() local
384 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); vb2_dc_get_base_sgt()
385 if (!sgt) { vb2_dc_get_base_sgt()
390 ret = dma_get_sgtable(buf->dev, sgt, buf->vaddr, buf->dma_addr, vb2_dc_get_base_sgt()
394 kfree(sgt); vb2_dc_get_base_sgt()
398 return sgt; vb2_dc_get_base_sgt()
517 struct sg_table *sgt = buf->dma_sgt; vb2_dc_put_userptr() local
519 if (sgt) { vb2_dc_put_userptr()
527 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_put_userptr()
530 vb2_dc_sgt_foreach_page(sgt, vb2_dc_put_dirty_page); vb2_dc_put_userptr()
532 sg_free_table(sgt); vb2_dc_put_userptr()
533 kfree(sgt); vb2_dc_put_userptr()
582 struct sg_table *sgt; vb2_dc_get_userptr() local
655 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); vb2_dc_get_userptr()
656 if (!sgt) { vb2_dc_get_userptr()
662 ret = sg_alloc_table_from_pages(sgt, pages, n_pages, vb2_dc_get_userptr()
677 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_get_userptr()
679 if (sgt->nents <= 0) { vb2_dc_get_userptr()
685 contig_size = vb2_dc_get_contiguous_size(sgt); vb2_dc_get_userptr()
693 buf->dma_addr = sg_dma_address(sgt->sgl); vb2_dc_get_userptr()
695 buf->dma_sgt = sgt; vb2_dc_get_userptr()
700 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_get_userptr()
705 vb2_dc_sgt_foreach_page(sgt, put_page); vb2_dc_get_userptr()
706 sg_free_table(sgt); vb2_dc_get_userptr()
709 kfree(sgt); vb2_dc_get_userptr()
735 struct sg_table *sgt; vb2_dc_map_dmabuf() local
749 sgt = dma_buf_map_attachment(buf->db_attach, buf->dma_dir); vb2_dc_map_dmabuf()
750 if (IS_ERR(sgt)) { vb2_dc_map_dmabuf()
756 contig_size = vb2_dc_get_contiguous_size(sgt); vb2_dc_map_dmabuf()
760 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); vb2_dc_map_dmabuf()
764 buf->dma_addr = sg_dma_address(sgt->sgl); vb2_dc_map_dmabuf()
765 buf->dma_sgt = sgt; vb2_dc_map_dmabuf()
774 struct sg_table *sgt = buf->dma_sgt; vb2_dc_unmap_dmabuf() local
781 if (WARN_ON(!sgt)) { vb2_dc_unmap_dmabuf()
790 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); vb2_dc_unmap_dmabuf()
335 vb2_dc_dmabuf_ops_unmap(struct dma_buf_attachment *db_attach, struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_dc_dmabuf_ops_unmap() argument
H A Dvideobuf2-dma-sg.c107 struct sg_table *sgt; vb2_dma_sg_alloc() local
145 sgt = &buf->sg_table; vb2_dma_sg_alloc()
150 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_alloc()
181 struct sg_table *sgt = &buf->sg_table; vb2_dma_sg_put() local
190 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_put()
206 struct sg_table *sgt = buf->dma_sgt; vb2_dma_sg_prepare() local
212 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dma_sg_prepare()
218 struct sg_table *sgt = buf->dma_sgt; vb2_dma_sg_finish() local
224 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dma_sg_finish()
241 struct sg_table *sgt; vb2_dma_sg_get_userptr() local
312 sgt = &buf->sg_table; vb2_dma_sg_get_userptr()
317 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_get_userptr()
346 struct sg_table *sgt = &buf->sg_table; vb2_dma_sg_put_userptr() local
354 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir, &attrs); vb2_dma_sg_put_userptr()
436 struct sg_table sgt; member in struct:vb2_dma_sg_attachment
446 struct sg_table *sgt; vb2_dma_sg_dmabuf_ops_attach() local
454 sgt = &attach->sgt; vb2_dma_sg_dmabuf_ops_attach()
458 ret = sg_alloc_table(sgt, buf->dma_sgt->orig_nents, GFP_KERNEL); vb2_dma_sg_dmabuf_ops_attach()
465 wr = sgt->sgl; vb2_dma_sg_dmabuf_ops_attach()
466 for (i = 0; i < sgt->orig_nents; ++i) { vb2_dma_sg_dmabuf_ops_attach()
482 struct sg_table *sgt; vb2_dma_sg_dmabuf_ops_detach() local
487 sgt = &attach->sgt; vb2_dma_sg_dmabuf_ops_detach()
491 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dma_sg_dmabuf_ops_detach()
493 sg_free_table(sgt); vb2_dma_sg_dmabuf_ops_detach()
504 struct sg_table *sgt; vb2_dma_sg_dmabuf_ops_map() local
509 sgt = &attach->sgt; vb2_dma_sg_dmabuf_ops_map()
513 return sgt; vb2_dma_sg_dmabuf_ops_map()
518 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dma_sg_dmabuf_ops_map()
524 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_dma_sg_dmabuf_ops_map()
535 return sgt; vb2_dma_sg_dmabuf_ops_map()
539 struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_dma_sg_dmabuf_ops_unmap()
613 struct sg_table *sgt; vb2_dma_sg_map_dmabuf() local
626 sgt = dma_buf_map_attachment(buf->db_attach, buf->dma_dir); vb2_dma_sg_map_dmabuf()
627 if (IS_ERR(sgt)) { vb2_dma_sg_map_dmabuf()
632 buf->dma_sgt = sgt; vb2_dma_sg_map_dmabuf()
641 struct sg_table *sgt = buf->dma_sgt; vb2_dma_sg_unmap_dmabuf() local
648 if (WARN_ON(!sgt)) { vb2_dma_sg_unmap_dmabuf()
657 dma_buf_unmap_attachment(buf->db_attach, sgt, buf->dma_dir); vb2_dma_sg_unmap_dmabuf()
538 vb2_dma_sg_dmabuf_ops_unmap(struct dma_buf_attachment *db_attach, struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_dma_sg_dmabuf_ops_unmap() argument
H A Dvideobuf2-vmalloc.c220 struct sg_table sgt; member in struct:vb2_vmalloc_attachment
230 struct sg_table *sgt; vb2_vmalloc_dmabuf_ops_attach() local
240 sgt = &attach->sgt; vb2_vmalloc_dmabuf_ops_attach()
241 ret = sg_alloc_table(sgt, num_pages, GFP_KERNEL); vb2_vmalloc_dmabuf_ops_attach()
246 for_each_sg(sgt->sgl, sg, sgt->nents, i) { vb2_vmalloc_dmabuf_ops_attach()
250 sg_free_table(sgt); vb2_vmalloc_dmabuf_ops_attach()
267 struct sg_table *sgt; vb2_vmalloc_dmabuf_ops_detach() local
272 sgt = &attach->sgt; vb2_vmalloc_dmabuf_ops_detach()
276 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_vmalloc_dmabuf_ops_detach()
278 sg_free_table(sgt); vb2_vmalloc_dmabuf_ops_detach()
289 struct sg_table *sgt; vb2_vmalloc_dmabuf_ops_map() local
294 sgt = &attach->sgt; vb2_vmalloc_dmabuf_ops_map()
298 return sgt; vb2_vmalloc_dmabuf_ops_map()
303 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_vmalloc_dmabuf_ops_map()
309 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_vmalloc_dmabuf_ops_map()
320 return sgt; vb2_vmalloc_dmabuf_ops_map()
324 struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_vmalloc_dmabuf_ops_unmap()
323 vb2_vmalloc_dmabuf_ops_unmap(struct dma_buf_attachment *db_attach, struct sg_table *sgt, enum dma_data_direction dma_dir) vb2_vmalloc_dmabuf_ops_unmap() argument
/linux-4.1.27/drivers/gpu/drm/tegra/
H A Dgem.c38 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) tegra_bo_pin() argument
45 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) tegra_bo_unpin() argument
117 err = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, tegra_bo_iommu_map()
118 bo->sgt->nents, prot); tegra_bo_iommu_map()
181 sg_free_table(bo->sgt); tegra_bo_free()
182 kfree(bo->sgt); tegra_bo_free()
192 struct sg_table *sgt; tegra_bo_get_pages() local
201 sgt = drm_prime_pages_to_sg(bo->pages, bo->num_pages); tegra_bo_get_pages()
202 if (IS_ERR(sgt)) tegra_bo_get_pages()
214 for_each_sg(sgt->sgl, s, sgt->nents, i) tegra_bo_get_pages()
217 if (dma_map_sg(drm->dev, sgt->sgl, sgt->nents, DMA_TO_DEVICE) == 0) tegra_bo_get_pages()
220 bo->sgt = sgt; tegra_bo_get_pages()
225 sg_free_table(sgt); tegra_bo_get_pages()
226 kfree(sgt); tegra_bo_get_pages()
227 sgt = ERR_PTR(-ENOMEM); tegra_bo_get_pages()
230 return PTR_ERR(sgt); tegra_bo_get_pages()
336 bo->sgt = dma_buf_map_attachment(attach, DMA_TO_DEVICE); tegra_bo_import()
337 if (!bo->sgt) { tegra_bo_import()
342 if (IS_ERR(bo->sgt)) { tegra_bo_import()
343 err = PTR_ERR(bo->sgt); tegra_bo_import()
352 if (bo->sgt->nents > 1) { tegra_bo_import()
357 bo->paddr = sg_dma_address(bo->sgt->sgl); tegra_bo_import()
365 if (!IS_ERR_OR_NULL(bo->sgt)) tegra_bo_import()
366 dma_buf_unmap_attachment(attach, bo->sgt, DMA_TO_DEVICE); tegra_bo_import()
385 dma_buf_unmap_attachment(gem->import_attach, bo->sgt, tegra_bo_free_object()
521 struct sg_table *sgt; tegra_gem_prime_map_dma_buf() local
523 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); tegra_gem_prime_map_dma_buf()
524 if (!sgt) tegra_gem_prime_map_dma_buf()
531 if (sg_alloc_table(sgt, bo->num_pages, GFP_KERNEL)) tegra_gem_prime_map_dma_buf()
534 for_each_sg(sgt->sgl, sg, bo->num_pages, i) tegra_gem_prime_map_dma_buf()
537 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) tegra_gem_prime_map_dma_buf()
540 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) tegra_gem_prime_map_dma_buf()
543 sg_dma_address(sgt->sgl) = bo->paddr; tegra_gem_prime_map_dma_buf()
544 sg_dma_len(sgt->sgl) = gem->size; tegra_gem_prime_map_dma_buf()
547 return sgt; tegra_gem_prime_map_dma_buf()
550 sg_free_table(sgt); tegra_gem_prime_map_dma_buf()
551 kfree(sgt); tegra_gem_prime_map_dma_buf()
556 struct sg_table *sgt, tegra_gem_prime_unmap_dma_buf()
563 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); tegra_gem_prime_unmap_dma_buf()
565 sg_free_table(sgt); tegra_gem_prime_unmap_dma_buf()
566 kfree(sgt); tegra_gem_prime_unmap_dma_buf()
555 tegra_gem_prime_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) tegra_gem_prime_unmap_dma_buf() argument
H A Dgem.h37 struct sg_table *sgt; member in struct:tegra_bo
/linux-4.1.27/drivers/gpu/drm/udl/
H A Dudl_dmabuf.c26 struct sg_table sgt; member in struct:udl_drm_dmabuf_attachment
54 struct sg_table *sgt; udl_detach_dma_buf() local
62 sgt = &udl_attach->sgt; udl_detach_dma_buf()
65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, udl_detach_dma_buf()
68 sg_free_table(sgt); udl_detach_dma_buf()
80 struct sg_table *sgt = NULL; udl_map_dma_buf() local
88 /* just return current sgt if already requested. */ udl_map_dma_buf()
90 return &udl_attach->sgt; udl_map_dma_buf()
103 DRM_ERROR("failed to allocate sgt.\n"); udl_map_dma_buf()
107 sgt = &udl_attach->sgt; udl_map_dma_buf()
109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); udl_map_dma_buf()
111 DRM_ERROR("failed to alloc sgt.\n"); udl_map_dma_buf()
118 wr = sgt->sgl; udl_map_dma_buf()
119 for (i = 0; i < sgt->orig_nents; ++i) { udl_map_dma_buf()
126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); udl_map_dma_buf()
129 sg_free_table(sgt); udl_map_dma_buf()
130 sgt = ERR_PTR(-EIO); udl_map_dma_buf()
141 return sgt; udl_map_dma_buf()
145 struct sg_table *sgt, udl_unmap_dma_buf()
144 udl_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) udl_unmap_dma_buf() argument
/linux-4.1.27/drivers/gpu/drm/armada/
H A Darmada_gem.c72 dma_buf_unmap_attachment(dobj->obj.import_attach, dobj->sgt, armada_gem_free_object()
427 struct sg_table *sgt; armada_gem_prime_map_dma_buf() local
430 sgt = kmalloc(sizeof(*sgt), GFP_KERNEL); armada_gem_prime_map_dma_buf()
431 if (!sgt) armada_gem_prime_map_dma_buf()
439 if (sg_alloc_table(sgt, count, GFP_KERNEL)) armada_gem_prime_map_dma_buf()
444 for_each_sg(sgt->sgl, sg, count, i) { armada_gem_prime_map_dma_buf()
456 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { armada_gem_prime_map_dma_buf()
457 num = sgt->nents; armada_gem_prime_map_dma_buf()
462 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) armada_gem_prime_map_dma_buf()
465 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); armada_gem_prime_map_dma_buf()
467 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) armada_gem_prime_map_dma_buf()
471 if (sg_alloc_table(sgt, 1, GFP_KERNEL)) armada_gem_prime_map_dma_buf()
473 sg_dma_address(sgt->sgl) = dobj->dev_addr; armada_gem_prime_map_dma_buf()
474 sg_dma_len(sgt->sgl) = dobj->obj.size; armada_gem_prime_map_dma_buf()
478 return sgt; armada_gem_prime_map_dma_buf()
481 for_each_sg(sgt->sgl, sg, num, i) armada_gem_prime_map_dma_buf()
484 sg_free_table(sgt); armada_gem_prime_map_dma_buf()
486 kfree(sgt); armada_gem_prime_map_dma_buf()
491 struct sg_table *sgt, enum dma_data_direction dir) armada_gem_prime_unmap_dma_buf()
498 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); armada_gem_prime_unmap_dma_buf()
502 for_each_sg(sgt->sgl, sg, sgt->nents, i) armada_gem_prime_unmap_dma_buf()
506 sg_free_table(sgt); armada_gem_prime_unmap_dma_buf()
507 kfree(sgt); armada_gem_prime_unmap_dma_buf()
594 dobj->sgt = dma_buf_map_attachment(dobj->obj.import_attach, armada_gem_map_import()
596 if (!dobj->sgt) { armada_gem_map_import()
600 if (IS_ERR(dobj->sgt)) { armada_gem_map_import()
601 ret = PTR_ERR(dobj->sgt); armada_gem_map_import()
602 dobj->sgt = NULL; armada_gem_map_import()
606 if (dobj->sgt->nents > 1) { armada_gem_map_import()
610 if (sg_dma_len(dobj->sgt->sgl) < dobj->obj.size) { armada_gem_map_import()
614 dobj->dev_addr = sg_dma_address(dobj->sgt->sgl); armada_gem_map_import()
490 armada_gem_prime_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) armada_gem_prime_unmap_dma_buf() argument
H A Darmada_gem.h21 struct sg_table *sgt; /* for imported */ member in struct:armada_gem_object
H A Darmada_fb.c129 if (obj->obj.import_attach && !obj->sgt) { armada_fb_create()
/linux-4.1.27/drivers/spi/
H A Dspi-pxa2xx-dma.c29 struct sg_table *sgt; pxa2xx_spi_map_dma_buffer() local
34 sgt = &drv_data->tx_sgt; pxa2xx_spi_map_dma_buffer()
39 sgt = &drv_data->rx_sgt; pxa2xx_spi_map_dma_buffer()
45 if (nents != sgt->nents) { pxa2xx_spi_map_dma_buffer()
48 sg_free_table(sgt); pxa2xx_spi_map_dma_buffer()
49 ret = sg_alloc_table(sgt, nents, GFP_ATOMIC); pxa2xx_spi_map_dma_buffer()
55 for_each_sg(sgt->sgl, sg, sgt->nents, i) { pxa2xx_spi_map_dma_buffer()
67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); pxa2xx_spi_map_dma_buffer()
78 struct sg_table *sgt; pxa2xx_spi_unmap_dma_buffer() local
82 sgt = &drv_data->tx_sgt; pxa2xx_spi_unmap_dma_buffer()
85 sgt = &drv_data->rx_sgt; pxa2xx_spi_unmap_dma_buffer()
88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); pxa2xx_spi_unmap_dma_buffer()
167 struct sg_table *sgt; pxa2xx_spi_dma_prepare_one() local
190 sgt = &drv_data->tx_sgt; pxa2xx_spi_dma_prepare_one()
198 sgt = &drv_data->rx_sgt; pxa2xx_spi_dma_prepare_one()
209 return dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, pxa2xx_spi_dma_prepare_one()
H A Dspi-ep93xx.c439 struct sg_table *sgt; ep93xx_spi_dma_prepare() local
456 sgt = &espi->rx_sgt; ep93xx_spi_dma_prepare()
463 sgt = &espi->tx_sgt; ep93xx_spi_dma_prepare()
484 if (nents != sgt->nents) { ep93xx_spi_dma_prepare()
485 sg_free_table(sgt); ep93xx_spi_dma_prepare()
487 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); ep93xx_spi_dma_prepare()
493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { ep93xx_spi_dma_prepare()
513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_prepare()
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); ep93xx_spi_dma_prepare()
519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_prepare()
537 struct sg_table *sgt; ep93xx_spi_dma_finish() local
541 sgt = &espi->rx_sgt; ep93xx_spi_dma_finish()
544 sgt = &espi->tx_sgt; ep93xx_spi_dma_finish()
547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_finish()
H A Dspi.c475 struct sg_table *sgt, void *buf, size_t len, spi_map_buf()
486 ret = sg_alloc_table(sgt, sgs, GFP_KERNEL); spi_map_buf()
496 sg_free_table(sgt); spi_map_buf()
499 sg_set_page(&sgt->sgl[i], vm_page, spi_map_buf()
503 sg_set_buf(&sgt->sgl[i], sg_buf, min); spi_map_buf()
511 ret = dma_map_sg(dev, sgt->sgl, sgt->nents, dir); spi_map_buf()
515 sg_free_table(sgt); spi_map_buf()
519 sgt->nents = ret; spi_map_buf()
525 struct sg_table *sgt, enum dma_data_direction dir) spi_unmap_buf()
527 if (sgt->orig_nents) { spi_unmap_buf()
528 dma_unmap_sg(dev, sgt->sgl, sgt->orig_nents, dir); spi_unmap_buf()
529 sg_free_table(sgt); spi_unmap_buf()
474 spi_map_buf(struct spi_master *master, struct device *dev, struct sg_table *sgt, void *buf, size_t len, enum dma_data_direction dir) spi_map_buf() argument
524 spi_unmap_buf(struct spi_master *master, struct device *dev, struct sg_table *sgt, enum dma_data_direction dir) spi_unmap_buf() argument
H A Dspi-s3c64xx.c277 struct sg_table *sgt) prepare_dma()
303 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, prepare_dma()
276 prepare_dma(struct s3c64xx_spi_dma_data *dma, struct sg_table *sgt) prepare_dma() argument
/linux-4.1.27/drivers/gpu/drm/msm/
H A Dmsm_iommu.c47 struct sg_table *sgt, unsigned len, int prot) msm_iommu_map()
56 if (!domain || !sgt) msm_iommu_map()
59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { msm_iommu_map()
86 struct sg_table *sgt, unsigned len) msm_iommu_unmap()
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { msm_iommu_unmap()
46 msm_iommu_map(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt, unsigned len, int prot) msm_iommu_map() argument
85 msm_iommu_unmap(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt, unsigned len) msm_iommu_unmap() argument
H A Dmsm_gem_prime.c26 BUG_ON(!msm_obj->sgt); /* should have already pinned! */ msm_gem_prime_get_sg_table()
27 return msm_obj->sgt; msm_gem_prime_get_sg_table()
H A Dmsm_mmu.h26 int (*map)(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt,
28 int (*unmap)(struct msm_mmu *mmu, uint32_t iova, struct sg_table *sgt,
H A Dmsm_gem.c92 msm_obj->sgt = drm_prime_pages_to_sg(p, npages); get_pages()
93 if (IS_ERR(msm_obj->sgt)) { get_pages()
94 dev_err(dev->dev, "failed to allocate sgt\n"); get_pages()
95 return ERR_CAST(msm_obj->sgt); get_pages()
104 dma_map_sg(dev->dev, msm_obj->sgt->sgl, get_pages()
105 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); get_pages()
120 dma_unmap_sg(obj->dev->dev, msm_obj->sgt->sgl, put_pages()
121 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); put_pages()
122 sg_free_table(msm_obj->sgt); put_pages()
123 kfree(msm_obj->sgt); put_pages()
304 ret = mmu->funcs->map(mmu, offset, msm_obj->sgt, msm_gem_get_iova_locked()
529 mmu->funcs->unmap(mmu, offset, msm_obj->sgt, obj->size); msm_gem_free_object()
666 uint32_t size, struct sg_table *sgt) msm_gem_import()
689 msm_obj->sgt = sgt; msm_gem_import()
696 ret = drm_prime_sg_to_page_addr_arrays(sgt, msm_obj->pages, NULL, npages); msm_gem_import()
665 msm_gem_import(struct drm_device *dev, uint32_t size, struct sg_table *sgt) msm_gem_import() argument
H A Dmsm_gem.h52 struct sg_table *sgt; member in struct:msm_gem_object
H A Dmsm_drv.h216 uint32_t size, struct sg_table *sgt);
/linux-4.1.27/drivers/gpu/drm/
H A Ddrm_prime.c70 struct sg_table *sgt; member in struct:drm_prime_attachment
145 struct sg_table *sgt; drm_gem_map_detach() local
153 sgt = prime_attach->sgt; drm_gem_map_detach()
154 if (sgt) { drm_gem_map_detach()
156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, drm_gem_map_detach()
158 sg_free_table(sgt); drm_gem_map_detach()
161 kfree(sgt); drm_gem_map_detach()
185 struct sg_table *sgt; drm_gem_map_dma_buf() local
192 return prime_attach->sgt; drm_gem_map_dma_buf()
201 sgt = obj->dev->driver->gem_prime_get_sg_table(obj); drm_gem_map_dma_buf()
203 if (!IS_ERR(sgt)) { drm_gem_map_dma_buf()
204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) { drm_gem_map_dma_buf()
205 sg_free_table(sgt); drm_gem_map_dma_buf()
206 kfree(sgt); drm_gem_map_dma_buf()
207 sgt = ERR_PTR(-ENOMEM); drm_gem_map_dma_buf()
209 prime_attach->sgt = sgt; drm_gem_map_dma_buf()
214 return sgt; drm_gem_map_dma_buf()
218 struct sg_table *sgt, drm_gem_unmap_dma_buf()
501 struct sg_table *sgt; drm_gem_prime_import() local
526 sgt = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); drm_gem_prime_import()
527 if (IS_ERR(sgt)) { drm_gem_prime_import()
528 ret = PTR_ERR(sgt); drm_gem_prime_import()
532 obj = dev->driver->gem_prime_import_sg_table(dev, attach, sgt); drm_gem_prime_import()
543 dma_buf_unmap_attachment(attach, sgt, DMA_BIDIRECTIONAL); drm_gem_prime_import()
701 * @sgt: scatter-gather table to convert
709 int drm_prime_sg_to_page_addr_arrays(struct sg_table *sgt, struct page **pages, drm_prime_sg_to_page_addr_arrays() argument
720 for_each_sg(sgt->sgl, sg, sgt->nents, count) { drm_prime_sg_to_page_addr_arrays()
217 drm_gem_unmap_dma_buf(struct dma_buf_attachment *attach, struct sg_table *sgt, enum dma_data_direction dir) drm_gem_unmap_dma_buf() argument
H A Ddrm_gem_cma_helper.c196 drm_prime_gem_destroy(gem_obj, cma_obj->sgt); drm_gem_cma_free_object()
415 struct sg_table *sgt; drm_gem_cma_prime_get_sg_table() local
418 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); drm_gem_cma_prime_get_sg_table()
419 if (!sgt) drm_gem_cma_prime_get_sg_table()
422 ret = dma_get_sgtable(obj->dev->dev, sgt, cma_obj->vaddr, drm_gem_cma_prime_get_sg_table()
427 return sgt; drm_gem_cma_prime_get_sg_table()
430 kfree(sgt); drm_gem_cma_prime_get_sg_table()
440 * @sgt: scatter/gather table of pinned pages
455 struct sg_table *sgt) drm_gem_cma_prime_import_sg_table()
459 if (sgt->nents != 1) drm_gem_cma_prime_import_sg_table()
467 cma_obj->paddr = sg_dma_address(sgt->sgl); drm_gem_cma_prime_import_sg_table()
468 cma_obj->sgt = sgt; drm_gem_cma_prime_import_sg_table()
453 drm_gem_cma_prime_import_sg_table(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sgt) drm_gem_cma_prime_import_sg_table() argument
/linux-4.1.27/include/drm/
H A Ddrm_gem_cma_helper.h11 * @sgt: scatter/gather table for imported PRIME buffers
17 struct sg_table *sgt; member in struct:drm_gem_cma_object
64 struct sg_table *sgt);
H A DdrmP.h592 struct sg_table *sgt);
1009 extern int drm_prime_sg_to_page_addr_arrays(struct sg_table *sgt, struct page **pages,
/linux-4.1.27/drivers/gpu/drm/rockchip/
H A Drockchip_drm_gem.c267 struct sg_table *sgt; rockchip_gem_prime_get_sg_table() local
270 sgt = kzalloc(sizeof(*sgt), GFP_KERNEL); rockchip_gem_prime_get_sg_table()
271 if (!sgt) rockchip_gem_prime_get_sg_table()
274 ret = dma_get_sgtable_attrs(drm->dev, sgt, rk_obj->kvaddr, rockchip_gem_prime_get_sg_table()
278 DRM_ERROR("failed to allocate sgt, %d\n", ret); rockchip_gem_prime_get_sg_table()
279 kfree(sgt); rockchip_gem_prime_get_sg_table()
283 return sgt; rockchip_gem_prime_get_sg_table()
H A Drockchip_drm_gem.h32 struct sg_table *sgt);
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_buffer.c209 struct sg_table sgt; member in struct:vmw_ttm_tt
316 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, vmw_piter_start()
317 vsgt->sgt->orig_nents, p_offset); vmw_piter_start()
336 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, vmw_ttm_unmap_from_dma()
338 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; vmw_ttm_unmap_from_dma()
359 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, vmw_ttm_map_for_dma()
364 vmw_tt->sgt.nents = ret; vmw_ttm_map_for_dma()
397 vsgt->sgt = &vmw_tt->sgt; vmw_ttm_map_dma()
412 ret = sg_alloc_table_from_pages(&vmw_tt->sgt, vsgt->pages, vmw_ttm_map_dma()
420 if (vsgt->num_pages > vmw_tt->sgt.nents) { vmw_ttm_map_dma()
423 vmw_tt->sgt.nents); vmw_ttm_map_dma()
452 sg_free_table(vmw_tt->vsgt.sgt); vmw_ttm_map_dma()
453 vmw_tt->vsgt.sgt = NULL; vmw_ttm_map_dma()
472 if (!vmw_tt->vsgt.sgt) vmw_ttm_unmap_dma()
479 sg_free_table(vmw_tt->vsgt.sgt); vmw_ttm_unmap_dma()
480 vmw_tt->vsgt.sgt = NULL; vmw_ttm_unmap_dma()
H A Dvmwgfx_drv.h229 * @sgt: Pointer to a struct sg_table with binding information
236 struct sg_table *sgt; member in struct:vmw_sg_table
/linux-4.1.27/net/ceph/
H A Dcrypto.c98 * Dispose of @sgt with teardown_sgtable().
105 static int setup_sgtable(struct sg_table *sgt, struct scatterlist *prealloc_sg, setup_sgtable() argument
117 memset(sgt, 0, sizeof(*sgt)); setup_sgtable()
127 ret = sg_alloc_table(sgt, chunk_cnt, GFP_NOFS); setup_sgtable()
133 sgt->sgl = prealloc_sg; setup_sgtable()
134 sgt->nents = sgt->orig_nents = 1; setup_sgtable()
137 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { setup_sgtable()
157 static void teardown_sgtable(struct sg_table *sgt) teardown_sgtable() argument
159 if (sgt->orig_nents > 1) teardown_sgtable()
160 sg_free_table(sgt); teardown_sgtable()
/linux-4.1.27/drivers/gpu/host1x/
H A Djob.h46 struct sg_table *sgt; member in struct:host1x_job_unpin_data
H A Djob.c185 struct sg_table *sgt; pin_job() local
192 phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); pin_job()
198 job->unpins[job->num_unpins].sgt = sgt; pin_job()
204 struct sg_table *sgt; pin_job() local
211 phys_addr = host1x_bo_pin(g->bo, &sgt); pin_job()
217 job->unpins[job->num_unpins].sgt = sgt; pin_job()
575 host1x_bo_unpin(unpin->bo, unpin->sgt); host1x_job_unpin()
/linux-4.1.27/include/linux/
H A Dhost1x.h63 dma_addr_t (*pin)(struct host1x_bo *bo, struct sg_table **sgt);
64 void (*unpin)(struct host1x_bo *bo, struct sg_table *sgt);
92 struct sg_table **sgt) host1x_bo_pin()
94 return bo->ops->pin(bo, sgt); host1x_bo_pin()
97 static inline void host1x_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) host1x_bo_unpin() argument
99 bo->ops->unpin(bo, sgt); host1x_bo_unpin()
91 host1x_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) host1x_bo_pin() argument
H A Ddma-mapping.h27 int (*get_sgtable)(struct device *dev, struct sg_table *sgt, void *,
H A Dscatterlist.h237 int sg_alloc_table_from_pages(struct sg_table *sgt,
/linux-4.1.27/drivers/crypto/qce/
H A Ddma.h58 qce_sgtable_add(struct sg_table *sgt, struct scatterlist *sg_add);
H A Ddma.c110 qce_sgtable_add(struct sg_table *sgt, struct scatterlist *new_sgl) qce_sgtable_add() argument
112 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; qce_sgtable_add()
/linux-4.1.27/include/asm-generic/
H A Ddma-mapping-common.h223 dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
227 dma_get_sgtable_attrs(struct device *dev, struct sg_table *sgt, void *cpu_addr, dma_get_sgtable_attrs() argument
233 return ops->get_sgtable(dev, sgt, cpu_addr, dma_addr, size, dma_get_sgtable_attrs()
235 return dma_common_get_sgtable(dev, sgt, cpu_addr, dma_addr, size); dma_get_sgtable_attrs()
/linux-4.1.27/drivers/media/pci/cx88/
H A Dcx88-vbi.c132 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); buffer_prepare() local
145 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, buffer_prepare()
H A Dcx88-video.c452 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); buffer_prepare() local
463 sgt->sgl, 0, UNSET, buffer_prepare()
468 sgt->sgl, UNSET, 0, buffer_prepare()
473 sgt->sgl, buffer_prepare()
480 sgt->sgl, buffer_prepare()
488 sgt->sgl, 0, buf->bpl, buffer_prepare()
H A Dcx88-mpeg.c230 struct sg_table *sgt = vb2_dma_sg_plane_desc(&buf->vb, 0); cx8802_buf_prepare() local
238 rc = cx88_risc_databuffer(dev->pci, risc, sgt->sgl, cx8802_buf_prepare()
/linux-4.1.27/drivers/hsi/controllers/
H A Domap_ssi_port.c205 omap_ssi->gdd_trn[lch].sg = msg->sgt.sgl; ssi_claim_lch()
227 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, ssi_start_dma()
241 d_addr = sg_dma_address(msg->sgt.sgl); ssi_start_dma()
243 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, ssi_start_dma()
255 s_addr = sg_dma_address(msg->sgt.sgl); ssi_start_dma()
269 writew_relaxed(SSI_BYTES_TO_FRAMES(msg->sgt.sgl->length), ssi_start_dma()
320 if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) ssi_start_transfer()
375 if (msg->sgt.nents > 1) ssi_async()
430 msg->channel, msg, msg->sgt.sgl->length, list_for_each_safe()
862 if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { ssi_pio_complete()
871 buf = sg_virt(msg->sgt.sgl) + msg->actual_len; ssi_pio_complete()
881 if (msg->actual_len >= msg->sgt.sgl->length) ssi_pio_complete()
H A Domap_ssi.c226 dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); ssi_gdd_complete()
247 msg->actual_len = sg_dma_len(msg->sgt.sgl); ssi_gdd_complete()
/linux-4.1.27/drivers/base/
H A Ddma-mapping.c228 int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt, dma_common_get_sgtable() argument
234 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); dma_common_get_sgtable()
238 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); dma_common_get_sgtable()
/linux-4.1.27/drivers/hsi/clients/
H A Dcmt_speech.c212 u32 *data = sg_virt(msg->sgt.sgl); cs_set_cmd()
218 u32 *data = sg_virt(msg->sgt.sgl); cs_get_cmd()
272 kfree(sg_virt(msg->sgt.sgl)); cs_free_cmds()
294 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); cs_alloc_cmds()
480 msg->sgt.nents = 1; cs_hsi_peek_on_control_complete()
510 msg->sgt.nents = 0; cs_hsi_read_on_control()
555 msg->sgt.nents = 1; cs_hsi_write_on_control()
632 sg_init_one(msg->sgt.sgl, address, hi->buf_size); cs_hsi_peek_on_data_complete()
633 msg->sgt.nents = 1; cs_hsi_peek_on_data_complete()
675 sg_init_one(rxmsg->sgt.sgl, (void *)hi->mmap_base, 0); cs_hsi_read_on_data()
676 rxmsg->sgt.nents = 0; cs_hsi_read_on_data()
727 sg_init_one(txmsg->sgt.sgl, address, hi->buf_size); cs_hsi_write_on_data()
H A Dhsi_char.c160 kfree(sg_virt(msg->sgt.sgl)); hsc_msg_free()
199 sg_init_one(msg->sgt.sgl, buf, alloc_size); hsc_msg_alloc()
230 return msg->sgt.sgl->length; hsc_msg_len_get()
235 msg->sgt.sgl->length = len; hsc_msg_len_set()
468 sg_virt(msg->sgt.sgl), hsc_msg_len_get(msg)); hsc_read()
504 if (copy_from_user(sg_virt(msg->sgt.sgl), (void __user *)buf, len)) { hsc_write()
H A Dssi_protocol.c167 data = sg_virt(msg->sgt.sgl); ssip_set_cmd()
175 data = sg_virt(msg->sgt.sgl); ssip_get_cmd()
186 BUG_ON(msg->sgt.nents != (unsigned int)(skb_shinfo(skb)->nr_frags + 1)); ssip_skb_to_msg()
188 sg = msg->sgt.sgl; ssip_skb_to_msg()
258 kfree(sg_virt(msg->sgt.sgl)); ssip_free_cmds()
278 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); ssip_alloc_cmds()
/linux-4.1.27/drivers/media/pci/cx25821/
H A Dcx25821-video.c166 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); cx25821_buffer_prepare() local
197 sgt->sgl, 0, UNSET, cx25821_buffer_prepare()
202 sgt->sgl, UNSET, 0, cx25821_buffer_prepare()
211 sgt->sgl, line0_offset, cx25821_buffer_prepare()
217 sgt->sgl, cx25821_buffer_prepare()
223 sgt->sgl, cx25821_buffer_prepare()
/linux-4.1.27/arch/c6x/include/asm/
H A Ddma-mapping.h108 static inline int dma_get_sgtable(struct device *dev, struct sg_table *sgt, dma_get_sgtable() argument
/linux-4.1.27/arch/cris/include/asm/
H A Ddma-mapping.h164 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/arch/frv/include/asm/
H A Ddma-mapping.h143 static inline int dma_get_sgtable(struct device *dev, struct sg_table *sgt, dma_get_sgtable() argument
/linux-4.1.27/arch/m68k/include/asm/
H A Ddma-mapping.h116 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/drivers/scsi/
H A Dnsp32.c868 nsp32_sgtable *sgt = data->cur_lunt->sglun->sgt; nsp32_setup_sg_table() local
872 if (sgt == NULL) { nsp32_setup_sg_table()
887 sgt[i].addr = cpu_to_le32(sg_dma_address(sg)); scsi_for_each_sg()
888 sgt[i].len = cpu_to_le32(sg_dma_len(sg)); scsi_for_each_sg()
890 if (le32_to_cpu(sgt[i].len) > 0x10000) { scsi_for_each_sg()
892 "can't transfer over 64KB at a time, size=0x%lx", le32_to_cpu(sgt[i].len)); scsi_for_each_sg()
898 le32_to_cpu(sgt[i].addr), scsi_for_each_sg()
899 le32_to_cpu(sgt[i].len )); scsi_for_each_sg()
903 l = le32_to_cpu(sgt[num-1].len);
904 sgt[num-1].len = cpu_to_le32(l | SGTEND);
1725 nsp32_sgtable *sgt = data->cur_lunt->sglun->sgt; nsp32_adjust_busfree() local
1732 s_sacklen -= le32_to_cpu(sgt[old_entry].addr) & 3; nsp32_adjust_busfree()
1735 * calculate new_entry from sack count and each sgt[].len nsp32_adjust_busfree()
1740 sentlen += (le32_to_cpu(sgt[new_entry].len) & ~SGTEND); nsp32_adjust_busfree()
1746 /* all sgt is processed */ nsp32_adjust_busfree()
1761 len = le32_to_cpu(sgt[new_entry].len); nsp32_adjust_busfree()
1762 addr = le32_to_cpu(sgt[new_entry].addr); nsp32_adjust_busfree()
1764 sgt[new_entry].addr = cpu_to_le32(addr); nsp32_adjust_busfree()
1765 sgt[new_entry].len = cpu_to_le32(restlen); nsp32_adjust_busfree()
H A Dnsp32.h455 nsp32_sgtable sgt[NSP32_SG_SIZE+1]; /* SG table */ member in struct:_nsp32_sglun
579 int cur_entry; /* current sgt entry */
/linux-4.1.27/drivers/media/pci/cx23885/
H A Dcx23885-vbi.c144 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); buffer_prepare() local
155 sgt->sgl, buffer_prepare()
H A Dcx23885-video.c336 struct sg_table *sgt = vb2_dma_sg_plane_desc(vb, 0); buffer_prepare() local
348 sgt->sgl, 0, UNSET, buffer_prepare()
353 sgt->sgl, UNSET, 0, buffer_prepare()
381 sgt->sgl, line0_offset, buffer_prepare()
388 sgt->sgl, buffer_prepare()
395 sgt->sgl, buffer_prepare()
H A Dcx23885-core.c1456 struct sg_table *sgt = vb2_dma_sg_plane_desc(&buf->vb, 0); cx23885_buf_prepare() local
1464 sgt->sgl, cx23885_buf_prepare()
/linux-4.1.27/lib/
H A Dscatterlist.c347 * @sgt: The sg table header to use
364 int sg_alloc_table_from_pages(struct sg_table *sgt, sg_alloc_table_from_pages() argument
381 ret = sg_alloc_table(sgt, chunks, gfp_mask); sg_alloc_table_from_pages()
387 for_each_sg(sgt->sgl, s, sgt->orig_nents, i) { sg_alloc_table_from_pages()
/linux-4.1.27/drivers/input/touchscreen/
H A Dsur40.c375 struct sg_table *sgt; sur40_process_video() local
419 sgt = vb2_dma_sg_plane_desc(&new_buf->vb, 0); sur40_process_video()
423 sgt->sgl, sgt->nents, sur40_video_format.sizeimage, 0); sur40_process_video()
/linux-4.1.27/drivers/media/platform/omap3isp/
H A Dispccdc.h60 struct sg_table sgt; member in struct:ispccdc_lsc_config_req::__anon5670
H A Dispstat.h39 struct sg_table sgt; member in struct:ispstat_buffer
H A Dispstat.c164 dma_sync_sg_for_device(stat->isp->dev, buf->sgt.sgl, isp_stat_buf_sync_for_device()
165 buf->sgt.nents, DMA_FROM_DEVICE); isp_stat_buf_sync_for_device()
174 dma_sync_sg_for_cpu(stat->isp->dev, buf->sgt.sgl, isp_stat_buf_sync_for_cpu()
175 buf->sgt.nents, DMA_FROM_DEVICE); isp_stat_buf_sync_for_cpu()
345 sg_free_table(&buf->sgt); isp_stat_bufs_free()
373 ret = dma_get_sgtable(dev, &buf->sgt, buf->virt_addr, buf->dma_addr, isp_stat_bufs_alloc_one()
H A Dispccdc.c362 sg_free_table(&req->table.sgt); ccdc_lsc_free_request()
445 ret = dma_get_sgtable(isp->dev, &req->table.sgt, ccdc_lsc_config()
451 dma_sync_sg_for_cpu(isp->dev, req->table.sgt.sgl, ccdc_lsc_config()
452 req->table.sgt.nents, DMA_TO_DEVICE); ccdc_lsc_config()
460 dma_sync_sg_for_device(isp->dev, req->table.sgt.sgl, ccdc_lsc_config()
461 req->table.sgt.nents, DMA_TO_DEVICE); ccdc_lsc_config()
/linux-4.1.27/arch/xtensa/include/asm/
H A Ddma-mapping.h181 static inline int dma_get_sgtable(struct device *dev, struct sg_table *sgt, dma_get_sgtable() argument
/linux-4.1.27/arch/mn10300/include/asm/
H A Ddma-mapping.h179 static inline int dma_get_sgtable(struct device *dev, struct sg_table *sgt, dma_get_sgtable() argument
/linux-4.1.27/arch/nios2/include/asm/
H A Ddma-mapping.h133 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/arch/blackfin/include/asm/
H A Ddma-mapping.h160 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/arch/metag/include/asm/
H A Ddma-mapping.h184 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/arch/arc/include/asm/
H A Ddma-mapping.h49 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/include/linux/hsi/
H A Dhsi.h190 * @sgt: Head of the scatterlist array
204 struct sg_table sgt; member in struct:hsi_msg
/linux-4.1.27/arch/parisc/include/asm/
H A Ddma-mapping.h252 static inline int dma_get_sgtable(struct device *dev, struct sg_table *sgt, dma_get_sgtable() argument
/linux-4.1.27/drivers/hsi/
H A Dhsi.c525 sg_free_table(&msg->sgt); hsi_free_msg()
553 err = sg_alloc_table(&msg->sgt, nents, flags); hsi_alloc_msg()
/linux-4.1.27/arch/arm/include/asm/
H A Ddma-mapping.h333 extern int arm_dma_get_sgtable(struct device *dev, struct sg_table *sgt,
/linux-4.1.27/arch/arm/mm/
H A Ddma-mapping.c767 int arm_dma_get_sgtable(struct device *dev, struct sg_table *sgt, arm_dma_get_sgtable() argument
774 ret = sg_alloc_table(sgt, 1, GFP_KERNEL); arm_dma_get_sgtable()
778 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); arm_dma_get_sgtable()
1454 static int arm_iommu_get_sgtable(struct device *dev, struct sg_table *sgt, arm_iommu_get_sgtable() argument
1464 return sg_alloc_table_from_pages(sgt, pages, count, 0, size, arm_iommu_get_sgtable()
/linux-4.1.27/drivers/gpu/drm/qxl/
H A Dqxl_drv.h545 struct sg_table *sgt);
/linux-4.1.27/arch/avr32/include/asm/
H A Ddma-mapping.h342 extern int dma_common_get_sgtable(struct device *dev, struct sg_table *sgt,

Completed in 1488 milliseconds