Home
last modified time | relevance | path

Searched refs:sgl (Results 1 – 200 of 271) sorted by relevance

12

/linux-4.1.27/drivers/misc/genwqe/
Dcard_utils.c297 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
303 sgl->fpage_offs = offset_in_page((unsigned long)user_addr); in genwqe_alloc_sync_sgl()
304 sgl->fpage_size = min_t(size_t, PAGE_SIZE-sgl->fpage_offs, user_size); in genwqe_alloc_sync_sgl()
305 sgl->nr_pages = DIV_ROUND_UP(sgl->fpage_offs + user_size, PAGE_SIZE); in genwqe_alloc_sync_sgl()
306 sgl->lpage_size = (user_size - sgl->fpage_size) % PAGE_SIZE; in genwqe_alloc_sync_sgl()
309 __func__, user_addr, user_size, sgl->nr_pages, in genwqe_alloc_sync_sgl()
310 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl()
312 sgl->user_addr = user_addr; in genwqe_alloc_sync_sgl()
313 sgl->user_size = user_size; in genwqe_alloc_sync_sgl()
314 sgl->sgl_size = genwqe_sgl_size(sgl->nr_pages); in genwqe_alloc_sync_sgl()
[all …]
Dcard_base.h354 struct sg_entry *sgl; member
372 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
375 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
378 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl);
/linux-4.1.27/lib/
Dscatterlist.c74 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) in sg_last() argument
77 struct scatterlist *ret = &sgl[nents - 1]; in sg_last()
82 for_each_sg(sgl, sg, nents, i) in sg_last()
87 BUG_ON(sgl[0].sg_magic != SG_MAGIC); in sg_last()
104 void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument
106 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table()
111 sgl[i].sg_magic = SG_MAGIC; in sg_init_table()
114 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
180 struct scatterlist *sgl, *next; in __sg_free_table() local
182 if (unlikely(!table->sgl)) in __sg_free_table()
[all …]
Dkfifo.c308 static int setup_sgl_buf(struct scatterlist *sgl, void *buf, in setup_sgl_buf() argument
334 sg_set_page(sgl, page, l - off, off); in setup_sgl_buf()
335 sgl = sg_next(sgl); in setup_sgl_buf()
336 if (++n == nents || sgl == NULL) in setup_sgl_buf()
343 sg_set_page(sgl, page, len, off); in setup_sgl_buf()
347 static unsigned int setup_sgl(struct __kfifo *fifo, struct scatterlist *sgl, in setup_sgl() argument
363 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl()
364 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl()
370 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_in_prepare() argument
378 return setup_sgl(fifo, sgl, nents, len, fifo->in); in __kfifo_dma_in_prepare()
[all …]
Dswiotlb.c872 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg_attrs() argument
880 for_each_sg(sgl, sg, nelems, i) { in swiotlb_map_sg_attrs()
892 swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in swiotlb_map_sg_attrs()
894 sg_dma_len(sgl) = 0; in swiotlb_map_sg_attrs()
907 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg() argument
910 return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_map_sg()
919 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in swiotlb_unmap_sg_attrs() argument
927 for_each_sg(sgl, sg, nelems, i) in swiotlb_unmap_sg_attrs()
934 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_unmap_sg() argument
937 return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_unmap_sg()
[all …]
/linux-4.1.27/crypto/
Dalgif_skcipher.c59 struct af_alg_sgl sgl; member
86 struct scatterlist *sgl; in skcipher_free_async_sgls() local
91 af_alg_free_sg(&rsgl->sgl); in skcipher_free_async_sgls()
95 sgl = sreq->tsg; in skcipher_free_async_sgls()
96 n = sg_nents(sgl); in skcipher_free_async_sgls()
97 for_each_sg(sgl, sg, n, i) in skcipher_free_async_sgls()
135 struct skcipher_sg_list *sgl; in skcipher_alloc_sgl() local
138 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); in skcipher_alloc_sgl()
140 sg = sgl->sg; in skcipher_alloc_sgl()
142 if (!sg || sgl->cur >= MAX_SGL_ENTS) { in skcipher_alloc_sgl()
[all …]
Dalgif_aead.c81 struct aead_sg_list *sgl = &ctx->tsgl; in aead_put_sgl() local
82 struct scatterlist *sg = sgl->sg; in aead_put_sgl()
85 for (i = 0; i < sgl->cur; i++) { in aead_put_sgl()
92 sgl->cur = 0; in aead_put_sgl()
173 struct aead_sg_list *sgl = &ctx->tsgl; in aead_sendmsg() local
219 sg = sgl->sg + sgl->cur - 1; in aead_sendmsg()
250 if (sgl->cur >= ALG_MAX_PAGES) { in aead_sendmsg()
256 sg = sgl->sg + sgl->cur; in aead_sendmsg()
277 sgl->cur++; in aead_sendmsg()
304 struct aead_sg_list *sgl = &ctx->tsgl; in aead_sendpage() local
[all …]
Daf_alg.c391 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len) in af_alg_make_sg() argument
397 n = iov_iter_get_pages(iter, sgl->pages, len, ALG_MAX_PAGES, &off); in af_alg_make_sg()
405 sg_init_table(sgl->sg, npages + 1); in af_alg_make_sg()
410 sg_set_page(sgl->sg + i, sgl->pages[i], plen, off); in af_alg_make_sg()
415 sg_mark_end(sgl->sg + npages - 1); in af_alg_make_sg()
416 sgl->npages = npages; in af_alg_make_sg()
429 void af_alg_free_sg(struct af_alg_sgl *sgl) in af_alg_free_sg() argument
433 for (i = 0; i < sgl->npages; i++) in af_alg_free_sg()
434 put_page(sgl->pages[i]); in af_alg_free_sg()
Dalgif_hash.c25 struct af_alg_sgl sgl; member
71 len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len); in hash_sendmsg()
77 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len); in hash_sendmsg()
81 af_alg_free_sg(&ctx->sgl); in hash_sendmsg()
116 sg_init_table(ctx->sgl.sg, 1); in hash_sendpage()
117 sg_set_page(ctx->sgl.sg, page, size, offset); in hash_sendpage()
119 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size); in hash_sendpage()
/linux-4.1.27/tools/virtio/linux/
Dscatterlist.h92 struct scatterlist *sgl) in sg_chain() argument
104 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain()
159 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument
161 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table()
166 sgl[i].sg_magic = SG_MAGIC; in sg_init_table()
169 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
/linux-4.1.27/arch/tile/include/asm/
Dhv_driver.h42 HV_SGL sgl[/* sgl_len */], __hv64 offset, in tile_hv_dev_preada()
45 return hv_dev_preada(devhdl, flags, sgl_len, sgl, in tile_hv_dev_preada()
52 HV_SGL sgl[/* sgl_len */], __hv64 offset, in tile_hv_dev_pwritea()
55 return hv_dev_pwritea(devhdl, flags, sgl_len, sgl, in tile_hv_dev_pwritea()
/linux-4.1.27/include/linux/
Dscatterlist.h13 struct scatterlist *sgl; /* the list */ member
137 struct scatterlist *sgl) in sg_chain() argument
153 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain()
242 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents,
244 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents,
247 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents,
249 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents,
346 void sg_miter_start(struct sg_mapping_iter *miter, struct scatterlist *sgl,
Dkfifo.h658 #define kfifo_dma_in_prepare(fifo, sgl, nents, len) \ argument
661 struct scatterlist *__sgl = (sgl); \
709 #define kfifo_dma_out_prepare(fifo, sgl, nents, len) \ argument
712 struct scatterlist *__sgl = (sgl); \
792 struct scatterlist *sgl, int nents, unsigned int len);
795 struct scatterlist *sgl, int nents, unsigned int len);
814 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
820 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
Ddma-mapping.h267 #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \ argument
268 dma_map_sg(dev, sgl, nents, dir)
270 #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \ argument
271 dma_unmap_sg(dev, sgl, nents, dir)
Dswiotlb.h82 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems,
86 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
Ddmaengine.h159 struct data_chunk sgl[0]; member
662 struct dma_chan *chan, struct scatterlist *sgl,
713 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_slave_sg() argument
716 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_slave_sg()
723 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_rio_sg() argument
727 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_rio_sg()
/linux-4.1.27/drivers/gpu/drm/exynos/
Dexynos_drm_dmabuf.c59 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in exynos_gem_detach_dma_buf()
100 rd = buf->sgt->sgl; in exynos_gem_map_dma_buf()
101 wr = sgt->sgl; in exynos_gem_map_dma_buf()
109 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in exynos_gem_map_dma_buf()
203 struct scatterlist *sgl; in exynos_dmabuf_prime_import() local
249 sgl = sgt->sgl; in exynos_dmabuf_prime_import()
252 buffer->dma_addr = sg_dma_address(sgl); in exynos_dmabuf_prime_import()
Dexynos_drm_gem.c86 struct scatterlist *sgl; in exynos_drm_gem_map_buf() local
98 sgl = buf->sgt->sgl; in exynos_drm_gem_map_buf()
99 for_each_sg(buf->sgt->sgl, sgl, buf->sgt->nents, i) { in exynos_drm_gem_map_buf()
100 if (page_offset < (sgl->length >> PAGE_SHIFT)) in exynos_drm_gem_map_buf()
102 page_offset -= (sgl->length >> PAGE_SHIFT); in exynos_drm_gem_map_buf()
105 pfn = __phys_to_pfn(sg_phys(sgl)) + page_offset; in exynos_drm_gem_map_buf()
486 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_map_sgt_with_dma()
501 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_unmap_sgt_from_dma()
/linux-4.1.27/arch/arm64/mm/
Ddma-mapping.c229 static int __swiotlb_map_sg_attrs(struct device *dev, struct scatterlist *sgl, in __swiotlb_map_sg_attrs() argument
236 ret = swiotlb_map_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_map_sg_attrs()
238 for_each_sg(sgl, sg, ret, i) in __swiotlb_map_sg_attrs()
246 struct scatterlist *sgl, int nelems, in __swiotlb_unmap_sg_attrs() argument
254 for_each_sg(sgl, sg, nelems, i) in __swiotlb_unmap_sg_attrs()
257 swiotlb_unmap_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_unmap_sg_attrs()
279 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_cpu() argument
286 for_each_sg(sgl, sg, nelems, i) in __swiotlb_sync_sg_for_cpu()
289 swiotlb_sync_sg_for_cpu(dev, sgl, nelems, dir); in __swiotlb_sync_sg_for_cpu()
293 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_device() argument
[all …]
/linux-4.1.27/arch/microblaze/kernel/
Ddma.c54 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, in dma_direct_map_sg() argument
62 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
131 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_cpu() argument
139 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_cpu()
145 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_device() argument
153 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_device()
/linux-4.1.27/drivers/scsi/lpfc/
Dlpfc_scsi.c182 struct sli4_sge *sgl = (struct sli4_sge *)lpfc_cmd->fcp_bpl; in lpfc_sli4_set_rsp_sgl_last() local
183 if (sgl) { in lpfc_sli4_set_rsp_sgl_last()
184 sgl += 1; in lpfc_sli4_set_rsp_sgl_last()
185 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
186 bf_set(lpfc_sli4_sge_last, sgl, 1); in lpfc_sli4_set_rsp_sgl_last()
187 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
794 struct sli4_sge *sgl; in lpfc_new_scsi_buf_s4() local
874 sgl = (struct sli4_sge *)psb->fcp_bpl; in lpfc_new_scsi_buf_s4()
884 sgl->addr_hi = cpu_to_le32(putPaddrHigh(pdma_phys_fcp_cmd)); in lpfc_new_scsi_buf_s4()
885 sgl->addr_lo = cpu_to_le32(putPaddrLow(pdma_phys_fcp_cmd)); in lpfc_new_scsi_buf_s4()
[all …]
Dlpfc_sli.c8056 struct sli4_sge *sgl = NULL; in lpfc_sli4_bpl2sgl() local
8067 sgl = (struct sli4_sge *)sglq->sgl; in lpfc_sli4_bpl2sgl()
8089 sgl->addr_hi = bpl->addrHigh; in lpfc_sli4_bpl2sgl()
8090 sgl->addr_lo = bpl->addrLow; in lpfc_sli4_bpl2sgl()
8092 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_bpl2sgl()
8094 bf_set(lpfc_sli4_sge_last, sgl, 1); in lpfc_sli4_bpl2sgl()
8096 bf_set(lpfc_sli4_sge_last, sgl, 0); in lpfc_sli4_bpl2sgl()
8101 sgl->sge_len = cpu_to_le32(bde.tus.f.bdeSize); in lpfc_sli4_bpl2sgl()
8113 bf_set(lpfc_sli4_sge_offset, sgl, offset); in lpfc_sli4_bpl2sgl()
8114 bf_set(lpfc_sli4_sge_type, sgl, in lpfc_sli4_bpl2sgl()
[all …]
/linux-4.1.27/drivers/media/platform/
Dm2m-deinterlace.c257 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
258 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
264 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
265 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
271 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
272 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
278 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
279 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
285 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
286 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
[all …]
/linux-4.1.27/drivers/net/ethernet/intel/ixgbe/
Dixgbe_fcoe.c53 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp()
134 if (ddp->sgl) in ixgbe_fcoe_ddp_put()
135 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc, in ixgbe_fcoe_ddp_put()
157 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument
176 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup()
192 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup()
194 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup()
212 dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in ixgbe_fcoe_ddp_setup()
225 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup()
229 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup()
[all …]
Dixgbe_fcoe.h64 struct scatterlist *sgl; member
Dixgbe.h917 struct scatterlist *sgl, unsigned int sgc);
919 struct scatterlist *sgl, unsigned int sgc);
/linux-4.1.27/drivers/scsi/esas2r/
Desas2r_io.c224 struct esas2r_mem_desc *sgl; in esas2r_build_sg_list_sge() local
231 sgl = esas2r_alloc_sgl(a); in esas2r_build_sg_list_sge()
233 if (unlikely(sgl == NULL)) in esas2r_build_sg_list_sge()
244 memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen); in esas2r_build_sg_list_sge()
248 (struct atto_vda_sge *)((u8 *)sgl->virt_addr + in esas2r_build_sg_list_sge()
253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr in esas2r_build_sg_list_sge()
260 cpu_to_le64(sgl->phys_addr); in esas2r_build_sg_list_sge()
302 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_sg_list_sge()
376 struct esas2r_mem_desc *sgl; in esas2r_build_prd_iblk() local
449 sgl = esas2r_alloc_sgl(a); in esas2r_build_prd_iblk()
[all …]
Desas2r_init.c841 struct esas2r_mem_desc *sgl; in esas2r_init_adapter_struct() local
909 for (i = 0, sgl = a->sg_list_mds; i < num_sg_lists; i++, sgl++) { in esas2r_init_adapter_struct()
910 sgl->size = sgl_page_size; in esas2r_init_adapter_struct()
912 list_add_tail(&sgl->next_desc, &a->free_sg_list_head); in esas2r_init_adapter_struct()
914 if (!esas2r_initmem_alloc(a, sgl, ESAS2R_SGL_ALIGN)) { in esas2r_init_adapter_struct()
Desas2r.h1153 struct list_head *sgl; in esas2r_alloc_sgl() local
1158 sgl = a->free_sg_list_head.next; in esas2r_alloc_sgl()
1159 result = list_entry(sgl, struct esas2r_mem_desc, next_desc); in esas2r_alloc_sgl()
1160 list_del_init(sgl); in esas2r_alloc_sgl()
/linux-4.1.27/drivers/media/v4l2-core/
Dvideobuf2-dma-contig.c59 for_each_sg(sgt->sgl, s, sgt->orig_nents, i) { in vb2_dc_sgt_foreach_page()
73 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
77 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size()
123 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_prepare()
135 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_finish()
261 rd = buf->sgt_base->sgl; in vb2_dc_dmabuf_ops_attach()
262 wr = sgt->sgl; in vb2_dc_dmabuf_ops_attach()
288 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_detach()
315 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_map()
321 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); in vb2_dc_dmabuf_ops_map()
[all …]
Dvideobuf2-dma-sg.c150 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_alloc()
190 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_put()
212 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_prepare()
224 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_finish()
317 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_get_userptr()
354 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir, &attrs); in vb2_dma_sg_put_userptr()
464 rd = buf->dma_sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
465 wr = sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
491 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_dmabuf_ops_detach()
518 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_dmabuf_ops_map()
[all …]
Dvideobuf2-vmalloc.c246 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach()
276 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_detach()
303 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_map()
309 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); in vb2_vmalloc_dmabuf_ops_map()
/linux-4.1.27/drivers/gpu/drm/armada/
Darmada_gem.c444 for_each_sg(sgt->sgl, sg, count, i) { in armada_gem_prime_map_dma_buf()
456 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf()
465 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf()
467 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in armada_gem_prime_map_dma_buf()
473 sg_dma_address(sgt->sgl) = dobj->dev_addr; in armada_gem_prime_map_dma_buf()
474 sg_dma_len(sgt->sgl) = dobj->obj.size; in armada_gem_prime_map_dma_buf()
481 for_each_sg(sgt->sgl, sg, num, i) in armada_gem_prime_map_dma_buf()
498 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in armada_gem_prime_unmap_dma_buf()
502 for_each_sg(sgt->sgl, sg, sgt->nents, i) in armada_gem_prime_unmap_dma_buf()
610 if (sg_dma_len(dobj->sgt->sgl) < dobj->obj.size) { in armada_gem_map_import()
[all …]
/linux-4.1.27/drivers/infiniband/core/
Dumem.c54 ib_dma_unmap_sg(dev, umem->sg_head.sgl, in __ib_umem_release()
58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { in __ib_umem_release()
188 sg_list_start = umem->sg_head.sgl; in ib_umem_get()
215 umem->sg_head.sgl, in ib_umem_get()
326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) in ib_umem_page_count()
355 ret = sg_pcopy_to_buffer(umem->sg_head.sgl, umem->nmap, dst, length, in ib_umem_copy_from()
/linux-4.1.27/drivers/infiniband/hw/cxgb3/
Diwch_qp.c77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_rdma_send()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; in build_rdma_write()
104 wqe->write.sgl[0].len = cpu_to_be32(0); in build_rdma_write()
114 wqe->write.sgl[i].stag = in build_rdma_write()
116 wqe->write.sgl[i].len = in build_rdma_write()
118 wqe->write.sgl[i].to = in build_rdma_write()
264 wqe->recv.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_recv()
265 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_recv()
[all …]
Dcxio_wr.h176 struct t3_sge sgl[T3_MAX_SGE]; /* 4+ */ member
238 struct t3_sge sgl[T3_MAX_SGE]; /* 5+ */ member
274 struct t3_sge sgl[T3_MAX_SGE]; /* 3+ */ member
/linux-4.1.27/drivers/dma/sh/
Dshdma-base.c564 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument
574 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
591 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
665 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument
688 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg()
706 struct scatterlist *sgl; in shdma_prep_dma_cyclic() local
734 sgl = kcalloc(sg_len, sizeof(*sgl), GFP_KERNEL); in shdma_prep_dma_cyclic()
735 if (!sgl) in shdma_prep_dma_cyclic()
738 sg_init_table(sgl, sg_len); in shdma_prep_dma_cyclic()
743 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, in shdma_prep_dma_cyclic()
[all …]
Drcar-dmac.c823 rcar_dmac_chan_prep_sg(struct rcar_dmac_chan *chan, struct scatterlist *sgl, in rcar_dmac_chan_prep_sg() argument
855 for_each_sg(sgl, sg, sg_len, i) { in rcar_dmac_chan_prep_sg()
1004 struct scatterlist sgl; in rcar_dmac_prep_dma_memcpy() local
1009 sg_init_table(&sgl, 1); in rcar_dmac_prep_dma_memcpy()
1010 sg_set_page(&sgl, pfn_to_page(PFN_DOWN(dma_src)), len, in rcar_dmac_prep_dma_memcpy()
1012 sg_dma_address(&sgl) = dma_src; in rcar_dmac_prep_dma_memcpy()
1013 sg_dma_len(&sgl) = len; in rcar_dmac_prep_dma_memcpy()
1015 return rcar_dmac_chan_prep_sg(rchan, &sgl, 1, dma_dest, in rcar_dmac_prep_dma_memcpy()
1020 rcar_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in rcar_dmac_prep_slave_sg() argument
1037 return rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, in rcar_dmac_prep_slave_sg()
[all …]
Dusb-dmac.c419 usb_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in usb_dmac_prep_slave_sg() argument
440 for_each_sg(sgl, sg, sg_len, i) { in usb_dmac_prep_slave_sg()
/linux-4.1.27/drivers/staging/android/ion/
Dion_carveout_heap.c64 struct page *page = sg_page(table->sgl); in ion_carveout_heap_phys()
97 sg_set_page(table->sgl, pfn_to_page(PFN_DOWN(paddr)), size, 0); in ion_carveout_heap_allocate()
113 struct page *page = sg_page(table->sgl); in ion_carveout_heap_free()
119 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_carveout_heap_free()
Dion_chunk_heap.c67 sg = table->sgl; in ion_chunk_heap_allocate()
82 sg = table->sgl; in ion_chunk_heap_allocate()
108 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_chunk_heap_free()
111 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free()
Dion_heap.c48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel()
81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user()
118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, in ion_heap_sglist_zero() argument
126 for_each_sg_page(sgl, &piter, nents, 0) { in ion_heap_sglist_zero()
151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); in ion_heap_buffer_zero()
Dion_system_heap.c160 sg = table->sgl; in ion_system_heap_allocate()
193 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
344 sg_set_page(table->sgl, page, len, 0); in ion_system_contig_heap_allocate()
364 struct page *page = sg_page(table->sgl); in ion_system_contig_heap_free()
379 struct page *page = sg_page(table->sgl); in ion_system_contig_heap_phys()
Dion_test.c66 for_each_sg_page(table->sgl, &sg_iter, table->nents, offset_page) { in ion_handle_test_dma()
Dion.c232 for_each_sg(table->sgl, sg, table->nents, i) { in ion_buffer_create()
255 for_each_sg(buffer->sg_table->sgl, sg, buffer->sg_table->nents, i) in ion_buffer_create()
1220 dma_sync_sg_for_device(NULL, buffer->sg_table->sgl, in ion_sync_for_device()
/linux-4.1.27/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c48 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf()
49 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf()
50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); in omap_gem_map_dma_buf()
51 sg_dma_address(sg->sgl) = paddr; in omap_gem_map_dma_buf()
/linux-4.1.27/drivers/scsi/bnx2fc/
Dbnx2fc_hwi.c1470 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_seq_cleanup_task() local
1518 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = in bnx2fc_init_seq_cleanup_task()
1520 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = in bnx2fc_init_seq_cleanup_task()
1522 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = in bnx2fc_init_seq_cleanup_task()
1524 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off = in bnx2fc_init_seq_cleanup_task()
1526 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i; in bnx2fc_init_seq_cleanup_task()
1536 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_seq_cleanup_task()
1537 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr; in bnx2fc_init_seq_cleanup_task()
1538 sgl->mul_sgl.cur_sge_addr.hi = (u32)((u64)phys_addr >> 32); in bnx2fc_init_seq_cleanup_task()
1539 sgl->mul_sgl.sgl_size = bd_count; in bnx2fc_init_seq_cleanup_task()
[all …]
/linux-4.1.27/include/linux/mtd/
Dubi.h252 int ubi_leb_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl,
280 struct ubi_sgl *sgl, int offset, int len) in ubi_read_sg() argument
282 return ubi_leb_read_sg(desc, lnum, sgl, offset, len, 0); in ubi_read_sg()
/linux-4.1.27/drivers/scsi/
Dstorvsc_drv.c558 static void destroy_bounce_buffer(struct scatterlist *sgl, in destroy_bounce_buffer() argument
565 page_buf = sg_page((&sgl[i])); in destroy_bounce_buffer()
570 kfree(sgl); in destroy_bounce_buffer()
573 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) in do_bounce_buffer() argument
585 if (sgl[i].offset + sgl[i].length != PAGE_SIZE) in do_bounce_buffer()
589 if (sgl[i].offset != 0) in do_bounce_buffer()
593 if (sgl[i].length != PAGE_SIZE || sgl[i].offset != 0) in do_bounce_buffer()
600 static struct scatterlist *create_bounce_buffer(struct scatterlist *sgl, in create_bounce_buffer() argument
1558 struct scatterlist *sgl; in storvsc_queuecommand() local
1617 sgl = (struct scatterlist *)scsi_sglist(scmnd); in storvsc_queuecommand()
[all …]
D3w-xxxx.c590 command_packet->byte8.param.sgl[0].address = param_value; in tw_aen_read_queue()
591 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_aen_read_queue()
732 command_packet->byte8.param.sgl[0].address = param_value; in tw_aen_drain_queue()
733 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_aen_drain_queue()
966 tw_ioctl->firmware_command.byte8.param.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; in tw_chrdev_ioctl()
967 tw_ioctl->firmware_command.byte8.param.sgl[0].length = data_buffer_length_adjusted; in tw_chrdev_ioctl()
970 tw_ioctl->firmware_command.byte8.io.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; in tw_chrdev_ioctl()
971 tw_ioctl->firmware_command.byte8.io.sgl[0].length = data_buffer_length_adjusted; in tw_chrdev_ioctl()
1154 command_packet->byte8.param.sgl[0].address = param_value; in tw_setfeature()
1155 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_setfeature()
[all …]
Dvmw_pvscsi.c61 struct pvscsi_sg_list *sgl; member
340 sge = &ctx->sgl->sge[0]; in pvscsi_create_sg()
373 ctx->sglPA = pci_map_single(adapter->dev, ctx->sgl, in pvscsi_map_buffers()
1162 free_pages((unsigned long)ctx->sgl, get_order(SGL_SIZE)); in pvscsi_free_sgls()
1254 ctx->sgl = (void *)__get_free_pages(GFP_KERNEL, in pvscsi_allocate_sg()
1257 BUG_ON(!IS_ALIGNED(((unsigned long)ctx->sgl), PAGE_SIZE)); in pvscsi_allocate_sg()
1258 if (!ctx->sgl) { in pvscsi_allocate_sg()
1260 free_pages((unsigned long)ctx->sgl, in pvscsi_allocate_sg()
1262 ctx->sgl = NULL; in pvscsi_allocate_sg()
D3w-xxxx.h307 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member
311 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member
D3w-sas.h232 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member
236 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member
Dscsi_lib.c568 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) in scsi_sg_free() argument
573 mempool_free(sgl, sgp->pool); in scsi_sg_free()
601 sg_init_table(sdb->table.sgl, nents); in scsi_alloc_sgtable()
604 first_chunk = sdb->table.sgl; in scsi_alloc_sgtable()
1097 count = blk_rq_map_sg(req->q, req, sdb->table.sgl); in scsi_init_sgtable()
1168 prot_sdb->table.sgl); in scsi_init_io()
1931 cmd->sdb.table.sgl = sg; in scsi_mq_prep_fn()
1940 cmd->prot_sdb->table.sgl = in scsi_mq_prep_fn()
1949 bidi_sdb->table.sgl = in scsi_mq_prep_fn()
3087 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, in scsi_kmap_atomic_sg() argument
[all …]
D3w-9xxx.h501 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member
505 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member
D3w-sas.c426 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twl_aen_sync_time()
427 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); in twl_aen_sync_time()
686 TW_SG_Entry_ISO *sgl; in twl_load_sgl() local
708sgl = (TW_SG_Entry_ISO *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry_ISO)/4) + pae +… in twl_load_sgl()
709 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); in twl_load_sgl()
710 sgl->length = TW_CPU_TO_SGL(length); in twl_load_sgl()
954 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twl_get_param()
955 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); in twl_get_param()
D3w-9xxx.c491 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twa_aen_sync_time()
492 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); in twa_aen_sync_time()
1079 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twa_get_param()
1080 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); in twa_get_param()
1382 TW_SG_Entry *sgl; in twa_load_sgl() local
1405 sgl = (TW_SG_Entry *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry)/4) + pae); in twa_load_sgl()
1407 sgl = (TW_SG_Entry *)((u32 *)oldcommand+TW_SGL_OUT(oldcommand->opcode__sgloffset)); in twa_load_sgl()
1408 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); in twa_load_sgl()
1409 sgl->length = cpu_to_le32(length); in twa_load_sgl()
Dmegaraid.c1417 struct scatterlist *sgl; in mega_cmd_done() local
1552 sgl = scsi_sglist(cmd); in mega_cmd_done()
1553 if( sg_page(sgl) ) { in mega_cmd_done()
1554 c = *(unsigned char *) sg_virt(&sgl[0]); in mega_cmd_done()
1757 scb->sgl[idx].address = sg_dma_address(sg); in mega_build_sglist()
1758 *len += scb->sgl[idx].length = sg_dma_len(sg); in mega_build_sglist()
2921 scb->sgl = NULL; in mega_init_scb()
2936 scb->sgl = (mega_sglist *)scb->sgl64; in mega_init_scb()
2938 if( !scb->sgl ) { in mega_init_scb()
/linux-4.1.27/arch/sparc/kernel/
Dioport.c549 static int pci32_map_sg(struct device *device, struct scatterlist *sgl, in pci32_map_sg() argument
557 for_each_sg(sgl, sg, nents, n) { in pci32_map_sg()
568 static void pci32_unmap_sg(struct device *dev, struct scatterlist *sgl, in pci32_unmap_sg() argument
576 for_each_sg(sgl, sg, nents, n) { in pci32_unmap_sg()
614 static void pci32_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in pci32_sync_sg_for_cpu() argument
621 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_cpu()
627 static void pci32_sync_sg_for_device(struct device *device, struct scatterlist *sgl, in pci32_sync_sg_for_device() argument
634 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_device()
/linux-4.1.27/drivers/xen/
Dswiotlb-xen.c546 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_map_sg_attrs() argument
555 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs()
572 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in xen_swiotlb_map_sg_attrs()
574 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg_attrs()
607 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_unmap_sg_attrs() argument
616 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg_attrs()
630 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_sync_sg() argument
637 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_sync_sg()
Dxen-scsiback.c134 struct scatterlist *sgl; member
286 kfree(req->sgl); in scsiback_fast_flush_area()
287 req->sgl = NULL; in scsiback_fast_flush_area()
407 pending_req->sgl, pending_req->n_sg, in scsiback_cmd_exec()
523 pending_req->sgl = kmalloc_array(nr_segments, in scsiback_gnttab_data_map()
525 if (!pending_req->sgl) in scsiback_gnttab_data_map()
528 sg_init_table(pending_req->sgl, nr_segments); in scsiback_gnttab_data_map()
562 for_each_sg(pending_req->sgl, sg, nr_segments, i) { in scsiback_gnttab_data_map()
/linux-4.1.27/drivers/net/ethernet/intel/i40e/
Di40e_fcoe.c166 if (ddp->sgl) { in i40e_fcoe_ddp_unmap()
167 dma_unmap_sg(&pf->pdev->dev, ddp->sgl, ddp->sgc, in i40e_fcoe_ddp_unmap()
169 ddp->sgl = NULL; in i40e_fcoe_ddp_unmap()
740 if (!ddp->sgl) in i40e_fcoe_handle_offload()
816 struct scatterlist *sgl, unsigned int sgc, in i40e_fcoe_ddp_setup() argument
847 if (ddp->sgl) { in i40e_fcoe_ddp_setup()
849 xid, ddp->sgl, ddp->sgc); in i40e_fcoe_ddp_setup()
866 dmacount = dma_map_sg(&pf->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in i40e_fcoe_ddp_setup()
869 sgl, sgc); in i40e_fcoe_ddp_setup()
883 for_each_sg(sgl, sg, dmacount, i) { in i40e_fcoe_ddp_setup()
[all …]
Di40e_fcoe.h109 struct scatterlist *sgl; member
/linux-4.1.27/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c64 src = obj->pages->sgl; in i915_gem_map_dma_buf()
65 dst = st->sgl; in i915_gem_map_dma_buf()
72 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { in i915_gem_map_dma_buf()
100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf()
139 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) in i915_gem_dmabuf_vmap()
Di915_gem_tiling.c515 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_do_bit_17_swizzle()
545 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_save_bit_17_swizzle()
Di915_gem_render_state.c88 page = sg_page(so->obj->pages->sgl); in render_state_setup()
Di915_gem_gtt.c557 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { in gen8_ppgtt_insert_entries()
1142 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { in gen6_ppgtt_insert_entries()
1723 obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_prepare_object()
1753 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen8_ggtt_insert_entries()
1799 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen6_ggtt_insert_entries()
1991 obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_finish_object()
2611 struct scatterlist *sg = st->sgl; in rotate_pages()
2674 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in intel_rotate_fb_obj_pages()
Di915_gem_userptr.c527 for_each_sg((*st)->sgl, sg, num_pages, n) in st_set_pages()
715 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_userptr_put_pages()
Di915_gem_stolen.c362 sg = st->sgl; in i915_pages_create_for_stolen()
/linux-4.1.27/drivers/gpu/drm/tegra/
Dgem.c117 err = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, in tegra_bo_iommu_map()
214 for_each_sg(sgt->sgl, s, sgt->nents, i) in tegra_bo_get_pages()
217 if (dma_map_sg(drm->dev, sgt->sgl, sgt->nents, DMA_TO_DEVICE) == 0) in tegra_bo_get_pages()
357 bo->paddr = sg_dma_address(bo->sgt->sgl); in tegra_bo_import()
534 for_each_sg(sgt->sgl, sg, bo->num_pages, i) in tegra_gem_prime_map_dma_buf()
537 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in tegra_gem_prime_map_dma_buf()
543 sg_dma_address(sgt->sgl) = bo->paddr; in tegra_gem_prime_map_dma_buf()
544 sg_dma_len(sgt->sgl) = gem->size; in tegra_gem_prime_map_dma_buf()
563 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in tegra_gem_prime_unmap_dma_buf()
/linux-4.1.27/drivers/gpu/drm/msm/
Dmsm_iommu.c59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { in msm_iommu_map()
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
Dmsm_gem.c104 dma_map_sg(dev->dev, msm_obj->sgt->sgl, in get_pages()
120 dma_unmap_sg(obj->dev->dev, msm_obj->sgt->sgl, in put_pages()
/linux-4.1.27/arch/ia64/sn/pci/
Dpci_dma.c241 static void sn_dma_unmap_sg(struct device *dev, struct scatterlist *sgl, in sn_dma_unmap_sg() argument
252 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_unmap_sg()
274 static int sn_dma_map_sg(struct device *dev, struct scatterlist *sgl, in sn_dma_map_sg() argument
279 struct scatterlist *saved_sg = sgl, *sg; in sn_dma_map_sg()
292 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_map_sg()
/linux-4.1.27/drivers/gpu/drm/udl/
Dudl_dmabuf.c65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf()
117 rd = obj->sg->sgl; in udl_map_dma_buf()
118 wr = sgt->sgl; in udl_map_dma_buf()
126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in udl_map_dma_buf()
/linux-4.1.27/arch/powerpc/kernel/
Ddma.c128 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, in dma_direct_map_sg() argument
135 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
196 struct scatterlist *sgl, int nents, in dma_direct_sync_sg() argument
202 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg()
Dvio.c559 struct scatterlist *sgl; in vio_dma_iommu_map_sg() local
564 for (sgl = sglist; count < nelems; count++, sgl++) in vio_dma_iommu_map_sg()
565 alloc_size += roundup(sgl->length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_map_sg()
580 for (sgl = sglist, count = 0; count < ret; count++, sgl++) in vio_dma_iommu_map_sg()
581 alloc_size -= roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_map_sg()
595 struct scatterlist *sgl; in vio_dma_iommu_unmap_sg() local
600 for (sgl = sglist; count < nelems; count++, sgl++) in vio_dma_iommu_unmap_sg()
601 alloc_size += roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_unmap_sg()
Dibmebus.c106 struct scatterlist *sgl, in ibmebus_map_sg() argument
113 for_each_sg(sgl, sg, nents, i) { in ibmebus_map_sg()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4vf/
Dsge.c159 struct ulptx_sgl *sgl; /* scatter/gather list in TX Queue */ member
307 const struct ulptx_sgl *sgl, const struct sge_txq *tq) in unmap_sgl() argument
313 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), in unmap_sgl()
314 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); in unmap_sgl()
316 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), in unmap_sgl()
317 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); in unmap_sgl()
325 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { in unmap_sgl()
394 unmap_sgl(dev, sdesc->skb, sdesc->sgl, tq); in free_tx_desc()
909 struct ulptx_sgl *sgl, u64 *end, unsigned int start, in write_sgl() argument
920 sgl->len0 = htonl(len); in write_sgl()
[all …]
/linux-4.1.27/include/xen/
Dswiotlb-xen.h28 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
33 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
/linux-4.1.27/include/crypto/
Dif_alg.h78 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len);
79 void af_alg_free_sg(struct af_alg_sgl *sgl);
/linux-4.1.27/drivers/mtd/ubi/
Deba.c516 struct ubi_sgl *sgl, int lnum, int offset, int len, in ubi_eba_read_leb_sg() argument
524 ubi_assert(sgl->list_pos < UBI_MAX_SG_COUNT); in ubi_eba_read_leb_sg()
525 sg = &sgl->sg[sgl->list_pos]; in ubi_eba_read_leb_sg()
526 if (len < sg->length - sgl->page_pos) in ubi_eba_read_leb_sg()
529 to_read = sg->length - sgl->page_pos; in ubi_eba_read_leb_sg()
532 sg_virt(sg) + sgl->page_pos, offset, in ubi_eba_read_leb_sg()
540 sgl->page_pos += to_read; in ubi_eba_read_leb_sg()
541 if (sgl->page_pos == sg->length) { in ubi_eba_read_leb_sg()
542 sgl->list_pos++; in ubi_eba_read_leb_sg()
543 sgl->page_pos = 0; in ubi_eba_read_leb_sg()
[all …]
Dkapi.c472 int ubi_leb_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl, in ubi_leb_read_sg() argument
488 err = ubi_eba_read_leb_sg(ubi, vol, sgl, lnum, offset, len, check); in ubi_leb_read_sg()
/linux-4.1.27/drivers/char/
Dvirtio_console.c878 struct sg_list *sgl = sd->u.data; in pipe_to_sg() local
881 if (sgl->n == sgl->size) in pipe_to_sg()
891 sg_set_page(&(sgl->sg[sgl->n]), buf->page, len, buf->offset); in pipe_to_sg()
910 sg_set_page(&(sgl->sg[sgl->n]), page, len, offset); in pipe_to_sg()
912 sgl->n++; in pipe_to_sg()
913 sgl->len += len; in pipe_to_sg()
924 struct sg_list sgl; in port_fops_splice_write() local
931 .u.data = &sgl, in port_fops_splice_write()
963 sgl.n = 0; in port_fops_splice_write()
964 sgl.len = 0; in port_fops_splice_write()
[all …]
/linux-4.1.27/drivers/spi/
Dspi-pxa2xx-dma.c55 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in pxa2xx_spi_map_dma_buffer()
67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_map_dma_buffer()
88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_unmap_dma_buffer()
209 return dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, in pxa2xx_spi_dma_prepare_one()
Dspi-pl022.c813 dma_unmap_sg(pl022->dma_tx_channel->device->dev, pl022->sgt_tx.sgl, in unmap_free_dma_scatter()
815 dma_unmap_sg(pl022->dma_rx_channel->device->dev, pl022->sgt_rx.sgl, in unmap_free_dma_scatter()
826 BUG_ON(!pl022->sgt_rx.sgl); in dma_callback()
840 pl022->sgt_rx.sgl, in dma_callback()
844 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { in dma_callback()
854 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { in dma_callback()
891 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
912 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
1066 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl, in configure_dma()
1071 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl, in configure_dma()
[all …]
Dspi-ep93xx.c493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare()
519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
Dspi-qup.c291 struct scatterlist *sgl; in spi_qup_prep_sg() local
299 sgl = xfer->tx_sg.sgl; in spi_qup_prep_sg()
303 sgl = xfer->rx_sg.sgl; in spi_qup_prep_sg()
306 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); in spi_qup_prep_sg()
Dspi-dw-mid.c168 xfer->tx_sg.sgl, in dw_spi_dma_prepare_tx()
214 xfer->rx_sg.sgl, in dw_spi_dma_prepare_rx()
Dspi-img-spfi.c338 rxdesc = dmaengine_prep_slave_sg(spfi->rx_ch, xfer->rx_sg.sgl, in img_spfi_start_dma()
362 txdesc = dmaengine_prep_slave_sg(spfi->tx_ch, xfer->tx_sg.sgl, in img_spfi_start_dma()
Dspi-rockchip.c457 rs->rx_sg.sgl, rs->rx_sg.nents, in rockchip_spi_prepare_dma()
474 rs->tx_sg.sgl, rs->tx_sg.nents, in rockchip_spi_prepare_dma()
Dspi.c499 sg_set_page(&sgt->sgl[i], vm_page, in spi_map_buf()
503 sg_set_buf(&sgt->sgl[i], sg_buf, min); in spi_map_buf()
511 ret = dma_map_sg(dev, sgt->sgl, sgt->nents, dir); in spi_map_buf()
528 dma_unmap_sg(dev, sgt->sgl, sgt->orig_nents, dir); in spi_unmap_buf()
/linux-4.1.27/net/ceph/
Dcrypto.c133 sgt->sgl = prealloc_sg; in setup_sgtable()
137 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable()
204 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, in ceph_aes_encrypt()
267 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, in ceph_aes_encrypt2()
320 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); in ceph_aes_decrypt()
386 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); in ceph_aes_decrypt2()
/linux-4.1.27/drivers/tty/serial/
Dimx.c476 struct scatterlist *sgl = &sport->tx_sgl[0]; in dma_tx_callback() local
483 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in dma_tx_callback()
517 struct scatterlist *sgl = sport->tx_sgl; in imx_dma_tx() local
531 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes); in imx_dma_tx()
534 sg_init_table(sgl, 2); in imx_dma_tx()
535 sg_set_buf(sgl, xmit->buf + xmit->tail, in imx_dma_tx()
537 sg_set_buf(sgl + 1, xmit->buf, xmit->head); in imx_dma_tx()
540 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in imx_dma_tx()
545 desc = dmaengine_prep_slave_sg(chan, sgl, sport->dma_tx_nents, in imx_dma_tx()
548 dma_unmap_sg(dev, sgl, sport->dma_tx_nents, in imx_dma_tx()
[all …]
Dmxs-auart.c227 struct scatterlist *sgl = &s->tx_sgl; in mxs_auart_dma_tx() local
241 sg_init_one(sgl, s->tx_dma_buf, size); in mxs_auart_dma_tx()
242 dma_map_sg(s->dev, sgl, 1, DMA_TO_DEVICE); in mxs_auart_dma_tx()
243 desc = dmaengine_prep_slave_sg(channel, sgl, in mxs_auart_dma_tx()
554 struct scatterlist *sgl = &s->rx_sgl; in mxs_auart_dma_prep_rx() local
570 sg_init_one(sgl, s->rx_dma_buf, UART_XMIT_SIZE); in mxs_auart_dma_prep_rx()
571 dma_map_sg(s->dev, sgl, 1, DMA_FROM_DEVICE); in mxs_auart_dma_prep_rx()
572 desc = dmaengine_prep_slave_sg(channel, sgl, 1, DMA_DEV_TO_MEM, in mxs_auart_dma_prep_rx()
/linux-4.1.27/drivers/infiniband/hw/qib/
Dqib_dma.c94 static int qib_map_sg(struct ib_device *dev, struct scatterlist *sgl, in qib_map_sg() argument
104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg()
Dqib_mr.c269 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in qib_reg_user_mr()
/linux-4.1.27/drivers/infiniband/hw/ipath/
Dipath_dma.c101 static int ipath_map_sg(struct ib_device *dev, struct scatterlist *sgl, in ipath_map_sg() argument
111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg()
Dipath_mr.c224 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in ipath_reg_user_mr()
/linux-4.1.27/drivers/scsi/isci/
Drequest.c582 stp_req->sgl.offset = 0; in sci_stp_pio_request_construct()
583 stp_req->sgl.set = SCU_SGL_ELEMENT_PAIR_A; in sci_stp_pio_request_construct()
587 stp_req->sgl.index = 0; in sci_stp_pio_request_construct()
590 stp_req->sgl.index = -1; in sci_stp_pio_request_construct()
1261 struct scu_sgl_element *sgl; in pio_sgl_next() local
1264 struct isci_stp_pio_sgl *pio_sgl = &stp_req->sgl; in pio_sgl_next()
1268 sgl = NULL; in pio_sgl_next()
1272 sgl = NULL; in pio_sgl_next()
1275 sgl = &sgl_pair->B; in pio_sgl_next()
1280 sgl = NULL; in pio_sgl_next()
[all …]
Drequest.h79 } sgl; member
/linux-4.1.27/arch/alpha/kernel/
Dpci-noop.c141 static int alpha_noop_map_sg(struct device *dev, struct scatterlist *sgl, int nents, in alpha_noop_map_sg() argument
147 for_each_sg(sgl, sg, nents, i) { in alpha_noop_map_sg()
/linux-4.1.27/drivers/mmc/host/
Dmxs-mmc.c230 struct scatterlist * sgl; in mxs_mmc_prep_dma() local
237 sgl = data->sg; in mxs_mmc_prep_dma()
241 sgl = (struct scatterlist *) ssp->ssp_pio_words; in mxs_mmc_prep_dma()
246 sgl, sg_len, ssp->slave_dirn, flags); in mxs_mmc_prep_dma()
356 struct scatterlist *sgl = data->sg, *sg; in mxs_mmc_adtc() local
402 for_each_sg(sgl, sg, sg_len, i) in mxs_mmc_adtc()
/linux-4.1.27/drivers/scsi/megaraid/
Dmegaraid_sas_base.c1246 &pthru->sgl); in megasas_build_dcdb()
1250 &pthru->sgl); in megasas_build_dcdb()
1253 &pthru->sgl); in megasas_build_dcdb()
1389 &ldio->sgl); in megasas_build_ldio()
1392 ldio->sge_count = megasas_make_sgl64(instance, scp, &ldio->sgl); in megasas_build_ldio()
1394 ldio->sge_count = megasas_make_sgl32(instance, scp, &ldio->sgl); in megasas_build_ldio()
1479 mfi_sgl = &ldio->sgl; in megasas_dump_pending_frames()
1489 mfi_sgl = &pthru->sgl; in megasas_dump_pending_frames()
1934 dcmd->sgl.sge32[0].phys_addr = in megasas_get_ld_vf_affiliation_111()
1937 dcmd->sgl.sge32[0].phys_addr = new_affiliation_111_h; in megasas_get_ld_vf_affiliation_111()
[all …]
Dmegaraid_sas.h1318 union megasas_sgl sgl; /*28h */ member
1345 union megasas_sgl sgl; /*30h */ member
1372 union megasas_sgl sgl; /*28h */ member
1424 } sgl; member
1453 } sgl; member
1918 struct iovec sgl[MAX_IOCTL_SGE]; member
1941 struct compat_iovec sgl[MAX_IOCTL_SGE]; member
Dmegaraid_mbox.c1354 struct scatterlist *sgl; in megaraid_mbox_mksgl() local
1373 scsi_for_each_sg(scp, sgl, sgcnt, i) { in megaraid_mbox_mksgl()
1374 ccb->sgl64[i].address = sg_dma_address(sgl); in megaraid_mbox_mksgl()
1375 ccb->sgl64[i].length = sg_dma_len(sgl); in megaraid_mbox_mksgl()
1563 struct scatterlist *sgl; in DEF_SCSI_QCMD() local
1566 sgl = scsi_sglist(scp); in DEF_SCSI_QCMD()
1567 if (sg_page(sgl)) { in DEF_SCSI_QCMD()
1568 vaddr = (caddr_t) sg_virt(&sgl[0]); in DEF_SCSI_QCMD()
2224 struct scatterlist *sgl; in megaraid_mbox_dpc() local
2308 sgl = scsi_sglist(scp); in megaraid_mbox_dpc()
[all …]
/linux-4.1.27/drivers/infiniband/ulp/iser/
Diser_memory.c250 struct scatterlist *sg, *sgl = data->sg; in iser_sg_to_page_vec() local
257 *offset = (u64) sgl[0].offset & ~MASK_4K; in iser_sg_to_page_vec()
261 for_each_sg(sgl, sg, data->dma_nents, i) { in iser_sg_to_page_vec()
302 struct scatterlist *sg, *sgl, *next_sg = NULL; in iser_data_buf_aligned_len() local
309 sgl = data->sg; in iser_data_buf_aligned_len()
310 start_addr = ib_sg_dma_address(ibdev, sgl); in iser_data_buf_aligned_len()
312 for_each_sg(sgl, sg, data->dma_nents, i) { in iser_data_buf_aligned_len()
/linux-4.1.27/drivers/hsi/clients/
Dhsi_char.c160 kfree(sg_virt(msg->sgt.sgl)); in hsc_msg_free()
199 sg_init_one(msg->sgt.sgl, buf, alloc_size); in hsc_msg_alloc()
230 return msg->sgt.sgl->length; in hsc_msg_len_get()
235 msg->sgt.sgl->length = len; in hsc_msg_len_set()
468 sg_virt(msg->sgt.sgl), hsc_msg_len_get(msg)); in hsc_read()
504 if (copy_from_user(sg_virt(msg->sgt.sgl), (void __user *)buf, len)) { in hsc_write()
Dcmt_speech.c212 u32 *data = sg_virt(msg->sgt.sgl); in cs_set_cmd()
218 u32 *data = sg_virt(msg->sgt.sgl); in cs_get_cmd()
272 kfree(sg_virt(msg->sgt.sgl)); in cs_free_cmds()
294 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); in cs_alloc_cmds()
632 sg_init_one(msg->sgt.sgl, address, hi->buf_size); in cs_hsi_peek_on_data_complete()
675 sg_init_one(rxmsg->sgt.sgl, (void *)hi->mmap_base, 0); in cs_hsi_read_on_data()
727 sg_init_one(txmsg->sgt.sgl, address, hi->buf_size); in cs_hsi_write_on_data()
Dssi_protocol.c167 data = sg_virt(msg->sgt.sgl); in ssip_set_cmd()
175 data = sg_virt(msg->sgt.sgl); in ssip_get_cmd()
188 sg = msg->sgt.sgl; in ssip_skb_to_msg()
258 kfree(sg_virt(msg->sgt.sgl)); in ssip_free_cmds()
278 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); in ssip_alloc_cmds()
/linux-4.1.27/drivers/mtd/nand/
Dlpc32xx_mlc.c197 struct scatterlist sgl; member
398 sg_init_one(&host->sgl, mem, len); in lpc32xx_xmit_dma()
400 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
406 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, in lpc32xx_xmit_dma()
422 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
426 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
Dlpc32xx_slc.c212 struct scatterlist sgl; member
448 sg_init_one(&host->sgl, mem, len); in lpc32xx_xmit_dma()
450 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
456 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, in lpc32xx_xmit_dma()
472 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
477 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
/linux-4.1.27/drivers/dma/
Dimx-dma.c809 struct dma_chan *chan, struct scatterlist *sgl, in imxdma_prep_slave_sg() argument
824 for_each_sg(sgl, sg, sg_len, i) { in imxdma_prep_slave_sg()
830 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg()
834 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg()
844 desc->sg = sgl; in imxdma_prep_slave_sg()
976 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved()
978 desc->w = xt->sgl[0].icg + desc->x; in imxdma_prep_dma_interleaved()
Dcoh901318_lli.c233 struct scatterlist *sgl, unsigned int nents, in coh901318_lli_fill_sg() argument
258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg()
Dtimb_dma.c509 struct scatterlist *sgl, unsigned int sg_len, in td_prep_slave_sg() argument
520 if (!sgl || !sg_len) { in td_prep_slave_sg()
540 for_each_sg(sgl, sg, sg_len, i) { in td_prep_slave_sg()
Dk3dma.c465 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sglen, in k3_dma_prep_slave_sg() argument
475 if (sgl == NULL) in k3_dma_prep_slave_sg()
478 for_each_sg(sgl, sg, sglen, i) { in k3_dma_prep_slave_sg()
493 for_each_sg(sgl, sg, sglen, i) { in k3_dma_prep_slave_sg()
Ddma-jz4780.c296 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in jz4780_dma_prep_slave_sg() argument
310 sg_dma_address(&sgl[i]), in jz4780_dma_prep_slave_sg()
311 sg_dma_len(&sgl[i]), in jz4780_dma_prep_slave_sg()
Dmxs-dma.c500 struct dma_chan *chan, struct scatterlist *sgl, in mxs_dma_prep_slave_sg() argument
543 pio = (u32 *) sgl; in mxs_dma_prep_slave_sg()
558 for_each_sg(sgl, sg, sg_len, i) { in mxs_dma_prep_slave_sg()
Dmmp_pdma.c526 mmp_pdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in mmp_pdma_prep_slave_sg() argument
537 if ((sgl == NULL) || (sg_len == 0)) in mmp_pdma_prep_slave_sg()
542 for_each_sg(sgl, sg, sg_len, i) { in mmp_pdma_prep_slave_sg()
544 avail = sg_dma_len(sgl); in mmp_pdma_prep_slave_sg()
Dsirf-dma.c532 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved()
533 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved()
Ddma-jz4740.c390 struct dma_chan *c, struct scatterlist *sgl, in jz4740_dma_prep_slave_sg() argument
403 for_each_sg(sgl, sg, sg_len, i) { in jz4740_dma_prep_slave_sg()
Dmoxart-dma.c269 struct dma_chan *chan, struct scatterlist *sgl, in moxart_prep_slave_sg() argument
319 for_each_sg(sgl, sgent, sg_len, i) { in moxart_prep_slave_sg()
Dimg-mdc-dma.c452 struct dma_chan *chan, struct scatterlist *sgl, in mdc_prep_slave_sg() argument
464 if (!sgl) in mdc_prep_slave_sg()
478 for_each_sg(sgl, sg, sg_len, i) { in mdc_prep_slave_sg()
Dqcom_bam_dma.c587 struct scatterlist *sgl, unsigned int sg_len, in bam_prep_slave_sg() argument
606 for_each_sg(sgl, sg, sg_len, i) in bam_prep_slave_sg()
630 for_each_sg(sgl, sg, sg_len, i) { in bam_prep_slave_sg()
Dedma.c449 struct dma_chan *chan, struct scatterlist *sgl, in edma_prep_slave_sg() argument
462 if (unlikely(!echan || !sgl || !sg_len)) in edma_prep_slave_sg()
513 for_each_sg(sgl, sg, sg_len, i) { in edma_prep_slave_sg()
Dsun6i-dma.c561 struct dma_chan *chan, struct scatterlist *sgl, in sun6i_dma_prep_slave_sg() argument
574 if (!sgl) in sun6i_dma_prep_slave_sg()
586 for_each_sg(sgl, sg, sg_len, i) { in sun6i_dma_prep_slave_sg()
Dcoh901318.c2290 coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in coh901318_prep_slave_sg() argument
2309 if (!sgl) in coh901318_prep_slave_sg()
2311 if (sg_dma_len(sgl) == 0) in coh901318_prep_slave_sg()
2357 for_each_sg(sgl, sg, sg_len, i) { in coh901318_prep_slave_sg()
2380 ret = coh901318_lli_fill_sg(&cohc->base->pool, lli, sgl, sg_len, in coh901318_prep_slave_sg()
Ds3c24xx-dma.c980 struct dma_chan *chan, struct scatterlist *sgl, in s3c24xx_dma_prep_slave_sg() argument
996 sg_dma_len(sgl), s3cchan->name); in s3c24xx_dma_prep_slave_sg()
1047 for_each_sg(sgl, sg, sg_len, tmp) { in s3c24xx_dma_prep_slave_sg()
Dnbpfaxi.c998 struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, in nbpf_prep_slave_sg() argument
1011 return nbpf_prep_sg(chan, sgl, &slave_sg, sg_len, in nbpf_prep_slave_sg()
1016 return nbpf_prep_sg(chan, &slave_sg, sgl, sg_len, in nbpf_prep_slave_sg()
Dmpc512x_dma.c688 mpc_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in mpc_dma_prep_slave_sg() argument
710 for_each_sg(sgl, sg, sg_len, i) { in mpc_dma_prep_slave_sg()
Dep93xx_dma.c1025 ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in ep93xx_dma_prep_slave_sg() argument
1047 for_each_sg(sgl, sg, sg_len, i) { in ep93xx_dma_prep_slave_sg()
Dpch_dma.c583 struct scatterlist *sgl, unsigned int sg_len, in pd_prep_slave_sg() argument
611 for_each_sg(sgl, sg, sg_len, i) { in pd_prep_slave_sg()
/linux-4.1.27/drivers/infiniband/hw/mlx5/
Dmem.c78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in mlx5_ib_cont_pages()
180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in __mlx5_ib_populate_pas()
Ddoorbell.c77 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); in mlx5_ib_db_map_user()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4/
Dsge.c150 struct ulptx_sgl *sgl; member
322 const struct ulptx_sgl *sgl, const struct sge_txq *q) in unmap_sgl() argument
328 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), in unmap_sgl()
331 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), in unmap_sgl()
340 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { in unmap_sgl()
401 unmap_sgl(dev, d->skb, d->sgl, q); in free_tx_desc()
853 struct ulptx_sgl *sgl, u64 *end, unsigned int start, in write_sgl() argument
864 sgl->len0 = htonl(len); in write_sgl()
865 sgl->addr0 = cpu_to_be64(addr[0] + start); in write_sgl()
868 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in write_sgl()
[all …]
/linux-4.1.27/drivers/media/platform/xilinx/
Dxilinx-dma.h100 struct data_chunk sgl[1]; member
Dxilinx-dma.c363 dma->sgl[0].size = dma->format.width * dma->fmtinfo->bpp; in xvip_dma_buffer_queue()
364 dma->sgl[0].icg = dma->format.bytesperline - dma->sgl[0].size; in xvip_dma_buffer_queue()
/linux-4.1.27/drivers/target/
Dtarget_core_transport.c1336 transport_generic_map_mem_to_cmd(struct se_cmd *cmd, struct scatterlist *sgl, in transport_generic_map_mem_to_cmd() argument
1339 if (!sgl || !sgl_count) in transport_generic_map_mem_to_cmd()
1353 cmd->t_data_sg = sgl; in transport_generic_map_mem_to_cmd()
1394 struct scatterlist *sgl, u32 sgl_count, in target_submit_cmd_map_sgls() argument
1460 BUG_ON(!sgl); in target_submit_cmd_map_sgls()
1474 if (sgl) in target_submit_cmd_map_sgls()
1475 buf = kmap(sg_page(sgl)) + sgl->offset; in target_submit_cmd_map_sgls()
1478 memset(buf, 0, sgl->length); in target_submit_cmd_map_sgls()
1479 kunmap(sg_page(sgl)); in target_submit_cmd_map_sgls()
1483 rc = transport_generic_map_mem_to_cmd(se_cmd, sgl, sgl_count, in target_submit_cmd_map_sgls()
[all …]
Dtarget_core_pscsi.c873 pscsi_map_sg(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in pscsi_map_sg() argument
881 int nr_pages = (cmd->data_length + sgl[0].offset + in pscsi_map_sg()
890 for_each_sg(sgl, sg, sgl_nents, i) { in pscsi_map_sg()
988 struct scatterlist *sgl = cmd->t_data_sg; in pscsi_execute_cmd() local
1010 if (!sgl) { in pscsi_execute_cmd()
1024 ret = pscsi_map_sg(cmd, sgl, sgl_nents, data_direction, &hbio); in pscsi_execute_cmd()
Dtarget_core_file.c319 static int fd_do_rw(struct se_cmd *cmd, struct scatterlist *sgl, in fd_do_rw() argument
338 for_each_sg(sgl, sg, sgl_nents, i) { in fd_do_rw()
611 fd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in fd_execute_rw() argument
641 ret = fd_do_rw(cmd, sgl, sgl_nents, 0); in fd_execute_rw()
675 ret = fd_do_rw(cmd, sgl, sgl_nents, 1); in fd_execute_rw()
Dtarget_core_rd.c477 rd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in rd_execute_rw() argument
521 sg_miter_start(&m, sgl, sgl_nents, in rd_execute_rw()
Dtarget_core_iblock.c675 iblock_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in iblock_execute_rw() argument
750 for_each_sg(sgl, sg, sgl_nents, i) { in iblock_execute_rw()
/linux-4.1.27/include/scsi/
Dscsi_cmnd.h178 return cmd->sdb.table.sgl; in scsi_sglist()
315 return cmd->prot_sdb ? cmd->prot_sdb->table.sgl : NULL; in scsi_prot_sglist()
/linux-4.1.27/drivers/scsi/csiostor/
Dcsio_scsi.c291 struct ulptx_sgl *sgl) in csio_scsi_init_ultptx_dsgl() argument
301 sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) | ULPTX_MORE_F | in csio_scsi_init_ultptx_dsgl()
307 sgl->addr0 = cpu_to_be64(sg_dma_address(sgel)); in csio_scsi_init_ultptx_dsgl()
308 sgl->len0 = cpu_to_be32(sg_dma_len(sgel)); in csio_scsi_init_ultptx_dsgl()
309 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); in csio_scsi_init_ultptx_dsgl()
331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
332 sgl->len0 = cpu_to_be32( in csio_scsi_init_ultptx_dsgl()
334 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); in csio_scsi_init_ultptx_dsgl()
365 struct ulptx_sgl *sgl; in csio_scsi_init_read_wr() local
397 sgl = (struct ulptx_sgl *)((uintptr_t)wrp + in csio_scsi_init_read_wr()
[all …]
/linux-4.1.27/drivers/hsi/controllers/
Domap_ssi_port.c205 omap_ssi->gdd_trn[lch].sg = msg->sgt.sgl; in ssi_claim_lch()
227 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma()
241 d_addr = sg_dma_address(msg->sgt.sgl); in ssi_start_dma()
243 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma()
255 s_addr = sg_dma_address(msg->sgt.sgl); in ssi_start_dma()
269 writew_relaxed(SSI_BYTES_TO_FRAMES(msg->sgt.sgl->length), in ssi_start_dma()
320 if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) in ssi_start_transfer()
430 msg->channel, msg, msg->sgt.sgl->length, in ssi_flush_queue()
862 if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { in ssi_pio_complete()
871 buf = sg_virt(msg->sgt.sgl) + msg->actual_len; in ssi_pio_complete()
[all …]
Domap_ssi.c226 dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); in ssi_gdd_complete()
247 msg->actual_len = sg_dma_len(msg->sgt.sgl); in ssi_gdd_complete()
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/
Dbcmsdh.c519 struct scatterlist *sgl; in brcmf_sdiod_sglist_rw() local
563 mmc_dat.sg = sdiodev->sgtable.sgl; in brcmf_sdiod_sglist_rw()
579 sgl = sdiodev->sgtable.sgl; in brcmf_sdiod_sglist_rw()
589 sg_set_buf(sgl, pkt_data, sg_data_sz); in brcmf_sdiod_sglist_rw()
592 sgl = sg_next(sgl); in brcmf_sdiod_sglist_rw()
660 sg_init_table(sdiodev->sgtable.sgl, sdiodev->sgtable.orig_nents); in brcmf_sdiod_sglist_rw()
/linux-4.1.27/drivers/message/fusion/
Dmptctl.c130 static void kfree_sgl(MptSge_t *sgl, dma_addr_t sgl_dma,
793 MptSge_t *sgl, *sgIn; in mptctl_do_fw_download() local
878 if ((sgl = kbuf_alloc_2_sgl(fwlen, sgdir, sge_offset, in mptctl_do_fw_download()
902 iocp->name, sgl, numfrags)); in mptctl_do_fw_download()
909 sgIn = sgl; in mptctl_do_fw_download()
975 if (sgl) in mptctl_do_fw_download()
976 kfree_sgl(sgl, sgl_dma, buflist, iocp); in mptctl_do_fw_download()
1005 kfree_sgl(sgl, sgl_dma, buflist, iocp); in mptctl_do_fw_download()
1031 MptSge_t *sgl; in kbuf_alloc_2_sgl() local
1075 sgl = sglbuf; in kbuf_alloc_2_sgl()
[all …]
/linux-4.1.27/drivers/media/pci/saa7134/
Dsaa7134-vbi.c124 if (dma->sgl->offset) { in buffer_prepare()
134 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in buffer_prepare()
Dsaa7134-ts.c110 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in saa7134_ts_buffer_prepare()
/linux-4.1.27/drivers/block/
Dnvme-scsi.c380 struct sg_iovec sgl; in nvme_trans_copy_to_user() local
383 not_copied = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_to_user()
388 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_to_user()
389 not_copied = copy_to_user(sgl.iov_base, index, in nvme_trans_copy_to_user()
421 struct sg_iovec sgl; in nvme_trans_copy_from_user() local
424 not_copied = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_from_user()
429 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_from_user()
430 not_copied = copy_from_user(index, sgl.iov_base, in nvme_trans_copy_from_user()
2097 struct sg_iovec sgl; in nvme_trans_do_nvme_io() local
2099 retcode = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_do_nvme_io()
[all …]
/linux-4.1.27/drivers/infiniband/hw/mlx4/
Ddoorbell.c75 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); in mlx4_ib_db_map_user()
/linux-4.1.27/drivers/scsi/be2iscsi/
Dbe_mgmt.h55 struct amap_mcc_sge sgl[19]; member
84 struct mcc_sge sgl[19]; member
Dbe_main.c3164 struct be_dma_mem *sgl) in be_sgl_create_contiguous() argument
3169 WARN_ON(!sgl); in be_sgl_create_contiguous()
3171 sgl->va = virtual_address; in be_sgl_create_contiguous()
3172 sgl->dma = (unsigned long)physical_address; in be_sgl_create_contiguous()
3173 sgl->size = length; in be_sgl_create_contiguous()
3178 static void be_sgl_destroy_contiguous(struct be_dma_mem *sgl) in be_sgl_destroy_contiguous() argument
3180 memset(sgl, 0, sizeof(*sgl)); in be_sgl_destroy_contiguous()
3185 struct mem_array *pmem, struct be_dma_mem *sgl) in hwi_build_be_sgl_arr() argument
3187 if (sgl->va) in hwi_build_be_sgl_arr()
3188 be_sgl_destroy_contiguous(sgl); in hwi_build_be_sgl_arr()
[all …]
Dbe_cmds.h45 struct be_sge sgl[19]; /* used by non-embedded cmds */ member
629 return &wrb->payload.sgl[0]; in nonembedded_sgl()
/linux-4.1.27/drivers/infiniband/hw/cxgb4/
Dmem.c65 struct ulptx_sgl *sgl; in _c4iw_write_mem_dma_aligned() local
74 wr_len = roundup(sizeof(*req) + sizeof(*sgl), 16); in _c4iw_write_mem_dma_aligned()
94 sgl = (struct ulptx_sgl *)(req + 1); in _c4iw_write_mem_dma_aligned()
95 sgl->cmd_nsge = cpu_to_be32(ULPTX_CMD_V(ULP_TX_SC_DSGL) | in _c4iw_write_mem_dma_aligned()
97 sgl->len0 = cpu_to_be32(len); in _c4iw_write_mem_dma_aligned()
98 sgl->addr0 = cpu_to_be64(data); in _c4iw_write_mem_dma_aligned()
754 for_each_sg(mhp->umem->sg_head.sgl, sg, mhp->umem->nmap, entry) { in c4iw_reg_user_mr()
/linux-4.1.27/drivers/gpu/drm/
Ddrm_cache.c112 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) in drm_clflush_sg()
Ddrm_prime.c156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in drm_gem_map_detach()
204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) { in drm_gem_map_dma_buf()
720 for_each_sg(sgt->sgl, sg, sgt->nents, count) { in drm_prime_sg_to_page_addr_arrays()
Ddrm_gem_cma_helper.c467 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table()
/linux-4.1.27/drivers/mtd/nand/gpmi-nand/
Dgpmi-lib.c1123 struct scatterlist *sgl; in gpmi_send_command() local
1143 sgl = &this->cmd_sgl; in gpmi_send_command()
1145 sg_init_one(sgl, this->cmd_buffer, this->command_length); in gpmi_send_command()
1146 dma_map_sg(this->dev, sgl, 1, DMA_TO_DEVICE); in gpmi_send_command()
1148 sgl, 1, DMA_MEM_TO_DEV, in gpmi_send_command()
Dgpmi-nand.c384 struct scatterlist *sgl = &this->data_sgl; in prepare_data_dma() local
390 sg_init_one(sgl, this->upper_buf, this->upper_len); in prepare_data_dma()
391 ret = dma_map_sg(this->dev, sgl, 1, dr); in prepare_data_dma()
401 sg_init_one(sgl, this->data_buffer_dma, this->upper_len); in prepare_data_dma()
406 dma_map_sg(this->dev, sgl, 1, dr); in prepare_data_dma()
/linux-4.1.27/drivers/media/pci/cx25821/
Dcx25821-video.c197 sgt->sgl, 0, UNSET, in cx25821_buffer_prepare()
202 sgt->sgl, UNSET, 0, in cx25821_buffer_prepare()
211 sgt->sgl, line0_offset, in cx25821_buffer_prepare()
217 sgt->sgl, in cx25821_buffer_prepare()
223 sgt->sgl, in cx25821_buffer_prepare()
/linux-4.1.27/drivers/media/pci/tw68/
Dtw68-video.c474 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
478 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
482 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
487 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
493 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
/linux-4.1.27/net/8021q/
Dvlan_dev.c383 struct scatterlist *sgl, unsigned int sgc) in vlan_dev_fcoe_ddp_setup() argument
390 rc = ops->ndo_fcoe_ddp_setup(real_dev, xid, sgl, sgc); in vlan_dev_fcoe_ddp_setup()
441 struct scatterlist *sgl, unsigned int sgc) in vlan_dev_fcoe_ddp_target() argument
448 rc = ops->ndo_fcoe_ddp_target(real_dev, xid, sgl, sgc); in vlan_dev_fcoe_ddp_target()
/linux-4.1.27/drivers/char/agp/
Dintel-gtt.c109 for_each_sg(st->sgl, sg, num_entries, i) in intel_gtt_map_memory()
113 st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL)) in intel_gtt_map_memory()
131 st.sgl = sg_list; in intel_gtt_unmap_memory()
853 for_each_sg(st->sgl, sg, st->nents, i) { in intel_gtt_insert_sg_entries()
919 mem->sg_list = st.sgl; in intel_fake_agp_insert_entries()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb3/
Dsge.c1052 const struct sg_ent *sgl, in write_wr_hdr_sgl() argument
1077 const u64 *fp = (const u64 *)sgl; in write_wr_hdr_sgl()
1144 struct sg_ent *sgp, sgl[MAX_SKB_FRAGS / 2 + 1]; in write_tx_pkt_wr() local
1198 sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl; in write_tx_pkt_wr()
1201 write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, gen, in write_tx_pkt_wr()
1551 const struct sg_ent *sgl, int sgl_flits) in setup_deferred_unmapping() argument
1558 for (p = dui->addr; sgl_flits >= 3; sgl++, sgl_flits -= 3) { in setup_deferred_unmapping()
1559 *p++ = be64_to_cpu(sgl->addr[0]); in setup_deferred_unmapping()
1560 *p++ = be64_to_cpu(sgl->addr[1]); in setup_deferred_unmapping()
1563 *p = be64_to_cpu(sgl->addr[0]); in setup_deferred_unmapping()
[all …]
/linux-4.1.27/Documentation/dmaengine/
Dclient.txt88 struct dma_chan *chan, struct scatterlist *sgl,
108 nr_sg = dma_map_sg(chan->device->dev, sgl, sg_len);
112 desc = dmaengine_prep_slave_sg(chan, sgl, nr_sg, direction, flags);
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
Dvmwgfx_buffer.c316 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, in vmw_piter_start()
336 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma()
359 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, in vmw_ttm_map_for_dma()
/linux-4.1.27/drivers/dma/xilinx/
Dxilinx_vdma.c942 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
965 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
966 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
/linux-4.1.27/drivers/media/pci/cx23885/
Dcx23885-video.c348 sgt->sgl, 0, UNSET, in buffer_prepare()
353 sgt->sgl, UNSET, 0, in buffer_prepare()
381 sgt->sgl, line0_offset, in buffer_prepare()
388 sgt->sgl, in buffer_prepare()
395 sgt->sgl, in buffer_prepare()
Dcx23885-vbi.c155 sgt->sgl, in buffer_prepare()
/linux-4.1.27/arch/m68k/fpsp040/
Dx_unfl.S144 | ;1=sgl, 2=dbl
224 | ;If destination format is sgl/dbl,
Dround.S26 | sgl = $0001xxxx
148 bfextu LOCAL_HI(%a0){#24:#2},%d3 |sgl prec. g-r are 2 bits right
149 movel #30,%d2 |of the sgl prec. limits
179 .set ad_1_sgl,0x00000100 | constant to add 1 to l-bit in sgl prec
204 andil #0xffffff00,LOCAL_HI(%a0) |truncate bits beyond sgl limit
Dutil.S235 bra end_ovfr |inf is same for all precisions (ext,dbl,sgl)
393 | 10 1 sgl
477 | ;smallest +sgl denorm
Dx_store.S88 | ;ext=00, sgl=01, dbl=10
Dbugfix.S191 | Check for opclass 0. If not, go and check for opclass 2 and sgl.
353 cmpiw #0x4400,%d0 |test for opclass 2 and size=sgl
/linux-4.1.27/drivers/crypto/qce/
Ddma.c112 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add()
Dablkcipher.c117 rctx->dst_sg = rctx->dst_tbl.sgl; in qce_ablkcipher_async_req_handle()
/linux-4.1.27/drivers/crypto/
Domap-sham.c153 struct scatterlist sgl; member
584 sg_init_table(&ctx->sgl, 1); in omap_sham_xmit_dma()
585 ctx->sgl.page_link = ctx->sg->page_link; in omap_sham_xmit_dma()
586 ctx->sgl.offset = ctx->sg->offset; in omap_sham_xmit_dma()
587 sg_dma_len(&ctx->sgl) = len32; in omap_sham_xmit_dma()
588 sg_dma_address(&ctx->sgl) = sg_dma_address(ctx->sg); in omap_sham_xmit_dma()
590 tx = dmaengine_prep_slave_sg(dd->dma_lch, &ctx->sgl, 1, in omap_sham_xmit_dma()
/linux-4.1.27/drivers/mmc/core/
Dsdio_ops.c165 data.sg = sgtable.sgl; in mmc_io_rw_extended()
/linux-4.1.27/drivers/dma/hsu/
Dhsu.c223 struct dma_chan *chan, struct scatterlist *sgl, in hsu_dma_prep_slave_sg() argument
236 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg()
/linux-4.1.27/drivers/rapidio/devices/
Dtsi721_dma.c757 struct scatterlist *sgl, unsigned int sg_len, in tsi721_prep_rio_sg() argument
767 if (!sgl || !sg_len) { in tsi721_prep_rio_sg()
806 desc->sg = sgl; in tsi721_prep_rio_sg()
/linux-4.1.27/drivers/crypto/qat/qat_common/
Dqat_algs.c658 struct scatterlist *sgl, in qat_alg_sgl_to_bufl() argument
665 int n = sg_nents(sgl), assoc_n = sg_nents(assoc); in qat_alg_sgl_to_bufl()
707 for_each_sg(sgl, sg, n, i) { in qat_alg_sgl_to_bufl()
726 if (sgl != sglout) { in qat_alg_sgl_to_bufl()
784 if (sgl != sglout && buflout) { in qat_alg_sgl_to_bufl()
/linux-4.1.27/drivers/gpu/drm/radeon/
Dradeon_ttm.c575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_pin_userptr()
603 if (!ttm->sg->sgl) in radeon_ttm_tt_unpin_userptr()
607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_unpin_userptr()
609 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in radeon_ttm_tt_unpin_userptr()
/linux-4.1.27/drivers/scsi/cxgbi/
Dlibcxgbi.c1375 struct scatterlist *sgl, in ddp_make_gl() argument
1381 struct scatterlist *sg = sgl; in ddp_make_gl()
1414 for (i = 1, sg = sg_next(sgl), j = 0; i < sgcnt; in ddp_make_gl()
1547 struct scatterlist *sgl, unsigned int sgcnt, gfp_t gfp) in cxgbi_ddp_reserve() argument
1567 gl = ddp_make_gl(xferlen, sgl, sgcnt, cdev->pdev, gfp); in cxgbi_ddp_reserve()
1718 scsi_in(sc)->table.sgl, in task_reserve_itt()
2020 static int sgl_seek_offset(struct scatterlist *sgl, unsigned int sgcnt, in sgl_seek_offset() argument
2027 for_each_sg(sgl, sg, sgcnt, i) { in sgl_seek_offset()
2178 sdb->table.sgl, sdb->table.nents, in cxgbi_conn_init_pdu()
/linux-4.1.27/drivers/media/pci/cx88/
Dcx88-video.c463 sgt->sgl, 0, UNSET, in buffer_prepare()
468 sgt->sgl, UNSET, 0, in buffer_prepare()
473 sgt->sgl, in buffer_prepare()
480 sgt->sgl, in buffer_prepare()
488 sgt->sgl, 0, buf->bpl, in buffer_prepare()
Dcx88-vbi.c145 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, in buffer_prepare()
/linux-4.1.27/arch/tile/include/hv/
Dhypervisor.h1842 HV_SGL sgl[/* sgl_len */], __hv64 offset, HV_IntArg intarg);
1888 HV_SGL sgl[/* sgl_len */], __hv64 offset, HV_IntArg intarg);
/linux-4.1.27/drivers/base/
Ddma-mapping.c238 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); in dma_common_get_sgtable()
/linux-4.1.27/drivers/scsi/qla2xxx/
Dqla_iocb.c1045 struct scatterlist *sg, *sgl; in qla24xx_walk_and_build_sglist() local
1054 sgl = scsi_sglist(cmd); in qla24xx_walk_and_build_sglist()
1057 sgl = tc->sg; in qla24xx_walk_and_build_sglist()
1065 for_each_sg(sgl, sg, tot_dsds, i) { in qla24xx_walk_and_build_sglist()
1135 struct scatterlist *sg, *sgl; in qla24xx_walk_and_build_prot_sglist() local
1144 sgl = scsi_prot_sglist(cmd); in qla24xx_walk_and_build_prot_sglist()
1148 sgl = tc->prot_sg; in qla24xx_walk_and_build_prot_sglist()
1157 for_each_sg(sgl, sg, tot_dsds, i) { in qla24xx_walk_and_build_prot_sglist()
/linux-4.1.27/drivers/crypto/ccp/
Dccp-crypto-main.c305 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
/linux-4.1.27/arch/powerpc/platforms/ps3/
Dsystem-bus.c642 static int ps3_sb_map_sg(struct device *_dev, struct scatterlist *sgl, in ps3_sb_map_sg() argument
653 for_each_sg(sgl, sg, nents, i) { in ps3_sb_map_sg()
/linux-4.1.27/drivers/misc/carma/
Dcarma-fpga-program.c565 for_each_sg(table.sgl, sg, num_pages, i) { in fpga_program_dma()
602 tx = dmaengine_prep_dma_sg(chan, table.sgl, num_pages, in fpga_program_dma()
Dcarma-fpga.c483 sg = table->sgl; in data_setup_corl_table()
704 src_sg = priv->corl_table.sgl; in data_submit_dma()
/linux-4.1.27/drivers/media/platform/omap3isp/
Dispstat.c164 dma_sync_sg_for_device(stat->isp->dev, buf->sgt.sgl, in isp_stat_buf_sync_for_device()
174 dma_sync_sg_for_cpu(stat->isp->dev, buf->sgt.sgl, in isp_stat_buf_sync_for_cpu()
/linux-4.1.27/drivers/media/pci/solo6x10/
Dsolo6x10-v4l2-enc.c331 for_each_sg(vbuf->sgl, sg, vbuf->nents, i) { in solo_send_desc()
751 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, in solo_enc_buf_finish()
755 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, in solo_enc_buf_finish()
/linux-4.1.27/drivers/staging/unisys/virthba/
Dvirthba.c818 struct scatterlist *sgl = NULL; in virthba_queue_command_lck() local
876 sgl = scsi_sglist(scsicmd); in virthba_queue_command_lck()
878 for_each_sg(sgl, sg, scsi_sg_count(scsicmd), i) { in virthba_queue_command_lck()
/linux-4.1.27/arch/powerpc/boot/dts/
Dwarp.dts143 compatible = "pika,fpga-sgl";
/linux-4.1.27/arch/m68k/ifpsp060/src/
Dfpsp.S1400 bsr.l funimp_skew # skew sgl or dbl inputs
1433 cmpi.b %d0,&0x11 # is class = 2 & fmt = sgl?
1977 # "non-skewed" operand for cases of sgl and dbl src INFs,NANs, and DENORMs.
2304 cmpi.b %d0,&0x1 # was src sgl?
3197 short tbl_operr - tbl_operr # sgl prec shouldn't happen
3329 # denorm operand in the sgl or dbl format. NANs also become skewed and must be
3367 short fsnan_out_s - tbl_snan # sgl prec shouldn't happen
3792 # in the sgl or dbl format.
5309 # here, the operation may underflow iff the precision is sgl or dbl.
11546 # norms/denorms into ext/sgl/dbl precision. #
[all …]
Dfplsp.S395 set SGL_LO, 0x3f81 # min sgl prec exponent
396 set SGL_HI, 0x407e # max sgl prec exponent
496 set sgl_thresh, 0x3f81 # minimum sgl exponent
574 fmov.s 0x8(%a6),%fp0 # load sgl input
751 fmov.s 0x8(%a6),%fp0 # load sgl input
928 fmov.s 0x8(%a6),%fp0 # load sgl input
1105 fmov.s 0x8(%a6),%fp0 # load sgl input
1282 fmov.s 0x8(%a6),%fp0 # load sgl input
1459 fmov.s 0x8(%a6),%fp0 # load sgl input
1636 fmov.s 0x8(%a6),%fp0 # load sgl input
[all …]
/linux-4.1.27/drivers/scsi/fcoe/
Dfcoe.c1032 struct scatterlist *sgl, unsigned int sgc) in fcoe_ddp_setup() argument
1038 xid, sgl, in fcoe_ddp_setup()
1054 struct scatterlist *sgl, unsigned int sgc) in fcoe_ddp_target() argument
1060 sgl, sgc); in fcoe_ddp_target()

12