Home
last modified time | relevance | path

Searched refs:sgl (Results 1 – 200 of 291) sorted by relevance

12

/linux-4.4.14/drivers/misc/genwqe/
Dcard_utils.c298 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
304 sgl->fpage_offs = offset_in_page((unsigned long)user_addr); in genwqe_alloc_sync_sgl()
305 sgl->fpage_size = min_t(size_t, PAGE_SIZE-sgl->fpage_offs, user_size); in genwqe_alloc_sync_sgl()
306 sgl->nr_pages = DIV_ROUND_UP(sgl->fpage_offs + user_size, PAGE_SIZE); in genwqe_alloc_sync_sgl()
307 sgl->lpage_size = (user_size - sgl->fpage_size) % PAGE_SIZE; in genwqe_alloc_sync_sgl()
310 __func__, user_addr, user_size, sgl->nr_pages, in genwqe_alloc_sync_sgl()
311 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); in genwqe_alloc_sync_sgl()
313 sgl->user_addr = user_addr; in genwqe_alloc_sync_sgl()
314 sgl->user_size = user_size; in genwqe_alloc_sync_sgl()
315 sgl->sgl_size = genwqe_sgl_size(sgl->nr_pages); in genwqe_alloc_sync_sgl()
[all …]
Dcard_base.h354 struct sg_entry *sgl; member
372 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
375 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
378 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl);
/linux-4.4.14/lib/
Dscatterlist.c106 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) in sg_last() argument
111 for_each_sg(sgl, sg, nents, i) in sg_last()
115 BUG_ON(sgl[0].sg_magic != SG_MAGIC); in sg_last()
132 void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument
134 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table()
139 sgl[i].sg_magic = SG_MAGIC; in sg_init_table()
142 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
208 struct scatterlist *sgl, *next; in __sg_free_table() local
210 if (unlikely(!table->sgl)) in __sg_free_table()
213 sgl = table->sgl; in __sg_free_table()
[all …]
Dkfifo.c308 static int setup_sgl_buf(struct scatterlist *sgl, void *buf, in setup_sgl_buf() argument
334 sg_set_page(sgl, page, l - off, off); in setup_sgl_buf()
335 sgl = sg_next(sgl); in setup_sgl_buf()
336 if (++n == nents || sgl == NULL) in setup_sgl_buf()
343 sg_set_page(sgl, page, len, off); in setup_sgl_buf()
347 static unsigned int setup_sgl(struct __kfifo *fifo, struct scatterlist *sgl, in setup_sgl() argument
363 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl()
364 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl()
370 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_in_prepare() argument
378 return setup_sgl(fifo, sgl, nents, len, fifo->in); in __kfifo_dma_in_prepare()
[all …]
Dswiotlb.c879 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg_attrs() argument
887 for_each_sg(sgl, sg, nelems, i) { in swiotlb_map_sg_attrs()
899 swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in swiotlb_map_sg_attrs()
901 sg_dma_len(sgl) = 0; in swiotlb_map_sg_attrs()
914 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_map_sg() argument
917 return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_map_sg()
926 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in swiotlb_unmap_sg_attrs() argument
934 for_each_sg(sgl, sg, nelems, i) in swiotlb_unmap_sg_attrs()
941 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, in swiotlb_unmap_sg() argument
944 return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); in swiotlb_unmap_sg()
[all …]
/linux-4.4.14/crypto/
Dalgif_skcipher.c59 struct af_alg_sgl sgl; member
78 struct scatterlist *sgl; in skcipher_free_async_sgls() local
83 af_alg_free_sg(&rsgl->sgl); in skcipher_free_async_sgls()
87 sgl = sreq->tsg; in skcipher_free_async_sgls()
88 n = sg_nents(sgl); in skcipher_free_async_sgls()
89 for_each_sg(sgl, sg, n, i) in skcipher_free_async_sgls()
124 struct skcipher_sg_list *sgl; in skcipher_alloc_sgl() local
127 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); in skcipher_alloc_sgl()
129 sg = sgl->sg; in skcipher_alloc_sgl()
131 if (!sg || sgl->cur >= MAX_SGL_ENTS) { in skcipher_alloc_sgl()
[all …]
Dalgif_aead.c82 struct aead_sg_list *sgl = &ctx->tsgl; in aead_put_sgl() local
83 struct scatterlist *sg = sgl->sg; in aead_put_sgl()
86 for (i = 0; i < sgl->cur; i++) { in aead_put_sgl()
94 sgl->cur = 0; in aead_put_sgl()
175 struct aead_sg_list *sgl = &ctx->tsgl; in aead_sendmsg() local
221 sg = sgl->sg + sgl->cur - 1; in aead_sendmsg()
252 if (sgl->cur >= ALG_MAX_PAGES) { in aead_sendmsg()
258 sg = sgl->sg + sgl->cur; in aead_sendmsg()
279 sgl->cur++; in aead_sendmsg()
306 struct aead_sg_list *sgl = &ctx->tsgl; in aead_sendpage() local
[all …]
Daf_alg.c394 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len) in af_alg_make_sg() argument
400 n = iov_iter_get_pages(iter, sgl->pages, len, ALG_MAX_PAGES, &off); in af_alg_make_sg()
408 sg_init_table(sgl->sg, npages + 1); in af_alg_make_sg()
413 sg_set_page(sgl->sg + i, sgl->pages[i], plen, off); in af_alg_make_sg()
418 sg_mark_end(sgl->sg + npages - 1); in af_alg_make_sg()
419 sgl->npages = npages; in af_alg_make_sg()
432 void af_alg_free_sg(struct af_alg_sgl *sgl) in af_alg_free_sg() argument
436 for (i = 0; i < sgl->npages; i++) in af_alg_free_sg()
437 put_page(sgl->pages[i]); in af_alg_free_sg()
Dalgif_hash.c25 struct af_alg_sgl sgl; member
71 len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len); in hash_sendmsg()
77 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len); in hash_sendmsg()
81 af_alg_free_sg(&ctx->sgl); in hash_sendmsg()
116 sg_init_table(ctx->sgl.sg, 1); in hash_sendpage()
117 sg_set_page(ctx->sgl.sg, page, size, offset); in hash_sendpage()
119 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size); in hash_sendpage()
/linux-4.4.14/tools/virtio/linux/
Dscatterlist.h92 struct scatterlist *sgl) in sg_chain() argument
104 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain()
159 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument
161 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table()
166 sgl[i].sg_magic = SG_MAGIC; in sg_init_table()
169 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
/linux-4.4.14/include/linux/
Dscatterlist.h39 struct scatterlist *sgl; /* the list */ member
162 struct scatterlist *sgl) in sg_chain() argument
174 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; in sg_chain()
269 size_t sg_copy_buffer(struct scatterlist *sgl, unsigned int nents, void *buf,
272 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents,
274 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents,
277 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents,
279 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents,
376 void sg_miter_start(struct sg_mapping_iter *miter, struct scatterlist *sgl,
Dkfifo.h658 #define kfifo_dma_in_prepare(fifo, sgl, nents, len) \ argument
661 struct scatterlist *__sgl = (sgl); \
709 #define kfifo_dma_out_prepare(fifo, sgl, nents, len) \ argument
712 struct scatterlist *__sgl = (sgl); \
792 struct scatterlist *sgl, int nents, unsigned int len);
795 struct scatterlist *sgl, int nents, unsigned int len);
814 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
820 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
Ddma-mapping.h271 #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \ argument
272 dma_map_sg(dev, sgl, nents, dir)
274 #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \ argument
275 dma_unmap_sg(dev, sgl, nents, dir)
Ddmaengine.h169 struct data_chunk sgl[0]; member
721 struct dma_chan *chan, struct scatterlist *sgl,
775 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_slave_sg() argument
778 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_slave_sg()
785 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_rio_sg() argument
789 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_rio_sg()
957 static inline size_t dmaengine_get_icg(bool inc, bool sgl, size_t icg, in dmaengine_get_icg() argument
963 else if (sgl) in dmaengine_get_icg()
Dswiotlb.h82 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems,
86 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
Dmpi.h76 MPI mpi_read_raw_from_sgl(struct scatterlist *sgl, unsigned int len);
/linux-4.4.14/arch/tile/include/asm/
Dhv_driver.h42 HV_SGL sgl[/* sgl_len */], __hv64 offset, in tile_hv_dev_preada()
45 return hv_dev_preada(devhdl, flags, sgl_len, sgl, in tile_hv_dev_preada()
52 HV_SGL sgl[/* sgl_len */], __hv64 offset, in tile_hv_dev_pwritea()
55 return hv_dev_pwritea(devhdl, flags, sgl_len, sgl, in tile_hv_dev_pwritea()
/linux-4.4.14/lib/mpi/
Dmpicoder.c350 int mpi_write_to_sgl(MPI a, struct scatterlist *sgl, unsigned *nbytes, in mpi_write_to_sgl() argument
367 buf_len = sgl->length; in mpi_write_to_sgl()
368 p2 = sg_virt(sgl); in mpi_write_to_sgl()
409 sgl = sg_next(sgl); in mpi_write_to_sgl()
410 if (!sgl) in mpi_write_to_sgl()
412 buf_len = sgl->length; in mpi_write_to_sgl()
413 p2 = sg_virt(sgl); in mpi_write_to_sgl()
437 MPI mpi_read_raw_from_sgl(struct scatterlist *sgl, unsigned int len) in mpi_read_raw_from_sgl() argument
446 ents = sg_nents(sgl); in mpi_read_raw_from_sgl()
448 for_each_sg(sgl, sg, ents, i) { in mpi_read_raw_from_sgl()
[all …]
/linux-4.4.14/arch/arm64/mm/
Ddma-mapping.c226 static int __swiotlb_map_sg_attrs(struct device *dev, struct scatterlist *sgl, in __swiotlb_map_sg_attrs() argument
233 ret = swiotlb_map_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_map_sg_attrs()
235 for_each_sg(sgl, sg, ret, i) in __swiotlb_map_sg_attrs()
243 struct scatterlist *sgl, int nelems, in __swiotlb_unmap_sg_attrs() argument
251 for_each_sg(sgl, sg, nelems, i) in __swiotlb_unmap_sg_attrs()
254 swiotlb_unmap_sg_attrs(dev, sgl, nelems, dir, attrs); in __swiotlb_unmap_sg_attrs()
276 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_cpu() argument
283 for_each_sg(sgl, sg, nelems, i) in __swiotlb_sync_sg_for_cpu()
286 swiotlb_sync_sg_for_cpu(dev, sgl, nelems, dir); in __swiotlb_sync_sg_for_cpu()
290 struct scatterlist *sgl, int nelems, in __swiotlb_sync_sg_for_device() argument
[all …]
/linux-4.4.14/arch/microblaze/kernel/
Ddma.c54 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, in dma_direct_map_sg() argument
62 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
131 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_cpu() argument
139 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_cpu()
145 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_device() argument
153 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_device()
/linux-4.4.14/drivers/scsi/lpfc/
Dlpfc_scsi.c182 struct sli4_sge *sgl = (struct sli4_sge *)lpfc_cmd->fcp_bpl; in lpfc_sli4_set_rsp_sgl_last() local
183 if (sgl) { in lpfc_sli4_set_rsp_sgl_last()
184 sgl += 1; in lpfc_sli4_set_rsp_sgl_last()
185 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
186 bf_set(lpfc_sli4_sge_last, sgl, 1); in lpfc_sli4_set_rsp_sgl_last()
187 sgl->word2 = cpu_to_le32(sgl->word2); in lpfc_sli4_set_rsp_sgl_last()
794 struct sli4_sge *sgl; in lpfc_new_scsi_buf_s4() local
874 sgl = (struct sli4_sge *)psb->fcp_bpl; in lpfc_new_scsi_buf_s4()
884 sgl->addr_hi = cpu_to_le32(putPaddrHigh(pdma_phys_fcp_cmd)); in lpfc_new_scsi_buf_s4()
885 sgl->addr_lo = cpu_to_le32(putPaddrLow(pdma_phys_fcp_cmd)); in lpfc_new_scsi_buf_s4()
[all …]
Dlpfc_sli.c8056 struct sli4_sge *sgl = NULL; in lpfc_sli4_bpl2sgl() local
8067 sgl = (struct sli4_sge *)sglq->sgl; in lpfc_sli4_bpl2sgl()
8089 sgl->addr_hi = bpl->addrHigh; in lpfc_sli4_bpl2sgl()
8090 sgl->addr_lo = bpl->addrLow; in lpfc_sli4_bpl2sgl()
8092 sgl->word2 = le32_to_cpu(sgl->word2); in lpfc_sli4_bpl2sgl()
8094 bf_set(lpfc_sli4_sge_last, sgl, 1); in lpfc_sli4_bpl2sgl()
8096 bf_set(lpfc_sli4_sge_last, sgl, 0); in lpfc_sli4_bpl2sgl()
8101 sgl->sge_len = cpu_to_le32(bde.tus.f.bdeSize); in lpfc_sli4_bpl2sgl()
8113 bf_set(lpfc_sli4_sge_offset, sgl, offset); in lpfc_sli4_bpl2sgl()
8114 bf_set(lpfc_sli4_sge_type, sgl, in lpfc_sli4_bpl2sgl()
[all …]
/linux-4.4.14/drivers/media/v4l2-core/
Dvideobuf2-dma-contig.c53 dma_addr_t expected = sg_dma_address(sgt->sgl); in vb2_dc_get_contiguous_size()
57 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size()
103 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_prepare()
116 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dc_finish()
242 rd = buf->sgt_base->sgl; in vb2_dc_dmabuf_ops_attach()
243 wr = sgt->sgl; in vb2_dc_dmabuf_ops_attach()
269 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_detach()
295 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_map()
301 sgt->nents = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_dmabuf_ops_map()
428 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_put_userptr()
[all …]
Dvideobuf2-dma-sg.c150 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_alloc()
191 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_put()
213 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_prepare()
226 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir); in vb2_dma_sg_finish()
269 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_get_userptr()
300 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, buf->dma_dir, in vb2_dma_sg_put_userptr()
408 rd = buf->dma_sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
409 wr = sgt->sgl; in vb2_dma_sg_dmabuf_ops_attach()
435 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_dmabuf_ops_detach()
461 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_dma_sg_dmabuf_ops_map()
[all …]
Dvideobuf2-vmalloc.c228 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach()
258 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_detach()
284 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_map()
290 sgt->nents = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, in vb2_vmalloc_dmabuf_ops_map()
/linux-4.4.14/drivers/media/platform/
Dm2m-deinterlace.c258 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
259 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
265 ctx->xt->sgl[0].size = s_width; in deinterlace_issue_dma()
266 ctx->xt->sgl[0].icg = s_width; in deinterlace_issue_dma()
272 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
273 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
279 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
280 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
286 ctx->xt->sgl[0].size = s_width / 2; in deinterlace_issue_dma()
287 ctx->xt->sgl[0].icg = s_width / 2; in deinterlace_issue_dma()
[all …]
/linux-4.4.14/drivers/net/ethernet/intel/ixgbe/
Dixgbe_fcoe.c53 ddp->sgl = NULL; in ixgbe_fcoe_clear_ddp()
134 if (ddp->sgl) in ixgbe_fcoe_ddp_put()
135 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc, in ixgbe_fcoe_ddp_put()
157 struct scatterlist *sgl, unsigned int sgc, in ixgbe_fcoe_ddp_setup() argument
176 if (!netdev || !sgl) in ixgbe_fcoe_ddp_setup()
192 if (ddp->sgl) { in ixgbe_fcoe_ddp_setup()
194 xid, ddp->sgl, ddp->sgc); in ixgbe_fcoe_ddp_setup()
212 dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in ixgbe_fcoe_ddp_setup()
225 ddp->sgl = sgl; in ixgbe_fcoe_ddp_setup()
229 for_each_sg(sgl, sg, dmacount, i) { in ixgbe_fcoe_ddp_setup()
[all …]
Dixgbe_fcoe.h64 struct scatterlist *sgl; member
Dixgbe.h933 struct scatterlist *sgl, unsigned int sgc);
935 struct scatterlist *sgl, unsigned int sgc);
/linux-4.4.14/drivers/dma/
Ddma-axi-dmac.c339 struct dma_chan *c, struct scatterlist *sgl, in axi_dmac_prep_slave_sg() argument
355 for_each_sg(sgl, sg, sg_len, i) { in axi_dmac_prep_slave_sg()
439 dst_icg = dmaengine_get_dst_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
440 src_icg = dmaengine_get_src_icg(xt, &xt->sgl[0]); in axi_dmac_prep_interleaved()
443 if (!axi_dmac_check_len(chan, xt->sgl[0].size) || in axi_dmac_prep_interleaved()
446 if (xt->sgl[0].size + dst_icg > chan->max_length || in axi_dmac_prep_interleaved()
447 xt->sgl[0].size + src_icg > chan->max_length) in axi_dmac_prep_interleaved()
452 if (chan->max_length / xt->sgl[0].size < xt->numf) in axi_dmac_prep_interleaved()
454 if (!axi_dmac_check_len(chan, xt->sgl[0].size * xt->numf)) in axi_dmac_prep_interleaved()
464 desc->sg[0].src_stride = xt->sgl[0].size + src_icg; in axi_dmac_prep_interleaved()
[all …]
Dimx-dma.c809 struct dma_chan *chan, struct scatterlist *sgl, in imxdma_prep_slave_sg() argument
824 for_each_sg(sgl, sg, sg_len, i) { in imxdma_prep_slave_sg()
830 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3) in imxdma_prep_slave_sg()
834 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1) in imxdma_prep_slave_sg()
844 desc->sg = sgl; in imxdma_prep_slave_sg()
976 desc->x = xt->sgl[0].size; in imxdma_prep_dma_interleaved()
978 desc->w = xt->sgl[0].icg + desc->x; in imxdma_prep_dma_interleaved()
Dcoh901318_lli.c233 struct scatterlist *sgl, unsigned int nents, in coh901318_lli_fill_sg() argument
258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg()
Dat_hdmac.c719 struct data_chunk *first = xt->sgl; in atc_prep_dma_interleaved()
743 struct data_chunk *chunk = xt->sgl + i; in atc_prep_dma_interleaved()
745 if ((chunk->size != xt->sgl->size) || in atc_prep_dma_interleaved()
991 struct scatterlist *sgl, in atc_prep_dma_memset_sg() argument
1007 if (unlikely(!sgl || !sg_len)) { in atc_prep_dma_memset_sg()
1021 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_dma_memset_sg()
1076 atc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in atc_prep_slave_sg() argument
1117 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_slave_sg()
1158 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_slave_sg()
Dat_xdmac.c634 at_xdmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in at_xdmac_prep_slave_sg() argument
646 if (!sgl) in at_xdmac_prep_slave_sg()
666 for_each_sg(sgl, sg, sg_len, i) { in at_xdmac_prep_slave_sg()
968 xt, xt->sgl); in at_xdmac_prep_interleaved()
982 chunk = xt->sgl + i; in at_xdmac_prep_interleaved()
1217 at_xdmac_prep_dma_memset_sg(struct dma_chan *chan, struct scatterlist *sgl, in at_xdmac_prep_dma_memset_sg() argument
1228 if (!sgl) in at_xdmac_prep_dma_memset_sg()
1235 for_each_sg(sgl, sg, sg_len, i) { in at_xdmac_prep_dma_memset_sg()
Dtimb_dma.c505 struct scatterlist *sgl, unsigned int sg_len, in td_prep_slave_sg() argument
516 if (!sgl || !sg_len) { in td_prep_slave_sg()
536 for_each_sg(sgl, sg, sg_len, i) { in td_prep_slave_sg()
Dk3dma.c464 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sglen, in k3_dma_prep_slave_sg() argument
474 if (sgl == NULL) in k3_dma_prep_slave_sg()
477 for_each_sg(sgl, sg, sglen, i) { in k3_dma_prep_slave_sg()
492 for_each_sg(sgl, sg, sglen, i) { in k3_dma_prep_slave_sg()
Dmxs-dma.c500 struct dma_chan *chan, struct scatterlist *sgl, in mxs_dma_prep_slave_sg() argument
543 pio = (u32 *) sgl; in mxs_dma_prep_slave_sg()
558 for_each_sg(sgl, sg, sg_len, i) { in mxs_dma_prep_slave_sg()
Dmmp_pdma.c525 mmp_pdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in mmp_pdma_prep_slave_sg() argument
536 if ((sgl == NULL) || (sg_len == 0)) in mmp_pdma_prep_slave_sg()
541 for_each_sg(sgl, sg, sg_len, i) { in mmp_pdma_prep_slave_sg()
543 avail = sg_dma_len(sgl); in mmp_pdma_prep_slave_sg()
Dzx296702_dma.c549 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sglen, in zx_dma_prep_slave_sg() argument
559 if (!sgl) in zx_dma_prep_slave_sg()
565 for_each_sg(sgl, sg, sglen, i) { in zx_dma_prep_slave_sg()
577 for_each_sg(sgl, sg, sglen, i) { in zx_dma_prep_slave_sg()
Ddma-jz4780.c309 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in jz4780_dma_prep_slave_sg() argument
324 sg_dma_address(&sgl[i]), in jz4780_dma_prep_slave_sg()
325 sg_dma_len(&sgl[i]), in jz4780_dma_prep_slave_sg()
Dmoxart-dma.c269 struct dma_chan *chan, struct scatterlist *sgl, in moxart_prep_slave_sg() argument
319 for_each_sg(sgl, sgent, sg_len, i) { in moxart_prep_slave_sg()
Ddma-jz4740.c390 struct dma_chan *c, struct scatterlist *sgl, in jz4740_dma_prep_slave_sg() argument
403 for_each_sg(sgl, sg, sg_len, i) { in jz4740_dma_prep_slave_sg()
Dqcom_bam_dma.c587 struct scatterlist *sgl, unsigned int sg_len, in bam_prep_slave_sg() argument
606 for_each_sg(sgl, sg, sg_len, i) in bam_prep_slave_sg()
630 for_each_sg(sgl, sg, sg_len, i) { in bam_prep_slave_sg()
Dimg-mdc-dma.c452 struct dma_chan *chan, struct scatterlist *sgl, in mdc_prep_slave_sg() argument
464 if (!sgl) in mdc_prep_slave_sg()
478 for_each_sg(sgl, sg, sg_len, i) { in mdc_prep_slave_sg()
Dsun6i-dma.c561 struct dma_chan *chan, struct scatterlist *sgl, in sun6i_dma_prep_slave_sg() argument
574 if (!sgl) in sun6i_dma_prep_slave_sg()
586 for_each_sg(sgl, sg, sg_len, i) { in sun6i_dma_prep_slave_sg()
Dsun4i-dma.c771 sun4i_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in sun4i_dma_prep_slave_sg() argument
785 if (!sgl) in sun4i_dma_prep_slave_sg()
818 for_each_sg(sgl, sg, sg_len, i) { in sun4i_dma_prep_slave_sg()
Didma64.c303 struct dma_chan *chan, struct scatterlist *sgl, in idma64_prep_slave_sg() argument
316 for_each_sg(sgl, sg, sg_len, i) { in idma64_prep_slave_sg()
Dcoh901318.c2290 coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in coh901318_prep_slave_sg() argument
2309 if (!sgl) in coh901318_prep_slave_sg()
2311 if (sg_dma_len(sgl) == 0) in coh901318_prep_slave_sg()
2357 for_each_sg(sgl, sg, sg_len, i) { in coh901318_prep_slave_sg()
2380 ret = coh901318_lli_fill_sg(&cohc->base->pool, lli, sgl, sg_len, in coh901318_prep_slave_sg()
Dsirf-dma.c732 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; in sirfsoc_dma_prep_interleaved()
733 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / in sirfsoc_dma_prep_interleaved()
Ds3c24xx-dma.c980 struct dma_chan *chan, struct scatterlist *sgl, in s3c24xx_dma_prep_slave_sg() argument
996 sg_dma_len(sgl), s3cchan->name); in s3c24xx_dma_prep_slave_sg()
1047 for_each_sg(sgl, sg, sg_len, tmp) { in s3c24xx_dma_prep_slave_sg()
Dnbpfaxi.c998 struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, in nbpf_prep_slave_sg() argument
1011 return nbpf_prep_sg(chan, sgl, &slave_sg, sg_len, in nbpf_prep_slave_sg()
1016 return nbpf_prep_sg(chan, &slave_sg, sgl, sg_len, in nbpf_prep_slave_sg()
Dpxa_dma.c1019 pxad_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, in pxad_prep_slave_sg() argument
1031 if ((sgl == NULL) || (sg_len == 0)) in pxad_prep_slave_sg()
1038 for_each_sg(sgl, sg, sg_len, i) in pxad_prep_slave_sg()
1044 for_each_sg(sgl, sg, sg_len, i) { in pxad_prep_slave_sg()
/linux-4.4.14/drivers/scsi/esas2r/
Desas2r_io.c224 struct esas2r_mem_desc *sgl; in esas2r_build_sg_list_sge() local
231 sgl = esas2r_alloc_sgl(a); in esas2r_build_sg_list_sge()
233 if (unlikely(sgl == NULL)) in esas2r_build_sg_list_sge()
244 memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen); in esas2r_build_sg_list_sge()
248 (struct atto_vda_sge *)((u8 *)sgl->virt_addr + in esas2r_build_sg_list_sge()
253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr in esas2r_build_sg_list_sge()
260 cpu_to_le64(sgl->phys_addr); in esas2r_build_sg_list_sge()
302 list_add(&sgl->next_desc, &rq->sg_table_head); in esas2r_build_sg_list_sge()
376 struct esas2r_mem_desc *sgl; in esas2r_build_prd_iblk() local
449 sgl = esas2r_alloc_sgl(a); in esas2r_build_prd_iblk()
[all …]
Desas2r_init.c841 struct esas2r_mem_desc *sgl; in esas2r_init_adapter_struct() local
909 for (i = 0, sgl = a->sg_list_mds; i < num_sg_lists; i++, sgl++) { in esas2r_init_adapter_struct()
910 sgl->size = sgl_page_size; in esas2r_init_adapter_struct()
912 list_add_tail(&sgl->next_desc, &a->free_sg_list_head); in esas2r_init_adapter_struct()
914 if (!esas2r_initmem_alloc(a, sgl, ESAS2R_SGL_ALIGN)) { in esas2r_init_adapter_struct()
Desas2r.h1153 struct list_head *sgl; in esas2r_alloc_sgl() local
1158 sgl = a->free_sg_list_head.next; in esas2r_alloc_sgl()
1159 result = list_entry(sgl, struct esas2r_mem_desc, next_desc); in esas2r_alloc_sgl()
1160 list_del_init(sgl); in esas2r_alloc_sgl()
/linux-4.4.14/drivers/infiniband/core/
Dumem.c54 ib_dma_unmap_sg(dev, umem->sg_head.sgl, in __ib_umem_release()
58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { in __ib_umem_release()
188 sg_list_start = umem->sg_head.sgl; in ib_umem_get()
215 umem->sg_head.sgl, in ib_umem_get()
326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) in ib_umem_page_count()
355 ret = sg_pcopy_to_buffer(umem->sg_head.sgl, umem->nmap, dst, length, in ib_umem_copy_from()
/linux-4.4.14/drivers/infiniband/hw/cxgb3/
Diwch_qp.c77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); in build_rdma_send()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; in build_rdma_write()
104 wqe->write.sgl[0].len = cpu_to_be32(0); in build_rdma_write()
114 wqe->write.sgl[i].stag = in build_rdma_write()
116 wqe->write.sgl[i].len = in build_rdma_write()
118 wqe->write.sgl[i].to = in build_rdma_write()
265 wqe->recv.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); in build_rdma_recv()
266 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); in build_rdma_recv()
[all …]
Dcxio_wr.h176 struct t3_sge sgl[T3_MAX_SGE]; /* 4+ */ member
238 struct t3_sge sgl[T3_MAX_SGE]; /* 5+ */ member
274 struct t3_sge sgl[T3_MAX_SGE]; /* 3+ */ member
/linux-4.4.14/drivers/gpu/drm/armada/
Darmada_gem.c445 for_each_sg(sgt->sgl, sg, count, i) { in armada_gem_prime_map_dma_buf()
457 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf()
466 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); in armada_gem_prime_map_dma_buf()
468 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in armada_gem_prime_map_dma_buf()
474 sg_dma_address(sgt->sgl) = dobj->dev_addr; in armada_gem_prime_map_dma_buf()
475 sg_dma_len(sgt->sgl) = dobj->obj.size; in armada_gem_prime_map_dma_buf()
482 for_each_sg(sgt->sgl, sg, num, i) in armada_gem_prime_map_dma_buf()
499 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in armada_gem_prime_unmap_dma_buf()
503 for_each_sg(sgt->sgl, sg, sgt->nents, i) in armada_gem_prime_unmap_dma_buf()
611 if (sg_dma_len(dobj->sgt->sgl) < dobj->obj.size) { in armada_gem_map_import()
[all …]
/linux-4.4.14/drivers/dma/sh/
Dshdma-base.c564 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument
574 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
591 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
665 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument
688 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg()
706 struct scatterlist *sgl; in shdma_prep_dma_cyclic() local
734 sgl = kcalloc(sg_len, sizeof(*sgl), GFP_KERNEL); in shdma_prep_dma_cyclic()
735 if (!sgl) in shdma_prep_dma_cyclic()
738 sg_init_table(sgl, sg_len); in shdma_prep_dma_cyclic()
743 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, in shdma_prep_dma_cyclic()
[all …]
Drcar-dmac.c828 rcar_dmac_chan_prep_sg(struct rcar_dmac_chan *chan, struct scatterlist *sgl, in rcar_dmac_chan_prep_sg() argument
860 for_each_sg(sgl, sg, sg_len, i) { in rcar_dmac_chan_prep_sg()
1009 struct scatterlist sgl; in rcar_dmac_prep_dma_memcpy() local
1014 sg_init_table(&sgl, 1); in rcar_dmac_prep_dma_memcpy()
1015 sg_set_page(&sgl, pfn_to_page(PFN_DOWN(dma_src)), len, in rcar_dmac_prep_dma_memcpy()
1017 sg_dma_address(&sgl) = dma_src; in rcar_dmac_prep_dma_memcpy()
1018 sg_dma_len(&sgl) = len; in rcar_dmac_prep_dma_memcpy()
1020 return rcar_dmac_chan_prep_sg(rchan, &sgl, 1, dma_dest, in rcar_dmac_prep_dma_memcpy()
1025 rcar_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in rcar_dmac_prep_slave_sg() argument
1042 return rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, in rcar_dmac_prep_slave_sg()
[all …]
Dusb-dmac.c419 usb_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, in usb_dmac_prep_slave_sg() argument
440 for_each_sg(sgl, sg, sg_len, i) { in usb_dmac_prep_slave_sg()
/linux-4.4.14/drivers/scsi/bnx2fc/
Dbnx2fc_hwi.c1470 struct fcoe_ext_mul_sges_ctx *sgl; in bnx2fc_init_seq_cleanup_task() local
1518 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = in bnx2fc_init_seq_cleanup_task()
1520 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = in bnx2fc_init_seq_cleanup_task()
1522 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = in bnx2fc_init_seq_cleanup_task()
1524 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off = in bnx2fc_init_seq_cleanup_task()
1526 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i; in bnx2fc_init_seq_cleanup_task()
1536 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; in bnx2fc_init_seq_cleanup_task()
1537 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr; in bnx2fc_init_seq_cleanup_task()
1538 sgl->mul_sgl.cur_sge_addr.hi = (u32)((u64)phys_addr >> 32); in bnx2fc_init_seq_cleanup_task()
1539 sgl->mul_sgl.sgl_size = bd_count; in bnx2fc_init_seq_cleanup_task()
[all …]
/linux-4.4.14/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c48 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf()
49 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf()
50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); in omap_gem_map_dma_buf()
51 sg_dma_address(sg->sgl) = paddr; in omap_gem_map_dma_buf()
/linux-4.4.14/drivers/staging/android/ion/
Dion_carveout_heap.c64 struct page *page = sg_page(table->sgl); in ion_carveout_heap_phys()
97 sg_set_page(table->sgl, pfn_to_page(PFN_DOWN(paddr)), size, 0); in ion_carveout_heap_allocate()
113 struct page *page = sg_page(table->sgl); in ion_carveout_heap_free()
119 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_carveout_heap_free()
Dion_chunk_heap.c67 sg = table->sgl; in ion_chunk_heap_allocate()
82 sg = table->sgl; in ion_chunk_heap_allocate()
108 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_chunk_heap_free()
111 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free()
Dion_heap.c48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel()
81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user()
118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, in ion_heap_sglist_zero() argument
126 for_each_sg_page(sgl, &piter, nents, 0) { in ion_heap_sglist_zero()
151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); in ion_heap_buffer_zero()
Dion_system_heap.c160 sg = table->sgl; in ion_system_heap_allocate()
196 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
359 sg_set_page(table->sgl, page, len, 0); in ion_system_contig_heap_allocate()
379 struct page *page = sg_page(table->sgl); in ion_system_contig_heap_free()
394 struct page *page = sg_page(table->sgl); in ion_system_contig_heap_phys()
Dion_test.c66 for_each_sg_page(table->sgl, &sg_iter, table->nents, offset_page) { in ion_handle_test_dma()
/linux-4.4.14/include/linux/mtd/
Dubi.h252 int ubi_leb_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl,
280 struct ubi_sgl *sgl, int offset, int len) in ubi_read_sg() argument
282 return ubi_leb_read_sg(desc, lnum, sgl, offset, len, 0); in ubi_read_sg()
/linux-4.4.14/drivers/spi/
Dspi-bcm2835.c250 struct scatterlist *sgl; in bcm2835_spi_prepare_sg() local
262 sgl = tfr->tx_sg.sgl; in bcm2835_spi_prepare_sg()
269 sgl = tfr->rx_sg.sgl; in bcm2835_spi_prepare_sg()
273 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); in bcm2835_spi_prepare_sg()
292 struct scatterlist *sgl; in bcm2835_check_sg_length() local
295 for_each_sg(sgt->sgl, sgl, (int)sgt->nents - 1, i) { in bcm2835_check_sg_length()
296 if (sg_dma_len(sgl) % 4) in bcm2835_check_sg_length()
Dspi-pxa2xx-dma.c55 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in pxa2xx_spi_map_dma_buffer()
67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_map_dma_buffer()
88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_unmap_dma_buffer()
209 return dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, in pxa2xx_spi_dma_prepare_one()
Dspi-pl022.c813 dma_unmap_sg(pl022->dma_tx_channel->device->dev, pl022->sgt_tx.sgl, in unmap_free_dma_scatter()
815 dma_unmap_sg(pl022->dma_rx_channel->device->dev, pl022->sgt_rx.sgl, in unmap_free_dma_scatter()
826 BUG_ON(!pl022->sgt_rx.sgl); in dma_callback()
840 pl022->sgt_rx.sgl, in dma_callback()
844 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { in dma_callback()
854 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { in dma_callback()
891 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
912 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter()
1066 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl, in configure_dma()
1071 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl, in configure_dma()
[all …]
Dspi-ep93xx.c493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare()
513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare()
519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare()
547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
Dspi-qup.c291 struct scatterlist *sgl; in spi_qup_prep_sg() local
299 sgl = xfer->tx_sg.sgl; in spi_qup_prep_sg()
303 sgl = xfer->rx_sg.sgl; in spi_qup_prep_sg()
306 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); in spi_qup_prep_sg()
Dspi-dw-mid.c168 xfer->tx_sg.sgl, in dw_spi_dma_prepare_tx()
214 xfer->rx_sg.sgl, in dw_spi_dma_prepare_rx()
Dspi-img-spfi.c338 rxdesc = dmaengine_prep_slave_sg(spfi->rx_ch, xfer->rx_sg.sgl, in img_spfi_start_dma()
362 txdesc = dmaengine_prep_slave_sg(spfi->tx_ch, xfer->tx_sg.sgl, in img_spfi_start_dma()
Dspi-mt65xx.c368 mdata->tx_sgl = xfer->tx_sg.sgl; in mtk_spi_dma_transfer()
370 mdata->rx_sgl = xfer->rx_sg.sgl; in mtk_spi_dma_transfer()
Dspi-rockchip.c462 rs->rx_sg.sgl, rs->rx_sg.nents, in rockchip_spi_prepare_dma()
479 rs->tx_sg.sgl, rs->tx_sg.nents, in rockchip_spi_prepare_dma()
/linux-4.4.14/arch/sparc/kernel/
Dioport.c549 static int pci32_map_sg(struct device *device, struct scatterlist *sgl, in pci32_map_sg() argument
557 for_each_sg(sgl, sg, nents, n) { in pci32_map_sg()
568 static void pci32_unmap_sg(struct device *dev, struct scatterlist *sgl, in pci32_unmap_sg() argument
576 for_each_sg(sgl, sg, nents, n) { in pci32_unmap_sg()
614 static void pci32_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, in pci32_sync_sg_for_cpu() argument
621 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_cpu()
627 static void pci32_sync_sg_for_device(struct device *device, struct scatterlist *sgl, in pci32_sync_sg_for_device() argument
634 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_device()
/linux-4.4.14/drivers/xen/
Dswiotlb-xen.c539 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_map_sg_attrs() argument
548 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs()
565 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, in xen_swiotlb_map_sg_attrs()
567 sg_dma_len(sgl) = 0; in xen_swiotlb_map_sg_attrs()
600 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_unmap_sg_attrs() argument
609 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_unmap_sg_attrs()
623 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, in xen_swiotlb_sync_sg() argument
630 for_each_sg(sgl, sg, nelems, i) in xen_swiotlb_sync_sg()
Dxen-scsiback.c129 struct scatterlist *sgl; member
279 kfree(req->sgl); in scsiback_fast_flush_area()
280 req->sgl = NULL; in scsiback_fast_flush_area()
401 pending_req->sgl, pending_req->n_sg, in scsiback_cmd_exec()
517 pending_req->sgl = kmalloc_array(nr_segments, in scsiback_gnttab_data_map()
519 if (!pending_req->sgl) in scsiback_gnttab_data_map()
522 sg_init_table(pending_req->sgl, nr_segments); in scsiback_gnttab_data_map()
556 for_each_sg(pending_req->sgl, sg, nr_segments, i) { in scsiback_gnttab_data_map()
/linux-4.4.14/drivers/net/ethernet/intel/i40e/
Di40e_fcoe.c169 if (ddp->sgl) { in i40e_fcoe_ddp_unmap()
170 dma_unmap_sg(&pf->pdev->dev, ddp->sgl, ddp->sgc, in i40e_fcoe_ddp_unmap()
172 ddp->sgl = NULL; in i40e_fcoe_ddp_unmap()
740 if (!ddp->sgl) in i40e_fcoe_handle_offload()
816 struct scatterlist *sgl, unsigned int sgc, in i40e_fcoe_ddp_setup() argument
847 if (ddp->sgl) { in i40e_fcoe_ddp_setup()
849 xid, ddp->sgl, ddp->sgc); in i40e_fcoe_ddp_setup()
866 dmacount = dma_map_sg(&pf->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); in i40e_fcoe_ddp_setup()
869 sgl, sgc); in i40e_fcoe_ddp_setup()
883 for_each_sg(sgl, sg, dmacount, i) { in i40e_fcoe_ddp_setup()
[all …]
Di40e_fcoe.h109 struct scatterlist *sgl; member
/linux-4.4.14/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c64 src = obj->pages->sgl; in i915_gem_map_dma_buf()
65 dst = st->sgl; in i915_gem_map_dma_buf()
72 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { in i915_gem_map_dma_buf()
100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf()
139 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) in i915_gem_dmabuf_vmap()
Di915_gem_userptr.c493 for_each_sg((*st)->sgl, sg, num_pages, n) in st_set_pages()
607 obj->get_page.sg = obj->pages->sgl; in __i915_gem_userptr_get_pages_worker()
760 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_userptr_put_pages()
Di915_gem_render_state.c106 page = sg_page(so->obj->pages->sgl); in render_state_setup()
Di915_guc_submission.c449 sg_dma_address(client->client_obj->pages->sgl); in guc_init_ctx_desc()
467 sg_pcopy_from_buffer(sg->sgl, sg->nents, &desc, sizeof(desc), in guc_init_ctx_desc()
480 sg_pcopy_from_buffer(sg->sgl, sg->nents, &desc, sizeof(desc), in guc_fini_ctx_desc()
Di915_gem_fence.c755 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_do_bit_17_swizzle()
793 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_save_bit_17_swizzle()
Di915_gem_gtt.c829 __sg_page_iter_start(&sg_iter, pages->sgl, sg_nents(pages->sgl), 0); in gen8_ppgtt_insert_entries()
1842 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { in gen6_ppgtt_insert_entries()
2326 obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_prepare_object()
2356 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen8_ggtt_insert_entries()
2402 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen6_ggtt_insert_entries()
2602 dma_unmap_sg(&dev->pdev->dev, obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_finish_object()
3280 sg = st->sgl; in rotate_pages()
3340 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in intel_rotate_fb_obj_pages()
3408 sg = st->sgl; in intel_partial_pages()
3410 for_each_sg_page(obj->pages->sgl, &obj_sg_iter, obj->pages->nents, in intel_partial_pages()
/linux-4.4.14/drivers/gpu/drm/tegra/
Dgem.c117 err = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, in tegra_bo_iommu_map()
211 for_each_sg(bo->sgt->sgl, s, bo->sgt->nents, i) in tegra_bo_get_pages()
214 dma_sync_sg_for_device(drm->dev, bo->sgt->sgl, bo->sgt->nents, in tegra_bo_get_pages()
348 bo->paddr = sg_dma_address(bo->sgt->sgl); in tegra_bo_import()
525 for_each_sg(sgt->sgl, sg, bo->num_pages, i) in tegra_gem_prime_map_dma_buf()
528 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in tegra_gem_prime_map_dma_buf()
534 sg_dma_address(sgt->sgl) = bo->paddr; in tegra_gem_prime_map_dma_buf()
535 sg_dma_len(sgt->sgl) = gem->size; in tegra_gem_prime_map_dma_buf()
554 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in tegra_gem_prime_unmap_dma_buf()
/linux-4.4.14/drivers/gpu/drm/msm/
Dmsm_iommu.c59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { in msm_iommu_map()
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
Dmsm_gem.c104 dma_map_sg(dev->dev, msm_obj->sgt->sgl, in get_pages()
120 dma_unmap_sg(obj->dev->dev, msm_obj->sgt->sgl, in put_pages()
/linux-4.4.14/arch/ia64/sn/pci/
Dpci_dma.c241 static void sn_dma_unmap_sg(struct device *dev, struct scatterlist *sgl, in sn_dma_unmap_sg() argument
252 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_unmap_sg()
274 static int sn_dma_map_sg(struct device *dev, struct scatterlist *sgl, in sn_dma_map_sg() argument
279 struct scatterlist *saved_sg = sgl, *sg; in sn_dma_map_sg()
292 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_map_sg()
/linux-4.4.14/arch/h8300/kernel/
Ddma.c49 static int map_sg(struct device *dev, struct scatterlist *sgl, in map_sg() argument
56 for_each_sg(sgl, sg, nents, i) { in map_sg()
/linux-4.4.14/drivers/gpu/drm/udl/
Dudl_dmabuf.c65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf()
117 rd = obj->sg->sgl; in udl_map_dma_buf()
118 wr = sgt->sgl; in udl_map_dma_buf()
126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in udl_map_dma_buf()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb4vf/
Dsge.c159 struct ulptx_sgl *sgl; /* scatter/gather list in TX Queue */ member
307 const struct ulptx_sgl *sgl, const struct sge_txq *tq) in unmap_sgl() argument
313 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), in unmap_sgl()
314 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); in unmap_sgl()
316 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), in unmap_sgl()
317 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); in unmap_sgl()
325 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { in unmap_sgl()
394 unmap_sgl(dev, sdesc->skb, sdesc->sgl, tq); in free_tx_desc()
907 struct ulptx_sgl *sgl, u64 *end, unsigned int start, in write_sgl() argument
918 sgl->len0 = htonl(len); in write_sgl()
[all …]
/linux-4.4.14/include/crypto/
Dif_alg.h78 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len);
79 void af_alg_free_sg(struct af_alg_sgl *sgl);
/linux-4.4.14/include/xen/
Dswiotlb-xen.h28 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
33 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
/linux-4.4.14/drivers/mtd/ubi/
Deba.c516 struct ubi_sgl *sgl, int lnum, int offset, int len, in ubi_eba_read_leb_sg() argument
524 ubi_assert(sgl->list_pos < UBI_MAX_SG_COUNT); in ubi_eba_read_leb_sg()
525 sg = &sgl->sg[sgl->list_pos]; in ubi_eba_read_leb_sg()
526 if (len < sg->length - sgl->page_pos) in ubi_eba_read_leb_sg()
529 to_read = sg->length - sgl->page_pos; in ubi_eba_read_leb_sg()
532 sg_virt(sg) + sgl->page_pos, offset, in ubi_eba_read_leb_sg()
540 sgl->page_pos += to_read; in ubi_eba_read_leb_sg()
541 if (sgl->page_pos == sg->length) { in ubi_eba_read_leb_sg()
542 sgl->list_pos++; in ubi_eba_read_leb_sg()
543 sgl->page_pos = 0; in ubi_eba_read_leb_sg()
[all …]
Dkapi.c472 int ubi_leb_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl, in ubi_leb_read_sg() argument
488 err = ubi_eba_read_leb_sg(ubi, vol, sgl, lnum, offset, len, check); in ubi_leb_read_sg()
/linux-4.4.14/drivers/char/
Dvirtio_console.c878 struct sg_list *sgl = sd->u.data; in pipe_to_sg() local
881 if (sgl->n == sgl->size) in pipe_to_sg()
891 sg_set_page(&(sgl->sg[sgl->n]), buf->page, len, buf->offset); in pipe_to_sg()
910 sg_set_page(&(sgl->sg[sgl->n]), page, len, offset); in pipe_to_sg()
912 sgl->n++; in pipe_to_sg()
913 sgl->len += len; in pipe_to_sg()
924 struct sg_list sgl; in port_fops_splice_write() local
931 .u.data = &sgl, in port_fops_splice_write()
963 sgl.n = 0; in port_fops_splice_write()
964 sgl.len = 0; in port_fops_splice_write()
[all …]
/linux-4.4.14/drivers/scsi/
D3w-xxxx.c590 command_packet->byte8.param.sgl[0].address = param_value; in tw_aen_read_queue()
591 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_aen_read_queue()
732 command_packet->byte8.param.sgl[0].address = param_value; in tw_aen_drain_queue()
733 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_aen_drain_queue()
966 tw_ioctl->firmware_command.byte8.param.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; in tw_chrdev_ioctl()
967 tw_ioctl->firmware_command.byte8.param.sgl[0].length = data_buffer_length_adjusted; in tw_chrdev_ioctl()
970 tw_ioctl->firmware_command.byte8.io.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; in tw_chrdev_ioctl()
971 tw_ioctl->firmware_command.byte8.io.sgl[0].length = data_buffer_length_adjusted; in tw_chrdev_ioctl()
1154 command_packet->byte8.param.sgl[0].address = param_value; in tw_setfeature()
1155 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); in tw_setfeature()
[all …]
Dvmw_pvscsi.c61 struct pvscsi_sg_list *sgl; member
340 sge = &ctx->sgl->sge[0]; in pvscsi_create_sg()
373 ctx->sglPA = pci_map_single(adapter->dev, ctx->sgl, in pvscsi_map_buffers()
1162 free_pages((unsigned long)ctx->sgl, get_order(SGL_SIZE)); in pvscsi_free_sgls()
1254 ctx->sgl = (void *)__get_free_pages(GFP_KERNEL, in pvscsi_allocate_sg()
1257 BUG_ON(!IS_ALIGNED(((unsigned long)ctx->sgl), PAGE_SIZE)); in pvscsi_allocate_sg()
1258 if (!ctx->sgl) { in pvscsi_allocate_sg()
1260 free_pages((unsigned long)ctx->sgl, in pvscsi_allocate_sg()
1262 ctx->sgl = NULL; in pvscsi_allocate_sg()
D3w-xxxx.h307 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member
311 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member
D3w-sas.h232 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member
236 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member
Dscsi_lib.c569 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) in scsi_sg_free() argument
574 mempool_free(sgl, sgp->pool); in scsi_sg_free()
602 sg_init_table(sdb->table.sgl, nents); in scsi_alloc_sgtable()
605 first_chunk = sdb->table.sgl; in scsi_alloc_sgtable()
1098 count = blk_rq_map_sg(req->q, req, sdb->table.sgl); in scsi_init_sgtable()
1169 prot_sdb->table.sgl); in scsi_init_io()
1931 cmd->sdb.table.sgl = sg; in scsi_mq_prep_fn()
1940 cmd->prot_sdb->table.sgl = in scsi_mq_prep_fn()
1949 bidi_sdb->table.sgl = in scsi_mq_prep_fn()
3096 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, in scsi_kmap_atomic_sg() argument
[all …]
D3w-9xxx.h501 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member
505 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member
D3w-sas.c426 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twl_aen_sync_time()
427 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); in twl_aen_sync_time()
686 TW_SG_Entry_ISO *sgl; in twl_load_sgl() local
708sgl = (TW_SG_Entry_ISO *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry_ISO)/4) + pae +… in twl_load_sgl()
709 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); in twl_load_sgl()
710 sgl->length = TW_CPU_TO_SGL(length); in twl_load_sgl()
954 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twl_get_param()
955 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); in twl_get_param()
D3w-9xxx.c491 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twa_aen_sync_time()
492 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); in twa_aen_sync_time()
1079 …command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[requ… in twa_get_param()
1080 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); in twa_get_param()
1382 TW_SG_Entry *sgl; in twa_load_sgl() local
1405 sgl = (TW_SG_Entry *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry)/4) + pae); in twa_load_sgl()
1407 sgl = (TW_SG_Entry *)((u32 *)oldcommand+TW_SGL_OUT(oldcommand->opcode__sgloffset)); in twa_load_sgl()
1408 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); in twa_load_sgl()
1409 sgl->length = cpu_to_le32(length); in twa_load_sgl()
Dstorvsc_drv.c1380 struct scatterlist *sgl; in storvsc_queuecommand() local
1446 sgl = (struct scatterlist *)scsi_sglist(scmnd); in storvsc_queuecommand()
1464 payload->range.offset = sgl[0].offset; in storvsc_queuecommand()
1466 cur_sgl = sgl; in storvsc_queuecommand()
Dmegaraid.c1417 struct scatterlist *sgl; in mega_cmd_done() local
1551 sgl = scsi_sglist(cmd); in mega_cmd_done()
1552 if( sg_page(sgl) ) { in mega_cmd_done()
1553 c = *(unsigned char *) sg_virt(&sgl[0]); in mega_cmd_done()
1755 scb->sgl[idx].address = sg_dma_address(sg); in mega_build_sglist()
1756 *len += scb->sgl[idx].length = sg_dma_len(sg); in mega_build_sglist()
2918 scb->sgl = NULL; in mega_init_scb()
2933 scb->sgl = (mega_sglist *)scb->sgl64; in mega_init_scb()
2935 if( !scb->sgl ) { in mega_init_scb()
/linux-4.4.14/arch/powerpc/kernel/
Ddma.c196 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, in dma_direct_map_sg() argument
203 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
252 struct scatterlist *sgl, int nents, in dma_direct_sync_sg() argument
258 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg()
Dvio.c559 struct scatterlist *sgl; in vio_dma_iommu_map_sg() local
564 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_map_sg()
565 alloc_size += roundup(sgl->length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_map_sg()
580 for_each_sg(sglist, sgl, ret, count) in vio_dma_iommu_map_sg()
581 alloc_size -= roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_map_sg()
595 struct scatterlist *sgl; in vio_dma_iommu_unmap_sg() local
600 for_each_sg(sglist, sgl, nelems, count) in vio_dma_iommu_unmap_sg()
601 alloc_size += roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); in vio_dma_iommu_unmap_sg()
Dibmebus.c106 struct scatterlist *sgl, in ibmebus_map_sg() argument
113 for_each_sg(sgl, sg, nents, i) { in ibmebus_map_sg()
/linux-4.4.14/net/ceph/
Dcrypto.c129 sgt->sgl = prealloc_sg; in setup_sgtable()
133 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { in setup_sgtable()
200 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, in ceph_aes_encrypt()
263 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, in ceph_aes_encrypt2()
316 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); in ceph_aes_decrypt()
382 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); in ceph_aes_decrypt2()
/linux-4.4.14/drivers/staging/rdma/ipath/
Dipath_dma.c101 static int ipath_map_sg(struct ib_device *dev, struct scatterlist *sgl, in ipath_map_sg() argument
111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg()
Dipath_mr.c224 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in ipath_reg_user_mr()
/linux-4.4.14/drivers/infiniband/hw/qib/
Dqib_dma.c94 static int qib_map_sg(struct ib_device *dev, struct scatterlist *sgl, in qib_map_sg() argument
104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg()
/linux-4.4.14/drivers/staging/rdma/hfi1/
Ddma.c111 static int hfi1_map_sg(struct ib_device *dev, struct scatterlist *sgl, in hfi1_map_sg() argument
122 for_each_sg(sgl, sg, nents, i) { in hfi1_map_sg()
/linux-4.4.14/drivers/scsi/isci/
Drequest.c582 stp_req->sgl.offset = 0; in sci_stp_pio_request_construct()
583 stp_req->sgl.set = SCU_SGL_ELEMENT_PAIR_A; in sci_stp_pio_request_construct()
587 stp_req->sgl.index = 0; in sci_stp_pio_request_construct()
590 stp_req->sgl.index = -1; in sci_stp_pio_request_construct()
1261 struct scu_sgl_element *sgl; in pio_sgl_next() local
1264 struct isci_stp_pio_sgl *pio_sgl = &stp_req->sgl; in pio_sgl_next()
1268 sgl = NULL; in pio_sgl_next()
1272 sgl = NULL; in pio_sgl_next()
1275 sgl = &sgl_pair->B; in pio_sgl_next()
1280 sgl = NULL; in pio_sgl_next()
[all …]
Drequest.h79 } sgl; member
/linux-4.4.14/arch/alpha/kernel/
Dpci-noop.c141 static int alpha_noop_map_sg(struct device *dev, struct scatterlist *sgl, int nents, in alpha_noop_map_sg() argument
147 for_each_sg(sgl, sg, nents, i) { in alpha_noop_map_sg()
/linux-4.4.14/drivers/tty/serial/
Dimx.c479 struct scatterlist *sgl = &sport->tx_sgl[0]; in dma_tx_callback() local
486 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in dma_tx_callback()
520 struct scatterlist *sgl = sport->tx_sgl; in imx_dma_tx() local
534 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes); in imx_dma_tx()
537 sg_init_table(sgl, 2); in imx_dma_tx()
538 sg_set_buf(sgl, xmit->buf + xmit->tail, in imx_dma_tx()
540 sg_set_buf(sgl + 1, xmit->buf, xmit->head); in imx_dma_tx()
543 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); in imx_dma_tx()
548 desc = dmaengine_prep_slave_sg(chan, sgl, sport->dma_tx_nents, in imx_dma_tx()
551 dma_unmap_sg(dev, sgl, sport->dma_tx_nents, in imx_dma_tx()
[all …]
Dmxs-auart.c229 struct scatterlist *sgl = &s->tx_sgl; in mxs_auart_dma_tx() local
243 sg_init_one(sgl, s->tx_dma_buf, size); in mxs_auart_dma_tx()
244 dma_map_sg(s->dev, sgl, 1, DMA_TO_DEVICE); in mxs_auart_dma_tx()
245 desc = dmaengine_prep_slave_sg(channel, sgl, in mxs_auart_dma_tx()
556 struct scatterlist *sgl = &s->rx_sgl; in mxs_auart_dma_prep_rx() local
572 sg_init_one(sgl, s->rx_dma_buf, UART_XMIT_SIZE); in mxs_auart_dma_prep_rx()
573 dma_map_sg(s->dev, sgl, 1, DMA_FROM_DEVICE); in mxs_auart_dma_prep_rx()
574 desc = dmaengine_prep_slave_sg(channel, sgl, 1, DMA_DEV_TO_MEM, in mxs_auart_dma_prep_rx()
/linux-4.4.14/drivers/mmc/host/
Dmxs-mmc.c230 struct scatterlist * sgl; in mxs_mmc_prep_dma() local
237 sgl = data->sg; in mxs_mmc_prep_dma()
241 sgl = (struct scatterlist *) ssp->ssp_pio_words; in mxs_mmc_prep_dma()
246 sgl, sg_len, ssp->slave_dirn, flags); in mxs_mmc_prep_dma()
356 struct scatterlist *sgl = data->sg, *sg; in mxs_mmc_adtc() local
402 for_each_sg(sgl, sg, sg_len, i) in mxs_mmc_adtc()
/linux-4.4.14/drivers/scsi/megaraid/
Dmegaraid_sas_base.c1314 &pthru->sgl); in megasas_build_dcdb()
1318 &pthru->sgl); in megasas_build_dcdb()
1321 &pthru->sgl); in megasas_build_dcdb()
1457 &ldio->sgl); in megasas_build_ldio()
1460 ldio->sge_count = megasas_make_sgl64(instance, scp, &ldio->sgl); in megasas_build_ldio()
1462 ldio->sge_count = megasas_make_sgl32(instance, scp, &ldio->sgl); in megasas_build_ldio()
1547 mfi_sgl = &ldio->sgl; in megasas_dump_pending_frames()
1556 mfi_sgl = &pthru->sgl; in megasas_dump_pending_frames()
2045 dcmd->sgl.sge32[0].phys_addr = in megasas_get_ld_vf_affiliation_111()
2048 dcmd->sgl.sge32[0].phys_addr = in megasas_get_ld_vf_affiliation_111()
[all …]
Dmegaraid_sas.h1361 union megasas_sgl sgl; /*28h */ member
1388 union megasas_sgl sgl; /*30h */ member
1415 union megasas_sgl sgl; /*28h */ member
1467 } sgl; member
1496 } sgl; member
1978 struct iovec sgl[MAX_IOCTL_SGE]; member
2001 struct compat_iovec sgl[MAX_IOCTL_SGE]; member
Dmegaraid_mbox.c1354 struct scatterlist *sgl; in megaraid_mbox_mksgl() local
1373 scsi_for_each_sg(scp, sgl, sgcnt, i) { in megaraid_mbox_mksgl()
1374 ccb->sgl64[i].address = sg_dma_address(sgl); in megaraid_mbox_mksgl()
1375 ccb->sgl64[i].length = sg_dma_len(sgl); in megaraid_mbox_mksgl()
1563 struct scatterlist *sgl; in DEF_SCSI_QCMD() local
1566 sgl = scsi_sglist(scp); in DEF_SCSI_QCMD()
1567 if (sg_page(sgl)) { in DEF_SCSI_QCMD()
1568 vaddr = (caddr_t) sg_virt(&sgl[0]); in DEF_SCSI_QCMD()
2224 struct scatterlist *sgl; in megaraid_mbox_dpc() local
2308 sgl = scsi_sglist(scp); in megaraid_mbox_dpc()
[all …]
Dmegaraid_sas_fusion.c772 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(pd_seq_h); in megasas_sync_pd_seq_num()
773 dcmd->sgl.sge32[0].length = cpu_to_le32(pd_seq_map_sz); in megasas_sync_pd_seq_num()
873 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); in megasas_get_ld_map_info()
874 dcmd->sgl.sge32[0].length = cpu_to_le32(size_map_info); in megasas_get_ld_map_info()
975 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); in megasas_sync_map_info()
976 dcmd->sgl.sge32[0].length = cpu_to_le32(size_map_info); in megasas_sync_map_info()
/linux-4.4.14/drivers/hsi/clients/
Dhsi_char.c160 kfree(sg_virt(msg->sgt.sgl)); in hsc_msg_free()
199 sg_init_one(msg->sgt.sgl, buf, alloc_size); in hsc_msg_alloc()
230 return msg->sgt.sgl->length; in hsc_msg_len_get()
235 msg->sgt.sgl->length = len; in hsc_msg_len_set()
468 sg_virt(msg->sgt.sgl), hsc_msg_len_get(msg)); in hsc_read()
504 if (copy_from_user(sg_virt(msg->sgt.sgl), (void __user *)buf, len)) { in hsc_write()
Dcmt_speech.c212 u32 *data = sg_virt(msg->sgt.sgl); in cs_set_cmd()
218 u32 *data = sg_virt(msg->sgt.sgl); in cs_get_cmd()
272 kfree(sg_virt(msg->sgt.sgl)); in cs_free_cmds()
294 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); in cs_alloc_cmds()
637 sg_init_one(msg->sgt.sgl, address, hi->buf_size); in cs_hsi_peek_on_data_complete()
680 sg_init_one(rxmsg->sgt.sgl, (void *)hi->mmap_base, 0); in cs_hsi_read_on_data()
732 sg_init_one(txmsg->sgt.sgl, address, hi->buf_size); in cs_hsi_write_on_data()
Dssi_protocol.c167 data = sg_virt(msg->sgt.sgl); in ssip_set_cmd()
175 data = sg_virt(msg->sgt.sgl); in ssip_get_cmd()
188 sg = msg->sgt.sgl; in ssip_skb_to_msg()
258 kfree(sg_virt(msg->sgt.sgl)); in ssip_free_cmds()
278 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); in ssip_alloc_cmds()
/linux-4.4.14/drivers/mtd/nand/
Dlpc32xx_mlc.c197 struct scatterlist sgl; member
398 sg_init_one(&host->sgl, mem, len); in lpc32xx_xmit_dma()
400 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
406 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, in lpc32xx_xmit_dma()
422 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
426 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
Dlpc32xx_slc.c215 struct scatterlist sgl; member
451 sg_init_one(&host->sgl, mem, len); in lpc32xx_xmit_dma()
453 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
459 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, in lpc32xx_xmit_dma()
475 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
480 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, in lpc32xx_xmit_dma()
/linux-4.4.14/drivers/target/
Dtarget_core_transport.c1353 transport_generic_map_mem_to_cmd(struct se_cmd *cmd, struct scatterlist *sgl, in transport_generic_map_mem_to_cmd() argument
1356 if (!sgl || !sgl_count) in transport_generic_map_mem_to_cmd()
1370 cmd->t_data_sg = sgl; in transport_generic_map_mem_to_cmd()
1411 struct scatterlist *sgl, u32 sgl_count, in target_submit_cmd_map_sgls() argument
1478 BUG_ON(!sgl); in target_submit_cmd_map_sgls()
1492 if (sgl) in target_submit_cmd_map_sgls()
1493 buf = kmap(sg_page(sgl)) + sgl->offset; in target_submit_cmd_map_sgls()
1496 memset(buf, 0, sgl->length); in target_submit_cmd_map_sgls()
1497 kunmap(sg_page(sgl)); in target_submit_cmd_map_sgls()
1501 rc = transport_generic_map_mem_to_cmd(se_cmd, sgl, sgl_count, in target_submit_cmd_map_sgls()
[all …]
Dtarget_core_file.c250 u32 block_size, struct scatterlist *sgl, in fd_do_rw() argument
266 for_each_sg(sgl, sg, sgl_nents, i) { in fd_do_rw()
511 fd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in fd_execute_rw() argument
544 sgl, sgl_nents, cmd->data_length, 0); in fd_execute_rw()
567 sgl, sgl_nents, cmd->data_length, 1); in fd_execute_rw()
Dtarget_core_pscsi.c878 pscsi_map_sg(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in pscsi_map_sg() argument
886 int nr_pages = (cmd->data_length + sgl[0].offset + in pscsi_map_sg()
895 for_each_sg(sgl, sg, sgl_nents, i) { in pscsi_map_sg()
993 struct scatterlist *sgl = cmd->t_data_sg; in pscsi_execute_cmd() local
1015 if (!sgl) { in pscsi_execute_cmd()
1029 ret = pscsi_map_sg(cmd, sgl, sgl_nents, data_direction, &hbio); in pscsi_execute_cmd()
Dtarget_core_rd.c442 rd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in rd_execute_rw() argument
486 sg_miter_start(&m, sgl, sgl_nents, in rd_execute_rw()
Dtarget_core_iblock.c635 iblock_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, in iblock_execute_rw() argument
692 for_each_sg(sgl, sg, sgl_nents, i) { in iblock_execute_rw()
/linux-4.4.14/drivers/nvme/host/
Dscsi.c242 struct sg_iovec sgl; in nvme_trans_copy_to_user() local
245 if (copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_to_user()
249 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_to_user()
250 if (copy_to_user(sgl.iov_base, index, xfer_len)) in nvme_trans_copy_to_user()
277 struct sg_iovec sgl; in nvme_trans_copy_from_user() local
280 if (copy_from_user(&sgl, hdr->dxferp + in nvme_trans_copy_from_user()
284 xfer_len = min(remaining, sgl.iov_len); in nvme_trans_copy_from_user()
285 if (copy_from_user(index, sgl.iov_base, xfer_len)) in nvme_trans_copy_from_user()
1677 struct sg_iovec sgl; in nvme_trans_do_nvme_io() local
1679 retcode = copy_from_user(&sgl, hdr->dxferp + in nvme_trans_do_nvme_io()
[all …]
/linux-4.4.14/drivers/infiniband/hw/mlx5/
Dmem.c78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in mlx5_ib_cont_pages()
180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in __mlx5_ib_populate_pas()
Ddoorbell.c77 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); in mlx5_ib_db_map_user()
/linux-4.4.14/drivers/media/platform/xilinx/
Dxilinx-dma.h100 struct data_chunk sgl[1]; member
Dxilinx-dma.c366 dma->sgl[0].size = dma->format.width * dma->fmtinfo->bpp; in xvip_dma_buffer_queue()
367 dma->sgl[0].icg = dma->format.bytesperline - dma->sgl[0].size; in xvip_dma_buffer_queue()
/linux-4.4.14/include/scsi/
Dscsi_cmnd.h178 return cmd->sdb.table.sgl; in scsi_sglist()
315 return cmd->prot_sdb ? cmd->prot_sdb->table.sgl : NULL; in scsi_prot_sglist()
/linux-4.4.14/drivers/scsi/csiostor/
Dcsio_scsi.c291 struct ulptx_sgl *sgl) in csio_scsi_init_ultptx_dsgl() argument
301 sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) | ULPTX_MORE_F | in csio_scsi_init_ultptx_dsgl()
307 sgl->addr0 = cpu_to_be64(sg_dma_address(sgel)); in csio_scsi_init_ultptx_dsgl()
308 sgl->len0 = cpu_to_be32(sg_dma_len(sgel)); in csio_scsi_init_ultptx_dsgl()
309 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); in csio_scsi_init_ultptx_dsgl()
331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); in csio_scsi_init_ultptx_dsgl()
332 sgl->len0 = cpu_to_be32( in csio_scsi_init_ultptx_dsgl()
334 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); in csio_scsi_init_ultptx_dsgl()
365 struct ulptx_sgl *sgl; in csio_scsi_init_read_wr() local
397 sgl = (struct ulptx_sgl *)((uintptr_t)wrp + in csio_scsi_init_read_wr()
[all …]
/linux-4.4.14/drivers/hsi/controllers/
Domap_ssi_port.c205 omap_ssi->gdd_trn[lch].sg = msg->sgt.sgl; in ssi_claim_lch()
227 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma()
241 d_addr = sg_dma_address(msg->sgt.sgl); in ssi_start_dma()
243 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma()
255 s_addr = sg_dma_address(msg->sgt.sgl); in ssi_start_dma()
269 writew_relaxed(SSI_BYTES_TO_FRAMES(msg->sgt.sgl->length), in ssi_start_dma()
320 if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) in ssi_start_transfer()
430 msg->channel, msg, msg->sgt.sgl->length, in ssi_flush_queue()
862 if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { in ssi_pio_complete()
871 buf = sg_virt(msg->sgt.sgl) + msg->actual_len; in ssi_pio_complete()
[all …]
Domap_ssi.c226 dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); in ssi_gdd_complete()
247 msg->actual_len = sg_dma_len(msg->sgt.sgl); in ssi_gdd_complete()
/linux-4.4.14/drivers/net/wireless/brcm80211/brcmfmac/
Dbcmsdh.c520 struct scatterlist *sgl; in brcmf_sdiod_sglist_rw() local
564 mmc_dat.sg = sdiodev->sgtable.sgl; in brcmf_sdiod_sglist_rw()
580 sgl = sdiodev->sgtable.sgl; in brcmf_sdiod_sglist_rw()
590 sg_set_buf(sgl, pkt_data, sg_data_sz); in brcmf_sdiod_sglist_rw()
593 sgl = sg_next(sgl); in brcmf_sdiod_sglist_rw()
661 sg_init_table(sdiodev->sgtable.sgl, sdiodev->sgtable.orig_nents); in brcmf_sdiod_sglist_rw()
/linux-4.4.14/drivers/message/fusion/
Dmptctl.c130 static void kfree_sgl(MptSge_t *sgl, dma_addr_t sgl_dma,
793 MptSge_t *sgl, *sgIn; in mptctl_do_fw_download() local
878 if ((sgl = kbuf_alloc_2_sgl(fwlen, sgdir, sge_offset, in mptctl_do_fw_download()
902 iocp->name, sgl, numfrags)); in mptctl_do_fw_download()
909 sgIn = sgl; in mptctl_do_fw_download()
975 if (sgl) in mptctl_do_fw_download()
976 kfree_sgl(sgl, sgl_dma, buflist, iocp); in mptctl_do_fw_download()
1005 kfree_sgl(sgl, sgl_dma, buflist, iocp); in mptctl_do_fw_download()
1031 MptSge_t *sgl; in kbuf_alloc_2_sgl() local
1079 sgl = sglbuf; in kbuf_alloc_2_sgl()
[all …]
/linux-4.4.14/drivers/media/pci/saa7134/
Dsaa7134-vbi.c127 if (dma->sgl->offset) { in buffer_prepare()
137 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in buffer_prepare()
Dsaa7134-ts.c114 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in saa7134_ts_buffer_prepare()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb4/
Dsge.c140 struct ulptx_sgl *sgl; member
312 const struct ulptx_sgl *sgl, const struct sge_txq *q) in unmap_sgl() argument
318 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), in unmap_sgl()
321 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), in unmap_sgl()
330 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { in unmap_sgl()
391 unmap_sgl(dev, d->skb, d->sgl, q); in free_tx_desc()
851 struct ulptx_sgl *sgl, u64 *end, unsigned int start, in write_sgl() argument
862 sgl->len0 = htonl(len); in write_sgl()
863 sgl->addr0 = cpu_to_be64(addr[0] + start); in write_sgl()
866 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); in write_sgl()
[all …]
/linux-4.4.14/drivers/infiniband/hw/mlx4/
Ddoorbell.c75 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); in mlx4_ib_db_map_user()
/linux-4.4.14/drivers/infiniband/hw/cxgb4/
Dmem.c65 struct ulptx_sgl *sgl; in _c4iw_write_mem_dma_aligned() local
74 wr_len = roundup(sizeof(*req) + sizeof(*sgl), 16); in _c4iw_write_mem_dma_aligned()
94 sgl = (struct ulptx_sgl *)(req + 1); in _c4iw_write_mem_dma_aligned()
95 sgl->cmd_nsge = cpu_to_be32(ULPTX_CMD_V(ULP_TX_SC_DSGL) | in _c4iw_write_mem_dma_aligned()
97 sgl->len0 = cpu_to_be32(len); in _c4iw_write_mem_dma_aligned()
98 sgl->addr0 = cpu_to_be64(data); in _c4iw_write_mem_dma_aligned()
754 for_each_sg(mhp->umem->sg_head.sgl, sg, mhp->umem->nmap, entry) { in c4iw_reg_user_mr()
/linux-4.4.14/drivers/mtd/nand/gpmi-nand/
Dgpmi-lib.c1123 struct scatterlist *sgl; in gpmi_send_command() local
1143 sgl = &this->cmd_sgl; in gpmi_send_command()
1145 sg_init_one(sgl, this->cmd_buffer, this->command_length); in gpmi_send_command()
1146 dma_map_sg(this->dev, sgl, 1, DMA_TO_DEVICE); in gpmi_send_command()
1148 sgl, 1, DMA_MEM_TO_DEV, in gpmi_send_command()
Dgpmi-nand.c384 struct scatterlist *sgl = &this->data_sgl; in prepare_data_dma() local
390 sg_init_one(sgl, this->upper_buf, this->upper_len); in prepare_data_dma()
391 ret = dma_map_sg(this->dev, sgl, 1, dr); in prepare_data_dma()
401 sg_init_one(sgl, this->data_buffer_dma, this->upper_len); in prepare_data_dma()
406 dma_map_sg(this->dev, sgl, 1, dr); in prepare_data_dma()
/linux-4.4.14/drivers/gpu/drm/exynos/
Dexynos_drm_gem.c383 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_map_sgt_with_dma()
398 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_unmap_sgt_from_dma()
574 exynos_gem->dma_addr = sg_dma_address(sgt->sgl); in exynos_drm_gem_prime_import_sg_table()
/linux-4.4.14/drivers/gpu/drm/
Ddrm_cache.c112 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) in drm_clflush_sg()
Ddrm_prime.c156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in drm_gem_map_detach()
204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) { in drm_gem_map_dma_buf()
722 for_each_sg(sgt->sgl, sg, sgt->nents, count) { in drm_prime_sg_to_page_addr_arrays()
Ddrm_gem_cma_helper.c459 cma_obj->paddr = sg_dma_address(sgt->sgl); in drm_gem_cma_prime_import_sg_table()
/linux-4.4.14/drivers/scsi/be2iscsi/
Dbe_mgmt.h55 struct amap_mcc_sge sgl[19]; member
84 struct mcc_sge sgl[19]; member
Dbe_main.c3185 struct be_dma_mem *sgl) in be_sgl_create_contiguous() argument
3190 WARN_ON(!sgl); in be_sgl_create_contiguous()
3192 sgl->va = virtual_address; in be_sgl_create_contiguous()
3193 sgl->dma = (unsigned long)physical_address; in be_sgl_create_contiguous()
3194 sgl->size = length; in be_sgl_create_contiguous()
3199 static void be_sgl_destroy_contiguous(struct be_dma_mem *sgl) in be_sgl_destroy_contiguous() argument
3201 memset(sgl, 0, sizeof(*sgl)); in be_sgl_destroy_contiguous()
3206 struct mem_array *pmem, struct be_dma_mem *sgl) in hwi_build_be_sgl_arr() argument
3208 if (sgl->va) in hwi_build_be_sgl_arr()
3209 be_sgl_destroy_contiguous(sgl); in hwi_build_be_sgl_arr()
[all …]
/linux-4.4.14/drivers/media/pci/cx25821/
Dcx25821-video.c199 sgt->sgl, 0, UNSET, in cx25821_buffer_prepare()
204 sgt->sgl, UNSET, 0, in cx25821_buffer_prepare()
213 sgt->sgl, line0_offset, in cx25821_buffer_prepare()
219 sgt->sgl, in cx25821_buffer_prepare()
225 sgt->sgl, in cx25821_buffer_prepare()
/linux-4.4.14/drivers/media/pci/tw68/
Dtw68-video.c477 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
481 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
485 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
490 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
496 tw68_risc_buffer(dev->pci, buf, dma->sgl, in tw68_buf_prepare()
/linux-4.4.14/net/8021q/
Dvlan_dev.c383 struct scatterlist *sgl, unsigned int sgc) in vlan_dev_fcoe_ddp_setup() argument
390 rc = ops->ndo_fcoe_ddp_setup(real_dev, xid, sgl, sgc); in vlan_dev_fcoe_ddp_setup()
441 struct scatterlist *sgl, unsigned int sgc) in vlan_dev_fcoe_ddp_target() argument
448 rc = ops->ndo_fcoe_ddp_target(real_dev, xid, sgl, sgc); in vlan_dev_fcoe_ddp_target()
/linux-4.4.14/drivers/crypto/qce/
Ddma.c60 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; in qce_sgtable_add()
/linux-4.4.14/drivers/char/agp/
Dintel-gtt.c109 for_each_sg(st->sgl, sg, num_entries, i) in intel_gtt_map_memory()
113 st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL)) in intel_gtt_map_memory()
131 st.sgl = sg_list; in intel_gtt_unmap_memory()
853 for_each_sg(st->sgl, sg, st->nents, i) { in intel_gtt_insert_sg_entries()
919 mem->sg_list = st.sgl; in intel_fake_agp_insert_entries()
/linux-4.4.14/drivers/net/ethernet/chelsio/cxgb3/
Dsge.c1052 const struct sg_ent *sgl, in write_wr_hdr_sgl() argument
1077 const u64 *fp = (const u64 *)sgl; in write_wr_hdr_sgl()
1144 struct sg_ent *sgp, sgl[MAX_SKB_FRAGS / 2 + 1]; in write_tx_pkt_wr() local
1198 sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl; in write_tx_pkt_wr()
1201 write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, gen, in write_tx_pkt_wr()
1551 const struct sg_ent *sgl, int sgl_flits) in setup_deferred_unmapping() argument
1558 for (p = dui->addr; sgl_flits >= 3; sgl++, sgl_flits -= 3) { in setup_deferred_unmapping()
1559 *p++ = be64_to_cpu(sgl->addr[0]); in setup_deferred_unmapping()
1560 *p++ = be64_to_cpu(sgl->addr[1]); in setup_deferred_unmapping()
1563 *p = be64_to_cpu(sgl->addr[0]); in setup_deferred_unmapping()
[all …]
/linux-4.4.14/Documentation/dmaengine/
Dclient.txt88 struct dma_chan *chan, struct scatterlist *sgl,
108 nr_sg = dma_map_sg(chan->device->dev, sgl, sg_len);
112 desc = dmaengine_prep_slave_sg(chan, sgl, nr_sg, direction, flags);
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_buffer.c329 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, in vmw_piter_start()
349 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma()
372 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, in vmw_ttm_map_for_dma()
/linux-4.4.14/drivers/infiniband/ulp/iser/
Diser_memory.c153 struct scatterlist *sg, *sgl = data->sg; in iser_sg_to_page_vec() local
160 *offset = (u64) sgl[0].offset & ~MASK_4K; in iser_sg_to_page_vec()
164 for_each_sg(sgl, sg, data->dma_nents, i) { in iser_sg_to_page_vec()
/linux-4.4.14/drivers/dma/xilinx/
Dxilinx_vdma.c942 if (!xt->numf || !xt->sgl[0].size) in xilinx_vdma_dma_prep_interleaved()
965 hw->hsize = xt->sgl[0].size; in xilinx_vdma_dma_prep_interleaved()
966 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << in xilinx_vdma_dma_prep_interleaved()
/linux-4.4.14/drivers/media/pci/cx23885/
Dcx23885-video.c349 sgt->sgl, 0, UNSET, in buffer_prepare()
354 sgt->sgl, UNSET, 0, in buffer_prepare()
382 sgt->sgl, line0_offset, in buffer_prepare()
389 sgt->sgl, in buffer_prepare()
396 sgt->sgl, in buffer_prepare()
Dcx23885-vbi.c156 sgt->sgl, in buffer_prepare()
/linux-4.4.14/arch/m68k/fpsp040/
Dx_unfl.S144 | ;1=sgl, 2=dbl
224 | ;If destination format is sgl/dbl,
Dround.S26 | sgl = $0001xxxx
148 bfextu LOCAL_HI(%a0){#24:#2},%d3 |sgl prec. g-r are 2 bits right
149 movel #30,%d2 |of the sgl prec. limits
179 .set ad_1_sgl,0x00000100 | constant to add 1 to l-bit in sgl prec
204 andil #0xffffff00,LOCAL_HI(%a0) |truncate bits beyond sgl limit
Dutil.S235 bra end_ovfr |inf is same for all precisions (ext,dbl,sgl)
393 | 10 1 sgl
477 | ;smallest +sgl denorm
Dx_store.S88 | ;ext=00, sgl=01, dbl=10
Dbugfix.S191 | Check for opclass 0. If not, go and check for opclass 2 and sgl.
353 cmpiw #0x4400,%d0 |test for opclass 2 and size=sgl
/linux-4.4.14/drivers/iommu/
Ddma-iommu.c313 sg_miter_start(&miter, sgt.sgl, sgt.orig_nents, SG_MITER_FROM_SG); in iommu_dma_alloc()
320 if (iommu_map_sg(domain, dma_addr, sgt.sgl, sgt.orig_nents, prot) in iommu_dma_alloc()
/linux-4.4.14/drivers/misc/mic/scif/
Dscif_debugfs.c91 for_each_sg(window->st->sgl, sg, window->st->nents, j) in scif_display_window()
Dscif_rma.c239 window->st->sgl, window->st->nents, in scif_unmap_window()
555 for_each_sg(window->st->sgl, sg, window->st->nents, i) in scif_iommu_map()
558 err = dma_map_sg(&remote_dev->sdev->dev, window->st->sgl, in scif_iommu_map()
563 sg = window->st->sgl; in scif_iommu_map()
/linux-4.4.14/drivers/dma/hsu/
Dhsu.c215 struct dma_chan *chan, struct scatterlist *sgl, in hsu_dma_prep_slave_sg() argument
228 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg()
/linux-4.4.14/drivers/crypto/
Domap-sham.c153 struct scatterlist sgl; member
590 sg_init_table(&ctx->sgl, 1); in omap_sham_xmit_dma()
591 sg_assign_page(&ctx->sgl, sg_page(ctx->sg)); in omap_sham_xmit_dma()
592 ctx->sgl.offset = ctx->sg->offset; in omap_sham_xmit_dma()
593 sg_dma_len(&ctx->sgl) = len32; in omap_sham_xmit_dma()
594 sg_dma_address(&ctx->sgl) = sg_dma_address(ctx->sg); in omap_sham_xmit_dma()
596 tx = dmaengine_prep_slave_sg(dd->dma_lch, &ctx->sgl, 1, in omap_sham_xmit_dma()
/linux-4.4.14/drivers/mmc/core/
Dsdio_ops.c165 data.sg = sgtable.sgl; in mmc_io_rw_extended()
/linux-4.4.14/drivers/rapidio/devices/
Dtsi721_dma.c757 struct scatterlist *sgl, unsigned int sg_len, in tsi721_prep_rio_sg() argument
767 if (!sgl || !sg_len) { in tsi721_prep_rio_sg()
806 desc->sg = sgl; in tsi721_prep_rio_sg()
/linux-4.4.14/drivers/crypto/qat/qat_common/
Dqat_algs.c663 struct scatterlist *sgl, in qat_alg_sgl_to_bufl() argument
669 int n = sg_nents(sgl); in qat_alg_sgl_to_bufl()
690 for_each_sg(sgl, sg, n, i) { in qat_alg_sgl_to_bufl()
709 if (sgl != sglout) { in qat_alg_sgl_to_bufl()
761 if (sgl != sglout && buflout) { in qat_alg_sgl_to_bufl()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ttm.c537 nents = dma_map_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in amdgpu_ttm_tt_pin_userptr()
565 if (!ttm->sg->sgl) in amdgpu_ttm_tt_unpin_userptr()
569 dma_unmap_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in amdgpu_ttm_tt_unpin_userptr()
571 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in amdgpu_ttm_tt_unpin_userptr()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_ttm.c575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_pin_userptr()
603 if (!ttm->sg->sgl) in radeon_ttm_tt_unpin_userptr()
607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_unpin_userptr()
609 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in radeon_ttm_tt_unpin_userptr()
/linux-4.4.14/drivers/scsi/cxgbi/
Dlibcxgbi.c1379 struct scatterlist *sgl, in ddp_make_gl() argument
1385 struct scatterlist *sg = sgl; in ddp_make_gl()
1418 for (i = 1, sg = sg_next(sgl), j = 0; i < sgcnt; in ddp_make_gl()
1551 struct scatterlist *sgl, unsigned int sgcnt, gfp_t gfp) in cxgbi_ddp_reserve() argument
1571 gl = ddp_make_gl(xferlen, sgl, sgcnt, cdev->pdev, gfp); in cxgbi_ddp_reserve()
1722 scsi_in(sc)->table.sgl, in task_reserve_itt()
2024 static int sgl_seek_offset(struct scatterlist *sgl, unsigned int sgcnt, in sgl_seek_offset() argument
2031 for_each_sg(sgl, sg, sgcnt, i) { in sgl_seek_offset()
2182 sdb->table.sgl, sdb->table.nents, in cxgbi_conn_init_pdu()
/linux-4.4.14/drivers/media/pci/cx88/
Dcx88-video.c462 sgt->sgl, 0, UNSET, in buffer_prepare()
467 sgt->sgl, UNSET, 0, in buffer_prepare()
472 sgt->sgl, in buffer_prepare()
479 sgt->sgl, in buffer_prepare()
487 sgt->sgl, 0, buf->bpl, in buffer_prepare()
Dcx88-vbi.c144 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, in buffer_prepare()
/linux-4.4.14/arch/tile/include/hv/
Dhypervisor.h1900 HV_SGL sgl[/* sgl_len */], __hv64 offset, HV_IntArg intarg);
1946 HV_SGL sgl[/* sgl_len */], __hv64 offset, HV_IntArg intarg);
/linux-4.4.14/drivers/base/
Ddma-mapping.c238 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); in dma_common_get_sgtable()
/linux-4.4.14/drivers/scsi/qla2xxx/
Dqla_iocb.c1031 struct scatterlist *sg, *sgl; in qla24xx_walk_and_build_sglist() local
1039 sgl = scsi_sglist(cmd); in qla24xx_walk_and_build_sglist()
1041 sgl = tc->sg; in qla24xx_walk_and_build_sglist()
1048 for_each_sg(sgl, sg, tot_dsds, i) { in qla24xx_walk_and_build_sglist()
1118 struct scatterlist *sg, *sgl; in qla24xx_walk_and_build_prot_sglist() local
1127 sgl = scsi_prot_sglist(cmd); in qla24xx_walk_and_build_prot_sglist()
1131 sgl = tc->prot_sg; in qla24xx_walk_and_build_prot_sglist()
1140 for_each_sg(sgl, sg, tot_dsds, i) { in qla24xx_walk_and_build_prot_sglist()
/linux-4.4.14/drivers/crypto/ccp/
Dccp-crypto-main.c305 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
/linux-4.4.14/arch/powerpc/platforms/ps3/
Dsystem-bus.c642 static int ps3_sb_map_sg(struct device *_dev, struct scatterlist *sgl, in ps3_sb_map_sg() argument
653 for_each_sg(sgl, sg, nents, i) { in ps3_sb_map_sg()
/linux-4.4.14/drivers/media/platform/omap3isp/
Dispstat.c164 dma_sync_sg_for_device(stat->isp->dev, buf->sgt.sgl, in isp_stat_buf_sync_for_device()
174 dma_sync_sg_for_cpu(stat->isp->dev, buf->sgt.sgl, in isp_stat_buf_sync_for_cpu()

12