Searched refs:sgl (Results 1 - 200 of 285) sorted by relevance

12

/linux-4.1.27/crypto/
H A Dalgif_skcipher.c59 struct af_alg_sgl sgl; member in struct:skcipher_async_rsgl
86 struct scatterlist *sgl; skcipher_free_async_sgls() local
91 af_alg_free_sg(&rsgl->sgl); skcipher_free_async_sgls()
95 sgl = sreq->tsg; skcipher_free_async_sgls()
96 n = sg_nents(sgl); skcipher_free_async_sgls()
97 for_each_sg(sgl, sg, n, i) skcipher_free_async_sgls()
135 struct skcipher_sg_list *sgl; skcipher_alloc_sgl() local
138 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); skcipher_alloc_sgl()
140 sg = sgl->sg; skcipher_alloc_sgl()
142 if (!sg || sgl->cur >= MAX_SGL_ENTS) { skcipher_alloc_sgl()
143 sgl = sock_kmalloc(sk, sizeof(*sgl) + skcipher_alloc_sgl()
144 sizeof(sgl->sg[0]) * (MAX_SGL_ENTS + 1), skcipher_alloc_sgl()
146 if (!sgl) skcipher_alloc_sgl()
149 sg_init_table(sgl->sg, MAX_SGL_ENTS + 1); skcipher_alloc_sgl()
150 sgl->cur = 0; skcipher_alloc_sgl()
153 scatterwalk_sg_chain(sg, MAX_SGL_ENTS + 1, sgl->sg); skcipher_alloc_sgl()
155 list_add_tail(&sgl->list, &ctx->tsgl); skcipher_alloc_sgl()
165 struct skcipher_sg_list *sgl; skcipher_pull_sgl() local
170 sgl = list_first_entry(&ctx->tsgl, struct skcipher_sg_list, skcipher_pull_sgl()
172 sg = sgl->sg; skcipher_pull_sgl()
174 for (i = 0; i < sgl->cur; i++) { skcipher_pull_sgl()
193 list_del(&sgl->list); skcipher_pull_sgl()
194 sock_kfree_s(sk, sgl, skcipher_pull_sgl()
195 sizeof(*sgl) + sizeof(sgl->sg[0]) * skcipher_pull_sgl()
312 struct skcipher_sg_list *sgl; skcipher_sendmsg() local
359 sgl = list_entry(ctx->tsgl.prev, skcipher_sendmsg()
361 sg = sgl->sg + sgl->cur - 1; skcipher_sendmsg()
393 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); skcipher_sendmsg()
394 sg = sgl->sg; skcipher_sendmsg()
395 if (sgl->cur) skcipher_sendmsg()
396 sg_unmark_end(sg + sgl->cur - 1); skcipher_sendmsg()
398 i = sgl->cur; skcipher_sendmsg()
419 sgl->cur++; skcipher_sendmsg()
420 } while (len && sgl->cur < MAX_SGL_ENTS); skcipher_sendmsg()
423 sg_mark_end(sg + sgl->cur - 1); skcipher_sendmsg()
445 struct skcipher_sg_list *sgl; skcipher_sendpage() local
469 sgl = list_entry(ctx->tsgl.prev, struct skcipher_sg_list, list); skcipher_sendpage()
471 if (sgl->cur) skcipher_sendpage()
472 sg_unmark_end(sgl->sg + sgl->cur - 1); skcipher_sendpage()
474 sg_mark_end(sgl->sg + sgl->cur); skcipher_sendpage()
476 sg_set_page(sgl->sg + sgl->cur, page, size, offset); skcipher_sendpage()
477 sgl->cur++; skcipher_sendpage()
492 struct skcipher_sg_list *sgl; skcipher_all_sg_nents() local
496 list_for_each_entry(sgl, &ctx->tsgl, list) { skcipher_all_sg_nents()
497 sg = sgl->sg; skcipher_all_sg_nents()
513 struct skcipher_sg_list *sgl; skcipher_recvmsg_async() local
553 sgl = list_first_entry(&ctx->tsgl, skcipher_recvmsg_async()
555 sg = sgl->sg; skcipher_recvmsg_async()
584 /* Need to take over the tx sgl from ctx skcipher_recvmsg_async()
601 used = af_alg_make_sg(&rsgl->sgl, &msg->msg_iter, used); skcipher_recvmsg_async()
606 af_alg_link_sg(&last_rsgl->sgl, &rsgl->sgl); skcipher_recvmsg_async()
617 ablkcipher_request_set_crypt(req, sreq->tsg, sreq->first_sgl.sgl.sg, skcipher_recvmsg_async()
643 struct skcipher_sg_list *sgl; skcipher_recvmsg_sync() local
651 sgl = list_first_entry(&ctx->tsgl, skcipher_recvmsg_sync()
653 sg = sgl->sg; skcipher_recvmsg_sync()
H A Dalgif_aead.c81 struct aead_sg_list *sgl = &ctx->tsgl; aead_put_sgl() local
82 struct scatterlist *sg = sgl->sg; aead_put_sgl()
85 for (i = 0; i < sgl->cur; i++) { aead_put_sgl()
92 sgl->cur = 0; aead_put_sgl()
173 struct aead_sg_list *sgl = &ctx->tsgl; aead_sendmsg() local
219 sg = sgl->sg + sgl->cur - 1; aead_sendmsg()
250 if (sgl->cur >= ALG_MAX_PAGES) { aead_sendmsg()
256 sg = sgl->sg + sgl->cur; aead_sendmsg()
277 sgl->cur++; aead_sendmsg()
304 struct aead_sg_list *sgl = &ctx->tsgl; aead_sendpage() local
310 if (sgl->cur >= ALG_MAX_PAGES) aead_sendpage()
330 sg_set_page(sgl->sg + sgl->cur, page, size, offset); aead_sendpage()
331 sgl->cur++; aead_sendpage()
357 struct aead_sg_list *sgl = &ctx->tsgl; aead_recvmsg() local
463 sg = sgl->sg + i; aead_recvmsg()
H A Daf_alg.c391 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len) af_alg_make_sg() argument
397 n = iov_iter_get_pages(iter, sgl->pages, len, ALG_MAX_PAGES, &off); af_alg_make_sg()
405 sg_init_table(sgl->sg, npages + 1); af_alg_make_sg()
410 sg_set_page(sgl->sg + i, sgl->pages[i], plen, off); af_alg_make_sg()
415 sg_mark_end(sgl->sg + npages - 1); af_alg_make_sg()
416 sgl->npages = npages; af_alg_make_sg()
429 void af_alg_free_sg(struct af_alg_sgl *sgl) af_alg_free_sg() argument
433 for (i = 0; i < sgl->npages; i++) af_alg_free_sg()
434 put_page(sgl->pages[i]); af_alg_free_sg()
H A Dalgif_hash.c25 struct af_alg_sgl sgl; member in struct:hash_ctx
71 len = af_alg_make_sg(&ctx->sgl, &msg->msg_iter, len); hash_sendmsg()
77 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len); hash_sendmsg()
81 af_alg_free_sg(&ctx->sgl); hash_sendmsg()
116 sg_init_table(ctx->sgl.sg, 1); hash_sendpage()
117 sg_set_page(ctx->sgl.sg, page, size, offset); hash_sendpage()
119 ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size); hash_sendpage()
/linux-4.1.27/drivers/misc/genwqe/
H A Dcard_utils.c290 * genwqe_alloc_sync_sgl() - Allocate memory for sgl and overlapping pages
292 * Allocates memory for sgl and overlapping pages. Pages which might
297 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, genwqe_alloc_sync_sgl() argument
303 sgl->fpage_offs = offset_in_page((unsigned long)user_addr); genwqe_alloc_sync_sgl()
304 sgl->fpage_size = min_t(size_t, PAGE_SIZE-sgl->fpage_offs, user_size); genwqe_alloc_sync_sgl()
305 sgl->nr_pages = DIV_ROUND_UP(sgl->fpage_offs + user_size, PAGE_SIZE); genwqe_alloc_sync_sgl()
306 sgl->lpage_size = (user_size - sgl->fpage_size) % PAGE_SIZE; genwqe_alloc_sync_sgl()
309 __func__, user_addr, user_size, sgl->nr_pages, genwqe_alloc_sync_sgl()
310 sgl->fpage_offs, sgl->fpage_size, sgl->lpage_size); genwqe_alloc_sync_sgl()
312 sgl->user_addr = user_addr; genwqe_alloc_sync_sgl()
313 sgl->user_size = user_size; genwqe_alloc_sync_sgl()
314 sgl->sgl_size = genwqe_sgl_size(sgl->nr_pages); genwqe_alloc_sync_sgl()
316 if (get_order(sgl->sgl_size) > MAX_ORDER) { genwqe_alloc_sync_sgl()
322 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, genwqe_alloc_sync_sgl()
323 &sgl->sgl_dma_addr); genwqe_alloc_sync_sgl()
324 if (sgl->sgl == NULL) { genwqe_alloc_sync_sgl()
331 if ((sgl->fpage_size != 0) && (sgl->fpage_size != PAGE_SIZE)) { genwqe_alloc_sync_sgl()
332 sgl->fpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, genwqe_alloc_sync_sgl()
333 &sgl->fpage_dma_addr); genwqe_alloc_sync_sgl()
334 if (sgl->fpage == NULL) genwqe_alloc_sync_sgl()
338 if (copy_from_user(sgl->fpage + sgl->fpage_offs, genwqe_alloc_sync_sgl()
339 user_addr, sgl->fpage_size)) { genwqe_alloc_sync_sgl()
344 if (sgl->lpage_size != 0) { genwqe_alloc_sync_sgl()
345 sgl->lpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, genwqe_alloc_sync_sgl()
346 &sgl->lpage_dma_addr); genwqe_alloc_sync_sgl()
347 if (sgl->lpage == NULL) genwqe_alloc_sync_sgl()
351 if (copy_from_user(sgl->lpage, user_addr + user_size - genwqe_alloc_sync_sgl()
352 sgl->lpage_size, sgl->lpage_size)) { genwqe_alloc_sync_sgl()
360 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, genwqe_alloc_sync_sgl()
361 sgl->fpage_dma_addr); genwqe_alloc_sync_sgl()
363 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, genwqe_alloc_sync_sgl()
364 sgl->sgl_dma_addr); genwqe_alloc_sync_sgl()
368 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, genwqe_setup_sgl() argument
375 size_t size = sgl->user_size; genwqe_setup_sgl()
378 map_offs = sgl->fpage_offs; /* offset in first page */ genwqe_setup_sgl()
380 s = &sgl->sgl[0]; /* first set of 8 entries */ genwqe_setup_sgl()
382 while (p < sgl->nr_pages) { genwqe_setup_sgl()
388 s[j].target_addr = cpu_to_be64(sgl->sgl_dma_addr + dma_offs); genwqe_setup_sgl()
397 if ((p == 0) && (sgl->fpage != NULL)) { genwqe_setup_sgl()
398 daddr = sgl->fpage_dma_addr + map_offs; genwqe_setup_sgl()
400 } else if ((p == sgl->nr_pages - 1) && genwqe_setup_sgl()
401 (sgl->lpage != NULL)) { genwqe_setup_sgl()
402 daddr = sgl->lpage_dma_addr; genwqe_setup_sgl()
421 if (p == sgl->nr_pages) genwqe_setup_sgl()
437 if (p == sgl->nr_pages) genwqe_setup_sgl()
445 s -= 8; /* full shift needed on previous sgl block */ genwqe_setup_sgl()
459 * genwqe_free_sync_sgl() - Free memory for sgl and overlapping pages
462 * the sgl and the cached pages. Data is being transfered from cached
465 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl) genwqe_free_sync_sgl() argument
470 if (sgl->fpage) { genwqe_free_sync_sgl()
471 if (copy_to_user(sgl->user_addr, sgl->fpage + sgl->fpage_offs, genwqe_free_sync_sgl()
472 sgl->fpage_size)) { genwqe_free_sync_sgl()
477 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, genwqe_free_sync_sgl()
478 sgl->fpage_dma_addr); genwqe_free_sync_sgl()
479 sgl->fpage = NULL; genwqe_free_sync_sgl()
480 sgl->fpage_dma_addr = 0; genwqe_free_sync_sgl()
482 if (sgl->lpage) { genwqe_free_sync_sgl()
483 if (copy_to_user(sgl->user_addr + sgl->user_size - genwqe_free_sync_sgl()
484 sgl->lpage_size, sgl->lpage, genwqe_free_sync_sgl()
485 sgl->lpage_size)) { genwqe_free_sync_sgl()
490 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, genwqe_free_sync_sgl()
491 sgl->lpage_dma_addr); genwqe_free_sync_sgl()
492 sgl->lpage = NULL; genwqe_free_sync_sgl()
493 sgl->lpage_dma_addr = 0; genwqe_free_sync_sgl()
495 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, genwqe_free_sync_sgl()
496 sgl->sgl_dma_addr); genwqe_free_sync_sgl()
498 sgl->sgl = NULL; genwqe_free_sync_sgl()
499 sgl->sgl_dma_addr = 0x0; genwqe_free_sync_sgl()
500 sgl->sgl_size = 0; genwqe_free_sync_sgl()
H A Dcard_base.h344 * @sgl: scatter gather list needs to be 128 byte aligned
345 * @sgl_dma_addr: dma address of sgl
346 * @sgl_size: size of area used for sgl
354 struct sg_entry *sgl; member in struct:genwqe_sgl
355 size_t sgl_size; /* size of sgl */
372 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
375 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
378 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl);
/linux-4.1.27/drivers/scsi/fnic/
H A Dfnic_io.h24 #define FNIC_MAX_SG_DESC_CNT 256 /* Maximum descriptors per sgl */
42 FNIC_SGL_CACHE_DFLT = 0, /* cache with default size sgl */
43 FNIC_SGL_CACHE_MAX, /* cache with max size sgl */
44 FNIC_SGL_NUM_CACHES /* number of sgl caches */
56 struct host_sg_desc *sgl_list; /* sgl list */
57 void *sgl_list_alloc; /* sgl list address used for free */
59 dma_addr_t sgl_list_pa; /* dma address for sgl list */
H A Dcq_exch_desc.h149 CQ_SGL_ERR_SGL_LCL_ADDR_ERR, /* sgl access to local vnic addr illegal*/
150 CQ_SGL_ERR_ADDR_RSP_ERR, /* sgl address error */
151 CQ_SGL_ERR_DATA_RSP_ERR, /* sgl data rsp error */
155 CQ_SGL_ERR_DATA_LCL_ADDR_ERR,/* sgl data buf to local vnic addr ill */
/linux-4.1.27/lib/
H A Dscatterlist.c62 * @sgl: First entry in the scatterlist
69 * Note that the @sgl@ pointer passed in need not be the first one,
71 * exist from @sgl@.
74 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) sg_last() argument
77 struct scatterlist *ret = &sgl[nents - 1]; sg_last()
82 for_each_sg(sgl, sg, nents, i) sg_last()
87 BUG_ON(sgl[0].sg_magic != SG_MAGIC); sg_last()
96 * @sgl: The SG table
104 void sg_init_table(struct scatterlist *sgl, unsigned int nents) sg_init_table() argument
106 memset(sgl, 0, sizeof(*sgl) * nents); sg_init_table()
111 sgl[i].sg_magic = SG_MAGIC; sg_init_table()
114 sg_mark_end(&sgl[nents - 1]); sg_init_table()
180 struct scatterlist *sgl, *next; __sg_free_table() local
182 if (unlikely(!table->sgl)) __sg_free_table()
185 sgl = table->sgl; __sg_free_table()
197 next = sg_chain_ptr(&sgl[max_ents - 1]); __sg_free_table()
209 free_fn(sgl, alloc_size); __sg_free_table()
210 sgl = next; __sg_free_table()
213 table->sgl = NULL; __sg_free_table()
305 table->sgl = sg; __sg_alloc_table()
387 for_each_sg(sgt->sgl, s, sgt->orig_nents, i) { sg_alloc_table_from_pages()
447 * @sgl: sg list to iterate over
456 void sg_miter_start(struct sg_mapping_iter *miter, struct scatterlist *sgl, sg_miter_start() argument
461 __sg_page_iter_start(&miter->piter, sgl, nents, 0); sg_miter_start()
610 * @sgl: The SG list
621 static size_t sg_copy_buffer(struct scatterlist *sgl, unsigned int nents, sg_copy_buffer() argument
635 sg_miter_start(&miter, sgl, nents, sg_flags); sg_copy_buffer()
663 * @sgl: The SG list
671 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents, sg_copy_from_buffer() argument
674 return sg_copy_buffer(sgl, nents, buf, buflen, 0, false); sg_copy_from_buffer()
680 * @sgl: The SG list
688 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents, sg_copy_to_buffer() argument
691 return sg_copy_buffer(sgl, nents, buf, buflen, 0, true); sg_copy_to_buffer()
697 * @sgl: The SG list
706 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents, sg_pcopy_from_buffer() argument
709 return sg_copy_buffer(sgl, nents, buf, buflen, skip, false); sg_pcopy_from_buffer()
715 * @sgl: The SG list
724 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents, sg_pcopy_to_buffer() argument
727 return sg_copy_buffer(sgl, nents, buf, buflen, skip, true); sg_pcopy_to_buffer()
H A Dkfifo.c308 static int setup_sgl_buf(struct scatterlist *sgl, void *buf, setup_sgl_buf() argument
334 sg_set_page(sgl, page, l - off, off); setup_sgl_buf()
335 sgl = sg_next(sgl); setup_sgl_buf()
336 if (++n == nents || sgl == NULL) setup_sgl_buf()
343 sg_set_page(sgl, page, len, off); setup_sgl_buf()
347 static unsigned int setup_sgl(struct __kfifo *fifo, struct scatterlist *sgl, setup_sgl() argument
363 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); setup_sgl()
364 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); setup_sgl()
370 struct scatterlist *sgl, int nents, unsigned int len) __kfifo_dma_in_prepare()
378 return setup_sgl(fifo, sgl, nents, len, fifo->in); __kfifo_dma_in_prepare()
383 struct scatterlist *sgl, int nents, unsigned int len) __kfifo_dma_out_prepare()
391 return setup_sgl(fifo, sgl, nents, len, fifo->out); __kfifo_dma_out_prepare()
562 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) __kfifo_dma_in_prepare_r()
571 return setup_sgl(fifo, sgl, nents, len, fifo->in + recsize); __kfifo_dma_in_prepare_r()
585 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) __kfifo_dma_out_prepare_r()
594 return setup_sgl(fifo, sgl, nents, len, fifo->out + recsize); __kfifo_dma_out_prepare_r()
369 __kfifo_dma_in_prepare(struct __kfifo *fifo, struct scatterlist *sgl, int nents, unsigned int len) __kfifo_dma_in_prepare() argument
382 __kfifo_dma_out_prepare(struct __kfifo *fifo, struct scatterlist *sgl, int nents, unsigned int len) __kfifo_dma_out_prepare() argument
561 __kfifo_dma_in_prepare_r(struct __kfifo *fifo, struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) __kfifo_dma_in_prepare_r() argument
584 __kfifo_dma_out_prepare_r(struct __kfifo *fifo, struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) __kfifo_dma_out_prepare_r() argument
H A Dswiotlb.c872 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_map_sg_attrs() argument
880 for_each_sg(sgl, sg, nelems, i) { for_each_sg()
892 swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, for_each_sg()
894 sg_dma_len(sgl) = 0; for_each_sg()
907 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_map_sg() argument
910 return swiotlb_map_sg_attrs(hwdev, sgl, nelems, dir, NULL); swiotlb_map_sg()
919 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, swiotlb_unmap_sg_attrs() argument
927 for_each_sg(sgl, sg, nelems, i) swiotlb_unmap_sg_attrs()
934 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sgl, int nelems, swiotlb_unmap_sg() argument
937 return swiotlb_unmap_sg_attrs(hwdev, sgl, nelems, dir, NULL); swiotlb_unmap_sg()
949 swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, swiotlb_sync_sg() argument
956 for_each_sg(sgl, sg, nelems, i) swiotlb_sync_sg()
/linux-4.1.27/arch/tile/include/asm/
H A Dhv_driver.h42 HV_SGL sgl[/* sgl_len */], __hv64 offset, tile_hv_dev_preada()
45 return hv_dev_preada(devhdl, flags, sgl_len, sgl, tile_hv_dev_preada()
52 HV_SGL sgl[/* sgl_len */], __hv64 offset, tile_hv_dev_pwritea()
55 return hv_dev_pwritea(devhdl, flags, sgl_len, sgl, tile_hv_dev_pwritea()
/linux-4.1.27/tools/virtio/linux/
H A Dscatterlist.h85 * @sgl: Second scatterlist
88 * Links @prv@ and @sgl@ together, to form a longer scatterlist.
92 struct scatterlist *sgl) sg_chain()
104 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; sg_chain()
159 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) sg_init_table() argument
161 memset(sgl, 0, sizeof(*sgl) * nents); sg_init_table()
166 sgl[i].sg_magic = SG_MAGIC; sg_init_table()
169 sg_mark_end(&sgl[nents - 1]); sg_init_table()
91 sg_chain(struct scatterlist *prv, unsigned int prv_nents, struct scatterlist *sgl) sg_chain() argument
/linux-4.1.27/arch/parisc/math-emu/
H A Dfpudispatch.c424 case 0: /* sgl/sgl */ decode_0c()
426 case 1: /* sgl/dbl */ decode_0c()
429 case 2: /* dbl/sgl */ decode_0c()
437 case 0: /* sgl/sgl */ decode_0c()
440 case 1: /* sgl/dbl */ decode_0c()
443 case 2: /* dbl/sgl */ decode_0c()
452 case 0: /* sgl/sgl */ decode_0c()
455 case 1: /* sgl/dbl */ decode_0c()
458 case 2: /* dbl/sgl */ decode_0c()
467 case 0: /* sgl/sgl */ decode_0c()
470 case 1: /* sgl/dbl */ decode_0c()
473 case 2: /* dbl/sgl */ decode_0c()
482 case 0: /* sgl/sgl */ decode_0c()
485 case 1: /* sgl/dbl */ decode_0c()
488 case 2: /* dbl/sgl */ decode_0c()
497 case 0: /* sgl/sgl */ decode_0c()
500 case 1: /* sgl/dbl */ decode_0c()
503 case 2: /* dbl/sgl */ decode_0c()
512 case 0: /* sgl/sgl */ decode_0c()
515 case 1: /* sgl/dbl */ decode_0c()
518 case 2: /* dbl/sgl */ decode_0c()
831 case 0: /* sgl/sgl */ decode_0e()
833 case 1: /* sgl/dbl */ decode_0e()
836 case 2: /* dbl/sgl */ decode_0e()
844 case 0: /* sgl/sgl */ decode_0e()
847 case 1: /* sgl/dbl */ decode_0e()
850 case 2: /* dbl/sgl */ decode_0e()
859 case 0: /* sgl/sgl */ decode_0e()
862 case 1: /* sgl/dbl */ decode_0e()
865 case 2: /* dbl/sgl */ decode_0e()
874 case 0: /* sgl/sgl */ decode_0e()
877 case 1: /* sgl/dbl */ decode_0e()
880 case 2: /* dbl/sgl */ decode_0e()
889 case 0: /* sgl/sgl */ decode_0e()
892 case 1: /* sgl/dbl */ decode_0e()
895 case 2: /* dbl/sgl */ decode_0e()
904 case 0: /* sgl/sgl */ decode_0e()
907 case 1: /* sgl/dbl */ decode_0e()
910 case 2: /* dbl/sgl */ decode_0e()
919 case 0: /* sgl/sgl */ decode_0e()
922 case 1: /* sgl/dbl */ decode_0e()
925 case 2: /* dbl/sgl */ decode_0e()
1135 fmt = extru(ir, fpmultifmt, 1); /* get sgl/dbl flag */ decode_06()
1154 /* special case FMPYCFXT, see sgl case below */ decode_06()
1273 fmt = extru(ir, fpmultifmt, 1); /* get sgl/dbl flag */ decode_26()
/linux-4.1.27/arch/microblaze/kernel/
H A Ddma.c54 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, dma_direct_map_sg() argument
62 for_each_sg(sgl, sg, nents, i) { for_each_sg()
131 struct scatterlist *sgl, int nents, dma_direct_sync_sg_for_cpu()
139 for_each_sg(sgl, sg, nents, i) dma_direct_sync_sg_for_cpu()
145 struct scatterlist *sgl, int nents, dma_direct_sync_sg_for_device()
153 for_each_sg(sgl, sg, nents, i) dma_direct_sync_sg_for_device()
130 dma_direct_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, int nents, enum dma_data_direction direction) dma_direct_sync_sg_for_cpu() argument
144 dma_direct_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, int nents, enum dma_data_direction direction) dma_direct_sync_sg_for_device() argument
/linux-4.1.27/arch/arm64/mm/
H A Ddma-mapping.c229 static int __swiotlb_map_sg_attrs(struct device *dev, struct scatterlist *sgl, __swiotlb_map_sg_attrs() argument
236 ret = swiotlb_map_sg_attrs(dev, sgl, nelems, dir, attrs); __swiotlb_map_sg_attrs()
238 for_each_sg(sgl, sg, ret, i) __swiotlb_map_sg_attrs()
246 struct scatterlist *sgl, int nelems, __swiotlb_unmap_sg_attrs()
254 for_each_sg(sgl, sg, nelems, i) __swiotlb_unmap_sg_attrs()
257 swiotlb_unmap_sg_attrs(dev, sgl, nelems, dir, attrs); __swiotlb_unmap_sg_attrs()
279 struct scatterlist *sgl, int nelems, __swiotlb_sync_sg_for_cpu()
286 for_each_sg(sgl, sg, nelems, i) __swiotlb_sync_sg_for_cpu()
289 swiotlb_sync_sg_for_cpu(dev, sgl, nelems, dir); __swiotlb_sync_sg_for_cpu()
293 struct scatterlist *sgl, int nelems, __swiotlb_sync_sg_for_device()
299 swiotlb_sync_sg_for_device(dev, sgl, nelems, dir); __swiotlb_sync_sg_for_device()
301 for_each_sg(sgl, sg, nelems, i) __swiotlb_sync_sg_for_device()
245 __swiotlb_unmap_sg_attrs(struct device *dev, struct scatterlist *sgl, int nelems, enum dma_data_direction dir, struct dma_attrs *attrs) __swiotlb_unmap_sg_attrs() argument
278 __swiotlb_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, int nelems, enum dma_data_direction dir) __swiotlb_sync_sg_for_cpu() argument
292 __swiotlb_sync_sg_for_device(struct device *dev, struct scatterlist *sgl, int nelems, enum dma_data_direction dir) __swiotlb_sync_sg_for_device() argument
/linux-4.1.27/drivers/gpu/drm/exynos/
H A Dexynos_drm_dmabuf.c59 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, exynos_gem_detach_dma_buf()
100 rd = buf->sgt->sgl; exynos_gem_map_dma_buf()
101 wr = sgt->sgl; exynos_gem_map_dma_buf()
109 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); exynos_gem_map_dma_buf()
111 DRM_ERROR("failed to map sgl with iommu.\n"); exynos_gem_map_dma_buf()
203 struct scatterlist *sgl; exynos_dmabuf_prime_import() local
249 sgl = sgt->sgl; exynos_dmabuf_prime_import()
252 buffer->dma_addr = sg_dma_address(sgl); exynos_dmabuf_prime_import()
H A Dexynos_drm_gem.c86 struct scatterlist *sgl; exynos_drm_gem_map_buf() local
98 sgl = buf->sgt->sgl; exynos_drm_gem_map_buf()
99 for_each_sg(buf->sgt->sgl, sgl, buf->sgt->nents, i) { exynos_drm_gem_map_buf()
100 if (page_offset < (sgl->length >> PAGE_SHIFT)) exynos_drm_gem_map_buf()
102 page_offset -= (sgl->length >> PAGE_SHIFT); exynos_drm_gem_map_buf()
105 pfn = __phys_to_pfn(sg_phys(sgl)) + page_offset; exynos_drm_gem_map_buf()
486 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); exynos_gem_map_sgt_with_dma()
488 DRM_ERROR("failed to map sgl with dma.\n"); exynos_gem_map_sgt_with_dma()
501 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); exynos_gem_unmap_sgt_from_dma()
/linux-4.1.27/samples/kfifo/
H A Ddma-example.c66 printk(KERN_INFO "DMA sgl entries: %d\n", nents); example_init()
68 /* fifo is full and no sgl was created */ example_init()
96 printk(KERN_INFO "DMA sgl entries: %d\n", nents); example_init()
98 /* no data was available and no sgl was created */ example_init()
/linux-4.1.27/include/xen/
H A Dswiotlb-xen.h28 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
33 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
/linux-4.1.27/drivers/gpu/drm/msm/
H A Dmsm_iommu.c59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { msm_iommu_map()
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { msm_iommu_unmap()
H A Dmsm_gem.c104 dma_map_sg(dev->dev, msm_obj->sgt->sgl, get_pages()
120 dma_unmap_sg(obj->dev->dev, msm_obj->sgt->sgl, put_pages()
/linux-4.1.27/drivers/infiniband/hw/cxgb3/
H A Diwch_qp.c77 wqe->send.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); build_rdma_send()
78 wqe->send.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); build_rdma_send()
79 wqe->send.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); build_rdma_send()
103 wqe->write.sgl[0].stag = wr->ex.imm_data; build_rdma_write()
104 wqe->write.sgl[0].len = cpu_to_be32(0); build_rdma_write()
114 wqe->write.sgl[i].stag = build_rdma_write()
116 wqe->write.sgl[i].len = build_rdma_write()
118 wqe->write.sgl[i].to = build_rdma_write()
264 wqe->recv.sgl[i].stag = cpu_to_be32(wr->sg_list[i].lkey); build_rdma_recv()
265 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); build_rdma_recv()
268 wqe->recv.sgl[i].to = cpu_to_be64(((u32)wr->sg_list[i].addr) & build_rdma_recv()
275 wqe->recv.sgl[i].stag = 0; build_rdma_recv()
276 wqe->recv.sgl[i].len = 0; build_rdma_recv()
277 wqe->recv.sgl[i].to = 0; build_rdma_recv()
330 wqe->recv.sgl[i].stag = 0; build_zero_stag_recv()
331 wqe->recv.sgl[i].len = cpu_to_be32(wr->sg_list[i].length); build_zero_stag_recv()
332 wqe->recv.sgl[i].to = cpu_to_be64(wr->sg_list[i].addr); build_zero_stag_recv()
338 wqe->recv.sgl[i].stag = 0; build_zero_stag_recv()
339 wqe->recv.sgl[i].len = 0; build_zero_stag_recv()
340 wqe->recv.sgl[i].to = 0; build_zero_stag_recv()
540 struct ib_sge sgl; iwch_bind_mw() local
570 sgl.addr = mw_bind->bind_info.addr; iwch_bind_mw()
571 sgl.lkey = mw_bind->bind_info.mr->lkey; iwch_bind_mw()
572 sgl.length = mw_bind->bind_info.length; iwch_bind_mw()
583 err = iwch_sgl2pbl_map(rhp, &sgl, 1, &pbl_addr, &page_size); iwch_bind_mw()
794 term = (struct terminate_message *)wqe->send.sgl; iwch_post_terminate()
H A Dcxio_wr.h176 struct t3_sge sgl[T3_MAX_SGE]; /* 4+ */ member in struct:t3_send_wr
238 struct t3_sge sgl[T3_MAX_SGE]; /* 5+ */ member in struct:t3_rdma_write_wr
274 struct t3_sge sgl[T3_MAX_SGE]; /* 3+ */ member in struct:t3_receive_wr
/linux-4.1.27/drivers/staging/android/ion/
H A Dion_carveout_heap.c64 struct page *page = sg_page(table->sgl); ion_carveout_heap_phys()
97 sg_set_page(table->sgl, pfn_to_page(PFN_DOWN(paddr)), size, 0); ion_carveout_heap_allocate()
113 struct page *page = sg_page(table->sgl); ion_carveout_heap_free()
119 dma_sync_sg_for_device(NULL, table->sgl, table->nents, ion_carveout_heap_free()
H A Dion_chunk_heap.c67 sg = table->sgl; ion_chunk_heap_allocate()
82 sg = table->sgl; ion_chunk_heap_allocate()
108 dma_sync_sg_for_device(NULL, table->sgl, table->nents, ion_chunk_heap_free()
111 for_each_sg(table->sgl, sg, table->nents, i) { ion_chunk_heap_free()
H A Dion_heap.c48 for_each_sg(table->sgl, sg, table->nents, i) { ion_heap_map_kernel()
81 for_each_sg(table->sgl, sg, table->nents, i) { ion_heap_map_user()
118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, ion_heap_sglist_zero() argument
126 for_each_sg_page(sgl, &piter, nents, 0) { ion_heap_sglist_zero()
151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); ion_heap_buffer_zero()
H A Dion_system_heap.c160 sg = table->sgl; ion_system_heap_allocate()
193 for_each_sg(table->sgl, sg, table->nents, i) ion_system_heap_free()
344 sg_set_page(table->sgl, page, len, 0); ion_system_contig_heap_allocate()
364 struct page *page = sg_page(table->sgl); ion_system_contig_heap_free()
379 struct page *page = sg_page(table->sgl); ion_system_contig_heap_phys()
H A Dion_test.c66 for_each_sg_page(table->sgl, &sg_iter, table->nents, offset_page) { ion_handle_test_dma()
H A Dion.c232 for_each_sg(table->sgl, sg, table->nents, i) { ion_buffer_create()
255 for_each_sg(buffer->sg_table->sgl, sg, buffer->sg_table->nents, i) ion_buffer_create()
1220 dma_sync_sg_for_device(NULL, buffer->sg_table->sgl, ion_sync_for_device()
/linux-4.1.27/drivers/gpu/drm/omapdrm/
H A Domap_gem_dmabuf.c48 sg_init_table(sg->sgl, 1); omap_gem_map_dma_buf()
49 sg_dma_len(sg->sgl) = obj->size; omap_gem_map_dma_buf()
50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); omap_gem_map_dma_buf()
51 sg_dma_address(sg->sgl) = paddr; omap_gem_map_dma_buf()
/linux-4.1.27/drivers/gpu/drm/armada/
H A Darmada_gem.c444 for_each_sg(sgt->sgl, sg, count, i) { armada_gem_prime_map_dma_buf()
456 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { armada_gem_prime_map_dma_buf()
465 sg_set_page(sgt->sgl, dobj->page, dobj->obj.size, 0); armada_gem_prime_map_dma_buf()
467 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) armada_gem_prime_map_dma_buf()
473 sg_dma_address(sgt->sgl) = dobj->dev_addr; armada_gem_prime_map_dma_buf()
474 sg_dma_len(sgt->sgl) = dobj->obj.size; armada_gem_prime_map_dma_buf()
481 for_each_sg(sgt->sgl, sg, num, i) armada_gem_prime_map_dma_buf()
498 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); armada_gem_prime_unmap_dma_buf()
502 for_each_sg(sgt->sgl, sg, sgt->nents, i) armada_gem_prime_unmap_dma_buf()
610 if (sg_dma_len(dobj->sgt->sgl) < dobj->obj.size) { armada_gem_map_import()
614 dobj->dev_addr = sg_dma_address(dobj->sgt->sgl); armada_gem_map_import()
/linux-4.1.27/include/crypto/
H A Dif_alg.h78 int af_alg_make_sg(struct af_alg_sgl *sgl, struct iov_iter *iter, int len);
79 void af_alg_free_sg(struct af_alg_sgl *sgl);
/linux-4.1.27/drivers/media/platform/
H A Dm2m-deinterlace.c257 ctx->xt->sgl[0].size = s_width; deinterlace_issue_dma()
258 ctx->xt->sgl[0].icg = s_width; deinterlace_issue_dma()
264 ctx->xt->sgl[0].size = s_width; deinterlace_issue_dma()
265 ctx->xt->sgl[0].icg = s_width; deinterlace_issue_dma()
271 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
272 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
278 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
279 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
285 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
286 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
292 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
293 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
299 ctx->xt->sgl[0].size = s_width; deinterlace_issue_dma()
300 ctx->xt->sgl[0].icg = s_width; deinterlace_issue_dma()
306 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
307 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
313 ctx->xt->sgl[0].size = s_width / 2; deinterlace_issue_dma()
314 ctx->xt->sgl[0].icg = s_width / 2; deinterlace_issue_dma()
320 ctx->xt->sgl[0].size = s_width * 2; deinterlace_issue_dma()
321 ctx->xt->sgl[0].icg = s_width * 2; deinterlace_issue_dma()
327 ctx->xt->sgl[0].size = s_width * 2; deinterlace_issue_dma()
328 ctx->xt->sgl[0].icg = s_width * 2; deinterlace_issue_dma()
335 ctx->xt->sgl[0].size = s_width * 2; deinterlace_issue_dma()
336 ctx->xt->sgl[0].icg = s_width * 2; deinterlace_issue_dma()
/linux-4.1.27/drivers/media/v4l2-core/
H A Dvideobuf2-dma-contig.c59 for_each_sg(sgt->sgl, s, sgt->orig_nents, i) { vb2_dc_sgt_foreach_page()
73 dma_addr_t expected = sg_dma_address(sgt->sgl); vb2_dc_get_contiguous_size()
77 for_each_sg(sgt->sgl, s, sgt->nents, i) { vb2_dc_get_contiguous_size()
123 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dc_prepare()
135 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dc_finish()
261 rd = buf->sgt_base->sgl; vb2_dc_dmabuf_ops_attach()
262 wr = sgt->sgl; vb2_dc_dmabuf_ops_attach()
288 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dc_dmabuf_ops_detach()
315 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dc_dmabuf_ops_map()
321 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_dc_dmabuf_ops_map()
527 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_put_userptr()
677 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_get_userptr()
693 buf->dma_addr = sg_dma_address(sgt->sgl); vb2_dc_get_userptr()
700 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, vb2_dc_get_userptr()
764 buf->dma_addr = sg_dma_address(sgt->sgl); vb2_dc_map_dmabuf()
H A Dvideobuf2-dma-sg.c150 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_alloc()
190 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_put()
212 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dma_sg_prepare()
224 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); vb2_dma_sg_finish()
317 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, vb2_dma_sg_get_userptr()
354 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir, &attrs); vb2_dma_sg_put_userptr()
464 rd = buf->dma_sgt->sgl; vb2_dma_sg_dmabuf_ops_attach()
465 wr = sgt->sgl; vb2_dma_sg_dmabuf_ops_attach()
491 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dma_sg_dmabuf_ops_detach()
518 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_dma_sg_dmabuf_ops_map()
524 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_dma_sg_dmabuf_ops_map()
H A Dvideobuf2-vmalloc.c246 for_each_sg(sgt->sgl, sg, sgt->nents, i) { vb2_vmalloc_dmabuf_ops_attach()
276 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_vmalloc_dmabuf_ops_detach()
303 dma_unmap_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, vb2_vmalloc_dmabuf_ops_map()
309 ret = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, dma_dir); vb2_vmalloc_dmabuf_ops_map()
H A Dvideobuf-dma-sg.c125 dprintk(2, "sgl: oops - no page\n"); videobuf_pages_to_sg()
130 dprintk(2, "sgl: oops - highmem page\n"); videobuf_pages_to_sg()
/linux-4.1.27/drivers/net/ethernet/intel/ixgbe/
H A Dixgbe_fcoe.c53 ddp->sgl = NULL; ixgbe_fcoe_clear_ddp()
134 if (ddp->sgl) ixgbe_fcoe_ddp_put()
135 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc, ixgbe_fcoe_ddp_put()
151 * @sgl: the scatter-gather list for this request
157 struct scatterlist *sgl, unsigned int sgc, ixgbe_fcoe_ddp_setup()
176 if (!netdev || !sgl) ixgbe_fcoe_ddp_setup()
192 if (ddp->sgl) { ixgbe_fcoe_ddp_setup()
193 e_err(drv, "xid 0x%x w/ non-null sgl=%p nents=%d\n", ixgbe_fcoe_ddp_setup()
194 xid, ddp->sgl, ddp->sgc); ixgbe_fcoe_ddp_setup()
211 /* setup dma from scsi command sgl */ ixgbe_fcoe_ddp_setup()
212 dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); ixgbe_fcoe_ddp_setup()
225 ddp->sgl = sgl; ixgbe_fcoe_ddp_setup()
229 for_each_sg(sgl, sg, dmacount, i) { for_each_sg()
347 dma_unmap_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE);
357 * @sgl: the scatter-gather list for this request
368 struct scatterlist *sgl, unsigned int sgc) ixgbe_fcoe_ddp_get()
370 return ixgbe_fcoe_ddp_setup(netdev, xid, sgl, sgc, 0); ixgbe_fcoe_ddp_get()
377 * @sgl: the scatter-gather list for this request
389 struct scatterlist *sgl, unsigned int sgc) ixgbe_fcoe_ddp_target()
391 return ixgbe_fcoe_ddp_setup(netdev, xid, sgl, sgc, 1); ixgbe_fcoe_ddp_target()
465 dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ixgbe_fcoe_ddp()
468 ddp->sgl = NULL; ixgbe_fcoe_ddp()
156 ixgbe_fcoe_ddp_setup(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc, int target_mode) ixgbe_fcoe_ddp_setup() argument
367 ixgbe_fcoe_ddp_get(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) ixgbe_fcoe_ddp_get() argument
388 ixgbe_fcoe_ddp_target(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) ixgbe_fcoe_ddp_target() argument
H A Dixgbe_fcoe.h64 struct scatterlist *sgl; member in struct:ixgbe_fcoe_ddp
/linux-4.1.27/include/linux/
H A Dscatterlist.h13 struct scatterlist *sgl; /* the list */ member in struct:sg_table
130 * @sgl: Second scatterlist
133 * Links @prv@ and @sgl@ together, to form a longer scatterlist.
137 struct scatterlist *sgl) sg_chain()
153 prv[prv_nents - 1].page_link = ((unsigned long) sgl | 0x01) & ~0x02; sg_chain()
242 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents,
244 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents,
247 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents,
249 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents,
346 void sg_miter_start(struct sg_mapping_iter *miter, struct scatterlist *sgl,
136 sg_chain(struct scatterlist *prv, unsigned int prv_nents, struct scatterlist *sgl) sg_chain() argument
H A Dswiotlb.h82 swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, int nelems,
86 swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl,
H A Ddma-mapping.h267 #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \
268 dma_map_sg(dev, sgl, nents, dir)
270 #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \
271 dma_unmap_sg(dev, sgl, nents, dir)
H A Ddmaengine.h139 * @src_sgl: If the 'icg' of sgl[] applies to Source (scattered read).
142 * @dst_sgl: If the 'icg' of sgl[] applies to Destination (scattered write).
146 * @frame_size: Number of chunks in a frame i.e, size of sgl[].
147 * @sgl: Array of {chunk,icg} pairs that make up a frame.
159 struct data_chunk sgl[0]; member in struct:dma_interleaved_template
662 struct dma_chan *chan, struct scatterlist *sgl,
713 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, dmaengine_prep_slave_sg()
716 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dmaengine_prep_slave_sg()
723 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, dmaengine_prep_rio_sg()
727 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, dmaengine_prep_rio_sg()
712 dmaengine_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags) dmaengine_prep_slave_sg() argument
722 dmaengine_prep_rio_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, struct rio_dma_ext *rio_ext) dmaengine_prep_rio_sg() argument
H A Dkfifo.h648 * @sgl: pointer to the scatterlist array
658 #define kfifo_dma_in_prepare(fifo, sgl, nents, len) \
661 struct scatterlist *__sgl = (sgl); \
697 * @sgl: pointer to the scatterlist array
709 #define kfifo_dma_out_prepare(fifo, sgl, nents, len) \
712 struct scatterlist *__sgl = (sgl); \
792 struct scatterlist *sgl, int nents, unsigned int len);
795 struct scatterlist *sgl, int nents, unsigned int len);
814 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
820 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
/linux-4.1.27/drivers/scsi/lpfc/
H A Dlpfc_scsi.c182 struct sli4_sge *sgl = (struct sli4_sge *)lpfc_cmd->fcp_bpl; lpfc_sli4_set_rsp_sgl_last() local
183 if (sgl) { lpfc_sli4_set_rsp_sgl_last()
184 sgl += 1; lpfc_sli4_set_rsp_sgl_last()
185 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_sli4_set_rsp_sgl_last()
186 bf_set(lpfc_sli4_sge_last, sgl, 1); lpfc_sli4_set_rsp_sgl_last()
187 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_sli4_set_rsp_sgl_last()
617 * For single SCSI buffer sgl with non-contiguous xri, if any, it shall use
648 /* a hole in xri block, form a sgl posting block */ list_for_each_entry_safe()
657 /* enough sgls for non-embed sgl mbox command */ list_for_each_entry_safe()
667 /* end of repost sgl list condition for SCSI buffers */ list_for_each_entry_safe()
670 /* last sgl posting block */ list_for_each_entry_safe()
674 /* last single sgl with non-contiguous xri */ list_for_each_entry_safe()
693 /* success, put on SCSI buffer sgl list */ list_for_each_entry_safe()
713 /* put posted SCSI buffer-sgl posted on SCSI buffer sgl list */ list_for_each_entry_safe()
729 /* Push SCSI buffers with sgl posted to the availble list */
794 struct sli4_sge *sgl; lpfc_new_scsi_buf_s4() local
874 sgl = (struct sli4_sge *)psb->fcp_bpl; lpfc_new_scsi_buf_s4()
884 sgl->addr_hi = cpu_to_le32(putPaddrHigh(pdma_phys_fcp_cmd)); lpfc_new_scsi_buf_s4()
885 sgl->addr_lo = cpu_to_le32(putPaddrLow(pdma_phys_fcp_cmd)); lpfc_new_scsi_buf_s4()
886 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_new_scsi_buf_s4()
887 bf_set(lpfc_sli4_sge_last, sgl, 0); lpfc_new_scsi_buf_s4()
888 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_new_scsi_buf_s4()
889 sgl->sge_len = cpu_to_le32(sizeof(struct fcp_cmnd)); lpfc_new_scsi_buf_s4()
890 sgl++; lpfc_new_scsi_buf_s4()
893 sgl->addr_hi = cpu_to_le32(putPaddrHigh(pdma_phys_fcp_rsp)); lpfc_new_scsi_buf_s4()
894 sgl->addr_lo = cpu_to_le32(putPaddrLow(pdma_phys_fcp_rsp)); lpfc_new_scsi_buf_s4()
895 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_new_scsi_buf_s4()
896 bf_set(lpfc_sli4_sge_last, sgl, 1); lpfc_new_scsi_buf_s4()
897 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_new_scsi_buf_s4()
898 sgl->sge_len = cpu_to_le32(sizeof(struct fcp_rsp)); lpfc_new_scsi_buf_s4()
908 * We are setting the bpl to point to out sgl. An sgl's lpfc_new_scsi_buf_s4()
2232 * @sgl: pointer to buffer list for protection groups
2259 struct sli4_sge *sgl, int datasegcnt) lpfc_bg_setup_sgl()
2294 diseed = (struct sli4_sge_diseed *) sgl; lpfc_bg_setup_sgl()
2296 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DISEED); lpfc_bg_setup_sgl()
2331 sgl++; lpfc_bg_setup_sgl()
2337 sgl->addr_lo = cpu_to_le32(putPaddrLow(physaddr)); scsi_for_each_sg()
2338 sgl->addr_hi = cpu_to_le32(putPaddrHigh(physaddr)); scsi_for_each_sg()
2340 bf_set(lpfc_sli4_sge_last, sgl, 1); scsi_for_each_sg()
2342 bf_set(lpfc_sli4_sge_last, sgl, 0); scsi_for_each_sg()
2343 bf_set(lpfc_sli4_sge_offset, sgl, dma_offset); scsi_for_each_sg()
2344 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DATA); scsi_for_each_sg()
2346 sgl->sge_len = cpu_to_le32(dma_len); scsi_for_each_sg()
2349 sgl++; scsi_for_each_sg()
2361 * @sgl: pointer to buffer list for protection groups
2396 struct sli4_sge *sgl, int datacnt, int protcnt) lpfc_bg_setup_sgl_prot()
2455 diseed = (struct sli4_sge_diseed *) sgl; lpfc_bg_setup_sgl_prot()
2457 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DISEED); lpfc_bg_setup_sgl_prot()
2498 /* advance sgl and increment bde count */ lpfc_bg_setup_sgl_prot()
2500 sgl++; lpfc_bg_setup_sgl_prot()
2510 sgl->word2 = 0; lpfc_bg_setup_sgl_prot()
2511 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DIF); lpfc_bg_setup_sgl_prot()
2512 sgl->addr_hi = le32_to_cpu(putPaddrHigh(protphysaddr)); lpfc_bg_setup_sgl_prot()
2513 sgl->addr_lo = le32_to_cpu(putPaddrLow(protphysaddr)); lpfc_bg_setup_sgl_prot()
2514 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_bg_setup_sgl_prot()
2520 if ((sgl->addr_lo & 0xfff) + protgroup_len > 0x1000) { lpfc_bg_setup_sgl_prot()
2521 protgroup_remainder = 0x1000 - (sgl->addr_lo & 0xfff); lpfc_bg_setup_sgl_prot()
2546 sgl++; lpfc_bg_setup_sgl_prot()
2566 sgl->addr_lo = cpu_to_le32(putPaddrLow(dataphysaddr)); lpfc_bg_setup_sgl_prot()
2567 sgl->addr_hi = cpu_to_le32(putPaddrHigh(dataphysaddr)); lpfc_bg_setup_sgl_prot()
2568 bf_set(lpfc_sli4_sge_last, sgl, 0); lpfc_bg_setup_sgl_prot()
2569 bf_set(lpfc_sli4_sge_offset, sgl, dma_offset); lpfc_bg_setup_sgl_prot()
2570 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DATA); lpfc_bg_setup_sgl_prot()
2572 sgl->sge_len = cpu_to_le32(dma_len); lpfc_bg_setup_sgl_prot()
2588 sgl++; lpfc_bg_setup_sgl_prot()
2594 bf_set(lpfc_sli4_sge_last, sgl, 1); lpfc_bg_setup_sgl_prot()
2599 sgl++; lpfc_bg_setup_sgl_prot()
3080 /* If we have a prot sgl, save the DIF buffer */ lpfc_parse_bg_err()
3235 struct sli4_sge *sgl = (struct sli4_sge *)lpfc_cmd->fcp_bpl; lpfc_scsi_prep_dma_buf_s4() local
3262 sgl += 1; lpfc_scsi_prep_dma_buf_s4()
3264 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_scsi_prep_dma_buf_s4()
3265 bf_set(lpfc_sli4_sge_last, sgl, 0); lpfc_scsi_prep_dma_buf_s4()
3266 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_scsi_prep_dma_buf_s4()
3267 sgl += 1; lpfc_scsi_prep_dma_buf_s4()
3268 first_data_sgl = sgl; lpfc_scsi_prep_dma_buf_s4()
3293 sgl->addr_lo = cpu_to_le32(putPaddrLow(physaddr)); scsi_for_each_sg()
3294 sgl->addr_hi = cpu_to_le32(putPaddrHigh(physaddr)); scsi_for_each_sg()
3295 sgl->word2 = le32_to_cpu(sgl->word2); scsi_for_each_sg()
3297 bf_set(lpfc_sli4_sge_last, sgl, 1); scsi_for_each_sg()
3299 bf_set(lpfc_sli4_sge_last, sgl, 0); scsi_for_each_sg()
3300 bf_set(lpfc_sli4_sge_offset, sgl, dma_offset); scsi_for_each_sg()
3301 bf_set(lpfc_sli4_sge_type, sgl, LPFC_SGE_TYPE_DATA); scsi_for_each_sg()
3302 sgl->word2 = cpu_to_le32(sgl->word2); scsi_for_each_sg()
3303 sgl->sge_len = cpu_to_le32(dma_len); scsi_for_each_sg()
3305 sgl++; scsi_for_each_sg()
3319 sgl += 1;
3321 sgl->word2 = le32_to_cpu(sgl->word2);
3322 bf_set(lpfc_sli4_sge_last, sgl, 1);
3323 sgl->word2 = cpu_to_le32(sgl->word2);
3365 struct sli4_sge *sgl = (struct sli4_sge *)(lpfc_cmd->fcp_bpl); lpfc_bg_scsi_prep_dma_buf_s4() local
3373 * Start the lpfc command prep by bumping the sgl beyond fcp_cmnd lpfc_bg_scsi_prep_dma_buf_s4()
3389 sgl += 1; lpfc_bg_scsi_prep_dma_buf_s4()
3391 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_bg_scsi_prep_dma_buf_s4()
3392 bf_set(lpfc_sli4_sge_last, sgl, 0); lpfc_bg_scsi_prep_dma_buf_s4()
3393 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_bg_scsi_prep_dma_buf_s4()
3395 sgl += 1; lpfc_bg_scsi_prep_dma_buf_s4()
3410 num_sge = lpfc_bg_setup_sgl(phba, scsi_cmnd, sgl, lpfc_bg_scsi_prep_dma_buf_s4()
3441 num_sge = lpfc_bg_setup_sgl_prot(phba, scsi_cmnd, sgl, lpfc_bg_scsi_prep_dma_buf_s4()
2258 lpfc_bg_setup_sgl(struct lpfc_hba *phba, struct scsi_cmnd *sc, struct sli4_sge *sgl, int datasegcnt) lpfc_bg_setup_sgl() argument
2395 lpfc_bg_setup_sgl_prot(struct lpfc_hba *phba, struct scsi_cmnd *sc, struct sli4_sge *sgl, int datacnt, int protcnt) lpfc_bg_setup_sgl_prot() argument
H A Dlpfc_sli.c898 * __lpfc_sli_get_sglq - Allocates an iocb object from sgl pool
2779 * exchange busy so sgl (xri) lpfc_sli_process_sol_iocb()
2812 * sgl (xri) process. lpfc_sli_process_sol_iocb()
6083 * lpfc_sli4_repost_els_sgl_list - Repsot the els buffers sgl pages as block
6091 * buffer sgl with non-contiguous xri, if any, it shall use embedded SGL post
6125 /* a hole in xri block, form a sgl posting block */ lpfc_sli4_repost_els_sgl_list()
6134 /* enough sgls for non-embed sgl mbox command */ lpfc_sli4_repost_els_sgl_list()
6144 /* keep track of last sgl's xritag */ lpfc_sli4_repost_els_sgl_list()
6147 /* end of repost sgl list condition for els buffers */ lpfc_sli4_repost_els_sgl_list()
6158 /* successful, put sgl to posted list */ lpfc_sli4_repost_els_sgl_list()
6162 /* Failure, put sgl to free list */ lpfc_sli4_repost_els_sgl_list()
6166 "sgl, xritag:x%x\n", lpfc_sli4_repost_els_sgl_list()
6184 /* success, put sgl list to posted sgl list */ lpfc_sli4_repost_els_sgl_list()
6187 /* Failure, put sgl list to free sgl list */ lpfc_sli4_repost_els_sgl_list()
6192 "3160 Failed to post els sgl-list, " lpfc_sli4_repost_els_sgl_list()
6205 /* reset els sgl post count for next round of posting */ lpfc_sli4_repost_els_sgl_list()
6224 "3161 Failure to post els sgl to port.\n"); lpfc_sli4_repost_els_sgl_list()
6494 /* update host els and scsi xri-sgl sizes and mappings */ lpfc_sli4_hba_setup()
6498 "1400 Failed to update xri-sgl size and " lpfc_sli4_hba_setup()
6503 /* register the els sgl pool to the port */ lpfc_sli4_hba_setup()
6507 "0582 Error %d during els sgl post " lpfc_sli4_hba_setup()
6513 /* register the allocated scsi sgl pool to the port */ lpfc_sli4_hba_setup()
6517 "0383 Error %d during scsi sgl post " lpfc_sli4_hba_setup()
8033 * lpfc_sli4_bpl2sgl - Convert the bpl/bde to a sgl.
8039 * to a sgl list for the sli4 hardware. The physical address
8056 struct sli4_sge *sgl = NULL; lpfc_sli4_bpl2sgl() local
8067 sgl = (struct sli4_sge *)sglq->sgl; lpfc_sli4_bpl2sgl()
8089 sgl->addr_hi = bpl->addrHigh; lpfc_sli4_bpl2sgl()
8090 sgl->addr_lo = bpl->addrLow; lpfc_sli4_bpl2sgl()
8092 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_sli4_bpl2sgl()
8094 bf_set(lpfc_sli4_sge_last, sgl, 1); lpfc_sli4_bpl2sgl()
8096 bf_set(lpfc_sli4_sge_last, sgl, 0); lpfc_sli4_bpl2sgl()
8098 * can assign it to the sgl. lpfc_sli4_bpl2sgl()
8101 sgl->sge_len = cpu_to_le32(bde.tus.f.bdeSize); lpfc_sli4_bpl2sgl()
8102 /* The offsets in the sgl need to be accumulated lpfc_sli4_bpl2sgl()
8113 bf_set(lpfc_sli4_sge_offset, sgl, offset); lpfc_sli4_bpl2sgl()
8114 bf_set(lpfc_sli4_sge_type, sgl, lpfc_sli4_bpl2sgl()
8118 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_sli4_bpl2sgl()
8120 sgl++; lpfc_sli4_bpl2sgl()
8125 * before putting them in the sgl. lpfc_sli4_bpl2sgl()
8127 sgl->addr_hi = lpfc_sli4_bpl2sgl()
8129 sgl->addr_lo = lpfc_sli4_bpl2sgl()
8131 sgl->word2 = le32_to_cpu(sgl->word2); lpfc_sli4_bpl2sgl()
8132 bf_set(lpfc_sli4_sge_last, sgl, 1); lpfc_sli4_bpl2sgl()
8133 sgl->word2 = cpu_to_le32(sgl->word2); lpfc_sli4_bpl2sgl()
8134 sgl->sge_len = lpfc_sli4_bpl2sgl()
8233 * can assign it to the sgl. lpfc_sli4_iocb2wqe()
8702 /* These IO's already have an XRI and a mapped sgl. */ __lpfc_sli_issue_iocb_s4()
14218 * This routine will post the sgl pages for the IO that has the xritag
14227 * the second sgl can have between 1 and 256 entries.
14391 * @post_sgl_list: pointer to els sgl entry list.
14392 * @count: number of els sgl entries on the list.
14394 * This routine is invoked to post a block of driver's sgl pages to the
14405 struct lpfc_mbx_post_uembed_sgl_page1 *sgl; lpfc_sli4_post_els_sgl_list() local
14420 "2559 Block sgl registration required DMA " lpfc_sli4_post_els_sgl_list()
14443 sgl = (struct lpfc_mbx_post_uembed_sgl_page1 *)viraddr; lpfc_sli4_post_els_sgl_list()
14444 sgl_pg_pairs = &sgl->sgl_pg_pairs; lpfc_sli4_post_els_sgl_list()
14466 bf_set(lpfc_post_sgl_pages_xri, sgl, xritag_start);
14467 bf_set(lpfc_post_sgl_pages_xricnt, sgl, phba->sli4_hba.els_xri_cnt);
14468 sgl->word0 = cpu_to_le32(sgl->word0);
14475 shdr = (union lpfc_sli4_cfg_shdr *) &sgl->cfg_shdr;
14491 * lpfc_sli4_post_scsi_sgl_block - post a block of scsi sgl list to firmware
14496 * This routine is invoked to post a block of @count scsi sgl pages from a
14507 struct lpfc_mbx_post_uembed_sgl_page1 *sgl; lpfc_sli4_post_scsi_sgl_block() local
14524 "0217 Block sgl registration required DMA " lpfc_sli4_post_scsi_sgl_block()
14553 sgl = (struct lpfc_mbx_post_uembed_sgl_page1 *)viraddr; lpfc_sli4_post_scsi_sgl_block()
14554 sgl_pg_pairs = &sgl->sgl_pg_pairs; lpfc_sli4_post_scsi_sgl_block()
14577 bf_set(lpfc_post_sgl_pages_xri, sgl, xritag_start);
14578 bf_set(lpfc_post_sgl_pages_xricnt, sgl, pg_pairs);
14580 sgl->word0 = cpu_to_le32(sgl->word0);
14588 shdr = (union lpfc_sli4_cfg_shdr *) &sgl->cfg_shdr;
17035 fail_msg = "to convert bpl to sgl"; lpfc_drain_txq()
H A Dlpfc_sli4.h628 enum lpfc_sge_type buff_type; /* is this a scsi sgl */
634 struct sli4_sge *sgl; /* pre-assigned SGL */ member in struct:lpfc_sglq
H A Dlpfc_init.c3071 * lpfc_sli4_xri_sgl_update - update xri-sgl sizing and mapping
3075 * scsi sgl lists, and then goes through all sgls to updates the physical
3077 * current els and allocated scsi sgl lists are 0s.
3094 * update on pci function's els xri-sgl list lpfc_sli4_xri_sgl_update()
3098 /* els xri-sgl expanded */ lpfc_sli4_xri_sgl_update()
3101 "3157 ELS xri-sgl count increased from " lpfc_sli4_xri_sgl_update()
3111 "ELS sgl entry:%d\n", i); lpfc_sli4_xri_sgl_update()
3126 sglq_entry->sgl = sglq_entry->virt; lpfc_sli4_xri_sgl_update()
3127 memset(sglq_entry->sgl, 0, LPFC_BPL_SIZE); lpfc_sli4_xri_sgl_update()
3137 /* els xri-sgl shrinked */ lpfc_sli4_xri_sgl_update()
3140 "3158 ELS xri-sgl count decreased from " lpfc_sli4_xri_sgl_update()
3165 "3163 ELS xri-sgl count unchanged: %d\n", lpfc_sli4_xri_sgl_update()
3178 "ELS sgl\n"); lpfc_sli4_xri_sgl_update()
3187 * update on pci function's allocated scsi xri-sgl list lpfc_sli4_xri_sgl_update()
3196 "2401 Current allocated SCSI xri-sgl count:%d, " lpfc_sli4_xri_sgl_update()
5386 /* Initialize sgl lists per host */ lpfc_sli4_driver_resource_setup()
5389 /* Allocate and initialize active sgl array */ lpfc_sli4_driver_resource_setup()
5393 "1430 Failed to initialize sgl list.\n"); lpfc_sli4_driver_resource_setup()
5542 /* Free the ELS sgl list */ lpfc_sli4_driver_resource_unset()
5777 * lpfc_free_sgl_list - Free a given sgl list.
5779 * @sglq_list: pointer to the head of sgl list.
5781 * This routine is invoked to free a give sgl list and memory.
5796 * lpfc_free_els_sgl_list - Free els sgl list.
5799 * This routine is invoked to free the driver's els sgl list and memory.
5814 /* Now free the sgl list */ lpfc_free_els_sgl_list()
5822 * This routine is invoked to allocate the driver's active sgl memory.
5854 * lpfc_init_sgl_list - Allocate and initialize sgl list.
5857 * This routine is invoked to allocate and initizlize the driver's sgl
5858 * list and set up the sgl xritag tag array accordingly.
5868 /* els xri-sgl book keeping */ lpfc_init_sgl_list()
/linux-4.1.27/drivers/gpu/drm/tegra/
H A Dgem.c117 err = iommu_map_sg(tegra->domain, bo->paddr, bo->sgt->sgl, tegra_bo_iommu_map()
214 for_each_sg(sgt->sgl, s, sgt->nents, i) tegra_bo_get_pages()
217 if (dma_map_sg(drm->dev, sgt->sgl, sgt->nents, DMA_TO_DEVICE) == 0) tegra_bo_get_pages()
357 bo->paddr = sg_dma_address(bo->sgt->sgl); tegra_bo_import()
534 for_each_sg(sgt->sgl, sg, bo->num_pages, i) tegra_gem_prime_map_dma_buf()
537 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) tegra_gem_prime_map_dma_buf()
543 sg_dma_address(sgt->sgl) = bo->paddr; tegra_gem_prime_map_dma_buf()
544 sg_dma_len(sgt->sgl) = gem->size; tegra_gem_prime_map_dma_buf()
563 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); tegra_gem_prime_unmap_dma_buf()
/linux-4.1.27/drivers/gpu/drm/i915/
H A Di915_gem_dmabuf.c64 src = obj->pages->sgl; i915_gem_map_dma_buf()
65 dst = st->sgl; i915_gem_map_dma_buf()
72 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { i915_gem_map_dma_buf()
100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); i915_gem_unmap_dma_buf()
139 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) i915_gem_dmabuf_vmap()
H A Di915_gem_render_state.c88 page = sg_page(so->obj->pages->sgl); render_state_setup()
H A Di915_gem_gtt.c557 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { gen8_ppgtt_insert_entries()
1142 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { gen6_ppgtt_insert_entries()
1723 obj->pages->sgl, obj->pages->nents, i915_gem_gtt_prepare_object()
1753 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { gen8_ggtt_insert_entries()
1799 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { gen6_ggtt_insert_entries()
1991 obj->pages->sgl, obj->pages->nents, i915_gem_gtt_finish_object()
2611 struct scatterlist *sg = st->sgl; rotate_pages()
2674 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { intel_rotate_fb_obj_pages()
H A Di915_gem_tiling.c515 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { i915_gem_object_do_bit_17_swizzle()
545 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { i915_gem_object_save_bit_17_swizzle()
/linux-4.1.27/drivers/gpu/drm/udl/
H A Dudl_dmabuf.c65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, udl_detach_dma_buf()
117 rd = obj->sg->sgl; udl_map_dma_buf()
118 wr = sgt->sgl; udl_map_dma_buf()
126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); udl_map_dma_buf()
128 DRM_ERROR("failed to map sgl with iommu.\n"); udl_map_dma_buf()
/linux-4.1.27/drivers/scsi/bnx2fc/
H A Dbnx2fc_hwi.c1470 struct fcoe_ext_mul_sges_ctx *sgl; bnx2fc_init_seq_cleanup_task() local
1518 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = bnx2fc_init_seq_cleanup_task()
1520 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = bnx2fc_init_seq_cleanup_task()
1522 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = bnx2fc_init_seq_cleanup_task()
1524 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off = bnx2fc_init_seq_cleanup_task()
1526 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i; bnx2fc_init_seq_cleanup_task()
1536 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; bnx2fc_init_seq_cleanup_task()
1537 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr; bnx2fc_init_seq_cleanup_task()
1538 sgl->mul_sgl.cur_sge_addr.hi = (u32)((u64)phys_addr >> 32); bnx2fc_init_seq_cleanup_task()
1539 sgl->mul_sgl.sgl_size = bd_count; bnx2fc_init_seq_cleanup_task()
1540 sgl->mul_sgl.cur_sge_off = offset; /*adjusted offset */ bnx2fc_init_seq_cleanup_task()
1541 sgl->mul_sgl.cur_sge_idx = i; bnx2fc_init_seq_cleanup_task()
1593 struct fcoe_ext_mul_sges_ctx *sgl; bnx2fc_init_mp_task() local
1619 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = bnx2fc_init_mp_task()
1621 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = bnx2fc_init_mp_task()
1623 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = 1; bnx2fc_init_mp_task()
1674 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; bnx2fc_init_mp_task()
1676 sgl->mul_sgl.cur_sge_addr.lo = (u32)mp_req->mp_resp_bd_dma; bnx2fc_init_mp_task()
1677 sgl->mul_sgl.cur_sge_addr.hi = bnx2fc_init_mp_task()
1679 sgl->mul_sgl.sgl_size = 1; bnx2fc_init_mp_task()
1691 struct fcoe_ext_mul_sges_ctx *sgl; bnx2fc_init_task() local
1729 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo = bnx2fc_init_task()
1731 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi = bnx2fc_init_task()
1733 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size = bnx2fc_init_task()
1792 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl; bnx2fc_init_task()
1822 sgl->mul_sgl.cur_sge_addr.lo = (u32)bd_tbl->bd_tbl_dma; bnx2fc_init_task()
1823 sgl->mul_sgl.cur_sge_addr.hi = bnx2fc_init_task()
1825 sgl->mul_sgl.sgl_size = bd_count; bnx2fc_init_task()
1828 sgl->mul_sgl.cur_sge_addr.lo = (u32)bd_tbl->bd_tbl_dma; bnx2fc_init_task()
1829 sgl->mul_sgl.cur_sge_addr.hi = bnx2fc_init_task()
1831 sgl->mul_sgl.sgl_size = bd_count; bnx2fc_init_task()
/linux-4.1.27/drivers/infiniband/core/
H A Dumem.c54 ib_dma_unmap_sg(dev, umem->sg_head.sgl, __ib_umem_release()
58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { __ib_umem_release()
188 sg_list_start = umem->sg_head.sgl; ib_umem_get()
215 umem->sg_head.sgl,
326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) ib_umem_page_count()
355 ret = sg_pcopy_to_buffer(umem->sg_head.sgl, umem->nmap, dst, length, ib_umem_copy_from()
/linux-4.1.27/drivers/media/platform/xilinx/
H A Dxilinx-dma.h75 * @sgl: data chunk structure for dma_interleaved_template
100 struct data_chunk sgl[1]; member in struct:xvip_dma
H A Dxilinx-dma.c363 dma->sgl[0].size = dma->format.width * dma->fmtinfo->bpp; xvip_dma_buffer_queue()
364 dma->sgl[0].icg = dma->format.bytesperline - dma->sgl[0].size; xvip_dma_buffer_queue()
/linux-4.1.27/arch/alpha/kernel/
H A Dpci-noop.c141 static int alpha_noop_map_sg(struct device *dev, struct scatterlist *sgl, int nents, alpha_noop_map_sg() argument
147 for_each_sg(sgl, sg, nents, i) { for_each_sg()
/linux-4.1.27/drivers/net/ethernet/intel/i40e/
H A Di40e_fcoe.c166 if (ddp->sgl) { i40e_fcoe_ddp_unmap()
167 dma_unmap_sg(&pf->pdev->dev, ddp->sgl, ddp->sgc, i40e_fcoe_ddp_unmap()
169 ddp->sgl = NULL; i40e_fcoe_ddp_unmap()
740 if (!ddp->sgl) i40e_fcoe_handle_offload()
809 * @sgl: the scatter-gather list for this request
816 struct scatterlist *sgl, unsigned int sgc, i40e_fcoe_ddp_setup()
847 if (ddp->sgl) { i40e_fcoe_ddp_setup()
848 dev_info(&pf->pdev->dev, "xid 0x%x w/ non-null sgl=%p nents=%d\n", i40e_fcoe_ddp_setup()
849 xid, ddp->sgl, ddp->sgc); i40e_fcoe_ddp_setup()
865 /* setup dma from scsi command sgl */ i40e_fcoe_ddp_setup()
866 dmacount = dma_map_sg(&pf->pdev->dev, sgl, sgc, DMA_FROM_DEVICE); i40e_fcoe_ddp_setup()
868 dev_info(&pf->pdev->dev, "dma_map_sg for sgl %p, sgc %d failed\n", i40e_fcoe_ddp_setup()
869 sgl, sgc); i40e_fcoe_ddp_setup()
883 for_each_sg(sgl, sg, dmacount, i) { for_each_sg()
927 ddp->sgl = sgl;
942 dma_unmap_sg(&pf->pdev->dev, sgl, sgc, DMA_FROM_DEVICE);
952 * @sgl: the scatter-gather list for this request
963 struct scatterlist *sgl, unsigned int sgc) i40e_fcoe_ddp_get()
965 return i40e_fcoe_ddp_setup(netdev, xid, sgl, sgc, 0); i40e_fcoe_ddp_get()
972 * @sgl: the scatter-gather list for this request
984 struct scatterlist *sgl, unsigned int sgc) i40e_fcoe_ddp_target()
986 return i40e_fcoe_ddp_setup(netdev, xid, sgl, sgc, 1); i40e_fcoe_ddp_target()
815 i40e_fcoe_ddp_setup(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc, int target_mode) i40e_fcoe_ddp_setup() argument
962 i40e_fcoe_ddp_get(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) i40e_fcoe_ddp_get() argument
983 i40e_fcoe_ddp_target(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) i40e_fcoe_ddp_target() argument
H A Di40e_fcoe.h109 struct scatterlist *sgl; member in struct:i40e_fcoe_ddp
/linux-4.1.27/drivers/dma/sh/
H A Drcar-dmac.c823 rcar_dmac_chan_prep_sg(struct rcar_dmac_chan *chan, struct scatterlist *sgl, rcar_dmac_chan_prep_sg() argument
855 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
900 "chan%u: chunk %p/%p sgl %u@%p, %u/%u %pad -> %pad\n", for_each_sg()
1004 struct scatterlist sgl; rcar_dmac_prep_dma_memcpy() local
1009 sg_init_table(&sgl, 1); rcar_dmac_prep_dma_memcpy()
1010 sg_set_page(&sgl, pfn_to_page(PFN_DOWN(dma_src)), len, rcar_dmac_prep_dma_memcpy()
1012 sg_dma_address(&sgl) = dma_src; rcar_dmac_prep_dma_memcpy()
1013 sg_dma_len(&sgl) = len; rcar_dmac_prep_dma_memcpy()
1015 return rcar_dmac_chan_prep_sg(rchan, &sgl, 1, dma_dest, rcar_dmac_prep_dma_memcpy()
1020 rcar_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, rcar_dmac_prep_slave_sg() argument
1037 return rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, rcar_dmac_prep_slave_sg()
1050 struct scatterlist *sgl; rcar_dmac_prep_dma_cyclic() local
1075 sgl = kcalloc(sg_len, sizeof(*sgl), GFP_NOWAIT); rcar_dmac_prep_dma_cyclic()
1076 if (!sgl) rcar_dmac_prep_dma_cyclic()
1079 sg_init_table(sgl, sg_len); rcar_dmac_prep_dma_cyclic()
1084 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, rcar_dmac_prep_dma_cyclic()
1086 sg_dma_address(&sgl[i]) = src; rcar_dmac_prep_dma_cyclic()
1087 sg_dma_len(&sgl[i]) = period_len; rcar_dmac_prep_dma_cyclic()
1092 desc = rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, rcar_dmac_prep_dma_cyclic()
1095 kfree(sgl); rcar_dmac_prep_dma_cyclic()
H A Dshdma-base.c564 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, shdma_prep_sg()
574 for_each_sg(sgl, sg, sg_len, i) shdma_prep_sg()
591 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
665 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, shdma_prep_slave_sg()
688 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, shdma_prep_slave_sg()
706 struct scatterlist *sgl; shdma_prep_dma_cyclic() local
734 sgl = kcalloc(sg_len, sizeof(*sgl), GFP_KERNEL); shdma_prep_dma_cyclic()
735 if (!sgl) shdma_prep_dma_cyclic()
738 sg_init_table(sgl, sg_len); shdma_prep_dma_cyclic()
743 sg_set_page(&sgl[i], pfn_to_page(PFN_DOWN(src)), period_len, shdma_prep_dma_cyclic()
745 sg_dma_address(&sgl[i]) = src; shdma_prep_dma_cyclic()
746 sg_dma_len(&sgl[i]) = period_len; shdma_prep_dma_cyclic()
749 desc = shdma_prep_sg(schan, sgl, sg_len, &slave_addr, shdma_prep_dma_cyclic()
752 kfree(sgl); shdma_prep_dma_cyclic()
563 shdma_prep_sg(struct shdma_chan *schan, struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, enum dma_transfer_direction direction, unsigned long flags, bool cyclic) shdma_prep_sg() argument
664 shdma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) shdma_prep_slave_sg() argument
H A Dusb-dmac.c419 usb_dmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, usb_dmac_prep_slave_sg() argument
440 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
/linux-4.1.27/drivers/spi/
H A Dspi-pxa2xx-dma.c55 for_each_sg(sgt->sgl, sg, sgt->nents, i) { pxa2xx_spi_map_dma_buffer()
67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); pxa2xx_spi_map_dma_buffer()
88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); pxa2xx_spi_unmap_dma_buffer()
209 return dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, pxa2xx_spi_dma_prepare_one()
H A Dspi-qup.c291 struct scatterlist *sgl; spi_qup_prep_sg() local
299 sgl = xfer->tx_sg.sgl; spi_qup_prep_sg()
303 sgl = xfer->rx_sg.sgl; spi_qup_prep_sg()
306 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); spi_qup_prep_sg()
H A Dspi-dw-mid.c168 xfer->tx_sg.sgl, dw_spi_dma_prepare_tx()
214 xfer->rx_sg.sgl, dw_spi_dma_prepare_rx()
H A Dspi-pl022.c813 dma_unmap_sg(pl022->dma_tx_channel->device->dev, pl022->sgt_tx.sgl, unmap_free_dma_scatter()
815 dma_unmap_sg(pl022->dma_rx_channel->device->dev, pl022->sgt_rx.sgl, unmap_free_dma_scatter()
826 BUG_ON(!pl022->sgt_rx.sgl); dma_callback()
840 pl022->sgt_rx.sgl, dma_callback()
844 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { dma_callback()
854 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { dma_callback()
891 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { setup_dma_scatter()
912 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { setup_dma_scatter()
1066 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl, configure_dma()
1071 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl, configure_dma()
1078 pl022->sgt_rx.sgl, configure_dma()
1086 pl022->sgt_tx.sgl, configure_dma()
1110 dma_unmap_sg(txchan->device->dev, pl022->sgt_tx.sgl, configure_dma()
1113 dma_unmap_sg(rxchan->device->dev, pl022->sgt_rx.sgl, configure_dma()
H A Dspi-ep93xx.c493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { ep93xx_spi_dma_prepare()
513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_prepare()
517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); ep93xx_spi_dma_prepare()
519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_prepare()
547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); ep93xx_spi_dma_finish()
H A Dspi-img-spfi.c338 rxdesc = dmaengine_prep_slave_sg(spfi->rx_ch, xfer->rx_sg.sgl, img_spfi_start_dma()
362 txdesc = dmaengine_prep_slave_sg(spfi->tx_ch, xfer->tx_sg.sgl, img_spfi_start_dma()
H A Dspi-rockchip.c457 rs->rx_sg.sgl, rs->rx_sg.nents, rockchip_spi_prepare_dma()
474 rs->tx_sg.sgl, rs->tx_sg.nents, rockchip_spi_prepare_dma()
/linux-4.1.27/arch/powerpc/kernel/
H A Ddma.c128 static int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, dma_direct_map_sg() argument
135 for_each_sg(sgl, sg, nents, i) { for_each_sg()
196 struct scatterlist *sgl, int nents, dma_direct_sync_sg()
202 for_each_sg(sgl, sg, nents, i) dma_direct_sync_sg()
195 dma_direct_sync_sg(struct device *dev, struct scatterlist *sgl, int nents, enum dma_data_direction direction) dma_direct_sync_sg() argument
H A Dvio.c559 struct scatterlist *sgl; vio_dma_iommu_map_sg() local
564 for (sgl = sglist; count < nelems; count++, sgl++) vio_dma_iommu_map_sg()
565 alloc_size += roundup(sgl->length, IOMMU_PAGE_SIZE(tbl)); vio_dma_iommu_map_sg()
580 for (sgl = sglist, count = 0; count < ret; count++, sgl++) vio_dma_iommu_map_sg()
581 alloc_size -= roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); vio_dma_iommu_map_sg()
595 struct scatterlist *sgl; vio_dma_iommu_unmap_sg() local
600 for (sgl = sglist; count < nelems; count++, sgl++) vio_dma_iommu_unmap_sg()
601 alloc_size += roundup(sgl->dma_length, IOMMU_PAGE_SIZE(tbl)); vio_dma_iommu_unmap_sg()
H A Dibmebus.c106 struct scatterlist *sgl, ibmebus_map_sg()
113 for_each_sg(sgl, sg, nents, i) { for_each_sg()
105 ibmebus_map_sg(struct device *dev, struct scatterlist *sgl, int nents, enum dma_data_direction direction, struct dma_attrs *attrs) ibmebus_map_sg() argument
/linux-4.1.27/drivers/scsi/
H A Dstorvsc_drv.c558 static void destroy_bounce_buffer(struct scatterlist *sgl, destroy_bounce_buffer() argument
565 page_buf = sg_page((&sgl[i])); destroy_bounce_buffer()
570 kfree(sgl); destroy_bounce_buffer()
573 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) do_bounce_buffer() argument
585 if (sgl[i].offset + sgl[i].length != PAGE_SIZE) do_bounce_buffer()
589 if (sgl[i].offset != 0) do_bounce_buffer()
593 if (sgl[i].length != PAGE_SIZE || sgl[i].offset != 0) do_bounce_buffer()
600 static struct scatterlist *create_bounce_buffer(struct scatterlist *sgl, create_bounce_buffer() argument
632 /* Assume the original sgl has enough room */ copy_from_bounce_buffer()
1558 struct scatterlist *sgl; storvsc_queuecommand() local
1617 sgl = (struct scatterlist *)scsi_sglist(scmnd); storvsc_queuecommand()
1625 /* check if we need to bounce the sgl */ storvsc_queuecommand()
1626 if (do_bounce_buffer(sgl, scsi_sg_count(scmnd)) != -1) { storvsc_queuecommand()
1628 create_bounce_buffer(sgl, sg_count, storvsc_queuecommand()
1638 copy_to_bounce_buffer(sgl, storvsc_queuecommand()
1641 sgl = cmd_request->bounce_sgl; storvsc_queuecommand()
1662 payload->range.offset = sgl[0].offset; storvsc_queuecommand()
1664 cur_sgl = sgl; storvsc_queuecommand()
H A D3w-9xxx.c64 2.26.02.003 - Correctly handle single sgl's with use_sg=1.
170 "Last sgl length: %4d\n" twa_show_stats()
171 "Max sgl length: %4d\n" twa_show_stats()
491 command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[request_id]); twa_aen_sync_time()
492 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); twa_aen_sync_time()
1079 command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[request_id]); twa_get_param()
1080 command_packet->byte8_offset.param.sgl[0].length = cpu_to_le32(TW_SECTOR_SIZE); twa_get_param()
1132 /* Turn on 64-bit sgl support if we need to */ twa_initconnection()
1346 /* Report residual bytes for single sgl */ twa_interrupt()
1382 TW_SG_Entry *sgl; twa_load_sgl() local
1405 sgl = (TW_SG_Entry *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry)/4) + pae); twa_load_sgl()
1407 sgl = (TW_SG_Entry *)((u32 *)oldcommand+TW_SGL_OUT(oldcommand->opcode__sgloffset)); twa_load_sgl()
1408 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); twa_load_sgl()
1409 sgl->length = cpu_to_le32(length); twa_load_sgl()
1866 TW_PRINTK(tw_dev->host, TW_DRIVER, 0x2e, "Found unaligned sgl address during execute scsi"); scsi_for_each_sg()
1879 TW_PRINTK(tw_dev->host, TW_DRIVER, 0x2f, "Found unaligned sgl address during internal post");
H A D3w-xxxx.c505 "Last sgl length: %4d\n" tw_show_stats()
506 "Max sgl length: %4d\n" tw_show_stats()
590 command_packet->byte8.param.sgl[0].address = param_value; tw_aen_read_queue()
591 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_aen_read_queue()
732 command_packet->byte8.param.sgl[0].address = param_value; tw_aen_drain_queue()
733 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_aen_drain_queue()
966 tw_ioctl->firmware_command.byte8.param.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; tw_chrdev_ioctl()
967 tw_ioctl->firmware_command.byte8.param.sgl[0].length = data_buffer_length_adjusted; tw_chrdev_ioctl()
970 tw_ioctl->firmware_command.byte8.io.sgl[0].address = dma_handle + sizeof(TW_New_Ioctl) - 1; tw_chrdev_ioctl()
971 tw_ioctl->firmware_command.byte8.io.sgl[0].length = data_buffer_length_adjusted; tw_chrdev_ioctl()
1154 command_packet->byte8.param.sgl[0].address = param_value; tw_setfeature()
1155 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_setfeature()
1426 command_packet->byte8.param.sgl[0].address = param_value; tw_scsiop_inquiry()
1427 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_scsiop_inquiry()
1536 command_packet->byte8.param.sgl[0].address = param_value; tw_scsiop_mode_sense()
1537 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_scsiop_mode_sense()
1626 command_packet->byte8.param.sgl[0].address = param_value; tw_scsiop_read_capacity()
1627 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_scsiop_read_capacity()
1749 command_packet->byte8.io.sgl[i].address = sg_dma_address(sg); tw_scsiop_read_write()
1750 command_packet->byte8.io.sgl[i].length = sg_dma_len(sg); tw_scsiop_read_write()
1871 command_packet->byte8.param.sgl[0].address = param_value; tw_scsiop_test_unit_ready()
1872 command_packet->byte8.param.sgl[0].length = sizeof(TW_Sector); tw_scsiop_test_unit_ready()
H A Dvmw_pvscsi.c61 struct pvscsi_sg_list *sgl; member in struct:pvscsi_ctx
340 sge = &ctx->sgl->sge[0]; pvscsi_create_sg()
373 ctx->sglPA = pci_map_single(adapter->dev, ctx->sgl, pvscsi_map_buffers()
1162 free_pages((unsigned long)ctx->sgl, get_order(SGL_SIZE)); pvscsi_free_sgls()
1254 ctx->sgl = (void *)__get_free_pages(GFP_KERNEL, pvscsi_allocate_sg()
1257 BUG_ON(!IS_ALIGNED(((unsigned long)ctx->sgl), PAGE_SIZE)); pvscsi_allocate_sg()
1258 if (!ctx->sgl) { pvscsi_allocate_sg()
1260 free_pages((unsigned long)ctx->sgl, pvscsi_allocate_sg()
1262 ctx->sgl = NULL; pvscsi_allocate_sg()
H A D3w-sas.c175 "Last sgl length: %4d\n" twl_show_stats()
176 "Max sgl length: %4d\n" twl_show_stats()
426 command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[request_id]); twl_aen_sync_time()
427 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); twl_aen_sync_time()
686 TW_SG_Entry_ISO *sgl; twl_load_sgl() local
708 sgl = (TW_SG_Entry_ISO *)((u32 *)oldcommand+oldcommand->size - (sizeof(TW_SG_Entry_ISO)/4) + pae + (sizeof(dma_addr_t) > 4 ? 1 : 0)); twl_load_sgl()
709 sgl->address = TW_CPU_TO_SGL(dma_handle + sizeof(TW_Ioctl_Buf_Apache) - 1); twl_load_sgl()
710 sgl->length = TW_CPU_TO_SGL(length); twl_load_sgl()
954 command_packet->byte8_offset.param.sgl[0].address = TW_CPU_TO_SGL(tw_dev->generic_buffer_phys[request_id]); twl_get_param()
955 command_packet->byte8_offset.param.sgl[0].length = TW_CPU_TO_SGL(TW_SECTOR_SIZE); twl_get_param()
999 /* Turn on 64-bit sgl support if we need to */ twl_initconnection()
1218 /* Report residual bytes for single sgl */ twl_interrupt()
H A D3w-sas.h232 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member in struct:TW_Command::__anon8516::__anon8517
236 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member in struct:TW_Command::__anon8516::__anon8518
H A Dscsi_lib.c568 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) scsi_sg_free() argument
573 mempool_free(sgl, sgp->pool); scsi_sg_free()
601 sg_init_table(sdb->table.sgl, nents); scsi_alloc_sgtable()
604 first_chunk = sdb->table.sgl; scsi_alloc_sgtable()
1097 count = blk_rq_map_sg(req->q, req, sdb->table.sgl); scsi_init_sgtable()
1168 prot_sdb->table.sgl); scsi_init_io()
1931 cmd->sdb.table.sgl = sg; scsi_mq_prep_fn()
1940 cmd->prot_sdb->table.sgl = scsi_mq_prep_fn()
1949 bidi_sdb->table.sgl = scsi_mq_prep_fn()
3080 * @sgl: scatter-gather list
3087 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, scsi_kmap_atomic_sg() argument
3097 for_each_sg(sgl, sg, sg_count, i) { for_each_sg()
H A D3w-xxxx.h307 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member in struct:TW_Command::__anon8524::__anon8525
311 TW_SG_Entry sgl[TW_MAX_SGL_LENGTH]; member in struct:TW_Command::__anon8524::__anon8526
H A D3w-9xxx.h501 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member in struct:TW_Command::__anon8508::__anon8509
505 TW_SG_Entry sgl[TW_ESCALADE_MAX_SGL_LENGTH]; member in struct:TW_Command::__anon8508::__anon8510
/linux-4.1.27/drivers/scsi/esas2r/
H A Desas2r_io.c224 struct esas2r_mem_desc *sgl; esas2r_build_sg_list_sge() local
231 sgl = esas2r_alloc_sgl(a); esas2r_build_sg_list_sge()
233 if (unlikely(sgl == NULL)) esas2r_build_sg_list_sge()
244 memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen); esas2r_build_sg_list_sge()
248 (struct atto_vda_sge *)((u8 *)sgl->virt_addr + esas2r_build_sg_list_sge()
253 (struct atto_vda_sge *)((u8 *)sgl->virt_addr esas2r_build_sg_list_sge()
260 cpu_to_le64(sgl->phys_addr); esas2r_build_sg_list_sge()
302 list_add(&sgl->next_desc, &rq->sg_table_head); esas2r_build_sg_list_sge()
376 struct esas2r_mem_desc *sgl; esas2r_build_prd_iblk() local
449 sgl = esas2r_alloc_sgl(a); esas2r_build_prd_iblk()
451 if (unlikely(sgl == NULL)) esas2r_build_prd_iblk()
458 list_add(&sgl->next_desc, &rq->sg_table_head); esas2r_build_prd_iblk()
471 cpu_to_le64(sgl->phys_addr); esas2r_build_prd_iblk()
479 (struct atto_physical_region_description *)sgl esas2r_build_prd_iblk()
H A Desas2r_init.c841 struct esas2r_mem_desc *sgl; esas2r_init_adapter_struct() local
909 for (i = 0, sgl = a->sg_list_mds; i < num_sg_lists; i++, sgl++) { esas2r_init_adapter_struct()
910 sgl->size = sgl_page_size; esas2r_init_adapter_struct()
912 list_add_tail(&sgl->next_desc, &a->free_sg_list_head); esas2r_init_adapter_struct()
914 if (!esas2r_initmem_alloc(a, sgl, ESAS2R_SGL_ALIGN)) { esas2r_init_adapter_struct()
H A Desas2r.h1153 struct list_head *sgl; esas2r_alloc_sgl() local
1158 sgl = a->free_sg_list_head.next; esas2r_alloc_sgl()
1159 result = list_entry(sgl, struct esas2r_mem_desc, next_desc); esas2r_alloc_sgl()
1160 list_del_init(sgl); esas2r_alloc_sgl()
/linux-4.1.27/net/ceph/
H A Dcrypto.c133 sgt->sgl = prealloc_sg; setup_sgtable()
137 for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) { setup_sgtable()
204 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, ceph_aes_encrypt()
267 ret = crypto_blkcipher_encrypt(&desc, sg_out.sgl, sg_in, ceph_aes_encrypt2()
320 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); ceph_aes_decrypt()
386 ret = crypto_blkcipher_decrypt(&desc, sg_out, sg_in.sgl, src_len); ceph_aes_decrypt2()
/linux-4.1.27/drivers/infiniband/hw/ipath/
H A Dipath_dma.c101 static int ipath_map_sg(struct ib_device *dev, struct scatterlist *sgl, ipath_map_sg() argument
111 for_each_sg(sgl, sg, nents, i) { for_each_sg()
H A Dipath_mr.c224 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { ipath_reg_user_mr()
/linux-4.1.27/drivers/infiniband/hw/qib/
H A Dqib_dma.c94 static int qib_map_sg(struct ib_device *dev, struct scatterlist *sgl, qib_map_sg() argument
104 for_each_sg(sgl, sg, nents, i) { for_each_sg()
/linux-4.1.27/arch/sparc/kernel/
H A Dioport.c549 static int pci32_map_sg(struct device *device, struct scatterlist *sgl, pci32_map_sg() argument
557 for_each_sg(sgl, sg, nents, n) { for_each_sg()
568 static void pci32_unmap_sg(struct device *dev, struct scatterlist *sgl, pci32_unmap_sg() argument
576 for_each_sg(sgl, sg, nents, n) { for_each_sg()
614 static void pci32_sync_sg_for_cpu(struct device *dev, struct scatterlist *sgl, pci32_sync_sg_for_cpu() argument
621 for_each_sg(sgl, sg, nents, n) { for_each_sg()
627 static void pci32_sync_sg_for_device(struct device *device, struct scatterlist *sgl, pci32_sync_sg_for_device() argument
634 for_each_sg(sgl, sg, nents, n) { for_each_sg()
/linux-4.1.27/drivers/xen/
H A Dswiotlb-xen.c546 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_map_sg_attrs() argument
555 for_each_sg(sgl, sg, nelems, i) { for_each_sg()
572 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir, for_each_sg()
574 sg_dma_len(sgl) = 0; for_each_sg()
607 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_unmap_sg_attrs() argument
616 for_each_sg(sgl, sg, nelems, i) xen_swiotlb_unmap_sg_attrs()
630 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, xen_swiotlb_sync_sg() argument
637 for_each_sg(sgl, sg, nelems, i) xen_swiotlb_sync_sg()
H A Dxen-scsiback.c134 struct scatterlist *sgl; member in struct:vscsibk_pend
286 kfree(req->sgl); scsiback_fast_flush_area()
287 req->sgl = NULL; scsiback_fast_flush_area()
407 pending_req->sgl, pending_req->n_sg, scsiback_cmd_exec()
522 /* free of (sgl) in fast_flush_area() */ scsiback_gnttab_data_map()
523 pending_req->sgl = kmalloc_array(nr_segments, scsiback_gnttab_data_map()
525 if (!pending_req->sgl) scsiback_gnttab_data_map()
528 sg_init_table(pending_req->sgl, nr_segments); scsiback_gnttab_data_map()
562 for_each_sg(pending_req->sgl, sg, nr_segments, i) { scsiback_gnttab_data_map()
/linux-4.1.27/drivers/scsi/isci/
H A Drequest.c582 stp_req->sgl.offset = 0; sci_stp_pio_request_construct()
583 stp_req->sgl.set = SCU_SGL_ELEMENT_PAIR_A; sci_stp_pio_request_construct()
587 stp_req->sgl.index = 0; sci_stp_pio_request_construct()
590 stp_req->sgl.index = -1; sci_stp_pio_request_construct()
1261 struct scu_sgl_element *sgl; pio_sgl_next() local
1264 struct isci_stp_pio_sgl *pio_sgl = &stp_req->sgl; pio_sgl_next()
1268 sgl = NULL; pio_sgl_next()
1272 sgl = NULL; pio_sgl_next()
1275 sgl = &sgl_pair->B; pio_sgl_next()
1280 sgl = NULL; pio_sgl_next()
1285 sgl = &sgl_pair->A; pio_sgl_next()
1289 return sgl; pio_sgl_next()
1319 /* transmit DATA_FIS from (current sgl + offset) for input
1320 * parameter length. current sgl and offset is alreay stored in the IO request
1334 sgl_pair = to_sgl_element_pair(ireq, stp_req->sgl.index); sci_stp_request_pio_data_out_trasmit_data_frame()
1335 if (stp_req->sgl.set == SCU_SGL_ELEMENT_PAIR_A) sci_stp_request_pio_data_out_trasmit_data_frame()
1355 struct scu_sgl_element *sgl; sci_stp_request_pio_data_out_transmit_data() local
1359 offset = stp_req->sgl.offset; sci_stp_request_pio_data_out_transmit_data()
1360 sgl_pair = to_sgl_element_pair(ireq, stp_req->sgl.index); sci_stp_request_pio_data_out_transmit_data()
1361 if (WARN_ONCE(!sgl_pair, "%s: null sgl element", __func__)) sci_stp_request_pio_data_out_transmit_data()
1364 if (stp_req->sgl.set == SCU_SGL_ELEMENT_PAIR_A) { sci_stp_request_pio_data_out_transmit_data()
1365 sgl = &sgl_pair->A; sci_stp_request_pio_data_out_transmit_data()
1368 sgl = &sgl_pair->B; sci_stp_request_pio_data_out_transmit_data()
1381 /* update the current sgl, offset and save for future */ sci_stp_request_pio_data_out_transmit_data()
1382 sgl = pio_sgl_next(stp_req); sci_stp_request_pio_data_out_transmit_data()
1389 sgl->address_lower += stp_req->pio_len; sci_stp_request_pio_data_out_transmit_data()
1393 stp_req->sgl.offset = offset; sci_stp_request_pio_data_out_transmit_data()
1701 /* setup sgl */ scu_atapi_construct_task_context()
2033 if (stp_req->sgl.index < 0) { sci_io_request_frame_handler()
2915 else /* unmap the sgl dma addresses */ isci_request_io_request_complete()
3352 /* map the sgl addresses, if present. isci_io_request_build()
H A Drequest.h69 * @sgl - track pio transfer progress as we iterate through the sgl
79 } sgl; member in struct:isci_stp_request
/linux-4.1.27/arch/ia64/sn/pci/
H A Dpci_dma.c241 static void sn_dma_unmap_sg(struct device *dev, struct scatterlist *sgl, sn_dma_unmap_sg() argument
252 for_each_sg(sgl, sg, nhwentries, i) { for_each_sg()
274 static int sn_dma_map_sg(struct device *dev, struct scatterlist *sgl, sn_dma_map_sg() argument
279 struct scatterlist *saved_sg = sgl, *sg; sn_dma_map_sg()
292 for_each_sg(sgl, sg, nhwentries, i) { for_each_sg()
/linux-4.1.27/drivers/infiniband/ulp/iser/
H A Diser_memory.c250 struct scatterlist *sg, *sgl = data->sg; iser_sg_to_page_vec() local
257 *offset = (u64) sgl[0].offset & ~MASK_4K; iser_sg_to_page_vec()
261 for_each_sg(sgl, sg, data->dma_nents, i) { iser_sg_to_page_vec()
302 struct scatterlist *sg, *sgl, *next_sg = NULL; iser_data_buf_aligned_len() local
309 sgl = data->sg; iser_data_buf_aligned_len()
310 start_addr = ib_sg_dma_address(ibdev, sgl); iser_data_buf_aligned_len()
312 for_each_sg(sgl, sg, data->dma_nents, i) { iser_data_buf_aligned_len()
/linux-4.1.27/drivers/mtd/ubi/
H A Deba.c506 * @sgl: UBI scatter gather list to store the read data
516 struct ubi_sgl *sgl, int lnum, int offset, int len, ubi_eba_read_leb_sg()
524 ubi_assert(sgl->list_pos < UBI_MAX_SG_COUNT); ubi_eba_read_leb_sg()
525 sg = &sgl->sg[sgl->list_pos]; ubi_eba_read_leb_sg()
526 if (len < sg->length - sgl->page_pos) ubi_eba_read_leb_sg()
529 to_read = sg->length - sgl->page_pos; ubi_eba_read_leb_sg()
532 sg_virt(sg) + sgl->page_pos, offset, ubi_eba_read_leb_sg()
540 sgl->page_pos += to_read; ubi_eba_read_leb_sg()
541 if (sgl->page_pos == sg->length) { ubi_eba_read_leb_sg()
542 sgl->list_pos++; ubi_eba_read_leb_sg()
543 sgl->page_pos = 0; ubi_eba_read_leb_sg()
549 sgl->list_pos++; ubi_eba_read_leb_sg()
550 sgl->page_pos = 0; ubi_eba_read_leb_sg()
515 ubi_eba_read_leb_sg(struct ubi_device *ubi, struct ubi_volume *vol, struct ubi_sgl *sgl, int lnum, int offset, int len, int check) ubi_eba_read_leb_sg() argument
/linux-4.1.27/drivers/char/
H A Dvirtio_console.c878 struct sg_list *sgl = sd->u.data; pipe_to_sg() local
881 if (sgl->n == sgl->size) pipe_to_sg()
891 sg_set_page(&(sgl->sg[sgl->n]), buf->page, len, buf->offset); pipe_to_sg()
910 sg_set_page(&(sgl->sg[sgl->n]), page, len, offset); pipe_to_sg()
912 sgl->n++; pipe_to_sg()
913 sgl->len += len; pipe_to_sg()
924 struct sg_list sgl; port_fops_splice_write() local
931 .u.data = &sgl, port_fops_splice_write()
963 sgl.n = 0; port_fops_splice_write()
964 sgl.len = 0; port_fops_splice_write()
965 sgl.size = pipe->nrbufs; port_fops_splice_write()
966 sgl.sg = buf->sg; port_fops_splice_write()
967 sg_init_table(sgl.sg, sgl.size); port_fops_splice_write()
971 ret = __send_to_port(port, buf->sg, sgl.n, sgl.len, buf, true); port_fops_splice_write()
/linux-4.1.27/drivers/tty/serial/
H A Dimx.c476 struct scatterlist *sgl = &sport->tx_sgl[0]; dma_tx_callback() local
483 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); dma_tx_callback()
517 struct scatterlist *sgl = sport->tx_sgl; imx_dma_tx() local
531 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes); imx_dma_tx()
534 sg_init_table(sgl, 2); imx_dma_tx()
535 sg_set_buf(sgl, xmit->buf + xmit->tail, imx_dma_tx()
537 sg_set_buf(sgl + 1, xmit->buf, xmit->head); imx_dma_tx()
540 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE); imx_dma_tx()
545 desc = dmaengine_prep_slave_sg(chan, sgl, sport->dma_tx_nents, imx_dma_tx()
548 dma_unmap_sg(dev, sgl, sport->dma_tx_nents, imx_dma_tx()
903 struct scatterlist *sgl = &sport->rx_sgl; dma_rx_callback() local
910 dma_unmap_sg(sport->port.dev, sgl, 1, DMA_FROM_DEVICE); dma_rx_callback()
949 struct scatterlist *sgl = &sport->rx_sgl; start_rx_dma() local
955 sg_init_one(sgl, sport->rx_buf, RX_BUF_SIZE); start_rx_dma()
956 ret = dma_map_sg(dev, sgl, 1, DMA_FROM_DEVICE); start_rx_dma()
961 desc = dmaengine_prep_slave_sg(chan, sgl, 1, DMA_DEV_TO_MEM, start_rx_dma()
964 dma_unmap_sg(dev, sgl, 1, DMA_FROM_DEVICE); start_rx_dma()
1228 struct scatterlist *sgl = &sport->tx_sgl[0]; imx_flush_buffer() local
1238 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, imx_flush_buffer()
H A Dmxs-auart.c227 struct scatterlist *sgl = &s->tx_sgl; mxs_auart_dma_tx() local
241 sg_init_one(sgl, s->tx_dma_buf, size); mxs_auart_dma_tx()
242 dma_map_sg(s->dev, sgl, 1, DMA_TO_DEVICE); mxs_auart_dma_tx()
243 desc = dmaengine_prep_slave_sg(channel, sgl, mxs_auart_dma_tx()
554 struct scatterlist *sgl = &s->rx_sgl; mxs_auart_dma_prep_rx() local
570 sg_init_one(sgl, s->rx_dma_buf, UART_XMIT_SIZE); mxs_auart_dma_prep_rx()
571 dma_map_sg(s->dev, sgl, 1, DMA_FROM_DEVICE); mxs_auart_dma_prep_rx()
572 desc = dmaengine_prep_slave_sg(channel, sgl, 1, DMA_DEV_TO_MEM, mxs_auart_dma_prep_rx()
/linux-4.1.27/drivers/mmc/host/
H A Dmxs-mmc.c230 struct scatterlist * sgl; mxs_mmc_prep_dma() local
237 sgl = data->sg; mxs_mmc_prep_dma()
241 sgl = (struct scatterlist *) ssp->ssp_pio_words; mxs_mmc_prep_dma()
246 sgl, sg_len, ssp->slave_dirn, flags); mxs_mmc_prep_dma()
356 struct scatterlist *sgl = data->sg, *sg; mxs_mmc_adtc() local
402 for_each_sg(sgl, sg, sg_len, i) mxs_mmc_adtc()
/linux-4.1.27/drivers/mtd/nand/
H A Dlpc32xx_mlc.c197 struct scatterlist sgl; member in struct:lpc32xx_nand_host
398 sg_init_one(&host->sgl, mem, len); lpc32xx_xmit_dma()
400 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
406 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, lpc32xx_xmit_dma()
422 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
426 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
H A Dlpc32xx_slc.c212 struct scatterlist sgl; member in struct:lpc32xx_nand_host
448 sg_init_one(&host->sgl, mem, len); lpc32xx_xmit_dma()
450 res = dma_map_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
456 desc = dmaengine_prep_slave_sg(host->dma_chan, &host->sgl, 1, dir, lpc32xx_xmit_dma()
472 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
477 dma_unmap_sg(host->dma_chan->device->dev, &host->sgl, 1, lpc32xx_xmit_dma()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c159 struct ulptx_sgl *sgl; /* scatter/gather list in TX Queue */ member in struct:tx_sw_desc
307 const struct ulptx_sgl *sgl, const struct sge_txq *tq) unmap_sgl()
313 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), unmap_sgl()
314 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); unmap_sgl()
316 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), unmap_sgl()
317 be32_to_cpu(sgl->len0), DMA_TO_DEVICE); unmap_sgl()
325 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { unmap_sgl()
394 unmap_sgl(dev, sdesc->skb, sdesc->sgl, tq); free_tx_desc()
895 * @sgl: starting location for writing the SGL
909 struct ulptx_sgl *sgl, u64 *end, unsigned int start, write_sgl()
920 sgl->len0 = htonl(len); write_sgl()
921 sgl->addr0 = cpu_to_be64(addr[0] + start); write_sgl()
924 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); write_sgl()
925 sgl->addr0 = cpu_to_be64(addr[1]); write_sgl()
928 sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) | write_sgl()
937 to = (u8 *)end > (u8 *)tq->stat ? buf : sgl->sge; write_sgl()
951 unsigned int part0 = (u8 *)tq->stat - (u8 *)sgl->sge, part1; write_sgl()
954 memcpy(sgl->sge, buf, part0); write_sgl()
1403 struct ulptx_sgl *sgl = (struct ulptx_sgl *)(cpl + 1); t4vf_eth_xmit() local
1414 if (unlikely((void *)sgl == (void *)tq->stat)) { t4vf_eth_xmit()
1415 sgl = (void *)tq->desc; t4vf_eth_xmit()
1419 write_sgl(skb, tq, sgl, end, 0, addr); t4vf_eth_xmit()
1426 tq->sdesc[last_desc].sgl = sgl; t4vf_eth_xmit()
306 unmap_sgl(struct device *dev, const struct sk_buff *skb, const struct ulptx_sgl *sgl, const struct sge_txq *tq) unmap_sgl() argument
908 write_sgl(const struct sk_buff *skb, struct sge_txq *tq, struct ulptx_sgl *sgl, u64 *end, unsigned int start, const dma_addr_t *addr) write_sgl() argument
/linux-4.1.27/drivers/hsi/clients/
H A Dhsi_char.c160 kfree(sg_virt(msg->sgt.sgl)); hsc_msg_free()
199 sg_init_one(msg->sgt.sgl, buf, alloc_size); hsc_msg_alloc()
230 return msg->sgt.sgl->length; hsc_msg_len_get()
235 msg->sgt.sgl->length = len; hsc_msg_len_set()
468 sg_virt(msg->sgt.sgl), hsc_msg_len_get(msg)); hsc_read()
504 if (copy_from_user(sg_virt(msg->sgt.sgl), (void __user *)buf, len)) { hsc_write()
H A Dcmt_speech.c212 u32 *data = sg_virt(msg->sgt.sgl); cs_set_cmd()
218 u32 *data = sg_virt(msg->sgt.sgl); cs_get_cmd()
272 kfree(sg_virt(msg->sgt.sgl)); cs_free_cmds()
294 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); cs_alloc_cmds()
632 sg_init_one(msg->sgt.sgl, address, hi->buf_size); cs_hsi_peek_on_data_complete()
675 sg_init_one(rxmsg->sgt.sgl, (void *)hi->mmap_base, 0); cs_hsi_read_on_data()
727 sg_init_one(txmsg->sgt.sgl, address, hi->buf_size); cs_hsi_write_on_data()
H A Dssi_protocol.c167 data = sg_virt(msg->sgt.sgl); ssip_set_cmd()
175 data = sg_virt(msg->sgt.sgl); ssip_get_cmd()
188 sg = msg->sgt.sgl; ssip_skb_to_msg()
258 kfree(sg_virt(msg->sgt.sgl)); ssip_free_cmds()
278 sg_init_one(msg->sgt.sgl, buf, sizeof(*buf)); ssip_alloc_cmds()
/linux-4.1.27/drivers/gpu/drm/
H A Ddrm_cache.c112 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) drm_clflush_sg()
H A Ddrm_prime.c156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, drm_gem_map_detach()
204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) { drm_gem_map_dma_buf()
720 for_each_sg(sgt->sgl, sg, sgt->nents, count) { drm_prime_sg_to_page_addr_arrays()
/linux-4.1.27/drivers/infiniband/hw/mlx4/
H A Ddoorbell.c75 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); mlx4_ib_db_map_user()
/linux-4.1.27/drivers/infiniband/hw/mlx5/
H A Ddoorbell.c77 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); mlx5_ib_db_map_user()
H A Dmem.c78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { mlx5_ib_cont_pages()
180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { __mlx5_ib_populate_pas()
/linux-4.1.27/drivers/dma/
H A Dimx-dma.c809 struct dma_chan *chan, struct scatterlist *sgl, imxdma_prep_slave_sg()
824 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
830 if (sg_dma_len(sgl) & 3 || sgl->dma_address & 3)
834 if (sg_dma_len(sgl) & 1 || sgl->dma_address & 1)
844 desc->sg = sgl;
976 desc->x = xt->sgl[0].size; imxdma_prep_dma_interleaved()
978 desc->w = xt->sgl[0].icg + desc->x; imxdma_prep_dma_interleaved()
808 imxdma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) imxdma_prep_slave_sg() argument
H A Dcoh901318_lli.c233 struct scatterlist *sgl, unsigned int nents, coh901318_lli_fill_sg()
258 for_each_sg(sgl, sg, nents, i) { for_each_sg()
231 coh901318_lli_fill_sg(struct coh901318_pool *pool, struct coh901318_lli *lli, struct scatterlist *sgl, unsigned int nents, dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl, u32 ctrl_last, enum dma_transfer_direction dir, u32 ctrl_irq_mask) coh901318_lli_fill_sg() argument
H A Dk3dma.c465 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sglen, k3_dma_prep_slave_sg()
475 if (sgl == NULL) k3_dma_prep_slave_sg()
478 for_each_sg(sgl, sg, sglen, i) { for_each_sg()
493 for_each_sg(sgl, sg, sglen, i) { for_each_sg()
464 k3_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sglen, enum dma_transfer_direction dir, unsigned long flags, void *context) k3_dma_prep_slave_sg() argument
H A Ddma-jz4780.c296 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, jz4780_dma_prep_slave_sg()
310 sg_dma_address(&sgl[i]), jz4780_dma_prep_slave_sg()
311 sg_dma_len(&sgl[i]), jz4780_dma_prep_slave_sg()
295 jz4780_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags) jz4780_dma_prep_slave_sg() argument
H A Dimg-mdc-dma.c452 struct dma_chan *chan, struct scatterlist *sgl, mdc_prep_slave_sg()
464 if (!sgl) mdc_prep_slave_sg()
478 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
451 mdc_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, void *context) mdc_prep_slave_sg() argument
H A Dmxs-dma.c500 struct dma_chan *chan, struct scatterlist *sgl, mxs_dma_prep_slave_sg()
543 pio = (u32 *) sgl; mxs_dma_prep_slave_sg()
558 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
499 mxs_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) mxs_dma_prep_slave_sg() argument
H A Dsirf-dma.c532 sdesc->xlen = xt->sgl[0].size / SIRFSOC_DMA_WORD_LEN; sirfsoc_dma_prep_interleaved()
533 sdesc->width = (xt->sgl[0].size + xt->sgl[0].icg) / sirfsoc_dma_prep_interleaved()
H A Dtimb_dma.c509 struct scatterlist *sgl, unsigned int sg_len, td_prep_slave_sg()
520 if (!sgl || !sg_len) { td_prep_slave_sg()
540 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
508 td_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) td_prep_slave_sg() argument
H A Ddma-jz4740.c390 struct dma_chan *c, struct scatterlist *sgl, jz4740_dma_prep_slave_sg()
403 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
389 jz4740_dma_prep_slave_sg( struct dma_chan *c, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) jz4740_dma_prep_slave_sg() argument
H A Dmoxart-dma.c269 struct dma_chan *chan, struct scatterlist *sgl, moxart_prep_slave_sg()
319 for_each_sg(sgl, sgent, sg_len, i) { for_each_sg()
268 moxart_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long tx_flags, void *context) moxart_prep_slave_sg() argument
H A Dep93xx_dma.c1016 * @sgl: list of buffers to transfer
1017 * @sg_len: number of entries in @sgl
1025 ep93xx_dma_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, ep93xx_dma_prep_slave_sg() argument
1047 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
H A Dmmp_pdma.c526 mmp_pdma_prep_slave_sg(struct dma_chan *dchan, struct scatterlist *sgl, mmp_pdma_prep_slave_sg() argument
537 if ((sgl == NULL) || (sg_len == 0)) mmp_pdma_prep_slave_sg()
542 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
544 avail = sg_dma_len(sgl); for_each_sg()
H A Dqcom_bam_dma.c580 * @sgl: scatter gather list
587 struct scatterlist *sgl, unsigned int sg_len, bam_prep_slave_sg()
606 for_each_sg(sgl, sg, sg_len, i) bam_prep_slave_sg()
630 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
586 bam_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) bam_prep_slave_sg() argument
H A Dcppi41.c493 struct dma_chan *chan, struct scatterlist *sgl, unsigned sg_len, cppi41_dma_prep_slave_sg()
504 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
492 cppi41_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned sg_len, enum dma_transfer_direction dir, unsigned long tx_flags, void *context) cppi41_dma_prep_slave_sg() argument
H A Dpch_dma.c583 struct scatterlist *sgl, unsigned int sg_len, pd_prep_slave_sg()
611 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
582 pd_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) pd_prep_slave_sg() argument
H A Dedma.c449 struct dma_chan *chan, struct scatterlist *sgl, edma_prep_slave_sg()
462 if (unlikely(!echan || !sgl || !sg_len)) edma_prep_slave_sg()
513 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
448 edma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long tx_flags, void *context) edma_prep_slave_sg() argument
H A Ds3c24xx-dma.c980 struct dma_chan *chan, struct scatterlist *sgl, s3c24xx_dma_prep_slave_sg()
996 sg_dma_len(sgl), s3cchan->name); s3c24xx_dma_prep_slave_sg()
1047 for_each_sg(sgl, sg, sg_len, tmp) { for_each_sg()
979 s3c24xx_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) s3c24xx_dma_prep_slave_sg() argument
H A Dsun6i-dma.c561 struct dma_chan *chan, struct scatterlist *sgl, sun6i_dma_prep_slave_sg()
574 if (!sgl) sun6i_dma_prep_slave_sg()
586 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
560 sun6i_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, void *context) sun6i_dma_prep_slave_sg() argument
H A Dcoh901318.c2290 coh901318_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, coh901318_prep_slave_sg() argument
2309 if (!sgl) coh901318_prep_slave_sg()
2311 if (sg_dma_len(sgl) == 0) coh901318_prep_slave_sg()
2357 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
2380 ret = coh901318_lli_fill_sg(&cohc->base->pool, lli, sgl, sg_len,
H A Dat_xdmac.c591 at_xdmac_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, at_xdmac_prep_slave_sg() argument
603 if (!sgl) at_xdmac_prep_slave_sg()
623 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
/linux-4.1.27/drivers/message/fusion/
H A Dmptctl.c130 static void kfree_sgl(MptSge_t *sgl, dma_addr_t sgl_dma,
793 MptSge_t *sgl, *sgIn; mptctl_do_fw_download() local
874 * Set the sge_offset to the start of the sgl (bytes). mptctl_do_fw_download()
878 if ((sgl = kbuf_alloc_2_sgl(fwlen, sgdir, sge_offset, mptctl_do_fw_download()
901 dctlprintk(iocp, printk(MYIOC_s_DEBUG_FMT "DbG: sgl buffer = %p, sgfrags = %d\n", mptctl_do_fw_download()
902 iocp->name, sgl, numfrags)); mptctl_do_fw_download()
905 * Parse SG list, copying sgl itself, mptctl_do_fw_download()
909 sgIn = sgl; mptctl_do_fw_download()
975 if (sgl) mptctl_do_fw_download()
976 kfree_sgl(sgl, sgl_dma, buflist, iocp); mptctl_do_fw_download()
1005 kfree_sgl(sgl, sgl_dma, buflist, iocp); mptctl_do_fw_download()
1020 * sglbuf_dma - pointer to the (dma) sgl
1022 * pointer to the (virtual) sgl if successful.
1031 MptSge_t *sgl; kbuf_alloc_2_sgl() local
1069 * sgl = sglbuf = point to beginning of sg buffer kbuf_alloc_2_sgl()
1075 sgl = sglbuf; kbuf_alloc_2_sgl()
1097 sgl->FlagsLength = (0x10000000|sgdir|this_alloc); kbuf_alloc_2_sgl()
1100 sgl->Address = dma_addr; kbuf_alloc_2_sgl()
1104 sgl++; kbuf_alloc_2_sgl()
1131 sgl[-1].FlagsLength |= 0xC1000000; kbuf_alloc_2_sgl()
1171 kfree_sgl(MptSge_t *sgl, dma_addr_t sgl_dma, struct buflist *buflist, MPT_ADAPTER *ioc) kfree_sgl() argument
1173 MptSge_t *sg = sgl; kfree_sgl()
1220 pci_free_consistent(ioc->pcidev, MAX_SGL_BYTES, sgl, sgl_dma); kfree_sgl()
/linux-4.1.27/drivers/scsi/megaraid/
H A Dmegaraid_sas_base.c1246 &pthru->sgl); megasas_build_dcdb()
1250 &pthru->sgl); megasas_build_dcdb()
1253 &pthru->sgl); megasas_build_dcdb()
1389 &ldio->sgl); megasas_build_ldio()
1392 ldio->sge_count = megasas_make_sgl64(instance, scp, &ldio->sgl); megasas_build_ldio()
1394 ldio->sge_count = megasas_make_sgl32(instance, scp, &ldio->sgl); megasas_build_ldio()
1479 mfi_sgl = &ldio->sgl; megasas_dump_pending_frames()
1489 mfi_sgl = &pthru->sgl; megasas_dump_pending_frames()
1500 printk(KERN_ERR "megasas: sgl len : 0x%x, sgl addr : 0x%llx ", megasas_dump_pending_frames()
1504 printk(KERN_ERR "megasas: sgl len : 0x%x, sgl addr : 0x%x ", megasas_dump_pending_frames()
1934 dcmd->sgl.sge32[0].phys_addr = megasas_get_ld_vf_affiliation_111()
1937 dcmd->sgl.sge32[0].phys_addr = new_affiliation_111_h; megasas_get_ld_vf_affiliation_111()
1939 dcmd->sgl.sge32[0].length = megasas_get_ld_vf_affiliation_111()
2050 dcmd->sgl.sge32[0].phys_addr = instance->vf_affiliation_h; megasas_get_ld_vf_affiliation_12()
2052 dcmd->sgl.sge32[0].phys_addr = new_affiliation_h; megasas_get_ld_vf_affiliation_12()
2054 dcmd->sgl.sge32[0].length = (MAX_LOGICAL_DRIVES + 1) * megasas_get_ld_vf_affiliation_12()
2216 dcmd->sgl.sge32[0].phys_addr = instance->hb_host_mem_h; megasas_sriov_start_heartbeat()
2217 dcmd->sgl.sge32[0].length = sizeof(struct MR_CTRL_HB_HOST_MEM); megasas_sriov_start_heartbeat()
3837 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); megasas_get_pd_list()
3838 dcmd->sgl.sge32[0].length = cpu_to_le32(MEGASAS_MAX_PD * sizeof(struct MR_PD_LIST)); megasas_get_pd_list()
3936 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); megasas_get_ld_list()
3937 dcmd->sgl.sge32[0].length = cpu_to_le32(sizeof(struct MR_LD_LIST)); megasas_get_ld_list()
4029 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); megasas_ld_list_query()
4030 dcmd->sgl.sge32[0].length = cpu_to_le32(sizeof(struct MR_LD_TARGETID_LIST)); megasas_ld_list_query()
4168 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(ci_h); megasas_get_ctrl_info()
4169 dcmd->sgl.sge32[0].length = cpu_to_le32(sizeof(struct megasas_ctrl_info)); megasas_get_ctrl_info()
4239 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(instance->crash_dump_h); megasas_set_crash_dump_params()
4240 dcmd->sgl.sge32[0].length = cpu_to_le32(CRASH_DMA_BUF_SIZE); megasas_set_crash_dump_params()
4815 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(el_info_h); megasas_get_seq_num()
4816 dcmd->sgl.sge32[0].length = cpu_to_le32(sizeof(struct megasas_evt_log_info)); megasas_get_seq_num()
4945 dcmd->sgl.sge32[0].phys_addr = cpu_to_le32(instance->evt_detail_h); megasas_register_aen()
4946 dcmd->sgl.sge32[0].length = cpu_to_le32(sizeof(struct megasas_evt_detail)); megasas_register_aen()
6102 if (!ioc->sgl[i].iov_len) megasas_mgmt_fw_ioctl()
6106 ioc->sgl[i].iov_len, megasas_mgmt_fw_ioctl()
6120 kern_sge32[i].length = cpu_to_le32(ioc->sgl[i].iov_len); megasas_mgmt_fw_ioctl()
6126 if (copy_from_user(kbuff_arr[i], ioc->sgl[i].iov_base, megasas_mgmt_fw_ioctl()
6127 (u32) (ioc->sgl[i].iov_len))) { megasas_mgmt_fw_ioctl()
6163 if (copy_to_user(ioc->sgl[i].iov_base, kbuff_arr[i], megasas_mgmt_fw_ioctl()
6164 ioc->sgl[i].iov_len)) { megasas_mgmt_fw_ioctl()
6432 if (get_user(ptr, &cioc->sgl[i].iov_base) || megasas_mgmt_compat_ioctl_fw()
6433 put_user(compat_ptr(ptr), &ioc->sgl[i].iov_base) || megasas_mgmt_compat_ioctl_fw()
6434 copy_in_user(&ioc->sgl[i].iov_len, megasas_mgmt_compat_ioctl_fw()
6435 &cioc->sgl[i].iov_len, sizeof(compat_size_t))) megasas_mgmt_compat_ioctl_fw()
H A Dmegaraid_sas.h1318 union megasas_sgl sgl; /*28h */ member in struct:megasas_io_frame
1345 union megasas_sgl sgl; /*30h */ member in struct:megasas_pthru_frame
1372 union megasas_sgl sgl; /*28h */ member in struct:megasas_dcmd_frame
1424 } sgl; member in struct:megasas_smp_frame
1453 } sgl; member in struct:megasas_stp_frame
1918 struct iovec sgl[MAX_IOCTL_SGE]; member in struct:megasas_iocpacket
1941 struct compat_iovec sgl[MAX_IOCTL_SGE]; member in struct:compat_megasas_iocpacket
H A Dmegaraid_mbox.c1354 struct scatterlist *sgl; megaraid_mbox_mksgl() local
1373 scsi_for_each_sg(scp, sgl, sgcnt, i) { scsi_for_each_sg()
1374 ccb->sgl64[i].address = sg_dma_address(sgl); scsi_for_each_sg()
1375 ccb->sgl64[i].length = sg_dma_len(sgl); scsi_for_each_sg()
1563 struct scatterlist *sgl; megaraid_mbox_build_cmd() local
1566 sgl = scsi_sglist(scp); megaraid_mbox_build_cmd()
1567 if (sg_page(sgl)) { megaraid_mbox_build_cmd()
1568 vaddr = (caddr_t) sg_virt(&sgl[0]); megaraid_mbox_build_cmd()
2224 struct scatterlist *sgl; megaraid_mbox_dpc() local
2308 sgl = scsi_sglist(scp); megaraid_mbox_dpc()
2309 if (sg_page(sgl)) { megaraid_mbox_dpc()
2310 c = *(unsigned char *) sg_virt(&sgl[0]); megaraid_mbox_dpc()
/linux-4.1.27/drivers/hsi/controllers/
H A Domap_ssi_port.c205 omap_ssi->gdd_trn[lch].sg = msg->sgt.sgl; ssi_claim_lch()
227 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, ssi_start_dma()
241 d_addr = sg_dma_address(msg->sgt.sgl); ssi_start_dma()
243 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, ssi_start_dma()
255 s_addr = sg_dma_address(msg->sgt.sgl); ssi_start_dma()
269 writew_relaxed(SSI_BYTES_TO_FRAMES(msg->sgt.sgl->length), ssi_start_dma()
320 if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) ssi_start_transfer()
430 msg->channel, msg, msg->sgt.sgl->length, list_for_each_safe()
862 if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { ssi_pio_complete()
871 buf = sg_virt(msg->sgt.sgl) + msg->actual_len; ssi_pio_complete()
881 if (msg->actual_len >= msg->sgt.sgl->length) ssi_pio_complete()
H A Domap_ssi.c226 dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); ssi_gdd_complete()
247 msg->actual_len = sg_dma_len(msg->sgt.sgl); ssi_gdd_complete()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4/
H A Dsge.c150 struct ulptx_sgl *sgl; member in struct:tx_sw_desc
322 const struct ulptx_sgl *sgl, const struct sge_txq *q) unmap_sgl()
328 dma_unmap_single(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), unmap_sgl()
331 dma_unmap_page(dev, be64_to_cpu(sgl->addr0), ntohl(sgl->len0), unmap_sgl()
340 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) { unmap_sgl()
401 unmap_sgl(dev, d->skb, d->sgl, q); free_tx_desc()
839 * @sgl: starting location for writing the SGL
847 * main body except for the first @start bytes. @sgl must be 16-byte
850 * wrap around, i.e., @end > @sgl.
853 struct ulptx_sgl *sgl, u64 *end, unsigned int start, write_sgl()
864 sgl->len0 = htonl(len); write_sgl()
865 sgl->addr0 = cpu_to_be64(addr[0] + start); write_sgl()
868 sgl->len0 = htonl(skb_frag_size(&si->frags[0])); write_sgl()
869 sgl->addr0 = cpu_to_be64(addr[1]); write_sgl()
872 sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) | write_sgl()
881 to = (u8 *)end > (u8 *)q->stat ? buf : sgl->sge; write_sgl()
895 unsigned int part0 = (u8 *)q->stat - (u8 *)sgl->sge, part1; write_sgl()
898 memcpy(sgl->sge, buf, part0); write_sgl()
1276 q->q.sdesc[last_desc].sgl = (struct ulptx_sgl *)(cpl + 1); t4_eth_xmit()
321 unmap_sgl(struct device *dev, const struct sk_buff *skb, const struct ulptx_sgl *sgl, const struct sge_txq *q) unmap_sgl() argument
852 write_sgl(const struct sk_buff *skb, struct sge_txq *q, struct ulptx_sgl *sgl, u64 *end, unsigned int start, const dma_addr_t *addr) write_sgl() argument
/linux-4.1.27/drivers/media/pci/saa7134/
H A Dsaa7134-vbi.c124 if (dma->sgl->offset) { buffer_prepare()
134 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, buffer_prepare()
H A Dsaa7134-ts.c110 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, saa7134_ts_buffer_prepare()
/linux-4.1.27/drivers/scsi/be2iscsi/
H A Dbe_mgmt.h55 struct amap_mcc_sge sgl[19]; member in union:amap_mcc_wrb_payload::__anon8670
84 struct mcc_sge sgl[19]; member in union:mcc_wrb_payload::__anon8671
H A Dbe_main.c3164 struct be_dma_mem *sgl) be_sgl_create_contiguous()
3169 WARN_ON(!sgl); be_sgl_create_contiguous()
3171 sgl->va = virtual_address; be_sgl_create_contiguous()
3172 sgl->dma = (unsigned long)physical_address; be_sgl_create_contiguous()
3173 sgl->size = length; be_sgl_create_contiguous()
3178 static void be_sgl_destroy_contiguous(struct be_dma_mem *sgl) be_sgl_destroy_contiguous() argument
3180 memset(sgl, 0, sizeof(*sgl)); be_sgl_destroy_contiguous()
3185 struct mem_array *pmem, struct be_dma_mem *sgl) hwi_build_be_sgl_arr()
3187 if (sgl->va) hwi_build_be_sgl_arr()
3188 be_sgl_destroy_contiguous(sgl); hwi_build_be_sgl_arr()
3192 pmem->size, sgl); hwi_build_be_sgl_arr()
3197 struct mem_array *pmem, struct be_dma_mem *sgl) hwi_build_be_sgl_by_offset()
3199 if (sgl->va) hwi_build_be_sgl_by_offset()
3200 be_sgl_destroy_contiguous(sgl); hwi_build_be_sgl_by_offset()
3204 pmem->size, sgl); hwi_build_be_sgl_by_offset()
3467 struct be_dma_mem sgl; beiscsi_post_template_hdr() local
3477 hwi_build_be_sgl_arr(phba, pm_arr, &sgl); beiscsi_post_template_hdr()
3479 &phba->ctrl, &sgl); beiscsi_post_template_hdr()
3502 struct be_dma_mem sgl; beiscsi_post_pages() local
3516 hwi_build_be_sgl_arr(phba, pm_arr, &sgl); beiscsi_post_pages()
3517 status = be_cmd_iscsi_post_sgl_pages(&phba->ctrl, &sgl, beiscsi_post_pages()
3523 "BM_%d : post sgl failed.\n"); beiscsi_post_pages()
3568 struct be_dma_mem sgl; beiscsi_create_wrb_rings() local
3643 hwi_build_be_sgl_by_offset(phba, &pwrb_arr[i], &sgl); beiscsi_create_wrb_rings()
3644 status = be_cmd_wrbq_create(&phba->ctrl, &sgl, beiscsi_create_wrb_rings()
4694 * the wrb and sgl if needed for the command. And it will prep
3162 be_sgl_create_contiguous(void *virtual_address, u64 physical_address, u32 length, struct be_dma_mem *sgl) be_sgl_create_contiguous() argument
3184 hwi_build_be_sgl_arr(struct beiscsi_hba *phba, struct mem_array *pmem, struct be_dma_mem *sgl) hwi_build_be_sgl_arr() argument
3196 hwi_build_be_sgl_by_offset(struct beiscsi_hba *phba, struct mem_array *pmem, struct be_dma_mem *sgl) hwi_build_be_sgl_by_offset() argument
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/
H A Dbcmsdh.c519 struct scatterlist *sgl; brcmf_sdiod_sglist_rw() local
563 mmc_dat.sg = sdiodev->sgtable.sgl; brcmf_sdiod_sglist_rw()
579 sgl = sdiodev->sgtable.sgl; brcmf_sdiod_sglist_rw()
589 sg_set_buf(sgl, pkt_data, sg_data_sz); brcmf_sdiod_sglist_rw()
592 sgl = sg_next(sgl); brcmf_sdiod_sglist_rw()
660 sg_init_table(sdiodev->sgtable.sgl, sdiodev->sgtable.orig_nents);
/linux-4.1.27/arch/m68k/fpsp040/
H A Dx_store.S88 | ;ext=00, sgl=01, dbl=10
184 | sgl_exp = ext_exp - $3fff(ext bias) + $7f(sgl bias)
H A Dx_unfl.S144 | ;1=sgl, 2=dbl
224 | ;If destination format is sgl/dbl,
H A Dround.S26 | sgl = $0001xxxx
148 bfextu LOCAL_HI(%a0){#24:#2},%d3 |sgl prec. g-r are 2 bits right
149 movel #30,%d2 |of the sgl prec. limits
179 .set ad_1_sgl,0x00000100 | constant to add 1 to l-bit in sgl prec
204 andil #0xffffff00,LOCAL_HI(%a0) |truncate bits beyond sgl limit
H A Dutil.S44 | ;largest magnitude +sgl in ext
235 bra end_ovfr |inf is same for all precisions (ext,dbl,sgl)
393 | 10 1 sgl
477 | ;smallest +sgl denorm
H A Dres_func.S1542 | 46fffe00 in sgl prec = 400d0000fffe000000000000 in ext prec
1545 | c7000000 in sgl prec = c00e00008000000000000000 in ext prec
1563 | 46ffff00 in sgl prec = 400d0000ffff000000000000 in ext prec
1571 | c7000080 in sgl prec = c00e00008000800000000000 in ext prec
1587 | 42fe0000 in sgl prec = 40050000fe00000000000000 in ext prec
1590 | c3000000 in sgl prec = c00600008000000000000000 in ext prec
1608 | 42ff0000 in sgl prec = 40050000ff00000000000000 in ext prec
1616 | c3008000 in sgl prec = c00600008080000000000000 in ext prec
1738 | ;formats sgl, dbl, ext are
1803 movel #1,%d0 |set round precision to sgl
1890 | d0 is the round precision (=1 for sgl; =2 for dbl)
H A Dget_op.S250 bnes src_sd_dnrm |if bit 5 set, handle sgl/dbl denorms
278 btstb #4,CMDREG1B(%a6) |differentiate between sgl/dbl denorm
281 movew #0x3f81,%d1 |write bias for sgl denorm
300 bsr mk_norm |convert sgl/dbl denorm to norm
H A Dbugfix.S191 | Check for opclass 0. If not, go and check for opclass 2 and sgl.
353 cmpiw #0x4400,%d0 |test for opclass 2 and size=sgl
/linux-4.1.27/drivers/media/pci/cx25821/
H A Dcx25821-video.c197 sgt->sgl, 0, UNSET, cx25821_buffer_prepare()
202 sgt->sgl, UNSET, 0, cx25821_buffer_prepare()
211 sgt->sgl, line0_offset, cx25821_buffer_prepare()
217 sgt->sgl, cx25821_buffer_prepare()
223 sgt->sgl, cx25821_buffer_prepare()
/linux-4.1.27/drivers/infiniband/hw/cxgb4/
H A Dmem.c65 struct ulptx_sgl *sgl; _c4iw_write_mem_dma_aligned() local
74 wr_len = roundup(sizeof(*req) + sizeof(*sgl), 16); _c4iw_write_mem_dma_aligned()
94 sgl = (struct ulptx_sgl *)(req + 1); _c4iw_write_mem_dma_aligned()
95 sgl->cmd_nsge = cpu_to_be32(ULPTX_CMD_V(ULP_TX_SC_DSGL) | _c4iw_write_mem_dma_aligned()
97 sgl->len0 = cpu_to_be32(len); _c4iw_write_mem_dma_aligned()
98 sgl->addr0 = cpu_to_be64(data); _c4iw_write_mem_dma_aligned()
754 for_each_sg(mhp->umem->sg_head.sgl, sg, mhp->umem->nmap, entry) { c4iw_reg_user_mr()
/linux-4.1.27/drivers/target/
H A Dtarget_core_rd.c477 rd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, rd_execute_rw() argument
521 sg_miter_start(&m, sgl, sgl_nents, rd_execute_rw()
530 pr_debug("RD[%u]: invalid sgl %p len %zu\n", rd_execute_rw()
H A Dtarget_core_transport.c1336 transport_generic_map_mem_to_cmd(struct se_cmd *cmd, struct scatterlist *sgl, transport_generic_map_mem_to_cmd() argument
1339 if (!sgl || !sgl_count) transport_generic_map_mem_to_cmd()
1353 cmd->t_data_sg = sgl; transport_generic_map_mem_to_cmd()
1377 * @sgl: struct scatterlist memory for unidirectional mapping
1394 struct scatterlist *sgl, u32 sgl_count, target_submit_cmd_map_sgls()
1460 BUG_ON(!sgl); target_submit_cmd_map_sgls()
1474 if (sgl) target_submit_cmd_map_sgls()
1475 buf = kmap(sg_page(sgl)) + sgl->offset; target_submit_cmd_map_sgls()
1478 memset(buf, 0, sgl->length); target_submit_cmd_map_sgls()
1479 kunmap(sg_page(sgl)); target_submit_cmd_map_sgls()
1483 rc = transport_generic_map_mem_to_cmd(se_cmd, sgl, sgl_count, target_submit_cmd_map_sgls()
2156 static inline void transport_free_sgl(struct scatterlist *sgl, int nents) transport_free_sgl() argument
2161 for_each_sg(sgl, sg, nents, count) transport_free_sgl()
2164 kfree(sgl); transport_free_sgl()
2282 target_alloc_sgl(struct scatterlist **sgl, unsigned int *nents, u32 length, target_alloc_sgl() argument
2308 *sgl = sg; target_alloc_sgl()
1391 target_submit_cmd_map_sgls(struct se_cmd *se_cmd, struct se_session *se_sess, unsigned char *cdb, unsigned char *sense, u32 unpacked_lun, u32 data_length, int task_attr, int data_dir, int flags, struct scatterlist *sgl, u32 sgl_count, struct scatterlist *sgl_bidi, u32 sgl_bidi_count, struct scatterlist *sgl_prot, u32 sgl_prot_count) target_submit_cmd_map_sgls() argument
H A Dtarget_core_pscsi.c873 pscsi_map_sg(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, pscsi_map_sg() argument
881 int nr_pages = (cmd->data_length + sgl[0].offset + pscsi_map_sg()
890 for_each_sg(sgl, sg, sgl_nents, i) { for_each_sg()
988 struct scatterlist *sgl = cmd->t_data_sg; pscsi_execute_cmd() local
1010 if (!sgl) { pscsi_execute_cmd()
1024 ret = pscsi_map_sg(cmd, sgl, sgl_nents, data_direction, &hbio); pscsi_execute_cmd()
H A Dtarget_core_file.c319 static int fd_do_rw(struct se_cmd *cmd, struct scatterlist *sgl, fd_do_rw() argument
338 for_each_sg(sgl, sg, sgl_nents, i) { for_each_sg()
611 fd_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, fd_execute_rw() argument
641 ret = fd_do_rw(cmd, sgl, sgl_nents, 0); fd_execute_rw()
675 ret = fd_do_rw(cmd, sgl, sgl_nents, 1); fd_execute_rw()
H A Dtarget_core_iblock.c675 iblock_execute_rw(struct se_cmd *cmd, struct scatterlist *sgl, u32 sgl_nents, iblock_execute_rw() argument
750 for_each_sg(sgl, sg, sgl_nents, i) { for_each_sg()
/linux-4.1.27/include/linux/mtd/
H A Dubi.h252 int ubi_leb_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl,
280 struct ubi_sgl *sgl, int offset, int len) ubi_read_sg()
282 return ubi_leb_read_sg(desc, lnum, sgl, offset, len, 0); ubi_read_sg()
279 ubi_read_sg(struct ubi_volume_desc *desc, int lnum, struct ubi_sgl *sgl, int offset, int len) ubi_read_sg() argument
/linux-4.1.27/drivers/mmc/core/
H A Dsdio_ops.c165 data.sg = sgtable.sgl; mmc_io_rw_extended()
/linux-4.1.27/drivers/media/pci/cx88/
H A Dcx88-vbi.c145 cx88_risc_buffer(dev->pci, &buf->risc, sgt->sgl, buffer_prepare()
H A Dcx88-video.c463 sgt->sgl, 0, UNSET, buffer_prepare()
468 sgt->sgl, UNSET, 0, buffer_prepare()
473 sgt->sgl, buffer_prepare()
480 sgt->sgl, buffer_prepare()
488 sgt->sgl, 0, buf->bpl, buffer_prepare()
/linux-4.1.27/drivers/crypto/qce/
H A Ddma.c112 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; qce_sgtable_add()
H A Dablkcipher.c117 rctx->dst_sg = rctx->dst_tbl.sgl; qce_ablkcipher_async_req_handle()
/linux-4.1.27/drivers/block/
H A Dnvme-scsi.c380 struct sg_iovec sgl; nvme_trans_copy_to_user() local
383 not_copied = copy_from_user(&sgl, hdr->dxferp + nvme_trans_copy_to_user()
388 xfer_len = min(remaining, sgl.iov_len); nvme_trans_copy_to_user()
389 not_copied = copy_to_user(sgl.iov_base, index, nvme_trans_copy_to_user()
421 struct sg_iovec sgl; nvme_trans_copy_from_user() local
424 not_copied = copy_from_user(&sgl, hdr->dxferp + nvme_trans_copy_from_user()
429 xfer_len = min(remaining, sgl.iov_len); nvme_trans_copy_from_user()
430 not_copied = copy_from_user(index, sgl.iov_base, nvme_trans_copy_from_user()
2097 struct sg_iovec sgl; nvme_trans_do_nvme_io() local
2099 retcode = copy_from_user(&sgl, hdr->dxferp + nvme_trans_do_nvme_io()
2104 unit_len = sgl.iov_len; nvme_trans_do_nvme_io()
2106 next_mapping_addr = sgl.iov_base; nvme_trans_do_nvme_io()
2174 struct sg_iovec sgl; nvme_trans_io() local
2205 not_copied = copy_from_user(&sgl, hdr->dxferp + nvme_trans_io()
2210 sum_iov_len += sgl.iov_len; nvme_trans_io()
2212 if (sgl.iov_len % (1 << ns->lba_shift) != 0) { nvme_trans_io()
/linux-4.1.27/net/8021q/
H A Dvlan_dev.c383 struct scatterlist *sgl, unsigned int sgc) vlan_dev_fcoe_ddp_setup()
390 rc = ops->ndo_fcoe_ddp_setup(real_dev, xid, sgl, sgc); vlan_dev_fcoe_ddp_setup()
441 struct scatterlist *sgl, unsigned int sgc) vlan_dev_fcoe_ddp_target()
448 rc = ops->ndo_fcoe_ddp_target(real_dev, xid, sgl, sgc); vlan_dev_fcoe_ddp_target()
382 vlan_dev_fcoe_ddp_setup(struct net_device *dev, u16 xid, struct scatterlist *sgl, unsigned int sgc) vlan_dev_fcoe_ddp_setup() argument
440 vlan_dev_fcoe_ddp_target(struct net_device *dev, u16 xid, struct scatterlist *sgl, unsigned int sgc) vlan_dev_fcoe_ddp_target() argument
/linux-4.1.27/drivers/crypto/
H A Domap-sham.c153 struct scatterlist sgl; member in struct:omap_sham_reqctx
580 * set correctly so use a local SG entry (sgl) with the omap_sham_xmit_dma()
584 sg_init_table(&ctx->sgl, 1); omap_sham_xmit_dma()
585 ctx->sgl.page_link = ctx->sg->page_link; omap_sham_xmit_dma()
586 ctx->sgl.offset = ctx->sg->offset; omap_sham_xmit_dma()
587 sg_dma_len(&ctx->sgl) = len32; omap_sham_xmit_dma()
588 sg_dma_address(&ctx->sgl) = sg_dma_address(ctx->sg); omap_sham_xmit_dma()
590 tx = dmaengine_prep_slave_sg(dd->dma_lch, &ctx->sgl, 1, omap_sham_xmit_dma()
/linux-4.1.27/drivers/dma/hsu/
H A Dhsu.c223 struct dma_chan *chan, struct scatterlist *sgl, hsu_dma_prep_slave_sg()
236 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
222 hsu_dma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) hsu_dma_prep_slave_sg() argument
/linux-4.1.27/include/scsi/
H A Dscsi_cmnd.h178 return cmd->sdb.table.sgl; scsi_sglist()
315 return cmd->prot_sdb ? cmd->prot_sdb->table.sgl : NULL; scsi_prot_sglist()
/linux-4.1.27/drivers/scsi/csiostor/
H A Dcsio_scsi.c286 * @sgl: ULP TX SGL pointer.
291 struct ulptx_sgl *sgl) csio_scsi_init_ultptx_dsgl()
301 sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) | ULPTX_MORE_F | csio_scsi_init_ultptx_dsgl()
307 sgl->addr0 = cpu_to_be64(sg_dma_address(sgel)); csio_scsi_init_ultptx_dsgl()
308 sgl->len0 = cpu_to_be32(sg_dma_len(sgel)); csio_scsi_init_ultptx_dsgl()
309 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); csio_scsi_init_ultptx_dsgl()
331 sgl->addr0 = cpu_to_be64(dma_buf->paddr); csio_scsi_init_ultptx_dsgl()
332 sgl->len0 = cpu_to_be32( csio_scsi_init_ultptx_dsgl()
334 sge_pair = (struct ulptx_sge_pair *)(sgl + 1); csio_scsi_init_ultptx_dsgl()
365 struct ulptx_sgl *sgl; csio_scsi_init_read_wr() local
397 sgl = (struct ulptx_sgl *)((uintptr_t)wrp + csio_scsi_init_read_wr()
401 csio_scsi_init_ultptx_dsgl(hw, req, sgl); csio_scsi_init_read_wr()
418 struct ulptx_sgl *sgl; csio_scsi_init_write_wr() local
450 sgl = (struct ulptx_sgl *)((uintptr_t)wrp + csio_scsi_init_write_wr()
454 csio_scsi_init_ultptx_dsgl(hw, req, sgl); csio_scsi_init_write_wr()
290 csio_scsi_init_ultptx_dsgl(struct csio_hw *hw, struct csio_ioreq *req, struct ulptx_sgl *sgl) csio_scsi_init_ultptx_dsgl() argument
/linux-4.1.27/arch/m68k/ifpsp060/src/
H A Dpfpsp.S414 set SGL_LO, 0x3f81 # min sgl prec exponent
415 set SGL_HI, 0x407e # max sgl prec exponent
515 set sgl_thresh, 0x3f81 # minimum sgl exponent
1399 bsr.l funimp_skew # skew sgl or dbl inputs
1432 cmpi.b %d0,&0x11 # is class = 2 & fmt = sgl?
1976 # "non-skewed" operand for cases of sgl and dbl src INFs,NANs, and DENORMs.
2303 cmpi.b %d0,&0x1 # was src sgl?
3132 # denorm operand in the sgl or dbl format. NANs also become skewed, but can't
3196 short tbl_operr - tbl_operr # sgl prec shouldn't happen
3328 # denorm operand in the sgl or dbl format. NANs also become skewed and must be
3366 short fsnan_out_s - tbl_snan # sgl prec shouldn't happen
3432 lsr.l &0x8,%d1 # shift mantissa for sgl
3433 or.l %d1,%d0 # create sgl SNAN
3447 lsr.l &0x8,%d1 # shift mantissa for sgl
3448 or.l %d1,%d0 # create sgl SNAN
3791 # in the sgl or dbl format.
6142 # sgl = $0004xxxx #
6205 cmpi.b %d1, &s_mode # is prec = sgl?
6222 cmpi.b %d1, &s_mode # is prec = sgl?
6239 cmpi.b %d1, &s_mode # is prec = sgl?
6245 set ad_1_sgl, 0x00000100 # constant to add 1 to l-bit in sgl prec
6262 and.l &0xffffff00, FTEMP_HI(%a0) # truncate bits beyond sgl limit
6315 cmpi.b %d1, &s_mode # is prec sgl?
6345 bne.b ext_grs_not_ext # no; go handle sgl or dbl
6358 cmpi.b %d1, &s_mode # is rnd prec = sgl?
6362 # sgl:
6374 bfextu FTEMP_HI(%a0){&24:&2}, %d3 # sgl prec. g-r are 2 bits right
6375 mov.l &30, %d2 # of the sgl prec. limits
6730 # set_tag_s(): return the optype of the input sgl fp number #
6903 mov.w &s_mode,%d1 # force rnd prec = sgl
7054 # _round() - needed to create EXOP for sgl/dbl precision #
7056 # ovf_res() - create default overflow result for sgl/dbl precision#
7057 # unf_res() - create default underflow result for sgl/dbl prec. #
7059 # dst_sgl() - create rounded sgl precision result. #
7074 # OVFL/UNFL occurred for a sgl or dbl operand #
7084 # For sgl or dbl precision, overflow or underflow can occur. If #
7159 ori.l &0x00800000,%d1 # make smallest sgl
7205 ori.l &0x00800000,%d1 # make smallest sgl
7252 ori.l &0x00800000,%d1 # make smallest sgl
7336 ori.b &s_mode*0x10,%d0 # insert sgl prec
7459 # call ovf_res() w/ sgl prec and the correct rnd mode to create the default
7464 mov.l L_SCR3(%a6),%d0 # pass: sgl prec,rnd mode
7814 # sgl_exp = ext_exp - $3fff(ext bias) + $7f(sgl bias) #
7977 # norms/denorms into ext/sgl/dbl precision. #
8000 ori.b &s_mode*0x10,%d0 # insert sgl prec
8133 bne.b fmul_ovfl_ena_sd # it's sgl or dbl
8244 bne.b fmul_unfl_ena_sd # no, sgl or dbl
8474 # sgl/dbl, must scale exponent and perform an "fmove". Check to see #
8485 ori.b &s_mode*0x10,%d0 # insert sgl precision
8505 bne.w fin_not_ext # no, so go handle dbl or sgl
8525 bne.w fin_not_ext # no, so go handle dbl or sgl
8562 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
8794 # norms/denorms into ext/sgl/dbl precision. #
8812 long 0x3fff - 0x407e # sgl overflow exponent
8818 ori.b &s_mode*0x10,%d0 # insert sgl prec
8946 bne.b fdiv_ovfl_ena_sd # no, do sgl or dbl
9014 bne.b fdiv_unfl_ena_sd # no, sgl or dbl
9231 # scale_to_zero_src() - scale sgl/dbl source exponent #
9247 # norms/denorms into ext/sgl/dbl precisions. Extended precision can be #
9259 ori.b &s_mode*0x10,%d0 # insert sgl precision
9278 bne.w fneg_not_ext # no; go handle sgl or dbl
9302 bne.b fneg_not_ext # no; go handle sgl or dbl
9342 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
9882 ori.b &s_mode*0x10,%d0 # insert sgl precision
9901 bne.b fabs_not_ext # no; go handle sgl or dbl
9960 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
10379 # norms/denorms into ext/sgl/dbl precision. #
10430 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
10457 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
10506 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
10528 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
10558 fsglmul.x FP_SCR0(%a6),%fp1 # execute sgl multiply
10583 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
10611 fsglmul.x FP_SCR0(%a6),%fp1 # execute sgl multiply
10720 # norms/denorms into ext/sgl/dbl precision. #
10777 fsgldiv.x FP_SCR0(%a6),%fp0 # perform sgl divide
10861 fsgldiv.x FP_SCR0(%a6),%fp0 # execute sgl divide
10891 fsgldiv.x FP_SCR0(%a6),%fp1 # execute sgl divide
10919 fsgldiv.x FP_SCR0(%a6),%fp0 # execute sgl divide
10946 fsgldiv.x FP_SCR0(%a6),%fp1 # execute sgl divide
11069 ori.b &s_mode*0x10,%d0 # insert sgl prec
11144 long 0x407f # sgl ovfl
11149 long 0x3f81 # sgl unfl
11173 bne.b fadd_ovfl_ena_sd # no; prec = sgl or dbl
11238 bne.b fadd_unfl_ena_sd # no; sgl or dbl
11522 ori.b &s_mode*0x10,%d0 # insert sgl prec
11597 long 0x407f # sgl ovfl
11602 long 0x3f81 # sgl unfl
11954 # norms/denorms into ext/sgl/dbl precision. #
11967 ori.b &s_mode*0x10,%d0 # insert sgl precision
11990 bne.b fsqrt_not_ext # no; go handle sgl or dbl
12007 bne.b fsqrt_not_ext # no; go handle sgl or dbl
12021 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
H A Dfplsp.S395 set SGL_LO, 0x3f81 # min sgl prec exponent
396 set SGL_HI, 0x407e # max sgl prec exponent
496 set sgl_thresh, 0x3f81 # minimum sgl exponent
574 fmov.s 0x8(%a6),%fp0 # load sgl input
751 fmov.s 0x8(%a6),%fp0 # load sgl input
928 fmov.s 0x8(%a6),%fp0 # load sgl input
1105 fmov.s 0x8(%a6),%fp0 # load sgl input
1282 fmov.s 0x8(%a6),%fp0 # load sgl input
1459 fmov.s 0x8(%a6),%fp0 # load sgl input
1636 fmov.s 0x8(%a6),%fp0 # load sgl input
1813 fmov.s 0x8(%a6),%fp0 # load sgl input
1990 fmov.s 0x8(%a6),%fp0 # load sgl input
2167 fmov.s 0x8(%a6),%fp0 # load sgl input
2344 fmov.s 0x8(%a6),%fp0 # load sgl input
2521 fmov.s 0x8(%a6),%fp0 # load sgl input
2698 fmov.s 0x8(%a6),%fp0 # load sgl input
2875 fmov.s 0x8(%a6),%fp0 # load sgl input
3052 fmov.s 0x8(%a6),%fp0 # load sgl input
3229 fmov.s 0x8(%a6),%fp0 # load sgl input
3406 fmov.s 0x8(%a6),%fp0 # load sgl input
3583 fmov.s 0x8(%a6),%fp0 # load sgl input
3760 fmov.s 0x8(%a6),%fp0 # load sgl input
3937 fmov.s 0x8(%a6),%fp0 # load sgl input
4114 fmov.s 0x8(%a6),%fp0 # load sgl input
4297 fmov.s 0x8(%a6),%fp0 # load sgl dst
4303 fmov.s 0xc(%a6),%fp0 # load sgl src
4501 fmov.s 0x8(%a6),%fp0 # load sgl dst
4507 fmov.s 0xc(%a6),%fp0 # load sgl src
4705 fmov.s 0x8(%a6),%fp0 # load sgl dst
4711 fmov.s 0xc(%a6),%fp0 # load sgl src
5203 # here, the operation may underflow iff the precision is sgl or dbl.
10048 cmpi.b %d1,&0x40 # is precision sgl?
10051 tst.l LOCAL_LO(%a0) # is lo lw of sgl set?
10290 fmov.s 0x8(%sp),%fp0 # load sgl dst
10292 fadd.s 0x8(%sp),%fp0 # fadd w/ sgl src
10314 fmov.s 0x8(%sp),%fp0 # load sgl dst
10316 fsub.s 0x8(%sp),%fp0 # fsub w/ sgl src
10338 fmov.s 0x8(%sp),%fp0 # load sgl dst
10340 fmul.s 0x8(%sp),%fp0 # fmul w/ sgl src
10362 fmov.s 0x8(%sp),%fp0 # load sgl dst
10364 fdiv.s 0x8(%sp),%fp0 # fdiv w/ sgl src
10384 fabs.s 0x4(%sp),%fp0 # fabs w/ sgl src
10399 fneg.s 0x4(%sp),%fp0 # fneg w/ sgl src
10414 fsqrt.s 0x4(%sp),%fp0 # fsqrt w/ sgl src
10429 fint.s 0x4(%sp),%fp0 # fint w/ sgl src
10444 fintrz.s 0x4(%sp),%fp0 # fintrz w/ sgl src
H A Dfpsp.S415 set SGL_LO, 0x3f81 # min sgl prec exponent
416 set SGL_HI, 0x407e # max sgl prec exponent
516 set sgl_thresh, 0x3f81 # minimum sgl exponent
1400 bsr.l funimp_skew # skew sgl or dbl inputs
1433 cmpi.b %d0,&0x11 # is class = 2 & fmt = sgl?
1977 # "non-skewed" operand for cases of sgl and dbl src INFs,NANs, and DENORMs.
2304 cmpi.b %d0,&0x1 # was src sgl?
3133 # denorm operand in the sgl or dbl format. NANs also become skewed, but can't
3197 short tbl_operr - tbl_operr # sgl prec shouldn't happen
3329 # denorm operand in the sgl or dbl format. NANs also become skewed and must be
3367 short fsnan_out_s - tbl_snan # sgl prec shouldn't happen
3433 lsr.l &0x8,%d1 # shift mantissa for sgl
3434 or.l %d1,%d0 # create sgl SNAN
3448 lsr.l &0x8,%d1 # shift mantissa for sgl
3449 or.l %d1,%d0 # create sgl SNAN
3792 # in the sgl or dbl format.
5309 # here, the operation may underflow iff the precision is sgl or dbl.
10298 bne.b ovfl_sc_dbl # no; sgl
10300 tst.l LOCAL_LO(%a0) # is lo lw of sgl set?
10446 long 0x3f810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
10447 long 0x3f810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
10448 long 0x3f810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
10449 long 0x3f810000, 0x00000100, 0x00000000, 0x0 # MIN; sgl
10466 long 0xbf810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
10467 long 0xbf810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
10468 long 0xbf810000, 0x00000100, 0x00000000, 0x0 # MIN; sgl
10469 long 0xbf810000, 0x00000000, 0x00000000, 0x0 # ZERO;sgl
11546 # norms/denorms into ext/sgl/dbl precision. #
11569 ori.b &s_mode*0x10,%d0 # insert sgl prec
11702 bne.b fmul_ovfl_ena_sd # it's sgl or dbl
11813 bne.b fmul_unfl_ena_sd # no, sgl or dbl
12043 # sgl/dbl, must scale exponent and perform an "fmove". Check to see #
12054 ori.b &s_mode*0x10,%d0 # insert sgl precision
12074 bne.w fin_not_ext # no, so go handle dbl or sgl
12094 bne.w fin_not_ext # no, so go handle dbl or sgl
12131 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
12363 # norms/denorms into ext/sgl/dbl precision. #
12381 long 0x3fff - 0x407e # sgl overflow exponent
12387 ori.b &s_mode*0x10,%d0 # insert sgl prec
12515 bne.b fdiv_ovfl_ena_sd # no, do sgl or dbl
12583 bne.b fdiv_unfl_ena_sd # no, sgl or dbl
12800 # scale_to_zero_src() - scale sgl/dbl source exponent #
12816 # norms/denorms into ext/sgl/dbl precisions. Extended precision can be #
12828 ori.b &s_mode*0x10,%d0 # insert sgl precision
12847 bne.w fneg_not_ext # no; go handle sgl or dbl
12871 bne.b fneg_not_ext # no; go handle sgl or dbl
12911 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
13451 ori.b &s_mode*0x10,%d0 # insert sgl precision
13470 bne.b fabs_not_ext # no; go handle sgl or dbl
13529 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
13948 # norms/denorms into ext/sgl/dbl precision. #
13999 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
14026 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
14075 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
14097 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
14127 fsglmul.x FP_SCR0(%a6),%fp1 # execute sgl multiply
14152 fsglmul.x FP_SCR0(%a6),%fp0 # execute sgl multiply
14180 fsglmul.x FP_SCR0(%a6),%fp1 # execute sgl multiply
14289 # norms/denorms into ext/sgl/dbl precision. #
14346 fsgldiv.x FP_SCR0(%a6),%fp0 # perform sgl divide
14430 fsgldiv.x FP_SCR0(%a6),%fp0 # execute sgl divide
14460 fsgldiv.x FP_SCR0(%a6),%fp1 # execute sgl divide
14488 fsgldiv.x FP_SCR0(%a6),%fp0 # execute sgl divide
14515 fsgldiv.x FP_SCR0(%a6),%fp1 # execute sgl divide
14638 ori.b &s_mode*0x10,%d0 # insert sgl prec
14713 long 0x407f # sgl ovfl
14718 long 0x3f81 # sgl unfl
14742 bne.b fadd_ovfl_ena_sd # no; prec = sgl or dbl
14807 bne.b fadd_unfl_ena_sd # no; sgl or dbl
15091 ori.b &s_mode*0x10,%d0 # insert sgl prec
15166 long 0x407f # sgl ovfl
15171 long 0x3f81 # sgl unfl
15523 # norms/denorms into ext/sgl/dbl precision. #
15536 ori.b &s_mode*0x10,%d0 # insert sgl precision
15559 bne.b fsqrt_not_ext # no; go handle sgl or dbl
15576 bne.b fsqrt_not_ext # no; go handle sgl or dbl
15590 cmpi.b %d0,&s_mode*0x10 # separate sgl/dbl prec
19562 # set_tag_s() - determine sgl prec optype tag #
19766 bsr.l fetch_dreg # fetch sgl in d0
19769 lea L_SCR1(%a6), %a0 # pass: ptr to the sgl
19770 bsr.l set_tag_s # determine sgl type
19943 lea L_SCR1(%a6), %a0 # pass: ptr to sgl src op
19947 cmpi.b %d0, &DENORM # is it a sgl DENORM?
19950 cmpi.b %d0, &SNAN # is it a sgl SNAN?
19964 # must convert sgl denorm format to an Xprec denorm fmt suitable for
19966 # %a0 : points to sgl denorm
19969 bfextu (%a0){&9:&23}, %d0 # fetch sgl hi(_mantissa)
19989 # convert sgl to ext SNAN
19990 # %a0 : points to sgl SNAN
20153 # _round() - needed to create EXOP for sgl/dbl precision #
20155 # ovf_res() - create default overflow result for sgl/dbl precision#
20156 # unf_res() - create default underflow result for sgl/dbl prec. #
20158 # dst_sgl() - create rounded sgl precision result. #
20173 # OVFL/UNFL occurred for a sgl or dbl operand #
20183 # For sgl or dbl precision, overflow or underflow can occur. If #
20258 ori.l &0x00800000,%d1 # make smallest sgl
20304 ori.l &0x00800000,%d1 # make smallest sgl
20351 ori.l &0x00800000,%d1 # make smallest sgl
20435 ori.b &s_mode*0x10,%d0 # insert sgl prec
20558 # call ovf_res() w/ sgl prec and the correct rnd mode to create the default
20563 mov.l L_SCR3(%a6),%d0 # pass: sgl prec,rnd mode
20913 # sgl_exp = ext_exp - $3fff(ext bias) + $7f(sgl bias) #
22020 # sgl = $0004xxxx #
22083 cmpi.b %d1, &s_mode # is prec = sgl?
22100 cmpi.b %d1, &s_mode # is prec = sgl?
22117 cmpi.b %d1, &s_mode # is prec = sgl?
22123 set ad_1_sgl, 0x00000100 # constant to add 1 to l-bit in sgl prec
22140 and.l &0xffffff00, FTEMP_HI(%a0) # truncate bits beyond sgl limit
22193 cmpi.b %d1, &s_mode # is prec sgl?
22223 bne.b ext_grs_not_ext # no; go handle sgl or dbl
22236 cmpi.b %d1, &s_mode # is rnd prec = sgl?
22240 # sgl:
22252 bfextu FTEMP_HI(%a0){&24:&2}, %d3 # sgl prec. g-r are 2 bits right
22253 mov.l &30, %d2 # of the sgl prec. limits
22608 # set_tag_s(): return the optype of the input sgl fp number #
22781 mov.w &s_mode,%d1 # force rnd prec = sgl
/linux-4.1.27/drivers/scsi/cxgbi/
H A Dlibcxgbi.c1375 struct scatterlist *sgl, ddp_make_gl()
1381 struct scatterlist *sg = sgl; ddp_make_gl()
1406 "xfer %u, sgl %u, gl max %u.\n", xferlen, sgcnt, npages); ddp_make_gl()
1414 for (i = 1, sg = sg_next(sgl), j = 0; i < sgcnt; ddp_make_gl()
1421 /* make sure the sgl is fit for ddp: ddp_make_gl()
1547 struct scatterlist *sgl, unsigned int sgcnt, gfp_t gfp) cxgbi_ddp_reserve()
1567 gl = ddp_make_gl(xferlen, sgl, sgcnt, cdev->pdev, gfp); cxgbi_ddp_reserve()
1718 scsi_in(sc)->table.sgl, task_reserve_itt()
2020 static int sgl_seek_offset(struct scatterlist *sgl, unsigned int sgcnt, sgl_seek_offset() argument
2027 for_each_sg(sgl, sg, sgcnt, i) { for_each_sg()
2178 sdb->table.sgl, sdb->table.nents, cxgbi_conn_init_pdu()
2181 pr_warn("tpdu, sgl %u, bad offset %u/%u.\n", cxgbi_conn_init_pdu()
2188 pr_warn("tpdu, sgl %u, bad offset %u + %u.\n", cxgbi_conn_init_pdu()
1374 ddp_make_gl(unsigned int xferlen, struct scatterlist *sgl, unsigned int sgcnt, struct pci_dev *pdev, gfp_t gfp) ddp_make_gl() argument
1545 cxgbi_ddp_reserve(struct cxgbi_sock *csk, unsigned int *tagp, unsigned int sw_tag, unsigned int xferlen, struct scatterlist *sgl, unsigned int sgcnt, gfp_t gfp) cxgbi_ddp_reserve() argument
/linux-4.1.27/drivers/media/pci/cx23885/
H A Dcx23885-video.c348 sgt->sgl, 0, UNSET, buffer_prepare()
353 sgt->sgl, UNSET, 0, buffer_prepare()
381 sgt->sgl, line0_offset, buffer_prepare()
388 sgt->sgl, buffer_prepare()
395 sgt->sgl, buffer_prepare()
H A Dcx23885-vbi.c155 sgt->sgl, buffer_prepare()
/linux-4.1.27/drivers/media/pci/tw68/
H A Dtw68-video.c474 tw68_risc_buffer(dev->pci, buf, dma->sgl, tw68_buf_prepare()
478 tw68_risc_buffer(dev->pci, buf, dma->sgl, tw68_buf_prepare()
482 tw68_risc_buffer(dev->pci, buf, dma->sgl, tw68_buf_prepare()
487 tw68_risc_buffer(dev->pci, buf, dma->sgl, tw68_buf_prepare()
493 tw68_risc_buffer(dev->pci, buf, dma->sgl, tw68_buf_prepare()
/linux-4.1.27/drivers/crypto/qat/qat_common/
H A Dqat_algs.c658 struct scatterlist *sgl, qat_alg_sgl_to_bufl()
665 int n = sg_nents(sgl), assoc_n = sg_nents(assoc); qat_alg_sgl_to_bufl()
707 for_each_sg(sgl, sg, n, i) { for_each_sg()
726 if (sgl != sglout) {
784 if (sgl != sglout && buflout) {
656 qat_alg_sgl_to_bufl(struct qat_crypto_instance *inst, struct scatterlist *assoc, struct scatterlist *sgl, struct scatterlist *sglout, uint8_t *iv, uint8_t ivlen, struct qat_crypto_request *qat_req) qat_alg_sgl_to_bufl() argument
/linux-4.1.27/drivers/char/agp/
H A Dintel-gtt.c109 for_each_sg(st->sgl, sg, num_entries, i) intel_gtt_map_memory()
113 st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL)) intel_gtt_map_memory()
131 st.sgl = sg_list; intel_gtt_unmap_memory()
853 for_each_sg(st->sgl, sg, st->nents, i) { intel_gtt_insert_sg_entries()
919 mem->sg_list = st.sgl; intel_fake_agp_insert_entries()
/linux-4.1.27/drivers/rapidio/devices/
H A Dtsi721_dma.c757 struct scatterlist *sgl, unsigned int sg_len, tsi721_prep_rio_sg()
767 if (!sgl || !sg_len) { tsi721_prep_rio_sg()
806 desc->sg = sgl; tsi721_prep_rio_sg()
756 tsi721_prep_rio_sg(struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction dir, unsigned long flags, void *tinfo) tsi721_prep_rio_sg() argument
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_buffer.c316 __sg_page_iter_start(&viter->iter, vsgt->sgt->sgl, vmw_piter_start()
336 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, vmw_ttm_unmap_from_dma()
359 ret = dma_map_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.orig_nents, vmw_ttm_map_for_dma()
/linux-4.1.27/drivers/dma/xilinx/
H A Dxilinx_vdma.c942 if (!xt->numf || !xt->sgl[0].size) xilinx_vdma_dma_prep_interleaved()
965 hw->hsize = xt->sgl[0].size; xilinx_vdma_dma_prep_interleaved()
966 hw->stride = (xt->sgl[0].icg + xt->sgl[0].size) << xilinx_vdma_dma_prep_interleaved()
/linux-4.1.27/drivers/base/
H A Ddma-mapping.c238 sg_set_page(sgt->sgl, page, PAGE_ALIGN(size), 0); dma_common_get_sgtable()
/linux-4.1.27/drivers/gpu/drm/radeon/
H A Dradeon_ttm.c575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); radeon_ttm_tt_pin_userptr()
603 if (!ttm->sg->sgl) radeon_ttm_tt_unpin_userptr()
607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); radeon_ttm_tt_unpin_userptr()
609 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { radeon_ttm_tt_unpin_userptr()
/linux-4.1.27/drivers/mtd/nand/gpmi-nand/
H A Dgpmi-lib.c1123 struct scatterlist *sgl; gpmi_send_command() local
1143 sgl = &this->cmd_sgl; gpmi_send_command()
1145 sg_init_one(sgl, this->cmd_buffer, this->command_length); gpmi_send_command()
1146 dma_map_sg(this->dev, sgl, 1, DMA_TO_DEVICE); gpmi_send_command()
1148 sgl, 1, DMA_MEM_TO_DEV, gpmi_send_command()
H A Dgpmi-nand.c384 struct scatterlist *sgl = &this->data_sgl; prepare_data_dma() local
390 sg_init_one(sgl, this->upper_buf, this->upper_len); prepare_data_dma()
391 ret = dma_map_sg(this->dev, sgl, 1, dr); prepare_data_dma()
401 sg_init_one(sgl, this->data_buffer_dma, this->upper_len); prepare_data_dma()
406 dma_map_sg(this->dev, sgl, 1, dr); prepare_data_dma()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb3/
H A Dsge.c1037 * @sgl: the SGL
1052 const struct sg_ent *sgl, write_wr_hdr_sgl()
1077 const u64 *fp = (const u64 *)sgl; write_wr_hdr_sgl()
1144 struct sg_ent *sgp, sgl[MAX_SKB_FRAGS / 2 + 1]; write_tx_pkt_wr() local
1198 sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl; write_tx_pkt_wr()
1201 write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, gen, write_tx_pkt_wr()
1551 const struct sg_ent *sgl, int sgl_flits) setup_deferred_unmapping()
1558 for (p = dui->addr; sgl_flits >= 3; sgl++, sgl_flits -= 3) { setup_deferred_unmapping()
1559 *p++ = be64_to_cpu(sgl->addr[0]); setup_deferred_unmapping()
1560 *p++ = be64_to_cpu(sgl->addr[1]); setup_deferred_unmapping()
1563 *p = be64_to_cpu(sgl->addr[0]); setup_deferred_unmapping()
1584 struct sg_ent *sgp, sgl[MAX_SKB_FRAGS / 2 + 1]; write_ofld_wr() local
1600 sgp = ndesc == 1 ? (struct sg_ent *)&d->flit[flits] : sgl; write_ofld_wr()
1610 write_wr_hdr_sgl(ndesc, skb, d, pidx, q, sgl, flits, sgl_flits, write_ofld_wr()
1049 write_wr_hdr_sgl(unsigned int ndesc, struct sk_buff *skb, struct tx_desc *d, unsigned int pidx, const struct sge_txq *q, const struct sg_ent *sgl, unsigned int flits, unsigned int sgl_flits, unsigned int gen, __be32 wr_hi, __be32 wr_lo) write_wr_hdr_sgl() argument
1550 setup_deferred_unmapping(struct sk_buff *skb, struct pci_dev *pdev, const struct sg_ent *sgl, int sgl_flits) setup_deferred_unmapping() argument
/linux-4.1.27/drivers/scsi/qla2xxx/
H A Dqla_iocb.c1045 struct scatterlist *sg, *sgl; qla24xx_walk_and_build_sglist() local
1054 sgl = scsi_sglist(cmd); qla24xx_walk_and_build_sglist()
1057 sgl = tc->sg; qla24xx_walk_and_build_sglist()
1065 for_each_sg(sgl, sg, tot_dsds, i) { for_each_sg()
1135 struct scatterlist *sg, *sgl; qla24xx_walk_and_build_prot_sglist() local
1144 sgl = scsi_prot_sglist(cmd); qla24xx_walk_and_build_prot_sglist()
1148 sgl = tc->prot_sg; qla24xx_walk_and_build_prot_sglist()
1157 for_each_sg(sgl, sg, tot_dsds, i) { for_each_sg()
/linux-4.1.27/include/uapi/linux/genwqe/
H A Dgenwqe_card.h387 #define ATS_TYPE_SGL_RD 0x6ull /* sgl read only */
388 #define ATS_TYPE_SGL_RDWR 0x7ull /* sgl read/write */
/linux-4.1.27/arch/powerpc/platforms/ps3/
H A Dsystem-bus.c642 static int ps3_sb_map_sg(struct device *_dev, struct scatterlist *sgl, ps3_sb_map_sg() argument
653 for_each_sg(sgl, sg, nents, i) { ps3_sb_map_sg()
/linux-4.1.27/drivers/scsi/pm8001/
H A Dpm80xx_hwi.h700 __le32 addr_low; /* dword 12: sgl low for normal I/O. */
702 __le32 addr_high; /* dword 13: sgl hi for normal I/O */
719 __le32 enc_addr_low; /* dword 28: Encryption sgl addr low */
720 __le32 enc_addr_high; /* dword 29: Encryption sgl addr hi */
/linux-4.1.27/drivers/crypto/ccp/
H A Dccp-crypto-aes-cmac.c138 sg = rctx->data_sg.sgl; ccp_do_cmac_update()
H A Dccp-crypto-main.c305 for (sg = table->sgl; sg; sg = sg_next(sg)) ccp_crypto_sg_table_add()
H A Dccp-crypto-sha.c113 sg = rctx->data_sg.sgl; ccp_do_sha_update()
/linux-4.1.27/drivers/media/pci/solo6x10/
H A Dsolo6x10-v4l2-enc.c331 for_each_sg(vbuf->sgl, sg, vbuf->nents, i) { solo_send_desc()
751 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, solo_enc_buf_finish()
755 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, solo_enc_buf_finish()
/linux-4.1.27/drivers/scsi/fcoe/
H A Dfcoe.c1026 * @sgl: The scatterlist describing this transfer
1032 struct scatterlist *sgl, unsigned int sgc) fcoe_ddp_setup()
1038 xid, sgl, fcoe_ddp_setup()
1048 * @sgl: The scatterlist describing this transfer
1054 struct scatterlist *sgl, unsigned int sgc) fcoe_ddp_target()
1060 sgl, sgc); fcoe_ddp_target()
1031 fcoe_ddp_setup(struct fc_lport *lport, u16 xid, struct scatterlist *sgl, unsigned int sgc) fcoe_ddp_setup() argument
1053 fcoe_ddp_target(struct fc_lport *lport, u16 xid, struct scatterlist *sgl, unsigned int sgc) fcoe_ddp_target() argument
/linux-4.1.27/drivers/staging/unisys/virthba/
H A Dvirthba.c818 struct scatterlist *sgl = NULL; virthba_queue_command_lck() local
876 sgl = scsi_sglist(scsicmd); virthba_queue_command_lck()
878 for_each_sg(sgl, sg, scsi_sg_count(scsicmd), i) { for_each_sg()
/linux-4.1.27/drivers/dma/dw/
H A Dcore.c761 dwc_prep_slave_sg(struct dma_chan *chan, struct scatterlist *sgl, dwc_prep_slave_sg() argument
802 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()
859 for_each_sg(sgl, sg, sg_len, i) { for_each_sg()

Completed in 6927 milliseconds

12