Home
last modified time | relevance | path

Searched refs:sg (Results 1 – 200 of 696) sorted by relevance

1234

/linux-4.1.27/tools/virtio/linux/
Dscatterlist.h13 #define sg_is_chain(sg) ((sg)->page_link & 0x01) argument
14 #define sg_is_last(sg) ((sg)->page_link & 0x02) argument
15 #define sg_chain_ptr(sg) \ argument
16 ((struct scatterlist *) ((sg)->page_link & ~0x03))
28 static inline void sg_assign_page(struct scatterlist *sg, struct page *page) in sg_assign_page() argument
30 unsigned long page_link = sg->page_link & 0x3; in sg_assign_page()
38 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_assign_page()
39 BUG_ON(sg_is_chain(sg)); in sg_assign_page()
41 sg->page_link = page_link | (unsigned long) page; in sg_assign_page()
58 static inline void sg_set_page(struct scatterlist *sg, struct page *page, in sg_set_page() argument
[all …]
/linux-4.1.27/include/linux/
Dscatterlist.h42 #define sg_is_chain(sg) ((sg)->page_link & 0x01) argument
43 #define sg_is_last(sg) ((sg)->page_link & 0x02) argument
44 #define sg_chain_ptr(sg) \ argument
45 ((struct scatterlist *) ((sg)->page_link & ~0x03))
57 static inline void sg_assign_page(struct scatterlist *sg, struct page *page) in sg_assign_page() argument
59 unsigned long page_link = sg->page_link & 0x3; in sg_assign_page()
67 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_assign_page()
68 BUG_ON(sg_is_chain(sg)); in sg_assign_page()
70 sg->page_link = page_link | (unsigned long) page; in sg_assign_page()
87 static inline void sg_set_page(struct scatterlist *sg, struct page *page, in sg_set_page() argument
[all …]
Ddma-debug.h47 extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
79 struct scatterlist *sg,
83 struct scatterlist *sg,
123 static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg, in debug_dma_map_sg() argument
173 struct scatterlist *sg, in debug_dma_sync_sg_for_cpu() argument
179 struct scatterlist *sg, in debug_dma_sync_sg_for_device() argument
Dswiotlb.h74 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents,
78 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
95 swiotlb_sync_sg_for_cpu(struct device *hwdev, struct scatterlist *sg,
103 swiotlb_sync_sg_for_device(struct device *hwdev, struct scatterlist *sg,
/linux-4.1.27/drivers/crypto/qce/
Ddma.c57 int qce_mapsg(struct device *dev, struct scatterlist *sg, int nents, in qce_mapsg() argument
63 while (sg) { in qce_mapsg()
64 err = dma_map_sg(dev, sg, 1, dir); in qce_mapsg()
67 sg = sg_next(sg); in qce_mapsg()
70 err = dma_map_sg(dev, sg, nents, dir); in qce_mapsg()
78 void qce_unmapsg(struct device *dev, struct scatterlist *sg, int nents, in qce_unmapsg() argument
82 while (sg) { in qce_unmapsg()
83 dma_unmap_sg(dev, sg, 1, dir); in qce_unmapsg()
84 sg = sg_next(sg); in qce_unmapsg()
87 dma_unmap_sg(dev, sg, nents, dir); in qce_unmapsg()
[all …]
Dsha.c239 struct scatterlist *sg_last, *sg; in qce_ahash_update() local
281 sg = sg_last = req->src; in qce_ahash_update()
283 while (len < nbytes && sg) { in qce_ahash_update()
284 if (len + sg_dma_len(sg) > nbytes) in qce_ahash_update()
286 len += sg_dma_len(sg); in qce_ahash_update()
287 sg_last = sg; in qce_ahash_update()
288 sg = sg_next(sg); in qce_ahash_update()
297 sg_init_table(rctx->sg, 2); in qce_ahash_update()
298 sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); in qce_ahash_update()
299 scatterwalk_sg_chain(rctx->sg, 2, req->src); in qce_ahash_update()
[all …]
Dablkcipher.c70 struct scatterlist *sg; in qce_ablkcipher_async_req_handle() local
104 sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); in qce_ablkcipher_async_req_handle()
105 if (IS_ERR(sg)) { in qce_ablkcipher_async_req_handle()
106 ret = PTR_ERR(sg); in qce_ablkcipher_async_req_handle()
110 sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg); in qce_ablkcipher_async_req_handle()
111 if (IS_ERR(sg)) { in qce_ablkcipher_async_req_handle()
112 ret = PTR_ERR(sg); in qce_ablkcipher_async_req_handle()
116 sg_mark_end(sg); in qce_ablkcipher_async_req_handle()
/linux-4.1.27/drivers/crypto/caam/
Dsg_sw_sec4.h33 sg_to_sec4_sg(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg() argument
37 dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), in sg_to_sec4_sg()
38 sg_dma_len(sg), offset); in sg_to_sec4_sg()
40 sg = sg_next(sg); in sg_to_sec4_sg()
50 static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg_last() argument
54 sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); in sg_to_sec4_sg_last()
62 struct scatterlist *sg = sg_list; in __sg_count() local
67 nbytes -= sg->length; in __sg_count()
68 if (!sg_is_last(sg) && (sg + 1)->length == 0) in __sg_count()
70 sg = sg_next(sg); in __sg_count()
[all …]
/linux-4.1.27/Documentation/scsi/
Dscsi-generic.txt1 Notes on Linux SCSI Generic (sg) driver
6 The SCSI Generic driver (sg) is one of the four "high level" SCSI device
10 Thus sg is used for scanners, CD writers and reading audio CDs digitally
18 Major versions of the sg driver
20 There are three major versions of sg found in the linux kernel (lk):
21 - sg version 1 (original) from 1992 to early 1999 (lk 2.2.5) .
23 - sg version 2 from lk 2.2.6 in the 2.2 series. It is based on
25 - sg version 3 found in the lk 2.4 series (and the lk 2.5 series).
31 The most recent documentation of the sg driver is kept at the Linux
34 This describes the sg version 3 driver found in the lk 2.4 series.
[all …]
/linux-4.1.27/crypto/
Dscatterwalk.c33 void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg) in scatterwalk_start() argument
35 walk->sg = sg; in scatterwalk_start()
37 BUG_ON(!sg->length); in scatterwalk_start()
39 walk->offset = sg->offset; in scatterwalk_start()
56 page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT); in scatterwalk_pagedone()
64 if (walk->offset >= walk->sg->offset + walk->sg->length) in scatterwalk_pagedone()
65 scatterwalk_start(walk, sg_next(walk->sg)); in scatterwalk_pagedone()
103 void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg, in scatterwalk_map_and_copy() argument
113 scatterwalk_start(&walk, sg); in scatterwalk_map_and_copy()
115 if (start < offset + sg->length) in scatterwalk_map_and_copy()
[all …]
Dalgif_skcipher.c31 struct scatterlist sg[0]; member
87 struct scatterlist *sg; in skcipher_free_async_sgls() local
97 for_each_sg(sgl, sg, n, i) in skcipher_free_async_sgls()
98 put_page(sg_page(sg)); in skcipher_free_async_sgls()
136 struct scatterlist *sg = NULL; in skcipher_alloc_sgl() local
140 sg = sgl->sg; in skcipher_alloc_sgl()
142 if (!sg || sgl->cur >= MAX_SGL_ENTS) { in skcipher_alloc_sgl()
144 sizeof(sgl->sg[0]) * (MAX_SGL_ENTS + 1), in skcipher_alloc_sgl()
149 sg_init_table(sgl->sg, MAX_SGL_ENTS + 1); in skcipher_alloc_sgl()
152 if (sg) in skcipher_alloc_sgl()
[all …]
Dalgif_aead.c28 struct scatterlist sg[ALG_MAX_PAGES]; member
82 struct scatterlist *sg = sgl->sg; in aead_put_sgl() local
86 if (!sg_page(sg + i)) in aead_put_sgl()
89 put_page(sg_page(sg + i)); in aead_put_sgl()
90 sg_assign_page(sg + i, NULL); in aead_put_sgl()
215 struct scatterlist *sg = NULL; in aead_sendmsg() local
219 sg = sgl->sg + sgl->cur - 1; in aead_sendmsg()
221 PAGE_SIZE - sg->offset - sg->length); in aead_sendmsg()
222 err = memcpy_from_msg(page_address(sg_page(sg)) + in aead_sendmsg()
223 sg->offset + sg->length, in aead_sendmsg()
[all …]
Dtcrypt.c76 struct scatterlist *sg, int blen, int secs) in test_cipher_jiffies() argument
85 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_jiffies()
87 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_jiffies()
99 struct scatterlist *sg, int blen) in test_cipher_cycles() argument
110 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_cycles()
112 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_cycles()
124 ret = crypto_blkcipher_encrypt(desc, sg, sg, blen); in test_cipher_cycles()
126 ret = crypto_blkcipher_decrypt(desc, sg, sg, blen); in test_cipher_cycles()
247 static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE], in sg_init_aead() argument
260 sg_init_table(sg, np); in sg_init_aead()
[all …]
Dtwofish_common.c500 ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
508 ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
516 ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
593 u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0; in __twofish_setkey() local
618 CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */ in __twofish_setkey()
619 CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */ in __twofish_setkey()
620 CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */ in __twofish_setkey()
621 CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */ in __twofish_setkey()
622 CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */ in __twofish_setkey()
623 CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */ in __twofish_setkey()
[all …]
Dahash.c69 struct scatterlist *sg; in hash_walk_new_entry() local
71 sg = walk->sg; in hash_walk_new_entry()
72 walk->offset = sg->offset; in hash_walk_new_entry()
73 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in hash_walk_new_entry()
75 walk->entrylen = sg->length; in hash_walk_new_entry()
125 walk->sg = sg_next(walk->sg); in crypto_hash_walk_done()
142 walk->sg = req->src; in crypto_hash_walk_first()
160 walk->sg = req->src; in crypto_ahash_walk_first()
172 struct scatterlist *sg, unsigned int len) in crypto_hash_walk_first_compat() argument
182 walk->sg = sg; in crypto_hash_walk_first_compat()
Dshash.c277 struct scatterlist *sg = req->src; in shash_ahash_digest() local
278 unsigned int offset = sg->offset; in shash_ahash_digest()
282 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { in shash_ahash_digest()
285 data = kmap_atomic(sg_page(sg)); in shash_ahash_digest()
390 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg, in shash_compat_update() argument
398 for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len); in shash_compat_update()
412 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg, in shash_compat_digest() argument
415 unsigned int offset = sg->offset; in shash_compat_digest()
418 if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) { in shash_compat_digest()
425 data = kmap_atomic(sg_page(sg)); in shash_compat_digest()
[all …]
Dauthenc.c41 struct scatterlist *sg; member
134 ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, in authenc_geniv_ahash_update_done()
144 scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, in authenc_geniv_ahash_update_done()
163 scatterwalk_map_and_copy(ahreq->result, areq_ctx->sg, in authenc_geniv_ahash_done()
187 ahash_request_set_crypt(ahreq, areq_ctx->sg, ahreq->result, in authenc_verify_ahash_update_done()
200 scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, in authenc_verify_ahash_update_done()
239 scatterwalk_map_and_copy(ihash, areq_ctx->sg, areq_ctx->cryptlen, in authenc_verify_ahash_done()
286 ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, in crypto_authenc_ahash_fb()
312 ahash_request_set_crypt(ahreq, areq_ctx->sg, hash, in crypto_authenc_ahash()
361 areq_ctx->sg = dst; in crypto_authenc_genicv()
[all …]
/linux-4.1.27/lib/
Dscatterlist.c25 struct scatterlist *sg_next(struct scatterlist *sg) in sg_next() argument
28 BUG_ON(sg->sg_magic != SG_MAGIC); in sg_next()
30 if (sg_is_last(sg)) in sg_next()
33 sg++; in sg_next()
34 if (unlikely(sg_is_chain(sg))) in sg_next()
35 sg = sg_chain_ptr(sg); in sg_next()
37 return sg; in sg_next()
50 int sg_nents(struct scatterlist *sg) in sg_nents() argument
53 for (nents = 0; sg; sg = sg_next(sg)) in sg_nents()
79 struct scatterlist *sg, *ret = NULL; in sg_last()
[all …]
Dswiotlb.c875 struct scatterlist *sg; in swiotlb_map_sg_attrs() local
880 for_each_sg(sgl, sg, nelems, i) { in swiotlb_map_sg_attrs()
881 phys_addr_t paddr = sg_phys(sg); in swiotlb_map_sg_attrs()
885 !dma_capable(hwdev, dev_addr, sg->length)) { in swiotlb_map_sg_attrs()
886 phys_addr_t map = map_single(hwdev, sg_phys(sg), in swiotlb_map_sg_attrs()
887 sg->length, dir); in swiotlb_map_sg_attrs()
891 swiotlb_full(hwdev, sg->length, dir, 0); in swiotlb_map_sg_attrs()
897 sg->dma_address = phys_to_dma(hwdev, map); in swiotlb_map_sg_attrs()
899 sg->dma_address = dev_addr; in swiotlb_map_sg_attrs()
900 sg_dma_len(sg) = sg->length; in swiotlb_map_sg_attrs()
[all …]
/linux-4.1.27/arch/nios2/mm/
Ddma-mapping.c59 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
66 for_each_sg(sg, sg, nents, i) { in dma_map_sg()
69 addr = sg_virt(sg); in dma_map_sg()
71 __dma_sync_for_device(addr, sg->length, direction); in dma_map_sg()
72 sg->dma_address = sg_phys(sg); in dma_map_sg()
104 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
115 for_each_sg(sg, sg, nhwentries, i) { in dma_unmap_sg()
116 addr = sg_virt(sg); in dma_unmap_sg()
118 __dma_sync_for_cpu(addr, sg->length, direction); in dma_unmap_sg()
161 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
[all …]
/linux-4.1.27/samples/kfifo/
Ddma-example.c28 struct scatterlist sg[10]; in example_init() local
64 sg_init_table(sg, ARRAY_SIZE(sg)); in example_init()
65 nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); in example_init()
79 i, sg[i].page_link, sg[i].offset, sg[i].length); in example_init()
81 if (sg_is_last(&sg[i])) in example_init()
95 nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); in example_init()
108 i, sg[i].page_link, sg[i].offset, sg[i].length); in example_init()
110 if (sg_is_last(&sg[i])) in example_init()
/linux-4.1.27/drivers/media/pci/tw68/
Dtw68-risc.c46 struct scatterlist *sg; in tw68_risc_field() local
62 sg = sglist; in tw68_risc_field()
65 while (offset && offset >= sg_dma_len(sg)) { in tw68_risc_field()
66 offset -= sg_dma_len(sg); in tw68_risc_field()
67 sg = sg_next(sg); in tw68_risc_field()
69 if (bpl <= sg_dma_len(sg) - offset) { in tw68_risc_field()
73 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
84 done = (sg_dma_len(sg) - offset); in tw68_risc_field()
88 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in tw68_risc_field()
90 sg = sg_next(sg); in tw68_risc_field()
[all …]
/linux-4.1.27/arch/tile/kernel/
Dpci-dma.c207 struct scatterlist *sg; in tile_dma_map_sg() local
214 for_each_sg(sglist, sg, nents, i) { in tile_dma_map_sg()
215 sg->dma_address = sg_phys(sg); in tile_dma_map_sg()
216 __dma_prep_pa_range(sg->dma_address, sg->length, direction); in tile_dma_map_sg()
218 sg->dma_length = sg->length; in tile_dma_map_sg()
229 struct scatterlist *sg; in tile_dma_unmap_sg() local
233 for_each_sg(sglist, sg, nents, i) { in tile_dma_unmap_sg()
234 sg->dma_address = sg_phys(sg); in tile_dma_unmap_sg()
235 __dma_complete_pa_range(sg->dma_address, sg->length, in tile_dma_unmap_sg()
284 struct scatterlist *sg; in tile_dma_sync_sg_for_cpu() local
[all …]
/linux-4.1.27/include/crypto/
Dscatterwalk.h37 struct scatterlist *sg, in scatterwalk_crypto_chain() argument
41 head->length += sg->length; in scatterwalk_crypto_chain()
42 sg = sg_next(sg); in scatterwalk_crypto_chain()
45 if (sg) in scatterwalk_crypto_chain()
46 scatterwalk_sg_chain(head, num, sg); in scatterwalk_crypto_chain()
54 return !(((sg_page(walk_in->sg) - sg_page(walk_out->sg)) << PAGE_SHIFT) + in scatterwalk_samebuf()
60 unsigned int len = walk->sg->offset + walk->sg->length - walk->offset; in scatterwalk_pagelen()
86 return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT); in scatterwalk_page()
94 void scatterwalk_start(struct scatter_walk *walk, struct scatterlist *sg);
100 void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
[all …]
/linux-4.1.27/arch/metag/include/asm/
Ddma-mapping.h51 struct scatterlist *sg; in dma_map_sg() local
57 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
58 BUG_ON(!sg_page(sg)); in dma_map_sg()
60 sg->dma_address = sg_phys(sg); in dma_map_sg()
61 dma_sync_for_device(sg_virt(sg), sg->length, direction); in dma_map_sg()
90 struct scatterlist *sg; in dma_unmap_sg() local
96 for_each_sg(sglist, sg, nhwentries, i) { in dma_unmap_sg()
97 BUG_ON(!sg_page(sg)); in dma_unmap_sg()
99 sg->dma_address = sg_phys(sg); in dma_unmap_sg()
100 dma_sync_for_cpu(sg_virt(sg), sg->length, direction); in dma_unmap_sg()
[all …]
/linux-4.1.27/arch/c6x/kernel/
Ddma.c67 struct scatterlist *sg; in dma_map_sg() local
70 for_each_sg(sglist, sg, nents, i) in dma_map_sg()
71 sg->dma_address = dma_map_single(dev, sg_virt(sg), sg->length, in dma_map_sg()
84 struct scatterlist *sg; in dma_unmap_sg() local
87 for_each_sg(sglist, sg, nents, i) in dma_unmap_sg()
88 dma_unmap_single(dev, sg_dma_address(sg), sg->length, dir); in dma_unmap_sg()
117 struct scatterlist *sg; in dma_sync_sg_for_cpu() local
120 for_each_sg(sglist, sg, nents, i) in dma_sync_sg_for_cpu()
121 dma_sync_single_for_cpu(dev, sg_dma_address(sg), in dma_sync_sg_for_cpu()
122 sg->length, dir); in dma_sync_sg_for_cpu()
[all …]
/linux-4.1.27/drivers/scsi/aacraid/
Dcommctrl.c560 if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) { in aac_send_raw_srb()
562 le32_to_cpu(srbcmd->sg.count))); in aac_send_raw_srb()
567 ((user_srbcmd->sg.count & 0xff) * sizeof(struct sgentry)); in aac_send_raw_srb()
568 actual_fibsize64 = actual_fibsize + (user_srbcmd->sg.count & 0xff) * in aac_send_raw_srb()
576 actual_fibsize, actual_fibsize64, user_srbcmd->sg.count, in aac_send_raw_srb()
582 if ((data_dir == DMA_NONE) && user_srbcmd->sg.count) { in aac_send_raw_srb()
589 struct user_sgmap64* upsg = (struct user_sgmap64*)&user_srbcmd->sg; in aac_send_raw_srb()
590 struct sgmap64* psg = (struct sgmap64*)&srbcmd->sg; in aac_send_raw_srb()
600 if (upsg->sg[i].count > in aac_send_raw_srb()
609 p = kmalloc(upsg->sg[i].count,GFP_KERNEL|__GFP_DMA); in aac_send_raw_srb()
[all …]
Daachba.c1183 ret = aac_build_sgraw(cmd, &readcmd->sg); in aac_read_raw_io()
1188 ((le32_to_cpu(readcmd->sg.count)-1) * sizeof(struct sgentryraw)); in aac_read_raw_io()
1219 ret = aac_build_sg64(cmd, &readcmd->sg); in aac_read_block64()
1223 ((le32_to_cpu(readcmd->sg.count) - 1) * in aac_read_block64()
1254 ret = aac_build_sg(cmd, &readcmd->sg); in aac_read_block()
1258 ((le32_to_cpu(readcmd->sg.count) - 1) * in aac_read_block()
1315 ret = aac_build_sgraw(cmd, &writecmd->sg); in aac_write_raw_io()
1320 ((le32_to_cpu(writecmd->sg.count)-1) * sizeof (struct sgentryraw)); in aac_write_raw_io()
1351 ret = aac_build_sg64(cmd, &writecmd->sg); in aac_write_block64()
1355 ((le32_to_cpu(writecmd->sg.count) - 1) * in aac_write_block64()
[all …]
/linux-4.1.27/net/sunrpc/auth_gss/
Dgss_krb5_crypto.c61 struct scatterlist sg[1]; in krb5_encrypt() local
78 sg_init_one(sg, out, length); in krb5_encrypt()
80 ret = crypto_blkcipher_encrypt_iv(&desc, sg, sg, length); in krb5_encrypt()
95 struct scatterlist sg[1]; in krb5_decrypt() local
111 sg_init_one(sg, out, length); in krb5_decrypt()
113 ret = crypto_blkcipher_decrypt_iv(&desc, sg, sg, length); in krb5_decrypt()
120 checksummer(struct scatterlist *sg, void *data) in checksummer() argument
124 return crypto_hash_update(desc, sg, sg->length); in checksummer()
156 struct scatterlist sg[1]; in make_checksum_hmac_md5() local
194 sg_init_one(sg, rc4salt, 4); in make_checksum_hmac_md5()
[all …]
/linux-4.1.27/arch/mips/mm/
Ddma-default.c265 static int mips_dma_map_sg(struct device *dev, struct scatterlist *sg, in mips_dma_map_sg() argument
270 for (i = 0; i < nents; i++, sg++) { in mips_dma_map_sg()
272 __dma_sync(sg_page(sg), sg->offset, sg->length, in mips_dma_map_sg()
275 sg->dma_length = sg->length; in mips_dma_map_sg()
277 sg->dma_address = plat_map_dma_mem_page(dev, sg_page(sg)) + in mips_dma_map_sg()
278 sg->offset; in mips_dma_map_sg()
294 static void mips_dma_unmap_sg(struct device *dev, struct scatterlist *sg, in mips_dma_unmap_sg() argument
300 for (i = 0; i < nhwentries; i++, sg++) { in mips_dma_unmap_sg()
303 __dma_sync(sg_page(sg), sg->offset, sg->length, in mips_dma_unmap_sg()
305 plat_unmap_dma_mem(dev, sg->dma_address, sg->length, direction); in mips_dma_unmap_sg()
[all …]
/linux-4.1.27/arch/sparc/mm/
Diommu.c238 static void iommu_get_scsi_sgl_gflush(struct device *dev, struct scatterlist *sg, int sz) in iommu_get_scsi_sgl_gflush() argument
245 n = (sg->length + sg->offset + PAGE_SIZE-1) >> PAGE_SHIFT; in iommu_get_scsi_sgl_gflush()
246 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_gflush()
247 sg->dma_length = sg->length; in iommu_get_scsi_sgl_gflush()
248 sg = sg_next(sg); in iommu_get_scsi_sgl_gflush()
252 static void iommu_get_scsi_sgl_pflush(struct device *dev, struct scatterlist *sg, int sz) in iommu_get_scsi_sgl_pflush() argument
260 n = (sg->length + sg->offset + PAGE_SIZE-1) >> PAGE_SHIFT; in iommu_get_scsi_sgl_pflush()
267 if ((page = (unsigned long) page_address(sg_page(sg))) != 0) { in iommu_get_scsi_sgl_pflush()
277 sg->dma_address = iommu_get_one(dev, sg_page(sg), n) + sg->offset; in iommu_get_scsi_sgl_pflush()
278 sg->dma_length = sg->length; in iommu_get_scsi_sgl_pflush()
[all …]
Dio-unit.c153 static void iounit_get_scsi_sgl(struct device *dev, struct scatterlist *sg, int sz) in iounit_get_scsi_sgl() argument
162 sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length); in iounit_get_scsi_sgl()
163 sg->dma_length = sg->length; in iounit_get_scsi_sgl()
164 sg = sg_next(sg); in iounit_get_scsi_sgl()
183 static void iounit_release_scsi_sgl(struct device *dev, struct scatterlist *sg, int sz) in iounit_release_scsi_sgl() argument
192 len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT; in iounit_release_scsi_sgl()
193 vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT; in iounit_release_scsi_sgl()
197 sg = sg_next(sg); in iounit_release_scsi_sgl()
/linux-4.1.27/net/rxrpc/
Drxkad.c117 struct scatterlist sg[2]; in rxkad_prime_packet_security() local
140 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security()
141 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_prime_packet_security()
142 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_prime_packet_security()
161 struct scatterlist sg[2]; in rxkad_secure_packet_auth() local
184 sg_init_one(&sg[0], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth()
185 sg_init_one(&sg[1], &tmpbuf, sizeof(tmpbuf)); in rxkad_secure_packet_auth()
186 crypto_blkcipher_encrypt_iv(&desc, &sg[0], &sg[1], sizeof(tmpbuf)); in rxkad_secure_packet_auth()
208 struct scatterlist sg[16]; in rxkad_secure_packet_encrypt() local
230 sg_init_one(&sg[0], sechdr, sizeof(rxkhdr)); in rxkad_secure_packet_encrypt()
[all …]
/linux-4.1.27/arch/microblaze/kernel/
Ddma.c58 struct scatterlist *sg; in dma_direct_map_sg() local
62 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
63 sg->dma_address = sg_phys(sg); in dma_direct_map_sg()
64 __dma_sync(page_to_phys(sg_page(sg)) + sg->offset, in dma_direct_map_sg()
65 sg->length, direction); in dma_direct_map_sg()
134 struct scatterlist *sg; in dma_direct_sync_sg_for_cpu() local
139 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_cpu()
140 __dma_sync(sg->dma_address, sg->length, direction); in dma_direct_sync_sg_for_cpu()
148 struct scatterlist *sg; in dma_direct_sync_sg_for_device() local
153 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_device()
[all …]
/linux-4.1.27/arch/xtensa/include/asm/
Ddma-mapping.h55 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
62 for (i = 0; i < nents; i++, sg++ ) { in dma_map_sg()
63 BUG_ON(!sg_page(sg)); in dma_map_sg()
65 sg->dma_address = sg_phys(sg); in dma_map_sg()
66 consistent_sync(sg_virt(sg), sg->length, direction); in dma_map_sg()
89 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
127 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
131 for (i = 0; i < nelems; i++, sg++) in dma_sync_sg_for_cpu()
132 consistent_sync(sg_virt(sg), sg->length, dir); in dma_sync_sg_for_cpu()
136 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
[all …]
/linux-4.1.27/drivers/gpu/drm/msm/
Dmsm_iommu.c51 struct scatterlist *sg; in msm_iommu_map() local
59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map()
60 u32 pa = sg_phys(sg) - sg->offset; in msm_iommu_map()
61 size_t bytes = sg->length + sg->offset; in msm_iommu_map()
77 for_each_sg(sgt->sgl, sg, i, j) { in msm_iommu_map()
78 size_t bytes = sg->length + sg->offset; in msm_iommu_map()
90 struct scatterlist *sg; in msm_iommu_unmap() local
94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
95 size_t bytes = sg->length + sg->offset; in msm_iommu_unmap()
Dmsm_gem_prime.c54 struct dma_buf_attachment *attach, struct sg_table *sg) in msm_gem_prime_import_sg_table() argument
56 return msm_gem_import(dev, attach->dmabuf->size, sg); in msm_gem_prime_import_sg_table()
/linux-4.1.27/arch/blackfin/kernel/
Ddma-mapping.c119 struct scatterlist *sg; in dma_map_sg() local
122 for_each_sg(sg_list, sg, nents, i) { in dma_map_sg()
123 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_map_sg()
124 __dma_sync(sg_dma_address(sg), sg_dma_len(sg), direction); in dma_map_sg()
134 struct scatterlist *sg; in dma_sync_sg_for_device() local
137 for_each_sg(sg_list, sg, nelems, i) { in dma_sync_sg_for_device()
138 sg->dma_address = (dma_addr_t) sg_virt(sg); in dma_sync_sg_for_device()
139 __dma_sync(sg_dma_address(sg), sg_dma_len(sg), direction); in dma_sync_sg_for_device()
/linux-4.1.27/drivers/gpu/drm/omapdrm/
Domap_gem_dmabuf.c29 struct sg_table *sg; in omap_gem_map_dma_buf() local
33 sg = kzalloc(sizeof(*sg), GFP_KERNEL); in omap_gem_map_dma_buf()
34 if (!sg) in omap_gem_map_dma_buf()
44 ret = sg_alloc_table(sg, 1, GFP_KERNEL); in omap_gem_map_dma_buf()
48 sg_init_table(sg->sgl, 1); in omap_gem_map_dma_buf()
49 sg_dma_len(sg->sgl) = obj->size; in omap_gem_map_dma_buf()
50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); in omap_gem_map_dma_buf()
51 sg_dma_address(sg->sgl) = paddr; in omap_gem_map_dma_buf()
56 return sg; in omap_gem_map_dma_buf()
58 kfree(sg); in omap_gem_map_dma_buf()
[all …]
/linux-4.1.27/arch/alpha/kernel/
Dpci_iommu.c504 sg_classify(struct device *dev, struct scatterlist *sg, struct scatterlist *end, in sg_classify() argument
512 leader = sg; in sg_classify()
519 for (++sg; sg < end; ++sg) { in sg_classify()
521 addr = SG_ENT_PHYS_ADDRESS(sg); in sg_classify()
522 len = sg->length; in sg_classify()
528 sg->dma_address = -1; in sg_classify()
531 sg->dma_address = -2; in sg_classify()
538 leader = sg; in sg_classify()
560 struct scatterlist *sg; in sg_fill() local
617 sg = leader; in sg_fill()
[all …]
Dpci-noop.c145 struct scatterlist *sg; in alpha_noop_map_sg() local
147 for_each_sg(sgl, sg, nents, i) { in alpha_noop_map_sg()
150 BUG_ON(!sg_page(sg)); in alpha_noop_map_sg()
151 va = sg_virt(sg); in alpha_noop_map_sg()
152 sg_dma_address(sg) = (dma_addr_t)virt_to_phys(va); in alpha_noop_map_sg()
153 sg_dma_len(sg) = sg->length; in alpha_noop_map_sg()
/linux-4.1.27/drivers/target/tcm_fc/
Dtfc_io.c66 struct scatterlist *sg = NULL; in ft_queue_data_in() local
100 sg = se_cmd->t_data_sg; in ft_queue_data_in()
101 mem_len = sg->length; in ft_queue_data_in()
102 mem_off = sg->offset; in ft_queue_data_in()
103 page = sg_page(sg); in ft_queue_data_in()
118 sg = sg_next(sg); in ft_queue_data_in()
119 mem_len = min((size_t)sg->length, remaining); in ft_queue_data_in()
120 mem_off = sg->offset; in ft_queue_data_in()
121 page = sg_page(sg); in ft_queue_data_in()
222 struct scatterlist *sg = NULL; in ft_recv_write_data() local
[all …]
/linux-4.1.27/drivers/mmc/card/
Dqueue.c147 struct scatterlist *sg; in mmc_alloc_sg() local
149 sg = kmalloc(sizeof(struct scatterlist)*sg_len, GFP_KERNEL); in mmc_alloc_sg()
150 if (!sg) in mmc_alloc_sg()
154 sg_init_table(sg, sg_len); in mmc_alloc_sg()
157 return sg; in mmc_alloc_sg()
253 mqrq_cur->sg = mmc_alloc_sg(1, &ret); in mmc_init_queue()
262 mqrq_prev->sg = mmc_alloc_sg(1, &ret); in mmc_init_queue()
281 mqrq_cur->sg = mmc_alloc_sg(host->max_segs, &ret); in mmc_init_queue()
286 mqrq_prev->sg = mmc_alloc_sg(host->max_segs, &ret); in mmc_init_queue()
309 kfree(mqrq_cur->sg); in mmc_init_queue()
[all …]
Dmmc_test.c84 struct scatterlist *sg; member
191 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument
221 mrq->data->sg = sg; in mmc_test_prepare_mrq()
278 struct scatterlist sg; in mmc_test_buffer_transfer() local
284 sg_init_one(&sg, buffer, blksz); in mmc_test_buffer_transfer()
286 mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write); in mmc_test_buffer_transfer()
399 struct scatterlist *sg = NULL; in mmc_test_map_sg() local
418 if (sg) in mmc_test_map_sg()
419 sg = sg_next(sg); in mmc_test_map_sg()
421 sg = sglist; in mmc_test_map_sg()
[all …]
/linux-4.1.27/drivers/staging/android/ion/
Dion_chunk_heap.c44 struct scatterlist *sg; in ion_chunk_heap_allocate() local
67 sg = table->sgl; in ion_chunk_heap_allocate()
73 sg_set_page(sg, pfn_to_page(PFN_DOWN(paddr)), in ion_chunk_heap_allocate()
75 sg = sg_next(sg); in ion_chunk_heap_allocate()
82 sg = table->sgl; in ion_chunk_heap_allocate()
84 gen_pool_free(chunk_heap->pool, page_to_phys(sg_page(sg)), in ion_chunk_heap_allocate()
85 sg->length); in ion_chunk_heap_allocate()
86 sg = sg_next(sg); in ion_chunk_heap_allocate()
99 struct scatterlist *sg; in ion_chunk_heap_free() local
111 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free()
[all …]
Dion_heap.c31 struct scatterlist *sg; in ion_heap_map_kernel() local
48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel()
49 int npages_this_entry = PAGE_ALIGN(sg->length) / PAGE_SIZE; in ion_heap_map_kernel()
50 struct page *page = sg_page(sg); in ion_heap_map_kernel()
77 struct scatterlist *sg; in ion_heap_map_user() local
81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user()
82 struct page *page = sg_page(sg); in ion_heap_map_user()
84 unsigned long len = sg->length; in ion_heap_map_user()
86 if (offset >= sg->length) { in ion_heap_map_user()
87 offset -= sg->length; in ion_heap_map_user()
[all …]
Dion_system_heap.c129 struct scatterlist *sg; in ion_system_heap_allocate() local
160 sg = table->sgl; in ion_system_heap_allocate()
162 sg_set_page(sg, page, PAGE_SIZE << compound_order(page), 0); in ion_system_heap_allocate()
163 sg = sg_next(sg); in ion_system_heap_allocate()
185 struct scatterlist *sg; in ion_system_heap_free() local
193 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
194 free_buffer_page(sys_heap, buffer, sg_page(sg)); in ion_system_heap_free()
/linux-4.1.27/drivers/usb/storage/
Dprotocol.c139 struct scatterlist *sg = *sgptr; in usb_stor_access_xfer_buf() local
143 if (sg) in usb_stor_access_xfer_buf()
144 nents = sg_nents(sg); in usb_stor_access_xfer_buf()
146 sg = scsi_sglist(srb); in usb_stor_access_xfer_buf()
148 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
163 if (*offset + len < miter.piter.sg->length) { in usb_stor_access_xfer_buf()
165 *sgptr = miter.piter.sg; in usb_stor_access_xfer_buf()
168 *sgptr = sg_next(miter.piter.sg); in usb_stor_access_xfer_buf()
185 struct scatterlist *sg = NULL; in usb_stor_set_xfer_buf() local
188 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, in usb_stor_set_xfer_buf()
/linux-4.1.27/drivers/infiniband/ulp/iser/
Diser_memory.c45 struct scatterlist *sg; in iser_free_bounce_sg() local
48 for_each_sg(data->sg, sg, data->size, count) in iser_free_bounce_sg()
49 __free_page(sg_page(sg)); in iser_free_bounce_sg()
51 kfree(data->sg); in iser_free_bounce_sg()
53 data->sg = data->orig_sg; in iser_free_bounce_sg()
62 struct scatterlist *sg; in iser_alloc_bounce_sg() local
67 sg = kcalloc(nents, sizeof(*sg), GFP_ATOMIC); in iser_alloc_bounce_sg()
68 if (!sg) in iser_alloc_bounce_sg()
71 sg_init_table(sg, nents); in iser_alloc_bounce_sg()
79 sg_set_page(&sg[i], page, page_len, 0); in iser_alloc_bounce_sg()
[all …]
/linux-4.1.27/include/asm-generic/
Dscatterlist.h26 #define sg_dma_address(sg) ((sg)->dma_address) argument
29 #define sg_dma_len(sg) ((sg)->dma_length) argument
31 #define sg_dma_len(sg) ((sg)->length) argument
Ddma-mapping-common.h46 static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_map_sg_attrs() argument
54 for_each_sg(sg, s, nents, i) in dma_map_sg_attrs()
57 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs()
59 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs()
64 static inline void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg, in dma_unmap_sg_attrs() argument
71 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs()
73 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
155 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
162 ops->sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
163 debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir); in dma_sync_sg_for_cpu()
[all …]
Dpci-dma-compat.h65 pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_map_sg() argument
68 …return dma_map_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)directio… in pci_map_sg()
72 pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg, in pci_unmap_sg() argument
75 dma_unmap_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); in pci_unmap_sg()
93 pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_cpu() argument
96 …dma_sync_sg_for_cpu(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)direc… in pci_dma_sync_sg_for_cpu()
100 pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg, in pci_dma_sync_sg_for_device() argument
103 …dma_sync_sg_for_device(hwdev == NULL ? NULL : &hwdev->dev, sg, nelems, (enum dma_data_direction)di… in pci_dma_sync_sg_for_device()
Ddma-mapping-broken.h47 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
51 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries,
72 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems,
/linux-4.1.27/drivers/s390/scsi/
Dzfcp_qdio.h179 int zfcp_qdio_sg_one_sbale(struct scatterlist *sg) in zfcp_qdio_sg_one_sbale() argument
181 return sg_is_last(sg) && sg->length <= ZFCP_QDIO_SBALE_LEN; in zfcp_qdio_sg_one_sbale()
232 unsigned int zfcp_qdio_sbale_count(struct scatterlist *sg) in zfcp_qdio_sbale_count() argument
236 for (; sg; sg = sg_next(sg)) in zfcp_qdio_sbale_count()
247 unsigned int zfcp_qdio_real_bytes(struct scatterlist *sg) in zfcp_qdio_real_bytes() argument
251 for (; sg; sg = sg_next(sg)) in zfcp_qdio_real_bytes()
252 real_bytes += sg->length; in zfcp_qdio_real_bytes()
Dzfcp_aux.c547 void zfcp_sg_free_table(struct scatterlist *sg, int count) in zfcp_sg_free_table() argument
551 for (i = 0; i < count; i++, sg++) in zfcp_sg_free_table()
552 if (sg) in zfcp_sg_free_table()
553 free_page((unsigned long) sg_virt(sg)); in zfcp_sg_free_table()
566 int zfcp_sg_setup_table(struct scatterlist *sg, int count) in zfcp_sg_setup_table() argument
571 sg_init_table(sg, count); in zfcp_sg_setup_table()
572 for (i = 0; i < count; i++, sg++) { in zfcp_sg_setup_table()
575 zfcp_sg_free_table(sg, i); in zfcp_sg_setup_table()
578 sg_set_buf(sg, addr, PAGE_SIZE); in zfcp_sg_setup_table()
/linux-4.1.27/drivers/gpu/drm/udl/
Dudl_dmabuf.c101 obj->sg = drm_prime_pages_to_sg(obj->pages, page_count); in udl_map_dma_buf()
102 if (IS_ERR(obj->sg)) { in udl_map_dma_buf()
104 return ERR_CAST(obj->sg); in udl_map_dma_buf()
109 ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL); in udl_map_dma_buf()
117 rd = obj->sg->sgl; in udl_map_dma_buf()
217 struct sg_table *sg, in udl_prime_create() argument
230 obj->sg = sg; in udl_prime_create()
237 drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages); in udl_prime_create()
247 struct sg_table *sg; in udl_gem_prime_import() local
261 sg = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); in udl_gem_prime_import()
[all …]
/linux-4.1.27/arch/powerpc/kernel/
Ddma.c132 struct scatterlist *sg; in dma_direct_map_sg() local
135 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg()
136 sg->dma_address = sg_phys(sg) + get_dma_offset(dev); in dma_direct_map_sg()
137 sg->dma_length = sg->length; in dma_direct_map_sg()
138 __dma_sync_page(sg_page(sg), sg->offset, sg->length, direction); in dma_direct_map_sg()
144 static void dma_direct_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_direct_unmap_sg() argument
199 struct scatterlist *sg; in dma_direct_sync_sg() local
202 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg()
203 __dma_sync_page(sg_page(sg), sg->offset, sg->length, direction); in dma_direct_sync_sg()
/linux-4.1.27/arch/ia64/sn/pci/
Dpci_dma.c21 #define SG_ENT_VIRT_ADDRESS(sg) (sg_virt((sg))) argument
248 struct scatterlist *sg; in sn_dma_unmap_sg() local
252 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_unmap_sg()
253 provider->dma_unmap(pdev, sg->dma_address, dir); in sn_dma_unmap_sg()
254 sg->dma_address = (dma_addr_t) NULL; in sn_dma_unmap_sg()
255 sg->dma_length = 0; in sn_dma_unmap_sg()
279 struct scatterlist *saved_sg = sgl, *sg; in sn_dma_map_sg() local
292 for_each_sg(sgl, sg, nhwentries, i) { in sn_dma_map_sg()
294 phys_addr = SG_ENT_PHYS_ADDRESS(sg); in sn_dma_map_sg()
298 sg->length, in sn_dma_map_sg()
[all …]
/linux-4.1.27/drivers/xen/
Dswiotlb-xen.c550 struct scatterlist *sg; in xen_swiotlb_map_sg_attrs() local
555 for_each_sg(sgl, sg, nelems, i) { in xen_swiotlb_map_sg_attrs()
556 phys_addr_t paddr = sg_phys(sg); in xen_swiotlb_map_sg_attrs()
561 !dma_capable(hwdev, dev_addr, sg->length) || in xen_swiotlb_map_sg_attrs()
562 range_straddles_page_boundary(paddr, sg->length)) { in xen_swiotlb_map_sg_attrs()
565 sg_phys(sg), in xen_swiotlb_map_sg_attrs()
566 sg->length, in xen_swiotlb_map_sg_attrs()
580 sg->length, in xen_swiotlb_map_sg_attrs()
583 sg->dma_address = xen_phys_to_bus(map); in xen_swiotlb_map_sg_attrs()
591 sg->length, in xen_swiotlb_map_sg_attrs()
[all …]
/linux-4.1.27/arch/sparc/kernel/
Dioport.c379 static int sbus_map_sg(struct device *dev, struct scatterlist *sg, int n, in sbus_map_sg() argument
382 mmu_get_scsi_sgl(dev, sg, n); in sbus_map_sg()
386 static void sbus_unmap_sg(struct device *dev, struct scatterlist *sg, int n, in sbus_unmap_sg() argument
389 mmu_release_scsi_sgl(dev, sg, n); in sbus_unmap_sg()
392 static void sbus_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in sbus_sync_sg_for_cpu() argument
398 static void sbus_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in sbus_sync_sg_for_device() argument
553 struct scatterlist *sg; in pci32_map_sg() local
557 for_each_sg(sgl, sg, nents, n) { in pci32_map_sg()
558 sg->dma_address = sg_phys(sg); in pci32_map_sg()
559 sg->dma_length = sg->length; in pci32_map_sg()
[all …]
Diommu_common.h42 struct scatterlist *sg) in is_span_boundary() argument
45 int nr = iommu_num_pages(paddr, outs->dma_length + sg->length, in is_span_boundary()
Diommu.c590 static unsigned long fetch_sg_ctx(struct iommu *iommu, struct scatterlist *sg) in fetch_sg_ctx() argument
599 bus_addr = sg->dma_address & IO_PAGE_MASK; in fetch_sg_ctx()
613 struct scatterlist *sg; in dma_4u_unmap_sg() local
626 sg = sglist; in dma_4u_unmap_sg()
628 dma_addr_t dma_handle = sg->dma_address; in dma_4u_unmap_sg()
629 unsigned int len = sg->dma_length; in dma_4u_unmap_sg()
652 sg = sg_next(sg); in dma_4u_unmap_sg()
705 struct scatterlist *sg, *sgprv; in dma_4u_sync_sg_for_cpu() local
731 for_each_sg(sglist, sg, nelems, i) { in dma_4u_sync_sg_for_cpu()
732 if (sg->dma_length == 0) in dma_4u_sync_sg_for_cpu()
[all …]
/linux-4.1.27/net/rds/
Diw_rdma.c83 struct scatterlist *sg, unsigned int nents);
251 static void rds_iw_set_scatterlist(struct rds_iw_scatterlist *sg, argument
254 sg->list = list;
255 sg->len = sg_len;
256 sg->dma_len = 0;
257 sg->dma_npages = 0;
258 sg->bytes = 0;
262 struct rds_iw_scatterlist *sg) argument
268 WARN_ON(sg->dma_len);
270 sg->dma_len = ib_dma_map_sg(dev, sg->list, sg->len, DMA_BIDIRECTIONAL);
[all …]
Dmessage.c271 struct scatterlist *sg; in rds_message_copy_from_user() local
279 sg = rm->data.op_sg; in rds_message_copy_from_user()
283 if (!sg_page(sg)) { in rds_message_copy_from_user()
284 ret = rds_page_remainder_alloc(sg, iov_iter_count(from), in rds_message_copy_from_user()
293 sg->length - sg_off); in rds_message_copy_from_user()
296 nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off, in rds_message_copy_from_user()
303 if (sg_off == sg->length) in rds_message_copy_from_user()
304 sg++; in rds_message_copy_from_user()
313 struct scatterlist *sg; in rds_message_inc_copy_to_user() local
323 sg = rm->data.op_sg; in rds_message_inc_copy_to_user()
[all …]
Dtcp_send.c81 unsigned int hdr_off, unsigned int sg, unsigned int off) in rds_tcp_xmit() argument
119 while (sg < rm->data.op_nents) { in rds_tcp_xmit()
121 sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
122 rm->data.op_sg[sg].offset + off, in rds_tcp_xmit()
123 rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
125 rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]), in rds_tcp_xmit()
126 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off, in rds_tcp_xmit()
133 if (off == rm->data.op_sg[sg].length) { in rds_tcp_xmit()
135 sg++; in rds_tcp_xmit()
Dib.h238 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_ib_dma_sync_sg_for_cpu() argument
244 ib_sg_dma_address(dev, &sg[i]), in rds_ib_dma_sync_sg_for_cpu()
245 ib_sg_dma_len(dev, &sg[i]), in rds_ib_dma_sync_sg_for_cpu()
252 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_ib_dma_sync_sg_for_device() argument
258 ib_sg_dma_address(dev, &sg[i]), in rds_ib_dma_sync_sg_for_device()
259 ib_sg_dma_len(dev, &sg[i]), in rds_ib_dma_sync_sg_for_device()
305 void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents,
345 unsigned int hdr_off, unsigned int sg, unsigned int off);
Drdma.c179 struct scatterlist *sg; in __rds_rdma_map() local
246 sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL); in __rds_rdma_map()
247 if (!sg) { in __rds_rdma_map()
252 sg_init_table(sg, nents); in __rds_rdma_map()
256 sg_set_page(&sg[i], pages[i], PAGE_SIZE, 0); in __rds_rdma_map()
264 trans_private = rs->rs_transport->get_mr(sg, nents, rs, in __rds_rdma_map()
269 put_page(sg_page(&sg[i])); in __rds_rdma_map()
270 kfree(sg); in __rds_rdma_map()
669 struct scatterlist *sg; in rds_cmsg_rdma_args() local
671 sg = &op->op_sg[op->op_nents + j]; in rds_cmsg_rdma_args()
[all …]
Dib_rdma.c58 struct scatterlist *sg; member
368 struct scatterlist *sg, unsigned int nents) in rds_ib_map_fmr() argument
371 struct scatterlist *scat = sg; in rds_ib_map_fmr()
379 sg_dma_len = ib_dma_map_sg(dev, sg, nents, in rds_ib_map_fmr()
437 ibmr->sg = scat; in rds_ib_map_fmr()
458 ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
462 ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg, in rds_ib_sync_mr()
474 ibmr->sg, ibmr->sg_len, in __rds_ib_teardown_mr()
484 struct page *page = sg_page(&ibmr->sg[i]); in __rds_ib_teardown_mr()
492 kfree(ibmr->sg); in __rds_ib_teardown_mr()
[all …]
Diw.h238 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_cpu() argument
244 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu()
245 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_cpu()
252 struct scatterlist *sg, unsigned int sg_dma_len, int direction) in rds_iw_dma_sync_sg_for_device() argument
258 ib_sg_dma_address(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device()
259 ib_sg_dma_len(dev, &sg[i]), in rds_iw_dma_sync_sg_for_device()
315 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents,
353 unsigned int hdr_off, unsigned int sg, unsigned int off);
Dloop.c61 unsigned int hdr_off, unsigned int sg, in rds_loop_xmit() argument
64 struct scatterlist *sgp = &rm->data.op_sg[sg]; in rds_loop_xmit()
75 BUG_ON(hdr_off || sg || off); in rds_loop_xmit()
/linux-4.1.27/drivers/mmc/host/
Dtmio_mmc_dma.c49 struct scatterlist *sg = host->sg_ptr, *sg_tmp; in tmio_mmc_start_dma_rx() local
57 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_rx()
66 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE || in tmio_mmc_start_dma_rx()
72 if (sg->length < TMIO_MMC_MIN_DMA_LEN) { in tmio_mmc_start_dma_rx()
81 sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length); in tmio_mmc_start_dma_rx()
83 sg = host->sg_ptr; in tmio_mmc_start_dma_rx()
86 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); in tmio_mmc_start_dma_rx()
88 desc = dmaengine_prep_slave_sg(chan, sg, ret, in tmio_mmc_start_dma_rx()
125 struct scatterlist *sg = host->sg_ptr, *sg_tmp; in tmio_mmc_start_dma_tx() local
133 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_tx()
[all …]
Datmel-mci.c185 struct scatterlist *sg; member
759 buf_size = sg_dma_len(host->sg); in atmci_pdc_set_single_buf()
760 atmci_writel(host, pointer_reg, sg_dma_address(host->sg)); in atmci_pdc_set_single_buf()
775 atmci_writel(host, counter_reg, sg_dma_len(host->sg) / 4); in atmci_pdc_set_single_buf()
776 host->data_size -= sg_dma_len(host->sg); in atmci_pdc_set_single_buf()
778 host->sg = sg_next(host->sg); in atmci_pdc_set_single_buf()
803 data->sg, data->sg_len, in atmci_pdc_cleanup()
825 sg_copy_from_buffer(host->data->sg, host->data->sg_len, in atmci_pdc_complete()
842 data->sg, data->sg_len, in atmci_dma_cleanup()
907 host->sg = data->sg; in atmci_prepare_data()
[all …]
Dmxcmmc.c291 struct scatterlist *sg; in mxcmci_swap_buffers() local
294 for_each_sg(data->sg, sg, data->sg_len, i) in mxcmci_swap_buffers()
295 buffer_swap32(sg_virt(sg), sg->length); in mxcmci_swap_buffers()
306 struct scatterlist *sg; in mxcmci_setup_data() local
323 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_setup_data()
324 if (sg->offset & 3 || sg->length & 3 || sg->length < 512) { in mxcmci_setup_data()
340 nents = dma_map_sg(host->dma->device->dev, data->sg, in mxcmci_setup_data()
346 data->sg, data->sg_len, slave_dirn, in mxcmci_setup_data()
350 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_setup_data()
460 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_finish_data()
[all …]
Dusdhi6rol0.c180 struct scatterlist *sg; /* current SG segment */ member
311 struct scatterlist *sg) in usdhi6_blk_bounce() argument
318 data->blksz, data->blocks, sg->offset); in usdhi6_blk_bounce()
345 host->sg = data->sg; in usdhi6_sg_prep()
347 host->offset = host->sg->offset; in usdhi6_sg_prep()
354 struct scatterlist *sg = data->sg_len > 1 ? host->sg : data->sg; in usdhi6_sg_map() local
355 size_t head = PAGE_SIZE - sg->offset; in usdhi6_sg_map()
359 if (WARN(sg_dma_len(sg) % data->blksz, in usdhi6_sg_map()
361 sg_dma_len(sg), data->blksz)) in usdhi6_sg_map()
364 host->pg.page = sg_page(sg); in usdhi6_sg_map()
[all …]
Dtmio_mmc.h118 static inline char *tmio_mmc_kmap_atomic(struct scatterlist *sg, in tmio_mmc_kmap_atomic() argument
122 return kmap_atomic(sg_page(sg)) + sg->offset; in tmio_mmc_kmap_atomic()
125 static inline void tmio_mmc_kunmap_atomic(struct scatterlist *sg, in tmio_mmc_kunmap_atomic() argument
128 kunmap_atomic(virt - sg->offset); in tmio_mmc_kunmap_atomic()
Dtifm_sd.c170 struct scatterlist *sg = r_data->sg; in tifm_sd_transfer_data() local
178 cnt = sg[host->sg_pos].length - host->block_pos; in tifm_sd_transfer_data()
191 cnt = sg[host->sg_pos].length; in tifm_sd_transfer_data()
193 off = sg[host->sg_pos].offset + host->block_pos; in tifm_sd_transfer_data()
195 pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT); in tifm_sd_transfer_data()
226 struct scatterlist *sg = r_data->sg; in tifm_sd_bounce_block() local
234 cnt = sg[host->sg_pos].length - host->block_pos; in tifm_sd_bounce_block()
240 cnt = sg[host->sg_pos].length; in tifm_sd_bounce_block()
242 off = sg[host->sg_pos].offset + host->block_pos; in tifm_sd_bounce_block()
244 pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT); in tifm_sd_bounce_block()
[all …]
Dau1xmmc.c353 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir); in au1xmmc_data_complete()
399 struct scatterlist *sg; in au1xmmc_send_pio() local
407 sg = &data->sg[host->pio.index]; in au1xmmc_send_pio()
408 sg_ptr = sg_virt(sg) + host->pio.offset; in au1xmmc_send_pio()
411 sg_len = data->sg[host->pio.index].length - host->pio.offset; in au1xmmc_send_pio()
454 struct scatterlist *sg; in au1xmmc_receive_pio() local
464 sg = &data->sg[host->pio.index]; in au1xmmc_receive_pio()
465 sg_ptr = sg_virt(sg) + host->pio.offset; in au1xmmc_receive_pio()
468 sg_len = sg_dma_len(&data->sg[host->pio.index]) - host->pio.offset; in au1xmmc_receive_pio()
630 host->dma.len = dma_map_sg(mmc_dev(host->mmc), data->sg, in au1xmmc_prepare_data()
[all …]
Dandroid-goldfish.c225 uint8_t *dest = (uint8_t *)sg_virt(data->sg); in goldfish_mmc_xfer_done()
226 memcpy(dest, host->virt_base, data->sg->length); in goldfish_mmc_xfer_done()
228 host->data->bytes_xfered += data->sg->length; in goldfish_mmc_xfer_done()
229 dma_unmap_sg(mmc_dev(host->mmc), data->sg, host->sg_len, in goldfish_mmc_xfer_done()
398 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, in goldfish_mmc_prepare_data()
408 const uint8_t *src = (uint8_t *)sg_virt(data->sg); in goldfish_mmc_prepare_data()
409 memcpy(host->virt_base, src, data->sg->length); in goldfish_mmc_prepare_data()
Dsh_mmcif.c289 struct scatterlist *sg = data->sg; in sh_mmcif_start_dma_rx() local
295 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, in sh_mmcif_start_dma_rx()
299 desc = dmaengine_prep_slave_sg(chan, sg, ret, in sh_mmcif_start_dma_rx()
338 struct scatterlist *sg = data->sg; in sh_mmcif_start_dma_tx() local
344 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, in sh_mmcif_start_dma_tx()
348 desc = dmaengine_prep_slave_sg(chan, sg, ret, in sh_mmcif_start_dma_tx()
569 BUG_ON(host->sg_blkidx > data->sg->length); in sh_mmcif_next_block()
571 if (host->sg_blkidx == data->sg->length) { in sh_mmcif_next_block()
574 host->pio_ptr = sg_virt(++data->sg); in sh_mmcif_next_block()
597 u32 *p = sg_virt(data->sg); in sh_mmcif_read_block()
[all …]
/linux-4.1.27/arch/arc/include/asm/
Ddma-mapping.h122 dma_map_sg(struct device *dev, struct scatterlist *sg, in dma_map_sg() argument
128 for_each_sg(sg, s, nents, i) in dma_map_sg()
136 dma_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_unmap_sg() argument
142 for_each_sg(sg, s, nents, i) in dma_unmap_sg()
181 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
186 for (i = 0; i < nelems; i++, sg++) in dma_sync_sg_for_cpu()
187 _dma_cache_sync((unsigned int)sg_virt(sg), sg->length, dir); in dma_sync_sg_for_cpu()
191 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
196 for (i = 0; i < nelems; i++, sg++) in dma_sync_sg_for_device()
197 _dma_cache_sync((unsigned int)sg_virt(sg), sg->length, dir); in dma_sync_sg_for_device()
/linux-4.1.27/drivers/gpu/drm/nouveau/
Dnouveau_sgdma.c32 if (ttm->sg) { in nv04_sgdma_bind()
33 node->sg = ttm->sg; in nv04_sgdma_bind()
36 node->sg = NULL; in nv04_sgdma_bind()
67 if (ttm->sg) { in nv50_sgdma_bind()
68 node->sg = ttm->sg; in nv50_sgdma_bind()
71 node->sg = NULL; in nv50_sgdma_bind()
/linux-4.1.27/block/
Dblk-merge.c161 struct scatterlist **sg, int *nsegs, int *cluster) in __blk_segment_map_sg() argument
166 if (*sg && *cluster) { in __blk_segment_map_sg()
167 if ((*sg)->length + nbytes > queue_max_segment_size(q)) in __blk_segment_map_sg()
175 (*sg)->length += nbytes; in __blk_segment_map_sg()
178 if (!*sg) in __blk_segment_map_sg()
179 *sg = sglist; in __blk_segment_map_sg()
191 sg_unmark_end(*sg); in __blk_segment_map_sg()
192 *sg = sg_next(*sg); in __blk_segment_map_sg()
195 sg_set_page(*sg, bvec->bv_page, nbytes, bvec->bv_offset); in __blk_segment_map_sg()
203 struct scatterlist **sg) in __blk_bios_map_sg() argument
[all …]
Dblk-integrity.c93 struct scatterlist *sg = NULL; in blk_rq_map_integrity_sg() local
107 if (sg->length + iv.bv_len > queue_max_segment_size(q)) in blk_rq_map_integrity_sg()
110 sg->length += iv.bv_len; in blk_rq_map_integrity_sg()
113 if (!sg) in blk_rq_map_integrity_sg()
114 sg = sglist; in blk_rq_map_integrity_sg()
116 sg_unmark_end(sg); in blk_rq_map_integrity_sg()
117 sg = sg_next(sg); in blk_rq_map_integrity_sg()
120 sg_set_page(sg, iv.bv_page, iv.bv_len, iv.bv_offset); in blk_rq_map_integrity_sg()
128 if (sg) in blk_rq_map_integrity_sg()
129 sg_mark_end(sg); in blk_rq_map_integrity_sg()
/linux-4.1.27/arch/m68k/kernel/
Ddma.c123 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nents, in dma_sync_sg_for_device() argument
128 for (i = 0; i < nents; sg++, i++) in dma_sync_sg_for_device()
129 dma_sync_single_for_device(dev, sg->dma_address, sg->length, dir); in dma_sync_sg_for_device()
154 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
159 for (i = 0; i < nents; sg++, i++) { in dma_map_sg()
160 sg->dma_address = sg_phys(sg); in dma_map_sg()
161 dma_sync_single_for_device(dev, sg->dma_address, sg->length, dir); in dma_map_sg()
/linux-4.1.27/arch/avr32/include/asm/
Ddma-mapping.h212 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
220 sg[i].dma_address = page_to_bus(sg_page(&sg[i])) + sg[i].offset; in dma_map_sg()
221 virt = sg_virt(&sg[i]); in dma_map_sg()
222 dma_cache_sync(dev, virt, sg[i].length, direction); in dma_map_sg()
240 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
313 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
324 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
330 dma_cache_sync(dev, sg_virt(&sg[i]), sg[i].length, direction); in dma_sync_sg_for_device()
/linux-4.1.27/drivers/crypto/
Dbfin_crc.c83 struct scatterlist *sg; /* sg list head for this update*/ member
105 struct scatterlist *sg = sg_list; in sg_count() local
111 while (!sg_is_last(sg)) { in sg_count()
113 sg = sg_next(sg); in sg_count()
125 struct scatterlist *sg = NULL; in sg_get() local
128 for_each_sg(sg_list, sg, nents, i) in sg_get()
132 return sg; in sg_get()
187 struct scatterlist *sg; in bfin_crypto_crc_config_dma() local
196 dma_map_sg(crc->dev, ctx->sg, ctx->sg_nents, DMA_TO_DEVICE); in bfin_crypto_crc_config_dma()
198 for_each_sg(ctx->sg, sg, ctx->sg_nents, j) { in bfin_crypto_crc_config_dma()
[all …]
Datmel-sha.c92 struct scatterlist *sg; member
163 count = min(ctx->sg->length - ctx->offset, ctx->total); in atmel_sha_append_sg()
173 if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) { in atmel_sha_append_sg()
174 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
181 scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg, in atmel_sha_append_sg()
188 if (ctx->offset == ctx->sg->length) { in atmel_sha_append_sg()
189 ctx->sg = sg_next(ctx->sg); in atmel_sha_append_sg()
190 if (ctx->sg) in atmel_sha_append_sg()
430 struct scatterlist sg[2]; in atmel_sha_xmit_dma() local
441 sg_init_table(sg, 2); in atmel_sha_xmit_dma()
[all …]
Dsahara.c277 static int sahara_sg_length(struct scatterlist *sg, in sahara_sg_length() argument
285 sg_list = sg; in sahara_sg_length()
479 struct scatterlist *sg; in sahara_hw_descriptor_create() local
528 sg = dev->in_sg; in sahara_hw_descriptor_create()
530 dev->hw_link[i]->len = sg->length; in sahara_hw_descriptor_create()
531 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
536 sg = sg_next(sg); in sahara_hw_descriptor_create()
542 sg = dev->out_sg; in sahara_hw_descriptor_create()
544 dev->hw_link[j]->len = sg->length; in sahara_hw_descriptor_create()
545 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
[all …]
Ds5p-sss.c230 static void s5p_set_dma_indata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_indata() argument
232 SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg)); in s5p_set_dma_indata()
233 SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg)); in s5p_set_dma_indata()
236 static void s5p_set_dma_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_dma_outdata() argument
238 SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg)); in s5p_set_dma_outdata()
239 SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg)); in s5p_set_dma_outdata()
259 static int s5p_set_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg) in s5p_set_outdata() argument
263 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_outdata()
267 if (!sg_dma_len(sg)) { in s5p_set_outdata()
272 err = dma_map_sg(dev->dev, sg, 1, DMA_FROM_DEVICE); in s5p_set_outdata()
[all …]
Dimg-hash.c96 struct scatterlist *sg; member
217 if (ctx->sg) in img_hash_dma_callback()
221 static int img_hash_xmit_dma(struct img_hash_dev *hdev, struct scatterlist *sg) in img_hash_xmit_dma() argument
226 ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_MEM_TO_DEV); in img_hash_xmit_dma()
234 sg, in img_hash_xmit_dma()
241 dma_unmap_sg(hdev->dev, sg, 1, DMA_MEM_TO_DEV); in img_hash_xmit_dma()
256 ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg), in img_hash_write_via_cpu()
364 if (!ctx->sg) in img_hash_dma_task()
367 addr = sg_virt(ctx->sg); in img_hash_dma_task()
368 nbytes = ctx->sg->length - ctx->offset; in img_hash_dma_task()
[all …]
Domap-sham.c152 struct scatterlist *sg; member
585 ctx->sgl.page_link = ctx->sg->page_link; in omap_sham_xmit_dma()
586 ctx->sgl.offset = ctx->sg->offset; in omap_sham_xmit_dma()
588 sg_dma_address(&ctx->sgl) = sg_dma_address(ctx->sg); in omap_sham_xmit_dma()
641 while (ctx->sg) { in omap_sham_append_sg()
642 vaddr = kmap_atomic(sg_page(ctx->sg)); in omap_sham_append_sg()
643 vaddr += ctx->sg->offset; in omap_sham_append_sg()
647 ctx->sg->length - ctx->offset); in omap_sham_append_sg()
655 if (ctx->offset == ctx->sg->length) { in omap_sham_append_sg()
656 ctx->sg = sg_next(ctx->sg); in omap_sham_append_sg()
[all …]
Dtalitos.c737 static int talitos_map_sg(struct device *dev, struct scatterlist *sg, in talitos_map_sg() argument
742 while (sg) { in talitos_map_sg()
743 dma_map_sg(dev, sg, 1, dir); in talitos_map_sg()
744 sg = sg_next(sg); in talitos_map_sg()
747 dma_map_sg(dev, sg, nents, dir); in talitos_map_sg()
751 static void talitos_unmap_sg_chain(struct device *dev, struct scatterlist *sg, in talitos_unmap_sg_chain() argument
754 while (sg) { in talitos_unmap_sg_chain()
755 dma_unmap_sg(dev, sg, 1, dir); in talitos_unmap_sg_chain()
756 sg = sg_next(sg); in talitos_unmap_sg_chain()
824 struct scatterlist *sg; in ipsec_esp_encrypt_done() local
[all …]
/linux-4.1.27/drivers/infiniband/hw/qib/
Dqib_dma.c97 struct scatterlist *sg; in qib_map_sg() local
104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg()
105 addr = (u64) page_address(sg_page(sg)); in qib_map_sg()
111 sg->dma_address = addr + sg->offset; in qib_map_sg()
113 sg->dma_length = sg->length; in qib_map_sg()
120 struct scatterlist *sg, int nents, in qib_unmap_sg() argument
/linux-4.1.27/drivers/infiniband/hw/ipath/
Dipath_dma.c104 struct scatterlist *sg; in ipath_map_sg() local
111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg()
112 addr = (u64) page_address(sg_page(sg)); in ipath_map_sg()
118 sg->dma_address = addr + sg->offset; in ipath_map_sg()
120 sg->dma_length = sg->length; in ipath_map_sg()
127 struct scatterlist *sg, int nents, in ipath_unmap_sg() argument
/linux-4.1.27/arch/arm/mach-rpc/
Ddma.c56 static void iomd_get_next_sg(struct scatterlist *sg, struct iomd_dma *idma) in iomd_get_next_sg() argument
60 if (idma->dma.sg) { in iomd_get_next_sg()
61 sg->dma_address = idma->dma_addr; in iomd_get_next_sg()
62 offset = sg->dma_address & ~PAGE_MASK; in iomd_get_next_sg()
72 sg->length = end - TRANSFER_SIZE; in iomd_get_next_sg()
79 idma->dma.sg = sg_next(idma->dma.sg); in iomd_get_next_sg()
80 idma->dma_addr = idma->dma.sg->dma_address; in iomd_get_next_sg()
81 idma->dma_len = idma->dma.sg->length; in iomd_get_next_sg()
84 idma->dma.sg = NULL; in iomd_get_next_sg()
90 sg->dma_address = 0; in iomd_get_next_sg()
[all …]
/linux-4.1.27/drivers/scsi/arm/
Dscsi.h27 static inline int copy_SCp_to_sg(struct scatterlist *sg, struct scsi_pointer *SCp, int max) in copy_SCp_to_sg() argument
36 sg_set_buf(sg, SCp->ptr, SCp->this_residual); in copy_SCp_to_sg()
43 *(++sg) = *src_sg; in copy_SCp_to_sg()
44 sg_mark_end(sg); in copy_SCp_to_sg()
100 struct scatterlist *sg; in init_SCp() local
103 scsi_for_each_sg(SCpnt, sg, sg_count, i) in init_SCp()
104 len += sg->length; in init_SCp()
/linux-4.1.27/net/mac80211/
Daes_gmac.c27 struct scatterlist sg[3], ct[1]; in ieee80211_aes_gmac() local
40 sg_init_table(sg, 3); in ieee80211_aes_gmac()
41 sg_set_buf(&sg[0], aad, AAD_LEN); in ieee80211_aes_gmac()
42 sg_set_buf(&sg[1], data, data_len - GMAC_MIC_LEN); in ieee80211_aes_gmac()
43 sg_set_buf(&sg[2], zero, GMAC_MIC_LEN); in ieee80211_aes_gmac()
53 aead_request_set_assoc(aead_req, sg, AAD_LEN + data_len); in ieee80211_aes_gmac()
/linux-4.1.27/net/9p/
Dtrans_virtio.c92 struct scatterlist sg[VIRTQUEUE_NUM]; member
185 static int pack_sg_list(struct scatterlist *sg, int start, in pack_sg_list() argument
197 sg_unmark_end(&sg[index]); in pack_sg_list()
198 sg_set_buf(&sg[index++], data, s); in pack_sg_list()
203 sg_mark_end(&sg[index - 1]); in pack_sg_list()
224 pack_sg_list_p(struct scatterlist *sg, int start, int limit, in pack_sg_list_p() argument
241 sg_unmark_end(&sg[index]); in pack_sg_list_p()
242 sg_set_page(&sg[index++], pdata[i++], s, data_off); in pack_sg_list_p()
249 sg_mark_end(&sg[index - 1]); in pack_sg_list_p()
277 out = pack_sg_list(chan->sg, 0, in p9_virtio_request()
[all …]
/linux-4.1.27/arch/mn10300/include/asm/
Ddma-mapping.h54 struct scatterlist *sg; in dma_map_sg() local
60 for_each_sg(sglist, sg, nents, i) { in dma_map_sg()
61 BUG_ON(!sg_page(sg)); in dma_map_sg()
63 sg->dma_address = sg_phys(sg); in dma_map_sg()
71 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
123 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_cpu() argument
129 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in dma_sync_sg_for_device() argument
/linux-4.1.27/drivers/crypto/ccp/
Dccp-crypto-main.c303 struct scatterlist *sg, *sg_last = NULL; in ccp_crypto_sg_table_add() local
305 for (sg = table->sgl; sg; sg = sg_next(sg)) in ccp_crypto_sg_table_add()
306 if (!sg_page(sg)) in ccp_crypto_sg_table_add()
308 BUG_ON(!sg); in ccp_crypto_sg_table_add()
310 for (; sg && sg_add; sg = sg_next(sg), sg_add = sg_next(sg_add)) { in ccp_crypto_sg_table_add()
311 sg_set_page(sg, sg_page(sg_add), sg_add->length, in ccp_crypto_sg_table_add()
313 sg_last = sg; in ccp_crypto_sg_table_add()
Dccp-ops.c54 struct scatterlist *sg; member
490 struct scatterlist *sg, u64 len, in ccp_init_sg_workarea() argument
495 wa->sg = sg; in ccp_init_sg_workarea()
496 if (!sg) in ccp_init_sg_workarea()
499 wa->nents = sg_nents(sg); in ccp_init_sg_workarea()
500 wa->length = sg->length; in ccp_init_sg_workarea()
510 wa->dma_sg = sg; in ccp_init_sg_workarea()
513 wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); in ccp_init_sg_workarea()
524 if (!wa->sg) in ccp_update_sg_workarea()
529 if (wa->sg_used == wa->sg->length) { in ccp_update_sg_workarea()
[all …]
Dccp-crypto-aes-cmac.c64 struct scatterlist *sg, *cmac_key_sg = NULL; in ccp_do_cmac_update() local
117 sg = NULL; in ccp_do_cmac_update()
120 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); in ccp_do_cmac_update()
124 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); in ccp_do_cmac_update()
134 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg); in ccp_do_cmac_update()
136 if (sg) { in ccp_do_cmac_update()
137 sg_mark_end(sg); in ccp_do_cmac_update()
138 sg = rctx->data_sg.sgl; in ccp_do_cmac_update()
156 rctx->cmd.u.aes.src = sg; in ccp_do_cmac_update()
Dccp-crypto-sha.c63 struct scatterlist *sg; in ccp_do_sha_update() local
96 sg = NULL; in ccp_do_sha_update()
109 sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); in ccp_do_sha_update()
110 sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); in ccp_do_sha_update()
111 sg_mark_end(sg); in ccp_do_sha_update()
113 sg = rctx->data_sg.sgl; in ccp_do_sha_update()
117 sg = &rctx->buf_sg; in ccp_do_sha_update()
119 sg = req->src; in ccp_do_sha_update()
130 rctx->cmd.u.sha.src = sg; in ccp_do_sha_update()
/linux-4.1.27/drivers/scsi/
Dlibiscsi_tcp.c98 struct scatterlist *sg, unsigned int offset) in iscsi_tcp_segment_init_sg() argument
100 segment->sg = sg; in iscsi_tcp_segment_init_sg()
102 segment->size = min(sg->length - offset, in iscsi_tcp_segment_init_sg()
118 struct scatterlist *sg; in iscsi_tcp_segment_map() local
120 if (segment->data != NULL || !segment->sg) in iscsi_tcp_segment_map()
123 sg = segment->sg; in iscsi_tcp_segment_map()
125 BUG_ON(sg->length == 0); in iscsi_tcp_segment_map()
133 if (page_count(sg_page(sg)) >= 1 && !recv) in iscsi_tcp_segment_map()
138 segment->sg_mapped = kmap_atomic(sg_page(sg)); in iscsi_tcp_segment_map()
142 segment->sg_mapped = kmap(sg_page(sg)); in iscsi_tcp_segment_map()
[all …]
Djazz_esp.c47 static int jazz_esp_map_sg(struct esp *esp, struct scatterlist *sg, in jazz_esp_map_sg() argument
50 return dma_map_sg(esp->dev, sg, num_sg, dir); in jazz_esp_map_sg()
59 static void jazz_esp_unmap_sg(struct esp *esp, struct scatterlist *sg, in jazz_esp_unmap_sg() argument
62 dma_unmap_sg(esp->dev, sg, num_sg, dir); in jazz_esp_unmap_sg()
/linux-4.1.27/arch/arm64/mm/
Ddma-mapping.c233 struct scatterlist *sg; in __swiotlb_map_sg_attrs() local
238 for_each_sg(sgl, sg, ret, i) in __swiotlb_map_sg_attrs()
239 __dma_map_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_map_sg_attrs()
240 sg->length, dir); in __swiotlb_map_sg_attrs()
250 struct scatterlist *sg; in __swiotlb_unmap_sg_attrs() local
254 for_each_sg(sgl, sg, nelems, i) in __swiotlb_unmap_sg_attrs()
255 __dma_unmap_area(phys_to_virt(dma_to_phys(dev, sg->dma_address)), in __swiotlb_unmap_sg_attrs()
256 sg->length, dir); in __swiotlb_unmap_sg_attrs()
282 struct scatterlist *sg; in __swiotlb_sync_sg_for_cpu() local
286 for_each_sg(sgl, sg, nelems, i) in __swiotlb_sync_sg_for_cpu()
[all …]
/linux-4.1.27/arch/parisc/include/asm/
Ddma-mapping.h16 …int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dire…
17 …void (*unmap_sg)(struct device *dev, struct scatterlist *sg, int nhwents, enum dma_data_direction …
20 …void (*dma_sync_sg_for_cpu)(struct device *dev, struct scatterlist *sg, int nelems, enum dma_data_…
21 …void (*dma_sync_sg_for_device)(struct device *dev, struct scatterlist *sg, int nelems, enum dma_da…
95 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
98 return hppa_dma_ops->map_sg(dev, sg, nents, direction); in dma_map_sg()
102 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
105 hppa_dma_ops->unmap_sg(dev, sg, nhwentries, direction); in dma_unmap_sg()
158 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
162 hppa_dma_ops->dma_sync_sg_for_cpu(dev, sg, nelems, direction); in dma_sync_sg_for_cpu()
[all …]
/linux-4.1.27/drivers/target/
Dtarget_core_iblock.c461 struct scatterlist *sg; in iblock_execute_write_same() local
472 sg = &cmd->t_data_sg[0]; in iblock_execute_write_same()
475 sg->length != cmd->se_dev->dev_attrib.block_size) { in iblock_execute_write_same()
477 " block_size: %u\n", cmd->t_data_nents, sg->length, in iblock_execute_write_same()
497 while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset) in iblock_execute_write_same()
498 != sg->length) { in iblock_execute_write_same()
509 block_lba += sg->length >> IBLOCK_LBA_SHIFT; in iblock_execute_write_same()
636 struct scatterlist *sg; in iblock_alloc_bip() local
658 for_each_sg(cmd->t_prot_sg, sg, cmd->t_prot_nents, i) { in iblock_alloc_bip()
660 rc = bio_integrity_add_page(bio, sg_page(sg), sg->length, in iblock_alloc_bip()
[all …]
Dtarget_core_sbc.c328 struct scatterlist *sg; in xdreadwrite_callback() local
363 for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, count) { in xdreadwrite_callback()
364 addr = kmap_atomic(sg_page(sg)); in xdreadwrite_callback()
370 for (i = 0; i < sg->length; i++) in xdreadwrite_callback()
371 *(addr + sg->offset + i) ^= *(buf + offset + i); in xdreadwrite_callback()
373 offset += sg->length; in xdreadwrite_callback()
419 struct scatterlist *write_sg = NULL, *sg; in compare_and_write_callback() local
478 for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, i) { in compare_and_write_callback()
479 addr = (unsigned char *)kmap_atomic(sg_page(sg)); in compare_and_write_callback()
485 len = min(sg->length, compare_len); in compare_and_write_callback()
[all …]
Dtarget_core_rd.c86 struct scatterlist *sg; in rd_release_sgl_table() local
90 sg = sg_table[i].sg_table; in rd_release_sgl_table()
94 pg = sg_page(&sg[j]); in rd_release_sgl_table()
100 kfree(sg); in rd_release_sgl_table()
138 struct scatterlist *sg; in rd_allocate_sgl_table() local
157 sg = kcalloc(sg_per_table + chain_entry, sizeof(*sg), in rd_allocate_sgl_table()
159 if (!sg) { in rd_allocate_sgl_table()
165 sg_init_table(sg, sg_per_table + chain_entry); in rd_allocate_sgl_table()
171 max_sg_per_table + 1, sg); in rd_allocate_sgl_table()
176 sg_table[i].sg_table = sg; in rd_allocate_sgl_table()
[all …]
Dtarget_core_user.c278 struct scatterlist *sg; in tcmu_queue_cmd_ring() local
363 for_each_sg(se_cmd->t_data_sg, sg, se_cmd->t_data_nents, i) { in tcmu_queue_cmd_ring()
364 size_t copy_bytes = min((size_t)sg->length, in tcmu_queue_cmd_ring()
366 void *from = kmap_atomic(sg_page(sg)) + sg->offset; in tcmu_queue_cmd_ring()
384 if (sg->length != copy_bytes) { in tcmu_queue_cmd_ring()
386 copy_bytes = sg->length - copy_bytes; in tcmu_queue_cmd_ring()
484 struct scatterlist *sg; in tcmu_handle_completion() local
488 for_each_sg(se_cmd->t_data_sg, sg, se_cmd->t_data_nents, i) { in tcmu_handle_completion()
493 copy_bytes = min((size_t)sg->length, in tcmu_handle_completion()
496 to = kmap_atomic(sg_page(sg)) + sg->offset; in tcmu_handle_completion()
[all …]
/linux-4.1.27/drivers/net/wireless/orinoco/
Dmic.c51 struct scatterlist sg[2]; in orinoco_mic() local
68 sg_init_table(sg, 2); in orinoco_mic()
69 sg_set_buf(&sg[0], hdr, sizeof(hdr)); in orinoco_mic()
70 sg_set_buf(&sg[1], data, data_len); in orinoco_mic()
77 return crypto_hash_digest(&desc, sg, data_len + sizeof(hdr), in orinoco_mic()
/linux-4.1.27/drivers/mmc/core/
Dsd_ops.c254 struct scatterlist sg; in mmc_app_send_scr() local
284 data.sg = &sg; in mmc_app_send_scr()
287 sg_init_one(&sg, data_buf, 8); in mmc_app_send_scr()
313 struct scatterlist sg; in mmc_sd_switch() local
335 data.sg = &sg; in mmc_sd_switch()
338 sg_init_one(&sg, resp, 64); in mmc_sd_switch()
358 struct scatterlist sg; in mmc_app_sd_status() local
380 data.sg = &sg; in mmc_app_sd_status()
383 sg_init_one(&sg, ssr, 64); in mmc_app_sd_status()
Dsdio_ops.c127 struct scatterlist sg, *sg_ptr; in mmc_io_rw_extended() local
165 data.sg = sgtable.sgl; in mmc_io_rw_extended()
168 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { in mmc_io_rw_extended()
175 data.sg = &sg; in mmc_io_rw_extended()
178 sg_init_one(&sg, buf, left_size); in mmc_io_rw_extended()
Dmmc_ops.c296 struct scatterlist sg; in mmc_send_cxd_data() local
314 data.sg = &sg; in mmc_send_cxd_data()
317 sg_init_one(&sg, buf, len); in mmc_send_cxd_data()
585 struct scatterlist sg; in mmc_send_tuning() local
624 data.sg = &sg; in mmc_send_tuning()
626 sg_init_one(&sg, data_buf, size); in mmc_send_tuning()
656 struct scatterlist sg; in mmc_send_bus_test() local
703 data.sg = &sg; in mmc_send_bus_test()
706 sg_init_one(&sg, data_buf, len); in mmc_send_bus_test()
/linux-4.1.27/drivers/target/iscsi/
Discsi_target_auth.c190 struct scatterlist sg; in chap_server_compute_md5() local
263 sg_init_one(&sg, &chap->id, 1); in chap_server_compute_md5()
264 ret = crypto_hash_update(&desc, &sg, 1); in chap_server_compute_md5()
271 sg_init_one(&sg, &auth->password, strlen(auth->password)); in chap_server_compute_md5()
272 ret = crypto_hash_update(&desc, &sg, strlen(auth->password)); in chap_server_compute_md5()
279 sg_init_one(&sg, chap->challenge, CHAP_CHALLENGE_LENGTH); in chap_server_compute_md5()
280 ret = crypto_hash_update(&desc, &sg, CHAP_CHALLENGE_LENGTH); in chap_server_compute_md5()
392 sg_init_one(&sg, &id_as_uchar, 1); in chap_server_compute_md5()
393 ret = crypto_hash_update(&desc, &sg, 1); in chap_server_compute_md5()
400 sg_init_one(&sg, auth->password_mutual, in chap_server_compute_md5()
[all …]
/linux-4.1.27/include/linux/platform_data/
Ddma-ste-dma40.h184 struct scatterlist sg; in stedma40_slave_mem() local
185 sg_init_table(&sg, 1); in stedma40_slave_mem()
186 sg.dma_address = addr; in stedma40_slave_mem()
187 sg.length = size; in stedma40_slave_mem()
189 return dmaengine_prep_slave_sg(chan, &sg, 1, direction, flags); in stedma40_slave_mem()
/linux-4.1.27/drivers/spi/
Dspi-topcliff-pch.c931 struct scatterlist *sg; in pch_spi_handle_dma() local
1018 sg = dma->sg_rx_p; in pch_spi_handle_dma()
1019 for (i = 0; i < num; i++, sg++) { in pch_spi_handle_dma()
1021 sg->offset = size * i; in pch_spi_handle_dma()
1022 sg->offset = sg->offset * (*bpw / 8); in pch_spi_handle_dma()
1023 sg_set_page(sg, virt_to_page(dma->rx_buf_virt), rem, in pch_spi_handle_dma()
1024 sg->offset); in pch_spi_handle_dma()
1025 sg_dma_len(sg) = rem; in pch_spi_handle_dma()
1027 sg->offset = size * (i - 1) + rem; in pch_spi_handle_dma()
1028 sg->offset = sg->offset * (*bpw / 8); in pch_spi_handle_dma()
[all …]
/linux-4.1.27/drivers/dma/ipu/
Dipu_idmac.c777 struct idmac_tx_desc *desc, struct scatterlist *sg, int buf_idx) in ipu_submit_buffer() argument
791 ipu_update_channel_buffer(ichan, buf_idx, sg_dma_address(sg)); in ipu_submit_buffer()
795 sg, chan_id, buf_idx); in ipu_submit_buffer()
804 struct scatterlist *sg; in ipu_submit_channel_buffers() local
807 for (i = 0, sg = desc->sg; i < 2 && sg; i++) { in ipu_submit_channel_buffers()
808 if (!ichan->sg[i]) { in ipu_submit_channel_buffers()
809 ichan->sg[i] = sg; in ipu_submit_channel_buffers()
811 ret = ipu_submit_buffer(ichan, desc, sg, i); in ipu_submit_channel_buffers()
815 sg = sg_next(sg); in ipu_submit_channel_buffers()
850 dma_addr_t dma_1 = sg_is_last(desc->sg) ? 0 : in idmac_tx_submit()
[all …]
/linux-4.1.27/drivers/virtio/
Dvirtio_ring.c132 struct scatterlist *sg; in virtqueue_add() local
207 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add()
209 desc[i].addr = cpu_to_virtio64(_vq->vdev, sg_phys(sg)); in virtqueue_add()
210 desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length); in virtqueue_add()
216 for (sg = sgs[n]; sg; sg = sg_next(sg)) { in virtqueue_add()
218 desc[i].addr = cpu_to_virtio64(_vq->vdev, sg_phys(sg)); in virtqueue_add()
219 desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length); in virtqueue_add()
283 struct scatterlist *sg; in virtqueue_add_sgs() local
284 for (sg = sgs[i]; sg; sg = sg_next(sg)) in virtqueue_add_sgs()
305 struct scatterlist *sg, unsigned int num, in virtqueue_add_outbuf() argument
[all …]
Dvirtio_balloon.c115 struct scatterlist sg; in tell_host() local
118 sg_init_one(&sg, vb->pfns, sizeof(vb->pfns[0]) * vb->num_pfns); in tell_host()
121 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in tell_host()
263 struct scatterlist sg; in stats_handle_request() local
272 sg_init_one(&sg, vb->stats, sizeof(vb->stats)); in stats_handle_request()
273 virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL); in stats_handle_request()
402 struct scatterlist sg; in init_vqs() local
409 sg_init_one(&sg, vb->stats, sizeof vb->stats); in init_vqs()
410 if (virtqueue_add_outbuf(vb->stats_vq, &sg, 1, vb, GFP_KERNEL) in init_vqs()
/linux-4.1.27/drivers/media/pci/cx25821/
Dcx25821-core.c1011 struct scatterlist *sg; in cx25821_risc_field() local
1025 sg = sglist; in cx25821_risc_field()
1027 while (offset && offset >= sg_dma_len(sg)) { in cx25821_risc_field()
1028 offset -= sg_dma_len(sg); in cx25821_risc_field()
1029 sg = sg_next(sg); in cx25821_risc_field()
1031 if (bpl <= sg_dma_len(sg) - offset) { in cx25821_risc_field()
1035 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field()
1042 (sg_dma_len(sg) - offset)); in cx25821_risc_field()
1043 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); in cx25821_risc_field()
1045 todo -= (sg_dma_len(sg) - offset); in cx25821_risc_field()
[all …]
/linux-4.1.27/drivers/infiniband/hw/mlx5/
Dmem.c57 struct scatterlist *sg; in mlx5_ib_cont_pages() local
78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in mlx5_ib_cont_pages()
79 len = sg_dma_len(sg) >> page_shift; in mlx5_ib_cont_pages()
80 pfn = sg_dma_address(sg) >> page_shift; in mlx5_ib_cont_pages()
161 struct scatterlist *sg; in __mlx5_ib_populate_pas() local
180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { in __mlx5_ib_populate_pas()
181 len = sg_dma_len(sg) >> umem_page_shift; in __mlx5_ib_populate_pas()
182 base = sg_dma_address(sg); in __mlx5_ib_populate_pas()
/linux-4.1.27/drivers/infiniband/core/
Dumem.c49 struct scatterlist *sg; in __ib_umem_release() local
58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { in __ib_umem_release()
60 page = sg_page(sg); in __ib_umem_release()
96 struct scatterlist *sg, *sg_list_start; in ib_umem_get() local
203 for_each_sg(sg_list_start, sg, ret, i) { in ib_umem_get()
207 sg_set_page(sg, page_list[i], PAGE_SIZE, 0); in ib_umem_get()
211 sg_list_start = sg; in ib_umem_get()
318 struct scatterlist *sg; in ib_umem_page_count() local
326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) in ib_umem_page_count()
327 n += sg_dma_len(sg) >> shift; in ib_umem_page_count()
/linux-4.1.27/arch/sh/kernel/
Ddma-nommu.c26 static int nommu_map_sg(struct device *dev, struct scatterlist *sg, in nommu_map_sg() argument
33 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg()
35 for_each_sg(sg, s, nents, i) { in nommu_map_sg()
54 static void nommu_sync_sg(struct device *dev, struct scatterlist *sg, in nommu_sync_sg() argument
60 for_each_sg(sg, s, nelems, i) in nommu_sync_sg()
/linux-4.1.27/drivers/net/ethernet/micrel/
Dks8842.c146 struct scatterlist sg; member
154 struct scatterlist sg; member
443 sg_dma_len(&ctl->sg) = skb->len + sizeof(u32); in ks8842_tx_frame_dma()
454 sg_dma_address(&ctl->sg), 0, sg_dma_len(&ctl->sg), in ks8842_tx_frame_dma()
458 if (sg_dma_len(&ctl->sg) % 4) in ks8842_tx_frame_dma()
459 sg_dma_len(&ctl->sg) += 4 - sg_dma_len(&ctl->sg) % 4; in ks8842_tx_frame_dma()
462 &ctl->sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT); in ks8842_tx_frame_dma()
556 struct scatterlist *sg = &ctl->sg; in __ks8842_start_new_rx_dma() local
561 sg_init_table(sg, 1); in __ks8842_start_new_rx_dma()
562 sg_dma_address(sg) = dma_map_single(adapter->dev, in __ks8842_start_new_rx_dma()
[all …]
/linux-4.1.27/drivers/gpu/drm/i915/
Di915_gem_dmabuf.c93 struct sg_table *sg, in i915_gem_unmap_dma_buf() argument
100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf()
101 sg_free_table(sg); in i915_gem_unmap_dma_buf()
102 kfree(sg); in i915_gem_unmap_dma_buf()
252 struct sg_table *sg; in i915_gem_object_get_pages_dmabuf() local
254 sg = dma_buf_map_attachment(obj->base.import_attach, DMA_BIDIRECTIONAL); in i915_gem_object_get_pages_dmabuf()
255 if (IS_ERR(sg)) in i915_gem_object_get_pages_dmabuf()
256 return PTR_ERR(sg); in i915_gem_object_get_pages_dmabuf()
258 obj->pages = sg; in i915_gem_object_get_pages_dmabuf()
/linux-4.1.27/drivers/scsi/libfc/
Dfc_libfc.c112 struct scatterlist *sg, in fc_copy_buffer_to_sglist() argument
119 while (remaining > 0 && sg) { in fc_copy_buffer_to_sglist()
123 if (*offset >= sg->length) { in fc_copy_buffer_to_sglist()
131 *offset -= sg->length; in fc_copy_buffer_to_sglist()
132 sg = sg_next(sg); in fc_copy_buffer_to_sglist()
135 sg_bytes = min(remaining, sg->length - *offset); in fc_copy_buffer_to_sglist()
141 off = *offset + sg->offset; in fc_copy_buffer_to_sglist()
144 page_addr = kmap_atomic(sg_page(sg) + (off >> PAGE_SHIFT)); in fc_copy_buffer_to_sglist()
/linux-4.1.27/drivers/gpu/drm/
Ddrm_scatter.c70 if (drm_core_check_feature(dev, DRIVER_SG) && dev->sg && in drm_legacy_sg_cleanup()
72 drm_sg_cleanup(dev->sg); in drm_legacy_sg_cleanup()
73 dev->sg = NULL; in drm_legacy_sg_cleanup()
97 if (dev->sg) in drm_legacy_sg_alloc()
149 dev->sg = entry; in drm_legacy_sg_alloc()
210 entry = dev->sg; in drm_legacy_sg_free()
211 dev->sg = NULL; in drm_legacy_sg_free()
Ddrm_prime.c678 struct sg_table *sg = NULL; in drm_prime_pages_to_sg() local
681 sg = kmalloc(sizeof(struct sg_table), GFP_KERNEL); in drm_prime_pages_to_sg()
682 if (!sg) { in drm_prime_pages_to_sg()
687 ret = sg_alloc_table_from_pages(sg, pages, nr_pages, 0, in drm_prime_pages_to_sg()
692 return sg; in drm_prime_pages_to_sg()
694 kfree(sg); in drm_prime_pages_to_sg()
713 struct scatterlist *sg; in drm_prime_sg_to_page_addr_arrays() local
720 for_each_sg(sgt->sgl, sg, sgt->nents, count) { in drm_prime_sg_to_page_addr_arrays()
721 len = sg->length; in drm_prime_sg_to_page_addr_arrays()
722 page = sg_page(sg); in drm_prime_sg_to_page_addr_arrays()
[all …]
/linux-4.1.27/drivers/dma/sh/
Dusb-dmac.c63 struct usb_dmac_sg sg[0]; member
200 struct usb_dmac_sg *sg = desc->sg + index; in usb_dmac_chan_start_sg() local
206 dst_addr = sg->mem_addr; in usb_dmac_chan_start_sg()
208 src_addr = sg->mem_addr; in usb_dmac_chan_start_sg()
212 chan->index, sg, sg->size, &src_addr, &dst_addr); in usb_dmac_chan_start_sg()
217 DIV_ROUND_UP(sg->size, USB_DMAC_XFER_SIZE)); in usb_dmac_chan_start_sg()
218 usb_dmac_chan_write(chan, USB_DMATEND, usb_dmac_calc_tend(sg->size)); in usb_dmac_chan_start_sg()
272 desc = kzalloc(sizeof(*desc) + sg_len * sizeof(desc->sg[0]), gfp); in usb_dmac_desc_alloc()
425 struct scatterlist *sg; in usb_dmac_prep_slave_sg() local
440 for_each_sg(sgl, sg, sg_len, i) { in usb_dmac_prep_slave_sg()
[all …]
Dshdma-base.c567 struct scatterlist *sg; in shdma_prep_sg() local
574 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
575 chunks += DIV_ROUND_UP(sg_dma_len(sg), schan->max_xfer_len); in shdma_prep_sg()
591 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
592 dma_addr_t sg_addr = sg_dma_address(sg); in shdma_prep_sg()
593 size_t len = sg_dma_len(sg); in shdma_prep_sg()
600 i, sg, len, &sg_addr); in shdma_prep_sg()
647 struct scatterlist sg; in shdma_prep_memcpy() local
654 sg_init_table(&sg, 1); in shdma_prep_memcpy()
655 sg_set_page(&sg, pfn_to_page(PFN_DOWN(dma_src)), len, in shdma_prep_memcpy()
[all …]
/linux-4.1.27/drivers/memstick/core/
Dms_block.c98 static int msb_sg_compare_to_buffer(struct scatterlist *sg, in msb_sg_compare_to_buffer() argument
104 sg_miter_start(&miter, sg, sg_nents(sg), in msb_sg_compare_to_buffer()
344 struct scatterlist sg[2]; in h_msb_read_page() local
434 sg_init_table(sg, ARRAY_SIZE(sg)); in h_msb_read_page()
435 msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg), in h_msb_read_page()
439 memstick_init_req_sg(mrq, MS_TPC_READ_LONG_DATA, sg); in h_msb_read_page()
480 struct scatterlist sg[2]; in h_msb_write_block() local
559 sg_init_table(sg, ARRAY_SIZE(sg)); in h_msb_write_block()
561 if (msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg), in h_msb_write_block()
566 memstick_init_req_sg(mrq, MS_TPC_WRITE_LONG_DATA, sg); in h_msb_write_block()
[all …]
/linux-4.1.27/drivers/staging/i2o/
Di2o_config.c604 struct sg_simple_element *sg; in i2o_cfg_passthru32() local
611 sg = (struct sg_simple_element *)((&msg->u.head[0]) + in i2o_cfg_passthru32()
626 if (!(sg[i].flag_count & 0x10000000 in i2o_cfg_passthru32()
630 c->name, i, sg[i].flag_count); in i2o_cfg_passthru32()
634 sg_size = sg[i].flag_count & 0xffffff; in i2o_cfg_passthru32()
646 if (sg[i]. in i2o_cfg_passthru32()
651 (void __user *)(unsigned long)sg[i]. in i2o_cfg_passthru32()
661 sg[i].addr_bus = (u32) p->phys; in i2o_cfg_passthru32()
677 struct sg_simple_element *sg; in i2o_cfg_passthru32() local
703 sg = (struct sg_simple_element *)(rmsg + sg_offset); in i2o_cfg_passthru32()
[all …]
Dmemory.c127 int i2o_dma_map_sg(struct i2o_controller *c, struct scatterlist *sg, in i2o_dma_map_sg() argument
144 sg_count = dma_map_sg(&c->pdev->dev, sg, sg_count, direction); in i2o_dma_map_sg()
158 *mptr++ = cpu_to_le32(sg_flags | sg_dma_len(sg)); in i2o_dma_map_sg()
159 *mptr++ = cpu_to_le32(i2o_dma_low(sg_dma_address(sg))); in i2o_dma_map_sg()
162 *mptr++ = cpu_to_le32(i2o_dma_high(sg_dma_address(sg))); in i2o_dma_map_sg()
164 sg = sg_next(sg); in i2o_dma_map_sg()
/linux-4.1.27/drivers/dma/
Ddma-jz4740.c112 struct jz4740_dma_sg sg[]; member
294 struct jz4740_dma_sg *sg; in jz4740_dma_start_transfer() local
310 sg = &chan->desc->sg[chan->next_sg]; in jz4740_dma_start_transfer()
313 src_addr = sg->addr; in jz4740_dma_start_transfer()
317 dst_addr = sg->addr; in jz4740_dma_start_transfer()
322 sg->len >> chan->transfer_shift); in jz4740_dma_start_transfer()
396 struct scatterlist *sg; in jz4740_dma_prep_slave_sg() local
403 for_each_sg(sgl, sg, sg_len, i) { in jz4740_dma_prep_slave_sg()
404 desc->sg[i].addr = sg_dma_address(sg); in jz4740_dma_prep_slave_sg()
405 desc->sg[i].len = sg_dma_len(sg); in jz4740_dma_prep_slave_sg()
[all …]
Dsa11x0-dma.c85 struct sa11x0_dma_sg sg[0]; member
166 struct sa11x0_dma_sg *sg; in sa11x0_dma_start_sg() local
202 sg = &txd->sg[p->sg_load++]; in sa11x0_dma_start_sg()
216 writel_relaxed(sg->addr, base + dbsx); in sa11x0_dma_start_sg()
217 writel_relaxed(sg->len, base + dbtx); in sa11x0_dma_start_sg()
222 'A' + (dbsx == DMA_DBSB), sg->addr, in sa11x0_dma_start_sg()
223 'A' + (dbtx == DMA_DBTB), sg->len); in sa11x0_dma_start_sg()
470 i, txd->sg[i].addr, txd->sg[i].len); in sa11x0_dma_tx_status()
471 if (addr >= txd->sg[i].addr && in sa11x0_dma_tx_status()
472 addr < txd->sg[i].addr + txd->sg[i].len) { in sa11x0_dma_tx_status()
[all …]
Dtimb_dma.c158 struct scatterlist *sg, bool last) in td_fill_desc() argument
160 if (sg_dma_len(sg) > USHRT_MAX) { in td_fill_desc()
166 if (sg_dma_len(sg) % sizeof(u32)) { in td_fill_desc()
168 sg_dma_len(sg)); in td_fill_desc()
173 dma_desc, (unsigned long long)sg_dma_address(sg)); in td_fill_desc()
175 dma_desc[7] = (sg_dma_address(sg) >> 24) & 0xff; in td_fill_desc()
176 dma_desc[6] = (sg_dma_address(sg) >> 16) & 0xff; in td_fill_desc()
177 dma_desc[5] = (sg_dma_address(sg) >> 8) & 0xff; in td_fill_desc()
178 dma_desc[4] = (sg_dma_address(sg) >> 0) & 0xff; in td_fill_desc()
180 dma_desc[3] = (sg_dma_len(sg) >> 8) & 0xff; in td_fill_desc()
[all …]
Domap-dma.c75 struct omap_sg sg[0]; member
362 struct omap_sg *sg = d->sg + idx; in omap_dma_start_sg() local
375 omap_dma_chan_write(c, cxsa, sg->addr); in omap_dma_start_sg()
378 omap_dma_chan_write(c, CEN, sg->en); in omap_dma_start_sg()
379 omap_dma_chan_write(c, CFN, sg->fn); in omap_dma_start_sg()
594 static size_t omap_dma_sg_size(struct omap_sg *sg) in omap_dma_sg_size() argument
596 return sg->en * sg->fn; in omap_dma_sg_size()
605 size += omap_dma_sg_size(&d->sg[i]); in omap_dma_desc_size()
616 size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size; in omap_dma_desc_size_pos()
620 else if (addr >= d->sg[i].addr && in omap_dma_desc_size_pos()
[all …]
Dcoh901318_lli.c239 struct scatterlist *sg; in coh901318_lli_fill_sg() local
258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg()
259 if (sg_is_chain(sg)) { in coh901318_lli_fill_sg()
273 src = sg_dma_address(sg); in coh901318_lli_fill_sg()
276 dst = sg_dma_address(sg); in coh901318_lli_fill_sg()
278 bytes_to_transfer = sg_dma_len(sg); in coh901318_lli_fill_sg()
Dimx-dma.c143 struct scatterlist *sg; member
286 struct scatterlist *sg = d->sg; in imxdma_sg_next() local
289 now = min(d->len, sg_dma_len(sg)); in imxdma_sg_next()
294 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
297 imx_dmav1_writel(imxdma, sg->dma_address, in imxdma_sg_next()
329 d->sg && imxdma_hw_chain(imxdmac)) { in imxdma_enable_hw()
330 d->sg = sg_next(d->sg); in imxdma_enable_hw()
331 if (d->sg) { in imxdma_enable_hw()
448 if (desc->sg) { in dma_irq_handle_channel()
450 desc->sg = sg_next(desc->sg); in dma_irq_handle_channel()
[all …]
Dmoxart-dma.c130 struct moxart_sg sg[0]; member
311 d = kzalloc(sizeof(*d) + sg_len * sizeof(d->sg[0]), GFP_ATOMIC); in moxart_prep_slave_sg()
320 d->sg[i].addr = sg_dma_address(sgent); in moxart_prep_slave_sg()
321 d->sg[i].len = sg_dma_len(sgent); in moxart_prep_slave_sg()
406 struct moxart_sg *sg = ch->desc->sg + idx; in moxart_dma_start_sg() local
409 moxart_dma_set_params(ch, sg->addr, d->dev_addr); in moxart_dma_start_sg()
411 moxart_dma_set_params(ch, d->dev_addr, sg->addr); in moxart_dma_start_sg()
413 moxart_set_transfer_params(ch, sg->len); in moxart_dma_start_sg()
456 size += d->sg[i].len; in moxart_dma_desc_size()
/linux-4.1.27/drivers/staging/rtl8192e/
Drtllib_crypt_wep.c105 struct scatterlist sg; in prism2_wep_encrypt() local
152 sg_init_one(&sg, pos, len+4); in prism2_wep_encrypt()
154 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in prism2_wep_encrypt()
179 struct scatterlist sg; in prism2_wep_decrypt() local
201 sg_init_one(&sg, pos, plen+4); in prism2_wep_decrypt()
203 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in prism2_wep_decrypt()
Drtllib_crypt_tkip.c310 struct scatterlist sg; in rtllib_tkip_encrypt() local
359 sg_init_one(&sg, pos, len+4); in rtllib_tkip_encrypt()
363 ret = crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in rtllib_tkip_encrypt()
393 struct scatterlist sg; in rtllib_tkip_decrypt() local
453 sg_init_one(&sg, pos, plen+4); in rtllib_tkip_decrypt()
456 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in rtllib_tkip_decrypt()
509 struct scatterlist sg[2]; in michael_mic() local
515 sg_init_table(sg, 2); in michael_mic()
516 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
517 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.1.27/drivers/media/pci/ivtv/
Divtv-udma.c77 struct scatterlist *sg; in ivtv_udma_fill_sg_array() local
79 for (i = 0, sg = dma->SGlist; i < dma->SG_length; i++, sg = sg_next(sg)) { in ivtv_udma_fill_sg_array()
80 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); in ivtv_udma_fill_sg_array()
81 dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg)); in ivtv_udma_fill_sg_array()
83 buffer_offset += sg_dma_len(sg); in ivtv_udma_fill_sg_array()
85 split -= sg_dma_len(sg); in ivtv_udma_fill_sg_array()
/linux-4.1.27/drivers/staging/rtl8192u/ieee80211/
Dieee80211_crypt_wep.c111 struct scatterlist sg; in prism2_wep_encrypt() local
157 sg_init_one(&sg, pos, len+4); in prism2_wep_encrypt()
159 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in prism2_wep_encrypt()
183 struct scatterlist sg; in prism2_wep_decrypt() local
207 sg_init_one(&sg, pos, plen+4); in prism2_wep_decrypt()
209 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in prism2_wep_decrypt()
Dieee80211_crypt_tkip.c315 struct scatterlist sg; in ieee80211_tkip_encrypt() local
369 sg_init_one(&sg, pos, len+4); in ieee80211_tkip_encrypt()
370 ret= crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in ieee80211_tkip_encrypt()
399 struct scatterlist sg; in ieee80211_tkip_decrypt() local
455 sg_init_one(&sg, pos, plen+4); in ieee80211_tkip_decrypt()
457 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in ieee80211_tkip_decrypt()
505 struct scatterlist sg[2]; in michael_mic() local
512 sg_init_table(sg, 2); in michael_mic()
513 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
514 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.1.27/net/wireless/
Dlib80211_crypt_wep.c139 struct scatterlist sg; in lib80211_wep_encrypt() local
169 sg_init_one(&sg, pos, len + 4); in lib80211_wep_encrypt()
170 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_wep_encrypt()
187 struct scatterlist sg; in lib80211_wep_decrypt() local
209 sg_init_one(&sg, pos, plen + 4); in lib80211_wep_decrypt()
210 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) in lib80211_wep_decrypt()
Dlib80211_crypt_tkip.c360 struct scatterlist sg; in lib80211_tkip_encrypt() local
386 sg_init_one(&sg, pos, len + 4); in lib80211_tkip_encrypt()
387 return crypto_blkcipher_encrypt(&desc, &sg, &sg, len + 4); in lib80211_tkip_encrypt()
414 struct scatterlist sg; in lib80211_tkip_decrypt() local
469 sg_init_one(&sg, pos, plen + 4); in lib80211_tkip_decrypt()
470 if (crypto_blkcipher_decrypt(&desc, &sg, &sg, plen + 4)) { in lib80211_tkip_decrypt()
512 struct scatterlist sg[2]; in michael_mic() local
518 sg_init_table(sg, 2); in michael_mic()
519 sg_set_buf(&sg[0], hdr, 16); in michael_mic()
520 sg_set_buf(&sg[1], data, data_len); in michael_mic()
[all …]
/linux-4.1.27/drivers/rapidio/devices/
Dtsi721_dma.c338 struct scatterlist *sg, u32 sys_size) in tsi721_desc_fill_init() argument
355 (u64)sg_dma_address(sg) & 0xffffffff); in tsi721_desc_fill_init()
356 bd_ptr->t1.bufptr_hi = cpu_to_le32((u64)sg_dma_address(sg) >> 32); in tsi721_desc_fill_init()
422 struct scatterlist *sg; in tsi721_submit_sg() local
457 for_each_sg(desc->sg, sg, desc->sg_len, i) { in tsi721_submit_sg()
461 (unsigned long long)sg_dma_address(sg), sg_dma_len(sg)); in tsi721_submit_sg()
463 if (sg_dma_len(sg) > TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg()
474 if (next_addr == sg_dma_address(sg) && in tsi721_submit_sg()
475 bcount + sg_dma_len(sg) <= TSI721_BDMA_MAX_BCOUNT) { in tsi721_submit_sg()
477 bcount += sg_dma_len(sg); in tsi721_submit_sg()
[all …]
/linux-4.1.27/drivers/crypto/nx/
Dnx.c97 struct nx_sg *sg; in nx_build_sg_list() local
121 for (sg = sg_head; sg_len < *len; sg++) { in nx_build_sg_list()
124 sg->addr = sg_addr; in nx_build_sg_list()
128 next_page = (sg->addr & PAGE_MASK) + PAGE_SIZE; in nx_build_sg_list()
129 sg->len = min_t(u64, sg_addr, next_page) - sg->addr; in nx_build_sg_list()
130 sg_len += sg->len; in nx_build_sg_list()
139 if ((sg - sg_head) == sgmax) { in nx_build_sg_list()
142 sg++; in nx_build_sg_list()
149 return sg; in nx_build_sg_list()
192 scatterwalk_start(&walk, sg_next(walk.sg)); in nx_walk_and_build()
[all …]
/linux-4.1.27/arch/powerpc/platforms/powernv/
Dopal.c889 struct opal_sg_list *sg, *first = NULL; in opal_vmalloc_to_sg_list() local
892 sg = kzalloc(PAGE_SIZE, GFP_KERNEL); in opal_vmalloc_to_sg_list()
893 if (!sg) in opal_vmalloc_to_sg_list()
896 first = sg; in opal_vmalloc_to_sg_list()
902 sg->entry[i].data = cpu_to_be64(data); in opal_vmalloc_to_sg_list()
903 sg->entry[i].length = cpu_to_be64(length); in opal_vmalloc_to_sg_list()
913 sg->length = cpu_to_be64( in opal_vmalloc_to_sg_list()
916 sg->next = cpu_to_be64(__pa(next)); in opal_vmalloc_to_sg_list()
917 sg = next; in opal_vmalloc_to_sg_list()
924 sg->length = cpu_to_be64(i * sizeof(struct opal_sg_entry) + 16); in opal_vmalloc_to_sg_list()
[all …]
/linux-4.1.27/drivers/usb/host/whci/
Dqset.c266 struct scatterlist *sg; in qset_copy_bounce_to_sg() local
273 sg = std->bounce_sg; in qset_copy_bounce_to_sg()
279 len = min(sg->length - offset, remaining); in qset_copy_bounce_to_sg()
280 memcpy(sg_virt(sg) + offset, bounce, len); in qset_copy_bounce_to_sg()
286 if (offset >= sg->length) { in qset_copy_bounce_to_sg()
287 sg = sg_next(sg); in qset_copy_bounce_to_sg()
439 struct scatterlist *sg; in qset_add_urb_sg() local
450 for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) { in qset_add_urb_sg()
460 dma_addr = sg_dma_address(sg); in qset_add_urb_sg()
461 dma_remaining = min_t(size_t, sg_dma_len(sg), remaining); in qset_add_urb_sg()
[all …]
/linux-4.1.27/drivers/infiniband/hw/mthca/
Dmthca_wqe.h117 struct ib_sge *sg) in mthca_set_data_seg() argument
119 dseg->byte_count = cpu_to_be32(sg->length); in mthca_set_data_seg()
120 dseg->lkey = cpu_to_be32(sg->lkey); in mthca_set_data_seg()
121 dseg->addr = cpu_to_be64(sg->addr); in mthca_set_data_seg()
/linux-4.1.27/arch/x86/kernel/
Dpci-nommu.c56 static int nommu_map_sg(struct device *hwdev, struct scatterlist *sg, in nommu_map_sg() argument
63 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg()
65 for_each_sg(sg, s, nents, i) { in nommu_map_sg()
85 struct scatterlist *sg, int nelems, in nommu_sync_sg_for_device() argument
Damd_gart_64.c288 static void gart_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_unmap_sg() argument
294 for_each_sg(sg, s, nents, i) { in gart_unmap_sg()
302 static int dma_map_sg_nonforce(struct device *dev, struct scatterlist *sg, in dma_map_sg_nonforce() argument
312 for_each_sg(sg, s, nents, i) { in dma_map_sg_nonforce()
319 gart_unmap_sg(dev, sg, i, dir, NULL); in dma_map_sg_nonforce()
321 sg[0].dma_length = 0; in dma_map_sg_nonforce()
389 static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_map_sg() argument
406 start_sg = sg; in gart_map_sg()
407 sgmap = sg; in gart_map_sg()
412 for_each_sg(sg, s, nents, i) { in gart_map_sg()
[all …]
/linux-4.1.27/drivers/dma/hsu/
Dhsu.c82 hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr); in hsu_dma_chan_start()
83 hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len); in hsu_dma_chan_start()
205 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc()
206 if (!desc->sg) { in hsu_dma_alloc_desc()
218 kfree(desc->sg); in hsu_dma_desc_free()
229 struct scatterlist *sg; in hsu_dma_prep_slave_sg() local
236 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg()
237 desc->sg[i].addr = sg_dma_address(sg); in hsu_dma_prep_slave_sg()
238 desc->sg[i].len = sg_dma_len(sg); in hsu_dma_prep_slave_sg()
266 bytes += desc->sg[i].len; in hsu_dma_desc_size()
/linux-4.1.27/drivers/net/
Dvirtio_net.c71 struct scatterlist sg[MAX_SKB_FRAGS + 2]; member
94 struct scatterlist sg[MAX_SKB_FRAGS + 2]; member
543 sg_init_table(rq->sg, MAX_SKB_FRAGS + 2); in add_recvbuf_small()
544 sg_set_buf(rq->sg, hdr, vi->hdr_len); in add_recvbuf_small()
545 skb_to_sgvec(skb, rq->sg + 1, 0, skb->len); in add_recvbuf_small()
547 err = virtqueue_add_inbuf(rq->vq, rq->sg, 2, skb, gfp); in add_recvbuf_small()
561 sg_init_table(rq->sg, MAX_SKB_FRAGS + 2); in add_recvbuf_big()
571 sg_set_buf(&rq->sg[i], page_address(first), PAGE_SIZE); in add_recvbuf_big()
587 sg_set_buf(&rq->sg[0], p, vi->hdr_len); in add_recvbuf_big()
591 sg_set_buf(&rq->sg[1], p + offset, PAGE_SIZE - offset); in add_recvbuf_big()
[all …]
/linux-4.1.27/drivers/usb/wusbcore/
Dcrypto.c209 struct scatterlist sg[4], sg_dst; in wusb_ccm_mac() local
254 sg_init_table(sg, ARRAY_SIZE(sg)); in wusb_ccm_mac()
255 sg_set_buf(&sg[0], &b0, sizeof(b0)); in wusb_ccm_mac()
256 sg_set_buf(&sg[1], &b1, sizeof(b1)); in wusb_ccm_mac()
257 sg_set_buf(&sg[2], b, blen); in wusb_ccm_mac()
259 sg_set_buf(&sg[3], bzero, zero_padding); in wusb_ccm_mac()
264 result = crypto_blkcipher_encrypt(&desc, &sg_dst, sg, dst_size); in wusb_ccm_mac()
/linux-4.1.27/drivers/staging/rts5208/
Drtsx_transport.c69 struct scatterlist *sg = in rtsx_stor_access_xfer_buf() local
79 struct page *page = sg_page(sg) + in rtsx_stor_access_xfer_buf()
80 ((sg->offset + *offset) >> PAGE_SHIFT); in rtsx_stor_access_xfer_buf()
82 (sg->offset + *offset) & (PAGE_SIZE-1); in rtsx_stor_access_xfer_buf()
83 unsigned int sglen = sg->length - *offset; in rtsx_stor_access_xfer_buf()
95 ++sg; in rtsx_stor_access_xfer_buf()
324 struct scatterlist *sg, int num_sg, unsigned int *index, in rtsx_transfer_sglist_adma_partial() argument
337 if ((sg == NULL) || (num_sg <= 0) || !offset || !index) in rtsx_transfer_sglist_adma_partial()
366 sg_cnt = dma_map_sg(&(rtsx->pci->dev), sg, num_sg, dma_dir); in rtsx_transfer_sglist_adma_partial()
369 sg_ptr = sg; in rtsx_transfer_sglist_adma_partial()
[all …]
/linux-4.1.27/arch/arm/mach-ks8695/
DMakefile15 obj-$(CONFIG_MACH_LITE300) += board-sg.o
16 obj-$(CONFIG_MACH_SG310) += board-sg.o
17 obj-$(CONFIG_MACH_SE4200) += board-sg.o
/linux-4.1.27/arch/sparc/include/asm/
Ddma.h112 #define mmu_get_scsi_sgl(dev,sg,sz) \ argument
113 sparc32_dma_ops->get_scsi_sgl(dev, sg, sz)
116 #define mmu_release_scsi_sgl(dev,sg,sz) \ argument
117 sparc32_dma_ops->release_scsi_sgl(dev, sg, sz)
/linux-4.1.27/drivers/media/pci/cx88/
Dcx88-core.c81 struct scatterlist *sg; in cx88_risc_field() local
94 sg = sglist; in cx88_risc_field()
96 while (offset && offset >= sg_dma_len(sg)) { in cx88_risc_field()
97 offset -= sg_dma_len(sg); in cx88_risc_field()
98 sg = sg_next(sg); in cx88_risc_field()
104 if (bpl <= sg_dma_len(sg)-offset) { in cx88_risc_field()
107 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in cx88_risc_field()
113 (sg_dma_len(sg)-offset)); in cx88_risc_field()
114 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in cx88_risc_field()
115 todo -= (sg_dma_len(sg)-offset); in cx88_risc_field()
[all …]
/linux-4.1.27/tools/virtio/
Dvringh_test.c326 struct scatterlist sg[4]; in parallel_test() local
351 sg_init_table(sg, num_sg = 3); in parallel_test()
352 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
353 sg_set_buf(&sg[1], (void *)dbuf + 1, 2); in parallel_test()
354 sg_set_buf(&sg[2], (void *)dbuf + 3, 1); in parallel_test()
357 sg_init_table(sg, num_sg = 2); in parallel_test()
358 sg_set_buf(&sg[0], (void *)dbuf, 1); in parallel_test()
359 sg_set_buf(&sg[1], (void *)dbuf + 1, 3); in parallel_test()
362 sg_init_table(sg, num_sg = 1); in parallel_test()
363 sg_set_buf(&sg[0], (void *)dbuf, 4); in parallel_test()
[all …]
/linux-4.1.27/drivers/block/
Dcpqarray.c945 c->req.sg[i].size = tmp_sg[i].length; in do_ida_request()
946 c->req.sg[i].addr = (__u32) pci_map_page(h->pci_dev, in do_ida_request()
1031 pci_unmap_page(hba[cmd->ctlr]->pci_dev, cmd->req.sg[i].addr, in complete_command()
1032 cmd->req.sg[i].size, ddir); in complete_command()
1257 p = memdup_user(io->sg[0].addr, io->sg[0].size); in ida_ctlr_ioctl()
1266 c->req.sg[0].size = io->sg[0].size; in ida_ctlr_ioctl()
1267 c->req.sg[0].addr = pci_map_single(h->pci_dev, p, in ida_ctlr_ioctl()
1268 c->req.sg[0].size, PCI_DMA_BIDIRECTIONAL); in ida_ctlr_ioctl()
1274 p = kmalloc(io->sg[0].size, GFP_KERNEL); in ida_ctlr_ioctl()
1282 c->req.sg[0].size = io->sg[0].size; in ida_ctlr_ioctl()
[all …]
Dxen-blkfront.c79 struct scatterlist *sg; member
405 struct scatterlist *sg; in blkif_queue_request() local
454 nseg = blk_rq_map_sg(req->q, req, info->shadow[id].sg); in blkif_queue_request()
497 for_each_sg(info->shadow[id].sg, sg, nseg, i) { in blkif_queue_request()
498 fsect = sg->offset >> 9; in blkif_queue_request()
499 lsect = fsect + (sg->length >> 9) - 1; in blkif_queue_request()
525 gnt_list_entry = get_grant(&gref_head, page_to_pfn(sg_page(sg)), info); in blkif_queue_request()
534 BUG_ON(sg->offset + sg->length > PAGE_SIZE); in blkif_queue_request()
537 bvec_data = kmap_atomic(sg_page(sg)); in blkif_queue_request()
548 memcpy(shared_data + sg->offset, in blkif_queue_request()
[all …]
/linux-4.1.27/drivers/net/irda/
Dsa1100_ir.c50 struct scatterlist sg; member
115 return sg_dma_len(&buf->sg) - state.residue; in sa1100_irda_dma_xferred()
150 desc = dmaengine_prep_slave_sg(chan, &buf->sg, 1, dir, in sa1100_irda_dma_start()
180 sg_set_buf(&si->dma_rx.sg, si->dma_rx.skb->data, HPSIR_MAX_RXLEN); in sa1100_irda_rx_alloc()
181 if (dma_map_sg(si->dma_rx.dev, &si->dma_rx.sg, 1, DMA_FROM_DEVICE) == 0) { in sa1100_irda_rx_alloc()
230 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE); in sa1100_irda_sirtxdma_irq()
235 dev->stats.tx_bytes += sg_dma_len(&si->dma_tx.sg); in sa1100_irda_sirtxdma_irq()
263 sg_set_buf(&si->dma_tx.sg, si->tx_buff.data, si->tx_buff.len); in sa1100_irda_sir_tx_start()
264 if (dma_map_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, DMA_TO_DEVICE) == 0) { in sa1100_irda_sir_tx_start()
378 dma_unmap_sg(si->dma_tx.dev, &si->dma_tx.sg, 1, in sa1100_irda_firtxdma_irq()
[all …]
/linux-4.1.27/arch/frv/mb93090-mb00/
Dpci-dma-nommu.c122 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
128 frv_cache_wback_inv(sg_dma_address(&sg[i]), in dma_map_sg()
129 sg_dma_address(&sg[i]) + sg_dma_len(&sg[i])); in dma_map_sg()
/linux-4.1.27/arch/frv/include/asm/
Ddma-mapping.h34 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
38 void dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
85 void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
91 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
/linux-4.1.27/arch/mips/loongson/common/
Ddma-swiotlb.c68 static int loongson_dma_map_sg(struct device *dev, struct scatterlist *sg, in loongson_dma_map_sg() argument
72 int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, NULL); in loongson_dma_map_sg()
87 struct scatterlist *sg, int nents, in loongson_dma_sync_sg_for_device() argument
90 swiotlb_sync_sg_for_device(dev, sg, nents, dir); in loongson_dma_sync_sg_for_device()
/linux-4.1.27/net/ipv6/
Desp6.c150 struct scatterlist *sg; in esp6_output() local
209 sg = asg + sglists; in esp6_output()
233 sg_init_table(sg, nfrags); in esp6_output()
234 skb_to_sgvec(skb, sg, in esp6_output()
248 aead_givcrypt_set_crypt(req, sg, sg, clen, iv); in esp6_output()
336 struct scatterlist *sg; in esp6_input() local
376 sg = asg + sglists; in esp6_input()
385 sg_init_table(sg, nfrags); in esp6_input()
386 skb_to_sgvec(skb, sg, sizeof(*esph) + crypto_aead_ivsize(aead), elen); in esp6_input()
398 aead_request_set_crypt(req, sg, sg, elen, iv); in esp6_input()
Dah6.c343 struct scatterlist *sg; in ah6_output() local
380 sg = ah_req_sg(ahash, req); in ah6_output()
381 seqhisg = sg + nfrags; in ah6_output()
425 sg_init_table(sg, nfrags + sglists); in ah6_output()
426 skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah6_output()
433 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah6_output()
524 struct scatterlist *sg; in ah6_input() local
587 sg = ah_req_sg(ahash, req); in ah6_input()
588 seqhisg = sg + nfrags; in ah6_input()
603 sg_init_table(sg, nfrags + sglists); in ah6_input()
[all …]
/linux-4.1.27/sound/soc/sh/
Dsiu_pcm.c124 struct scatterlist sg; in siu_pcm_wr_set() local
127 sg_init_table(&sg, 1); in siu_pcm_wr_set()
128 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_wr_set()
130 sg_dma_len(&sg) = size; in siu_pcm_wr_set()
131 sg_dma_address(&sg) = buff; in siu_pcm_wr_set()
134 &sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK); in siu_pcm_wr_set()
172 struct scatterlist sg; in siu_pcm_rd_set() local
177 sg_init_table(&sg, 1); in siu_pcm_rd_set()
178 sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)), in siu_pcm_rd_set()
180 sg_dma_len(&sg) = size; in siu_pcm_rd_set()
[all …]
/linux-4.1.27/net/ipv4/
Dah4.c154 struct scatterlist *sg; in ah_output() local
185 sg = ah_req_sg(ahash, req); in ah_output()
186 seqhisg = sg + nfrags; in ah_output()
222 sg_init_table(sg, nfrags + sglists); in ah_output()
223 skb_to_sgvec_nomark(skb, sg, 0, skb->len); in ah_output()
230 ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len); in ah_output()
307 struct scatterlist *sg; in ah_input() local
370 sg = ah_req_sg(ahash, req); in ah_input()
371 seqhisg = sg + nfrags; in ah_input()
390 sg_init_table(sg, nfrags + sglists); in ah_input()
[all …]
Desp4.c122 struct scatterlist *sg; in esp_output() local
182 sg = asg + sglists; in esp_output()
241 sg_init_table(sg, nfrags); in esp_output()
242 skb_to_sgvec(skb, sg, in esp_output()
256 aead_givcrypt_set_crypt(req, sg, sg, clen, iv); in esp_output()
386 struct scatterlist *sg; in esp_input() local
422 sg = asg + sglists; in esp_input()
431 sg_init_table(sg, nfrags); in esp_input()
432 skb_to_sgvec(skb, sg, sizeof(*esph) + crypto_aead_ivsize(aead), elen); in esp_input()
444 aead_request_set_crypt(req, sg, sg, elen, iv); in esp_input()
/linux-4.1.27/drivers/ata/
Dpata_pxa.c60 static void pxa_load_dmac(struct scatterlist *sg, struct ata_queued_cmd *qc) in pxa_load_dmac() argument
67 cpu_addr = sg_dma_address(sg); in pxa_load_dmac()
68 cpu_len = sg_dma_len(sg); in pxa_load_dmac()
109 struct scatterlist *sg; in pxa_qc_prep() local
119 for_each_sg(qc->sg, sg, qc->n_elem, si) in pxa_qc_prep()
120 pxa_load_dmac(sg, qc); in pxa_qc_prep()
/linux-4.1.27/drivers/media/pci/bt8xx/
Dbttv-risc.c54 struct scatterlist *sg; in bttv_risc_packed() local
80 sg = sglist; in bttv_risc_packed()
85 while (offset && offset >= sg_dma_len(sg)) { in bttv_risc_packed()
86 offset -= sg_dma_len(sg); in bttv_risc_packed()
87 sg = sg_next(sg); in bttv_risc_packed()
89 if (bpl <= sg_dma_len(sg)-offset) { in bttv_risc_packed()
93 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed()
99 (sg_dma_len(sg)-offset)); in bttv_risc_packed()
100 *(rp++)=cpu_to_le32(sg_dma_address(sg)+offset); in bttv_risc_packed()
101 todo -= (sg_dma_len(sg)-offset); in bttv_risc_packed()
[all …]
/linux-4.1.27/arch/arm/kernel/
Ddma.c124 void set_dma_sg (unsigned int chan, struct scatterlist *sg, int nr_sg) in set_dma_sg() argument
131 dma->sg = sg; in set_dma_sg()
148 dma->sg = NULL; in __set_dma_addr()
165 dma->sg = NULL; in set_dma_count()
/linux-4.1.27/include/scsi/
Dscsi_cmnd.h162 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
196 #define scsi_for_each_sg(cmd, sg, nseg, __i) \ argument
197 for_each_sg(scsi_sglist(cmd), sg, nseg, __i)
323 #define scsi_for_each_prot_sg(cmd, sg, nseg, __i) \ argument
324 for_each_sg(scsi_prot_sglist(cmd), sg, nseg, __i)
/linux-4.1.27/arch/arm/mm/
Ddma-mapping.c901 int arm_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_map_sg() argument
908 for_each_sg(sg, s, nents, i) { in arm_dma_map_sg()
920 for_each_sg(sg, s, i, j) in arm_dma_map_sg()
935 void arm_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_unmap_sg() argument
943 for_each_sg(sg, s, nents, i) in arm_dma_unmap_sg()
954 void arm_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, in arm_dma_sync_sg_for_cpu() argument
961 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_cpu()
973 void arm_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, in arm_dma_sync_sg_for_device() argument
980 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_device()
1492 static int __map_sg_chunk(struct device *dev, struct scatterlist *sg, in __map_sg_chunk() argument
[all …]
/linux-4.1.27/drivers/crypto/amcc/
Dcrypto4xx_core.c551 struct scatterlist *sg; in crypto4xx_copy_pkt_to_dst() local
558 sg = &dst[i]; in crypto4xx_copy_pkt_to_dst()
559 sg_len = sg->length; in crypto4xx_copy_pkt_to_dst()
560 addr = dma_map_page(dev->core_dev->device, sg_page(sg), in crypto4xx_copy_pkt_to_dst()
561 sg->offset, sg->length, DMA_TO_DEVICE); in crypto4xx_copy_pkt_to_dst()
564 len = (nbytes <= sg->length) ? nbytes : sg->length; in crypto4xx_copy_pkt_to_dst()
574 len = (sg->length < len) ? sg->length : len; in crypto4xx_copy_pkt_to_dst()
749 struct scatterlist *sg = sg_list; in get_sg_count() local
754 if (sg->length > nbytes) in get_sg_count()
756 nbytes -= sg->length; in get_sg_count()
[all …]
/linux-4.1.27/drivers/ide/
Dau1xxx-ide.c219 struct scatterlist *sg; in auide_build_dmatable() local
227 sg = hwif->sg_table; in auide_build_dmatable()
228 while (i && sg_dma_len(sg)) { in auide_build_dmatable()
232 cur_addr = sg_dma_address(sg); in auide_build_dmatable()
233 cur_len = sg_dma_len(sg); in auide_build_dmatable()
253 sg_phys(sg), tc, flags)) { in auide_build_dmatable()
259 sg_phys(sg), tc, flags)) { in auide_build_dmatable()
268 sg = sg_next(sg); in auide_build_dmatable()
/linux-4.1.27/arch/nios2/include/asm/
Ddma-mapping.h78 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
84 extern void dma_unmap_sg(struct device *dev, struct scatterlist *sg,
96 extern void dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
98 extern void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
/linux-4.1.27/arch/cris/include/asm/
Ddma-mapping.h56 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument
80 dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nhwentries, in dma_unmap_sg() argument
113 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_cpu() argument
119 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nelems, in dma_sync_sg_for_device() argument
/linux-4.1.27/drivers/net/ppp/
Dppp_mppe.c68 setup_sg(struct scatterlist *sg, const void *address, unsigned int length) in setup_sg() argument
70 sg_set_buf(sg, address, length); in setup_sg()
139 struct scatterlist sg[4]; in get_new_key_from_sha() local
142 sg_init_table(sg, 4); in get_new_key_from_sha()
144 nbytes = setup_sg(&sg[0], state->master_key, state->keylen); in get_new_key_from_sha()
145 nbytes += setup_sg(&sg[1], sha_pad->sha_pad1, in get_new_key_from_sha()
147 nbytes += setup_sg(&sg[2], state->session_key, state->keylen); in get_new_key_from_sha()
148 nbytes += setup_sg(&sg[3], sha_pad->sha_pad2, in get_new_key_from_sha()
154 crypto_hash_digest(&desc, sg, nbytes, state->sha1_digest); in get_new_key_from_sha()
/linux-4.1.27/drivers/tty/serial/
Damba-pl011.c119 struct scatterlist sg; member
141 struct scatterlist sg; member
239 static int pl011_sgbuf_init(struct dma_chan *chan, struct pl011_sgbuf *sg, in pl011_sgbuf_init() argument
244 sg->buf = dma_alloc_coherent(chan->device->dev, in pl011_sgbuf_init()
246 if (!sg->buf) in pl011_sgbuf_init()
249 sg_init_table(&sg->sg, 1); in pl011_sgbuf_init()
250 sg_set_page(&sg->sg, phys_to_page(dma_addr), in pl011_sgbuf_init()
252 sg_dma_address(&sg->sg) = dma_addr; in pl011_sgbuf_init()
253 sg_dma_len(&sg->sg) = PL011_DMA_BUFFER_SIZE; in pl011_sgbuf_init()
258 static void pl011_sgbuf_free(struct dma_chan *chan, struct pl011_sgbuf *sg, in pl011_sgbuf_free() argument
[all …]
Dpch_uart.c798 struct scatterlist *sg = priv->sg_tx_p; in pch_dma_tx_complete() local
801 for (i = 0; i < priv->nent; i++, sg++) { in pch_dma_tx_complete()
802 xmit->tail += sg_dma_len(sg); in pch_dma_tx_complete()
803 port->icount.tx += sg_dma_len(sg); in pch_dma_tx_complete()
807 dma_unmap_sg(port->dev, sg, priv->nent, DMA_TO_DEVICE); in pch_dma_tx_complete()
869 struct scatterlist *sg; in dma_handle_rx() local
872 sg = &priv->sg_rx; in dma_handle_rx()
876 sg_dma_len(sg) = priv->trigger_level; in dma_handle_rx()
879 sg_dma_len(sg), (unsigned long)priv->rx_buf_virt & in dma_handle_rx()
882 sg_dma_address(sg) = priv->rx_buf_dma; in dma_handle_rx()
[all …]
/linux-4.1.27/arch/openrisc/kernel/
Ddma.c179 or1k_map_sg(struct device *dev, struct scatterlist *sg, in or1k_map_sg() argument
186 for_each_sg(sg, s, nents, i) { in or1k_map_sg()
195 or1k_unmap_sg(struct device *dev, struct scatterlist *sg, in or1k_unmap_sg() argument
202 for_each_sg(sg, s, nents, i) { in or1k_unmap_sg()
/linux-4.1.27/drivers/scsi/qla2xxx/
Dqla_iocb.c200 struct scatterlist *sg; in qla2x00_build_scsi_iocbs_32() local
223 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla2x00_build_scsi_iocbs_32()
237 *cur_dsd++ = cpu_to_le32(sg_dma_address(sg)); in qla2x00_build_scsi_iocbs_32()
238 *cur_dsd++ = cpu_to_le32(sg_dma_len(sg)); in qla2x00_build_scsi_iocbs_32()
258 struct scatterlist *sg; in qla2x00_build_scsi_iocbs_64() local
281 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla2x00_build_scsi_iocbs_64()
296 sle_dma = sg_dma_address(sg); in qla2x00_build_scsi_iocbs_64()
299 *cur_dsd++ = cpu_to_le32(sg_dma_len(sg)); in qla2x00_build_scsi_iocbs_64()
711 struct scatterlist *sg; in qla24xx_build_scsi_iocbs() local
749 scsi_for_each_sg(cmd, sg, tot_dsds, i) { in qla24xx_build_scsi_iocbs()
[all …]
/linux-4.1.27/fs/ext4/
Dcrypto_fname.c68 struct scatterlist sg[1]; in ext4_fname_encrypt() local
104 sg_init_table(sg, 1); in ext4_fname_encrypt()
105 sg_set_page(sg, ctx->workpage, PAGE_SIZE, 0); in ext4_fname_encrypt()
106 ablkcipher_request_set_crypt(req, sg, sg, ciphertext_len, iv); in ext4_fname_encrypt()
142 struct scatterlist sg[1]; in ext4_fname_decrypt() local
176 sg_init_table(sg, 1); in ext4_fname_decrypt()
177 sg_set_page(sg, ctx->workpage, PAGE_SIZE, 0); in ext4_fname_decrypt()
178 ablkcipher_request_set_crypt(req, sg, sg, iname->len, iv); in ext4_fname_decrypt()
/linux-4.1.27/arch/blackfin/include/asm/
Ddma-mapping.h99 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
103 dma_unmap_sg(struct device *dev, struct scatterlist *sg, in dma_unmap_sg() argument
140 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nents, in dma_sync_sg_for_cpu() argument
147 dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
/linux-4.1.27/drivers/scsi/isci/
Drequest.c105 static void init_sgl_element(struct scu_sgl_element *e, struct scatterlist *sg) in init_sgl_element() argument
107 e->length = sg_dma_len(sg); in init_sgl_element()
108 e->address_upper = upper_32_bits(sg_dma_address(sg)); in init_sgl_element()
109 e->address_lower = lower_32_bits(sg_dma_address(sg)); in init_sgl_element()
117 struct scatterlist *sg = NULL; in sci_request_build_sgl() local
124 sg = task->scatter; in sci_request_build_sgl()
126 while (sg) { in sci_request_build_sgl()
128 init_sgl_element(&scu_sg->A, sg); in sci_request_build_sgl()
129 sg = sg_next(sg); in sci_request_build_sgl()
130 if (sg) { in sci_request_build_sgl()
[all …]
/linux-4.1.27/Documentation/video4linux/
Dpxa_camera.txt99 | desc-sg[0] | ... | desc-sg[last] | finisher/linker |
104 - desc-sg[i]: i-th descriptor, transferring the i-th sg
107 - linker: has ddadr= desc-sg[0] of next video buffer, dcmd=0
109 For the next schema, let's assume d0=desc-sg[0] .. dN=desc-sg[N],
/linux-4.1.27/drivers/staging/rtl8192e/rtl8192e/
Drtl_crypto.h165 struct scatterlist *sg, unsigned int nsg);
167 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
277 struct scatterlist *sg, in crypto_digest_update() argument
281 tfm->crt_digest.dit_update(tfm, sg, nsg); in crypto_digest_update()
291 struct scatterlist *sg, in crypto_digest_digest() argument
295 tfm->crt_digest.dit_digest(tfm, sg, nsg, out); in crypto_digest_digest()
/linux-4.1.27/drivers/gpu/drm/radeon/
Dradeon_ttm.c568 r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0, in radeon_ttm_tt_pin_userptr()
575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_pin_userptr()
576 if (nents != ttm->sg->nents) in radeon_ttm_tt_pin_userptr()
579 drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages, in radeon_ttm_tt_pin_userptr()
585 kfree(ttm->sg); in radeon_ttm_tt_pin_userptr()
603 if (!ttm->sg->sgl) in radeon_ttm_tt_unpin_userptr()
607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_unpin_userptr()
609 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in radeon_ttm_tt_unpin_userptr()
618 sg_free_table(ttm->sg); in radeon_ttm_tt_unpin_userptr()
724 ttm->sg = kcalloc(1, sizeof(struct sg_table), GFP_KERNEL); in radeon_ttm_tt_populate()
[all …]
/linux-4.1.27/net/ipx/
Dipx_route.c261 struct sockaddr_ipx *sg, *st; in ipxrtr_ioctl() local
267 sg = (struct sockaddr_ipx *)&rt.rt_gateway; in ipxrtr_ioctl()
272 sg->sipx_family != AF_IPX || in ipxrtr_ioctl()
283 f.ipx_router_network = sg->sipx_network; in ipxrtr_ioctl()
284 memcpy(f.ipx_router_node, sg->sipx_node, IPX_NODE_LEN); in ipxrtr_ioctl()
/linux-4.1.27/drivers/misc/carma/
Dcarma-fpga.c466 struct scatterlist *sg; in data_setup_corl_table() local
483 sg = table->sgl; in data_setup_corl_table()
485 sg_dma_address(sg) = fpga_start_addr(priv, i); in data_setup_corl_table()
486 sg_dma_len(sg) = REG_BLOCK_SIZE; in data_setup_corl_table()
487 sg = sg_next(sg); in data_setup_corl_table()
491 sg_dma_address(sg) = SYS_FPGA_BLOCK; in data_setup_corl_table()
492 sg_dma_len(sg) = REG_BLOCK_SIZE; in data_setup_corl_table()
493 sg = sg_next(sg); in data_setup_corl_table()
499 sg_dma_address(sg) = fpga_block_addr(priv, i, j); in data_setup_corl_table()
500 sg_dma_len(sg) = info->blk_size; in data_setup_corl_table()
[all …]
/linux-4.1.27/drivers/usb/misc/
Dusbtest.c477 static void free_sglist(struct scatterlist *sg, int nents) in free_sglist() argument
481 if (!sg) in free_sglist()
484 if (!sg_page(&sg[i])) in free_sglist()
486 kfree(sg_virt(&sg[i])); in free_sglist()
488 kfree(sg); in free_sglist()
494 struct scatterlist *sg; in alloc_sglist() local
504 sg = kmalloc_array(nents, sizeof(*sg), GFP_KERNEL); in alloc_sglist()
505 if (!sg) in alloc_sglist()
507 sg_init_table(sg, nents); in alloc_sglist()
515 free_sglist(sg, i); in alloc_sglist()
[all …]
/linux-4.1.27/drivers/i2c/busses/
Di2c-at91.c87 struct scatterlist sg; member
189 dma_unmap_single(dev->dev, sg_dma_address(&dma->sg), in at91_twi_dma_cleanup()
217 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg), in at91_twi_write_data_dma_callback()
252 sg_dma_len(&dma->sg) = dev->buf_len; in at91_twi_write_data_dma()
253 sg_dma_address(&dma->sg) = dma_addr; in at91_twi_write_data_dma()
255 txdesc = dmaengine_prep_slave_sg(chan_tx, &dma->sg, 1, DMA_MEM_TO_DEV, in at91_twi_write_data_dma()
316 dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg), in at91_twi_read_data_dma_callback()
344 dma->sg.dma_address = dma_addr; in at91_twi_read_data_dma()
345 sg_dma_len(&dma->sg) = dev->buf_len - 2; in at91_twi_read_data_dma()
347 rxdesc = dmaengine_prep_slave_sg(chan_rx, &dma->sg, 1, DMA_DEV_TO_MEM, in at91_twi_read_data_dma()
[all …]
/linux-4.1.27/arch/powerpc/platforms/ps3/
Dsystem-bus.c650 struct scatterlist *sg; in ps3_sb_map_sg()
653 for_each_sg(sgl, sg, nents, i) { in ps3_sb_map_sg()
654 int result = ps3_dma_map(dev->d_region, sg_phys(sg), in ps3_sb_map_sg()
655 sg->length, &sg->dma_address, 0); in ps3_sb_map_sg()
663 sg->dma_length = sg->length; in ps3_sb_map_sg()
670 static int ps3_ioc0_map_sg(struct device *_dev, struct scatterlist *sg, in ps3_ioc0_map_sg() argument
679 static void ps3_sb_unmap_sg(struct device *_dev, struct scatterlist *sg, in ps3_sb_unmap_sg() argument
687 static void ps3_ioc0_unmap_sg(struct device *_dev, struct scatterlist *sg, in ps3_ioc0_unmap_sg() argument

1234