in_sg             467 crypto/asymmetric_keys/asym_tpm.c 	struct scatterlist in_sg, out_sg;
in_sg             494 crypto/asymmetric_keys/asym_tpm.c 	sg_init_one(&in_sg, in, params->in_len);
in_sg             496 crypto/asymmetric_keys/asym_tpm.c 	akcipher_request_set_crypt(req, &in_sg, &out_sg, params->in_len,
in_sg             170 crypto/asymmetric_keys/public_key.c 	struct scatterlist in_sg, out_sg;
in_sg             210 crypto/asymmetric_keys/public_key.c 	sg_init_one(&in_sg, in, params->in_len);
in_sg             212 crypto/asymmetric_keys/public_key.c 	akcipher_request_set_crypt(req, &in_sg, &out_sg, params->in_len,
in_sg             101 crypto/rsa-pkcs1pad.c 	struct scatterlist in_sg[2], out_sg[1];
in_sg             258 crypto/rsa-pkcs1pad.c 	pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf,
in_sg             266 crypto/rsa-pkcs1pad.c 	akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg,
in_sg             417 crypto/rsa-pkcs1pad.c 	pkcs1pad_sg_set_buf(req_ctx->in_sg, req_ctx->in_buf,
in_sg             425 crypto/rsa-pkcs1pad.c 	akcipher_request_set_crypt(&req_ctx->child_req, req_ctx->in_sg,
in_sg             112 drivers/crypto/atmel-tdes.c 	struct scatterlist	*in_sg;
in_sg             326 drivers/crypto/atmel-tdes.c 		dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
in_sg             513 drivers/crypto/atmel-tdes.c 		in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) &&
in_sg             514 drivers/crypto/atmel-tdes.c 			IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size);
in_sg             519 drivers/crypto/atmel-tdes.c 		if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg))
in_sg             525 drivers/crypto/atmel-tdes.c 		count = min_t(size_t, dd->total, sg_dma_len(dd->in_sg));
in_sg             528 drivers/crypto/atmel-tdes.c 		err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
in_sg             538 drivers/crypto/atmel-tdes.c 			dma_unmap_sg(dd->dev, dd->in_sg, 1,
in_sg             543 drivers/crypto/atmel-tdes.c 		addr_in = sg_dma_address(dd->in_sg);
in_sg             550 drivers/crypto/atmel-tdes.c 		count = atmel_tdes_sg_copy(&dd->in_sg, &dd->in_offset,
in_sg             567 drivers/crypto/atmel-tdes.c 		dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
in_sg             619 drivers/crypto/atmel-tdes.c 	dd->in_sg = req->src;
in_sg             651 drivers/crypto/atmel-tdes.c 			dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
in_sg            1113 drivers/crypto/atmel-tdes.c 			dd->in_sg = sg_next(dd->in_sg);
in_sg            1115 drivers/crypto/atmel-tdes.c 			if (!dd->in_sg || !dd->out_sg)
in_sg             164 drivers/crypto/nx/nx-aes-ccm.c 	struct nx_sg *nx_insg = nx_ctx->in_sg;
in_sg             252 drivers/crypto/nx/nx-aes-ccm.c 		nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) *
in_sg             285 drivers/crypto/nx/nx-aes-ccm.c 			nx_insg = nx_walk_and_build(nx_ctx->in_sg,
in_sg             299 drivers/crypto/nx/nx-aes-ccm.c 			nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) *
in_sg             107 drivers/crypto/nx/nx-aes-gcm.c 	struct nx_sg *nx_sg = nx_ctx->in_sg;
in_sg             137 drivers/crypto/nx/nx-aes-gcm.c 		nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
in_sg             145 drivers/crypto/nx/nx-aes-gcm.c 		nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg)
in_sg             205 drivers/crypto/nx/nx-aes-gcm.c 		nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len,
in_sg             213 drivers/crypto/nx/nx-aes-gcm.c 		nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg)
in_sg             251 drivers/crypto/nx/nx-aes-gcm.c 	struct nx_sg *in_sg, *out_sg;
in_sg             271 drivers/crypto/nx/nx-aes-gcm.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info,
in_sg             284 drivers/crypto/nx/nx-aes-gcm.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg              63 drivers/crypto/nx/nx-aes-xcbc.c 	struct nx_sg *in_sg, *out_sg;
in_sg              81 drivers/crypto/nx/nx-aes-xcbc.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len,
in_sg              93 drivers/crypto/nx/nx-aes-xcbc.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             108 drivers/crypto/nx/nx-aes-xcbc.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len,
in_sg             121 drivers/crypto/nx/nx-aes-xcbc.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             172 drivers/crypto/nx/nx-aes-xcbc.c 	struct nx_sg *in_sg;
in_sg             195 drivers/crypto/nx/nx-aes-xcbc.c 	in_sg = nx_ctx->in_sg;
in_sg             230 drivers/crypto/nx/nx-aes-xcbc.c 			in_sg = nx_build_sg_list(nx_ctx->in_sg,
in_sg             241 drivers/crypto/nx/nx-aes-xcbc.c 		in_sg = nx_build_sg_list(in_sg,
in_sg             251 drivers/crypto/nx/nx-aes-xcbc.c 		nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) *
in_sg             280 drivers/crypto/nx/nx-aes-xcbc.c 		in_sg = nx_ctx->in_sg;
in_sg             297 drivers/crypto/nx/nx-aes-xcbc.c 	struct nx_sg *in_sg, *out_sg;
in_sg             324 drivers/crypto/nx/nx-aes-xcbc.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer,
in_sg             341 drivers/crypto/nx/nx-aes-xcbc.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             104 drivers/crypto/nx/nx-sha256.c 		struct nx_sg *in_sg = nx_ctx->in_sg;
in_sg             108 drivers/crypto/nx/nx-sha256.c 			in_sg = nx_build_sg_list(in_sg,
in_sg             117 drivers/crypto/nx/nx-sha256.c 			used_sgs = in_sg - nx_ctx->in_sg;
in_sg             132 drivers/crypto/nx/nx-sha256.c 		in_sg = nx_build_sg_list(in_sg, (u8 *) data,
in_sg             135 drivers/crypto/nx/nx-sha256.c 		nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             181 drivers/crypto/nx/nx-sha256.c 	struct nx_sg *in_sg, *out_sg;
in_sg             210 drivers/crypto/nx/nx-sha256.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf,
in_sg             226 drivers/crypto/nx/nx-sha256.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             104 drivers/crypto/nx/nx-sha512.c 		struct nx_sg *in_sg = nx_ctx->in_sg;
in_sg             108 drivers/crypto/nx/nx-sha512.c 			in_sg = nx_build_sg_list(in_sg,
in_sg             116 drivers/crypto/nx/nx-sha512.c 			used_sgs = in_sg - nx_ctx->in_sg;
in_sg             131 drivers/crypto/nx/nx-sha512.c 		in_sg = nx_build_sg_list(in_sg, (u8 *) data,
in_sg             134 drivers/crypto/nx/nx-sha512.c 		nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             184 drivers/crypto/nx/nx-sha512.c 	struct nx_sg *in_sg, *out_sg;
in_sg             219 drivers/crypto/nx/nx-sha512.c 	in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len,
in_sg             231 drivers/crypto/nx/nx-sha512.c 	nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg);
in_sg             268 drivers/crypto/nx/nx.c 	struct nx_sg *nx_insg = nx_ctx->in_sg;
in_sg             293 drivers/crypto/nx/nx.c 	nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes);
in_sg             313 drivers/crypto/nx/nx.c 	nx_ctx->op.in = __pa(nx_ctx->in_sg);
in_sg             321 drivers/crypto/nx/nx.c 		nx_ctx->op_aead.in = __pa(nx_ctx->in_sg);
in_sg             675 drivers/crypto/nx/nx.c 	nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE);
in_sg             676 drivers/crypto/nx/nx.c 	nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE);
in_sg             751 drivers/crypto/nx/nx.c 	nx_ctx->in_sg = NULL;
in_sg             126 drivers/crypto/nx/nx.h 	struct nx_sg *in_sg;      /* aligned pointer into kmem to an sg list */
in_sg              33 drivers/crypto/omap-aes-gcm.c 	dd->in_sg = NULL;
in_sg              53 drivers/crypto/omap-aes-gcm.c 	dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
in_sg             137 drivers/crypto/omap-aes-gcm.c 	dd->in_sg = dd->in_sgl;
in_sg             158 drivers/crypto/omap-aes-gcm.c 	dd->in_sg_len = sg_nents_for_len(dd->in_sg, alen + clen);
in_sg             268 drivers/crypto/omap-aes.c 			      struct scatterlist *in_sg,
in_sg             277 drivers/crypto/omap-aes.c 		scatterwalk_start(&dd->in_walk, dd->in_sg);
in_sg             286 drivers/crypto/omap-aes.c 	dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE);
in_sg             305 drivers/crypto/omap-aes.c 	tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len,
in_sg             357 drivers/crypto/omap-aes.c 		err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len,
in_sg             372 drivers/crypto/omap-aes.c 	err = omap_aes_crypt_dma(dd, dd->in_sg, dd->out_sg, dd->in_sg_len,
in_sg             375 drivers/crypto/omap-aes.c 		dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
in_sg             432 drivers/crypto/omap-aes.c 	dd->in_sg = req->src;
in_sg             440 drivers/crypto/omap-aes.c 	ret = omap_crypto_align_sg(&dd->in_sg, dd->total, AES_BLOCK_SIZE,
in_sg             452 drivers/crypto/omap-aes.c 	dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total);
in_sg             491 drivers/crypto/omap-aes.c 		dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
in_sg             892 drivers/crypto/omap-aes.c 		BUG_ON(!dd->in_sg);
in_sg             894 drivers/crypto/omap-aes.c 		BUG_ON(_calc_walked(in) > dd->in_sg->length);
in_sg             896 drivers/crypto/omap-aes.c 		src = sg_virt(dd->in_sg) + _calc_walked(in);
in_sg             902 drivers/crypto/omap-aes.c 			if (dd->in_sg->length == _calc_walked(in)) {
in_sg             903 drivers/crypto/omap-aes.c 				dd->in_sg = sg_next(dd->in_sg);
in_sg             904 drivers/crypto/omap-aes.c 				if (dd->in_sg) {
in_sg             906 drivers/crypto/omap-aes.c 							  dd->in_sg);
in_sg             907 drivers/crypto/omap-aes.c 					src = sg_virt(dd->in_sg) +
in_sg             178 drivers/crypto/omap-aes.h 	struct scatterlist		*in_sg;
in_sg             151 drivers/crypto/omap-des.c 	struct scatterlist		*in_sg;
in_sg             377 drivers/crypto/omap-des.c 		struct scatterlist *in_sg, struct scatterlist *out_sg,
in_sg             387 drivers/crypto/omap-des.c 		scatterwalk_start(&dd->in_walk, dd->in_sg);
in_sg             396 drivers/crypto/omap-des.c 	dma_sync_sg_for_device(dd->dev, dd->in_sg, in_sg_len, DMA_TO_DEVICE);
in_sg             415 drivers/crypto/omap-des.c 	tx_in = dmaengine_prep_slave_sg(dd->dma_lch_in, in_sg, in_sg_len,
in_sg             466 drivers/crypto/omap-des.c 		err = dma_map_sg(dd->dev, dd->in_sg, dd->in_sg_len,
in_sg             481 drivers/crypto/omap-des.c 	err = omap_des_crypt_dma(tfm, dd->in_sg, dd->out_sg, dd->in_sg_len,
in_sg             484 drivers/crypto/omap-des.c 		dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
in_sg             543 drivers/crypto/omap-des.c 	dd->in_sg = req->src;
in_sg             551 drivers/crypto/omap-des.c 	ret = omap_crypto_align_sg(&dd->in_sg, dd->total, DES_BLOCK_SIZE,
in_sg             563 drivers/crypto/omap-des.c 	dd->in_sg_len = sg_nents_for_len(dd->in_sg, dd->total);
in_sg             605 drivers/crypto/omap-des.c 		dma_unmap_sg(dd->dev, dd->in_sg, dd->in_sg_len, DMA_TO_DEVICE);
in_sg             863 drivers/crypto/omap-des.c 		BUG_ON(!dd->in_sg);
in_sg             865 drivers/crypto/omap-des.c 		BUG_ON(_calc_walked(in) > dd->in_sg->length);
in_sg             867 drivers/crypto/omap-des.c 		src = sg_virt(dd->in_sg) + _calc_walked(in);
in_sg             873 drivers/crypto/omap-des.c 			if (dd->in_sg->length == _calc_walked(in)) {
in_sg             874 drivers/crypto/omap-des.c 				dd->in_sg = sg_next(dd->in_sg);
in_sg             875 drivers/crypto/omap-des.c 				if (dd->in_sg) {
in_sg             877 drivers/crypto/omap-des.c 							  dd->in_sg);
in_sg             878 drivers/crypto/omap-des.c 					src = sg_virt(dd->in_sg) +
in_sg             183 drivers/crypto/sahara.c 	struct scatterlist	*in_sg;
in_sg             221 drivers/crypto/sahara.c 	struct scatterlist	*in_sg;
in_sg             470 drivers/crypto/sahara.c 	dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total);
in_sg             486 drivers/crypto/sahara.c 	ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg,
in_sg             501 drivers/crypto/sahara.c 	sg = dev->in_sg;
in_sg             544 drivers/crypto/sahara.c 	dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg,
in_sg             565 drivers/crypto/sahara.c 	dev->in_sg = req->src;
in_sg             594 drivers/crypto/sahara.c 	dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg,
in_sg             802 drivers/crypto/sahara.c 	dev->in_sg = rctx->in_sg;
in_sg             804 drivers/crypto/sahara.c 	dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total);
in_sg             815 drivers/crypto/sahara.c 	sg = dev->in_sg;
in_sg             816 drivers/crypto/sahara.c 	ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, DMA_TO_DEVICE);
in_sg             973 drivers/crypto/sahara.c 		rctx->in_sg = rctx->in_sg_chain;
in_sg             979 drivers/crypto/sahara.c 			rctx->in_sg = req->src;
in_sg             981 drivers/crypto/sahara.c 			rctx->in_sg = rctx->in_sg_chain;
in_sg             983 drivers/crypto/sahara.c 		sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt);
in_sg             987 drivers/crypto/sahara.c 		rctx->in_sg = req->src;
in_sg             989 drivers/crypto/sahara.c 		req->src = rctx->in_sg;
in_sg            1037 drivers/crypto/sahara.c 		dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg,
in_sg             107 drivers/crypto/stm32/stm32-cryp.c #define _walked_in              (cryp->in_walk.offset - cryp->in_sg->offset)
in_sg             151 drivers/crypto/stm32/stm32-cryp.c 	struct scatterlist      *in_sg;
in_sg             317 drivers/crypto/stm32/stm32-cryp.c 	ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
in_sg             366 drivers/crypto/stm32/stm32-cryp.c 	sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
in_sg             369 drivers/crypto/stm32/stm32-cryp.c 	cryp->in_sg = &cryp->in_sgl;
in_sg             981 drivers/crypto/stm32/stm32-cryp.c 	cryp->in_sg = req ? req->src : areq->src;
in_sg             985 drivers/crypto/stm32/stm32-cryp.c 	cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
in_sg            1003 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_start(&cryp->in_walk, cryp->in_sg);
in_sg            1090 drivers/crypto/stm32/stm32-cryp.c 	if (unlikely(cryp->in_sg->length == _walked_in)) {
in_sg            1091 drivers/crypto/stm32/stm32-cryp.c 		cryp->in_sg = sg_next(cryp->in_sg);
in_sg            1092 drivers/crypto/stm32/stm32-cryp.c 		if (cryp->in_sg) {
in_sg            1093 drivers/crypto/stm32/stm32-cryp.c 			scatterwalk_start(&cryp->in_walk, cryp->in_sg);
in_sg            1094 drivers/crypto/stm32/stm32-cryp.c 			return (sg_virt(cryp->in_sg) + _walked_in);
in_sg            1188 drivers/crypto/stm32/stm32-cryp.c 		scatterwalk_map_and_copy(in_tag, cryp->in_sg,
in_sg            1287 drivers/crypto/stm32/stm32-cryp.c 	src = sg_virt(cryp->in_sg) + _walked_in;
in_sg            1535 drivers/crypto/stm32/stm32-cryp.c 	src = sg_virt(cryp->in_sg) + _walked_in;
in_sg            1588 drivers/crypto/stm32/stm32-cryp.c 	src = sg_virt(cryp->in_sg) + _walked_in;
in_sg             149 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	struct scatterlist *in_sg = areq->src;
in_sg             190 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 	while (in_sg && no_chunk == 1) {
in_sg             191 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 		if (in_sg->length % 4)
in_sg             193 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c 		in_sg = sg_next(in_sg);
in_sg             183 drivers/crypto/sunxi-ss/sun4i-ss-hash.c 	struct scatterlist *in_sg = areq->src;
in_sg             248 drivers/crypto/sunxi-ss/sun4i-ss-hash.c 	while (in_sg && i == 1) {
in_sg             249 drivers/crypto/sunxi-ss/sun4i-ss-hash.c 		if (in_sg->length % 4)
in_sg             251 drivers/crypto/sunxi-ss/sun4i-ss-hash.c 		in_sg = sg_next(in_sg);
in_sg            1005 drivers/staging/wusbcore/wa-xfer.c static struct scatterlist *wa_xfer_create_subset_sg(struct scatterlist *in_sg,
in_sg            1012 drivers/staging/wusbcore/wa-xfer.c 	struct scatterlist *current_xfer_sg = in_sg;
in_sg              80 lib/sg_split.c 	struct scatterlist *in_sg, *out_sg;
in_sg              84 lib/sg_split.c 		in_sg = split->in_sg0;
in_sg              87 lib/sg_split.c 			*out_sg = *in_sg;
in_sg              96 lib/sg_split.c 			in_sg = sg_next(in_sg);
in_sg             106 lib/sg_split.c 	struct scatterlist *in_sg, *out_sg;
in_sg             110 lib/sg_split.c 		in_sg = split->in_sg0;
in_sg             113 lib/sg_split.c 			sg_dma_address(out_sg) = sg_dma_address(in_sg);
in_sg             114 lib/sg_split.c 			sg_dma_len(out_sg) = sg_dma_len(in_sg);
in_sg             119 lib/sg_split.c 			in_sg = sg_next(in_sg);
in_sg             149 net/vmw_vsock/virtio_transport.c 		int ret, in_sg = 0, out_sg = 0;
in_sg             174 net/vmw_vsock/virtio_transport.c 		ret = virtqueue_add_sgs(vq, sgs, out_sg, in_sg, pkt, GFP_KERNEL);