sg_in            1793 crypto/drbg.c  	sg_init_table(&drbg->sg_in, 1);
sg_in            1824 crypto/drbg.c  	struct scatterlist *sg_in = &drbg->sg_in, *sg_out = &drbg->sg_out;
sg_in            1830 crypto/drbg.c  		sg_set_buf(sg_in, inbuf, inlen);
sg_in            1835 crypto/drbg.c  		sg_set_buf(sg_in, drbg->outscratchpad, scratchpad_use);
sg_in            1842 crypto/drbg.c  		skcipher_request_set_crypt(drbg->ctr_req, sg_in, sg_out,
sg_in             103 drivers/block/cryptoloop.c 	struct scatterlist sg_in;
sg_in             115 drivers/block/cryptoloop.c 	sg_init_table(&sg_in, 1);
sg_in             136 drivers/block/cryptoloop.c 		sg_set_page(&sg_in, in_page, sz, in_offs);
sg_in             139 drivers/block/cryptoloop.c 		skcipher_request_set_crypt(req, &sg_in, &sg_out, sz, iv);
sg_in              39 drivers/crypto/qce/dma.h int qce_dma_prep_sgs(struct qce_dma_data *dma, struct scatterlist *sg_in,
sg_in             154 drivers/crypto/rockchip/rk3288_crypto.c 	struct scatterlist *sg_in, *sg_out;
sg_in             156 drivers/crypto/rockchip/rk3288_crypto.c 	sg_in = dev->aligned ? dev->sg_src : &dev->sg_tmp;
sg_in             157 drivers/crypto/rockchip/rk3288_crypto.c 	dma_unmap_sg(dev->dev, sg_in, 1, DMA_TO_DEVICE);
sg_in              82 drivers/md/dm-crypt.c 	struct scatterlist sg_in[4];
sg_in             509 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
sg_in             658 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
sg_in            1005 drivers/md/dm-crypt.c 	sg_init_table(dmreq->sg_in, 4);
sg_in            1006 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[0], sector, sizeof(uint64_t));
sg_in            1007 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[1], org_iv, cc->iv_size);
sg_in            1008 drivers/md/dm-crypt.c 	sg_set_page(&dmreq->sg_in[2], bv_in.bv_page, cc->sector_size, bv_in.bv_offset);
sg_in            1009 drivers/md/dm-crypt.c 	sg_set_buf(&dmreq->sg_in[3], tag, cc->integrity_tag_size);
sg_in            1035 drivers/md/dm-crypt.c 		aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out,
sg_in            1042 drivers/md/dm-crypt.c 		aead_request_set_crypt(req, dmreq->sg_in, dmreq->sg_out,
sg_in            1069 drivers/md/dm-crypt.c 	struct scatterlist *sg_in, *sg_out;
sg_in            1095 drivers/md/dm-crypt.c 	sg_in  = &dmreq->sg_in[0];
sg_in            1098 drivers/md/dm-crypt.c 	sg_init_table(sg_in, 1);
sg_in            1099 drivers/md/dm-crypt.c 	sg_set_page(sg_in, bv_in.bv_page, cc->sector_size, bv_in.bv_offset);
sg_in            1120 drivers/md/dm-crypt.c 	skcipher_request_set_crypt(req, sg_in, sg_out, cc->sector_size, iv);
sg_in             128 include/crypto/drbg.h 	struct scatterlist sg_in, sg_out;	/* CTR mode SGLs */
sg_in              55 net/tls/tls_device_fallback.c 	struct scatterlist sg_in[3];
sg_in              82 net/tls/tls_device_fallback.c 	sg_init_table(sg_in, ARRAY_SIZE(sg_in));
sg_in              84 net/tls/tls_device_fallback.c 	sg_set_buf(sg_in, aad, TLS_AAD_SPACE_SIZE);
sg_in              86 net/tls/tls_device_fallback.c 	chain_to_walk(sg_in + 1, in);
sg_in             113 net/tls/tls_device_fallback.c 	aead_request_set_crypt(aead_req, sg_in, sg_out, len, iv);
sg_in             141 net/tls/tls_device_fallback.c 			   struct crypto_aead *aead, struct scatterlist *sg_in,
sg_in             148 net/tls/tls_device_fallback.c 	scatterwalk_start(&in, sg_in);
sg_in             228 net/tls/tls_device_fallback.c static int fill_sg_in(struct scatterlist *sg_in,
sg_in             275 net/tls/tls_device_fallback.c 		sg_set_page(sg_in + i, skb_frag_page(frag),
sg_in             281 net/tls/tls_device_fallback.c 			sg_in[i].length += remaining;
sg_in             286 net/tls/tls_device_fallback.c 	if (skb_to_sgvec(skb, &sg_in[i], tcp_payload_offset, payload_len) < 0)
sg_in             309 net/tls/tls_device_fallback.c 				   struct scatterlist *sg_in,
sg_in             350 net/tls/tls_device_fallback.c 	if (tls_enc_records(aead_req, ctx->aead_send, sg_in, sg_out, aad, iv,
sg_in             378 net/tls/tls_device_fallback.c 	struct scatterlist *sg_in, sg_out[3];
sg_in             394 net/tls/tls_device_fallback.c 	sg_in = kmalloc_array(sg_in_max_elements, sizeof(*sg_in), GFP_ATOMIC);
sg_in             395 net/tls/tls_device_fallback.c 	if (!sg_in)
sg_in             398 net/tls/tls_device_fallback.c 	sg_init_table(sg_in, sg_in_max_elements);
sg_in             401 net/tls/tls_device_fallback.c 	if (fill_sg_in(sg_in, skb, ctx, &rcd_sn, &sync_size, &resync_sgs)) {
sg_in             408 net/tls/tls_device_fallback.c 	nskb = tls_enc_skb(tls_ctx, sg_out, sg_in, skb, sync_size, rcd_sn);
sg_in             412 net/tls/tls_device_fallback.c 		put_page(sg_page(&sg_in[--resync_sgs]));
sg_in             413 net/tls/tls_device_fallback.c 	kfree(sg_in);
sg_in             462 security/keys/encrypted-keys/encrypted.c 	struct scatterlist sg_in[2];
sg_in             478 security/keys/encrypted-keys/encrypted.c 	sg_init_table(sg_in, 2);
sg_in             479 security/keys/encrypted-keys/encrypted.c 	sg_set_buf(&sg_in[0], epayload->decrypted_data,
sg_in             481 security/keys/encrypted-keys/encrypted.c 	sg_set_page(&sg_in[1], ZERO_PAGE(0), AES_BLOCK_SIZE, 0);
sg_in             487 security/keys/encrypted-keys/encrypted.c 	skcipher_request_set_crypt(req, sg_in, sg_out, encrypted_datalen, iv);
sg_in             564 security/keys/encrypted-keys/encrypted.c 	struct scatterlist sg_in[1];
sg_in             585 security/keys/encrypted-keys/encrypted.c 	sg_init_table(sg_in, 1);
sg_in             587 security/keys/encrypted-keys/encrypted.c 	sg_set_buf(sg_in, epayload->encrypted_data, encrypted_datalen);
sg_in             593 security/keys/encrypted-keys/encrypted.c 	skcipher_request_set_crypt(req, sg_in, sg_out, encrypted_datalen, iv);