Lines Matching refs:walk
316 struct blkcipher_walk *walk) in ecb_aes_crypt() argument
318 int ret = blkcipher_walk_virt(desc, walk); in ecb_aes_crypt()
321 while ((nbytes = walk->nbytes)) { in ecb_aes_crypt()
324 u8 *out = walk->dst.virt.addr; in ecb_aes_crypt()
325 u8 *in = walk->src.virt.addr; in ecb_aes_crypt()
332 ret = blkcipher_walk_done(desc, walk, nbytes); in ecb_aes_crypt()
343 struct blkcipher_walk walk; in ecb_aes_encrypt() local
348 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_encrypt()
349 return ecb_aes_crypt(desc, sctx->enc, sctx->key, &walk); in ecb_aes_encrypt()
357 struct blkcipher_walk walk; in ecb_aes_decrypt() local
362 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_aes_decrypt()
363 return ecb_aes_crypt(desc, sctx->dec, sctx->key, &walk); in ecb_aes_decrypt()
445 struct blkcipher_walk *walk) in cbc_aes_crypt() argument
448 int ret = blkcipher_walk_virt(desc, walk); in cbc_aes_crypt()
449 unsigned int nbytes = walk->nbytes; in cbc_aes_crypt()
458 memcpy(param.iv, walk->iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
463 u8 *out = walk->dst.virt.addr; in cbc_aes_crypt()
464 u8 *in = walk->src.virt.addr; in cbc_aes_crypt()
471 ret = blkcipher_walk_done(desc, walk, nbytes); in cbc_aes_crypt()
472 } while ((nbytes = walk->nbytes)); in cbc_aes_crypt()
473 memcpy(walk->iv, param.iv, AES_BLOCK_SIZE); in cbc_aes_crypt()
484 struct blkcipher_walk walk; in cbc_aes_encrypt() local
489 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_aes_encrypt()
490 return cbc_aes_crypt(desc, sctx->enc, &walk); in cbc_aes_encrypt()
498 struct blkcipher_walk walk; in cbc_aes_decrypt() local
503 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_aes_decrypt()
504 return cbc_aes_crypt(desc, sctx->dec, &walk); in cbc_aes_decrypt()
618 struct blkcipher_walk *walk) in xts_aes_crypt() argument
621 int ret = blkcipher_walk_virt(desc, walk); in xts_aes_crypt()
622 unsigned int nbytes = walk->nbytes; in xts_aes_crypt()
637 memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak)); in xts_aes_crypt()
648 out = walk->dst.virt.addr; in xts_aes_crypt()
649 in = walk->src.virt.addr; in xts_aes_crypt()
656 ret = blkcipher_walk_done(desc, walk, nbytes); in xts_aes_crypt()
657 } while ((nbytes = walk->nbytes)); in xts_aes_crypt()
667 struct blkcipher_walk walk; in xts_aes_encrypt() local
672 blkcipher_walk_init(&walk, dst, src, nbytes); in xts_aes_encrypt()
673 return xts_aes_crypt(desc, xts_ctx->enc, xts_ctx, &walk); in xts_aes_encrypt()
681 struct blkcipher_walk walk; in xts_aes_decrypt() local
686 blkcipher_walk_init(&walk, dst, src, nbytes); in xts_aes_decrypt()
687 return xts_aes_crypt(desc, xts_ctx->dec, xts_ctx, &walk); in xts_aes_decrypt()
778 struct s390_aes_ctx *sctx, struct blkcipher_walk *walk) in ctr_aes_crypt() argument
780 int ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE); in ctr_aes_crypt()
785 if (!walk->nbytes) in ctr_aes_crypt()
791 memcpy(ctrptr, walk->iv, AES_BLOCK_SIZE); in ctr_aes_crypt()
792 while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) { in ctr_aes_crypt()
793 out = walk->dst.virt.addr; in ctr_aes_crypt()
794 in = walk->src.virt.addr; in ctr_aes_crypt()
815 ret = blkcipher_walk_done(desc, walk, nbytes); in ctr_aes_crypt()
821 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE); in ctr_aes_crypt()
825 memcpy(walk->iv, ctrptr, AES_BLOCK_SIZE); in ctr_aes_crypt()
831 out = walk->dst.virt.addr; in ctr_aes_crypt()
832 in = walk->src.virt.addr; in ctr_aes_crypt()
839 ret = blkcipher_walk_done(desc, walk, 0); in ctr_aes_crypt()
840 memcpy(walk->iv, ctrbuf, AES_BLOCK_SIZE); in ctr_aes_crypt()
851 struct blkcipher_walk walk; in ctr_aes_encrypt() local
853 blkcipher_walk_init(&walk, dst, src, nbytes); in ctr_aes_encrypt()
854 return ctr_aes_crypt(desc, sctx->enc, sctx, &walk); in ctr_aes_encrypt()
862 struct blkcipher_walk walk; in ctr_aes_decrypt() local
864 blkcipher_walk_init(&walk, dst, src, nbytes); in ctr_aes_decrypt()
865 return ctr_aes_crypt(desc, sctx->dec, sctx, &walk); in ctr_aes_decrypt()