Lines Matching refs:walk
378 struct blkcipher_walk walk; in ecb_encrypt() local
381 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_encrypt()
382 err = blkcipher_walk_virt(desc, &walk); in ecb_encrypt()
386 while ((nbytes = walk.nbytes)) { in ecb_encrypt()
387 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_encrypt()
390 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_encrypt()
402 struct blkcipher_walk walk; in ecb_decrypt() local
405 blkcipher_walk_init(&walk, dst, src, nbytes); in ecb_decrypt()
406 err = blkcipher_walk_virt(desc, &walk); in ecb_decrypt()
410 while ((nbytes = walk.nbytes)) { in ecb_decrypt()
411 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ecb_decrypt()
414 err = blkcipher_walk_done(desc, &walk, nbytes); in ecb_decrypt()
426 struct blkcipher_walk walk; in cbc_encrypt() local
429 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_encrypt()
430 err = blkcipher_walk_virt(desc, &walk); in cbc_encrypt()
434 while ((nbytes = walk.nbytes)) { in cbc_encrypt()
435 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cbc_encrypt()
436 nbytes & AES_BLOCK_MASK, walk.iv); in cbc_encrypt()
438 err = blkcipher_walk_done(desc, &walk, nbytes); in cbc_encrypt()
450 struct blkcipher_walk walk; in cbc_decrypt() local
453 blkcipher_walk_init(&walk, dst, src, nbytes); in cbc_decrypt()
454 err = blkcipher_walk_virt(desc, &walk); in cbc_decrypt()
458 while ((nbytes = walk.nbytes)) { in cbc_decrypt()
459 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr, in cbc_decrypt()
460 nbytes & AES_BLOCK_MASK, walk.iv); in cbc_decrypt()
462 err = blkcipher_walk_done(desc, &walk, nbytes); in cbc_decrypt()
471 struct blkcipher_walk *walk) in ctr_crypt_final() argument
473 u8 *ctrblk = walk->iv; in ctr_crypt_final()
475 u8 *src = walk->src.virt.addr; in ctr_crypt_final()
476 u8 *dst = walk->dst.virt.addr; in ctr_crypt_final()
477 unsigned int nbytes = walk->nbytes; in ctr_crypt_final()
509 struct blkcipher_walk walk; in ctr_crypt() local
512 blkcipher_walk_init(&walk, dst, src, nbytes); in ctr_crypt()
513 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt()
517 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt()
518 aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr, in ctr_crypt()
519 nbytes & AES_BLOCK_MASK, walk.iv); in ctr_crypt()
521 err = blkcipher_walk_done(desc, &walk, nbytes); in ctr_crypt()
523 if (walk.nbytes) { in ctr_crypt()
524 ctr_crypt_final(ctx, &walk); in ctr_crypt()
525 err = blkcipher_walk_done(desc, &walk, 0); in ctr_crypt()