Lines Matching refs:AES_BLOCK_SIZE
48 u8 b[AES_BLOCK_SIZE];
179 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
183 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
202 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
206 walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
225 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt()
230 walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
249 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt()
254 walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
269 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_encrypt()
272 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
276 nbytes -= blocks * AES_BLOCK_SIZE; in ctr_encrypt()
277 if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE) in ctr_encrypt()
280 walk.nbytes % AES_BLOCK_SIZE); in ctr_encrypt()
283 u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
284 u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE; in ctr_encrypt()
285 u8 __aligned(8) tail[AES_BLOCK_SIZE]; in ctr_encrypt()
316 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in xts_encrypt()
321 walk.nbytes % AES_BLOCK_SIZE); in xts_encrypt()
341 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in xts_decrypt()
346 walk.nbytes % AES_BLOCK_SIZE); in xts_decrypt()
359 .cra_blocksize = AES_BLOCK_SIZE,
367 .ivsize = AES_BLOCK_SIZE,
378 .cra_blocksize = AES_BLOCK_SIZE,
386 .ivsize = AES_BLOCK_SIZE,
405 .ivsize = AES_BLOCK_SIZE,
416 .cra_blocksize = AES_BLOCK_SIZE,
424 .ivsize = AES_BLOCK_SIZE,
434 .cra_blocksize = AES_BLOCK_SIZE,
444 .ivsize = AES_BLOCK_SIZE,
454 .cra_blocksize = AES_BLOCK_SIZE,
464 .ivsize = AES_BLOCK_SIZE,
484 .ivsize = AES_BLOCK_SIZE,
494 .cra_blocksize = AES_BLOCK_SIZE,
504 .ivsize = AES_BLOCK_SIZE,