Home
last modified time | relevance | path

Searched refs:ctrblk (Results 1 – 15 of 15) sorted by relevance

/linux-4.1.27/arch/x86/crypto/
Dblowfish_glue.c267 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
273 blowfish_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final()
277 crypto_inc(ctrblk, BF_BLOCK_SIZE); in ctr_crypt_final()
288 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local
302 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
303 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt()
304 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt()
305 ctrblocks[3] = cpu_to_be64(ctrblk++); in __ctr_crypt()
323 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
332 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
Ddes3_ede_glue.c273 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
279 des3_ede_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final()
283 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE); in ctr_crypt_final()
294 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local
301 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
302 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt()
303 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt()
322 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
333 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
Dglue_helper.c224 le128 ctrblk; in glue_ctr_crypt_final_128bit() local
227 be128_to_le128(&ctrblk, (be128 *)walk->iv); in glue_ctr_crypt_final_128bit()
230 fn_ctr(ctx, &tmp, &tmp, &ctrblk); in glue_ctr_crypt_final_128bit()
233 le128_to_be128((be128 *)walk->iv, &ctrblk); in glue_ctr_crypt_final_128bit()
245 le128 ctrblk; in __glue_ctr_crypt_128bit() local
249 be128_to_le128(&ctrblk, (be128 *)walk->iv); in __glue_ctr_crypt_128bit()
258 gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk); in __glue_ctr_crypt_128bit()
271 le128_to_be128((be128 *)walk->iv, &ctrblk); in __glue_ctr_crypt_128bit()
Dcast5_avx_glue.c253 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
259 __cast5_encrypt(ctx, keystream, ctrblk); in ctr_crypt_final()
263 crypto_inc(ctrblk, CAST5_BLOCK_SIZE); in ctr_crypt_final()
292 u64 ctrblk; in __ctr_crypt() local
297 ctrblk = *(u64 *)walk->iv; in __ctr_crypt()
300 __cast5_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __ctr_crypt()
301 *dst ^= ctrblk; in __ctr_crypt()
Dtwofish_glue_3way.c67 be128 ctrblk; in twofish_enc_blk_ctr() local
72 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr()
75 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in twofish_enc_blk_ctr()
76 u128_xor(dst, dst, (u128 *)&ctrblk); in twofish_enc_blk_ctr()
Dcast6_avx_glue.c74 be128 ctrblk; in cast6_crypt_ctr() local
76 le128_to_be128(&ctrblk, iv); in cast6_crypt_ctr()
79 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in cast6_crypt_ctr()
80 u128_xor(dst, src, (u128 *)&ctrblk); in cast6_crypt_ctr()
Dserpent_avx_glue.c71 be128 ctrblk; in __serpent_crypt_ctr() local
73 le128_to_be128(&ctrblk, iv); in __serpent_crypt_ctr()
76 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __serpent_crypt_ctr()
77 u128_xor(dst, src, (u128 *)&ctrblk); in __serpent_crypt_ctr()
Dserpent_sse2_glue.c64 be128 ctrblk; in serpent_crypt_ctr() local
66 le128_to_be128(&ctrblk, iv); in serpent_crypt_ctr()
69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in serpent_crypt_ctr()
70 u128_xor(dst, src, (u128 *)&ctrblk); in serpent_crypt_ctr()
Dcamellia_glue.c1294 be128 ctrblk; in camellia_crypt_ctr() local
1299 le128_to_be128(&ctrblk, iv); in camellia_crypt_ctr()
1302 camellia_enc_blk_xor(ctx, (u8 *)dst, (u8 *)&ctrblk); in camellia_crypt_ctr()
Daesni-intel_glue.c473 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
479 aesni_enc(ctx, keystream, ctrblk); in ctr_crypt_final()
482 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
/linux-4.1.27/crypto/
Dctr.c60 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final() local
67 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); in crypto_ctr_crypt_final()
71 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_final()
80 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment() local
87 fn(crypto_cipher_tfm(tfm), dst, ctrblk); in crypto_ctr_crypt_segment()
91 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_segment()
108 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace() local
115 fn(crypto_cipher_tfm(tfm), keystream, ctrblk); in crypto_ctr_crypt_inplace()
119 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_inplace()
/linux-4.1.27/arch/s390/crypto/
Ddes_s390.c27 static u8 *ctrblk; variable
398 ctrptr = ctrblk; in ctr_desall_crypt()
405 if (ctrptr == ctrblk) in ctr_desall_crypt()
412 if (ctrptr == ctrblk) in ctr_desall_crypt()
426 if (ctrptr == ctrblk) { in ctr_desall_crypt()
575 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in des_s390_init()
576 if (!ctrblk) { in des_s390_init()
606 if (ctrblk) { in des_s390_exit()
609 free_page((unsigned long) ctrblk); in des_s390_exit()
Daes_s390.c35 static u8 *ctrblk; variable
789 ctrptr = ctrblk; in ctr_aes_crypt()
796 if (ctrptr == ctrblk) in ctr_aes_crypt()
803 if (ctrptr == ctrblk) in ctr_aes_crypt()
817 if (ctrptr == ctrblk) { in ctr_aes_crypt()
938 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in aes_s390_init()
939 if (!ctrblk) { in aes_s390_init()
945 free_page((unsigned long) ctrblk); in aes_s390_init()
970 free_page((unsigned long) ctrblk); in aes_s390_fini()
/linux-4.1.27/drivers/crypto/vmx/
Daes_ctr.c93 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() local
102 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); in p8_aes_ctr_final()
107 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
/linux-4.1.27/arch/sparc/crypto/
Daes_glue.c339 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, in ctr_crypt_final()
349 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()