Home
last modified time | relevance | path

Searched refs:ctrblk (Results 1 – 15 of 15) sorted by relevance

/linux-4.4.14/arch/x86/crypto/
Dblowfish_glue.c267 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
273 blowfish_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final()
277 crypto_inc(ctrblk, BF_BLOCK_SIZE); in ctr_crypt_final()
288 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local
302 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
303 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt()
304 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt()
305 ctrblocks[3] = cpu_to_be64(ctrblk++); in __ctr_crypt()
323 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
332 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
Ddes3_ede_glue.c273 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
279 des3_ede_enc_blk(ctx, keystream, ctrblk); in ctr_crypt_final()
283 crypto_inc(ctrblk, DES3_EDE_BLOCK_SIZE); in ctr_crypt_final()
294 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv); in __ctr_crypt() local
301 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
302 ctrblocks[1] = cpu_to_be64(ctrblk++); in __ctr_crypt()
303 ctrblocks[2] = cpu_to_be64(ctrblk++); in __ctr_crypt()
322 ctrblocks[0] = cpu_to_be64(ctrblk++); in __ctr_crypt()
333 *(__be64 *)walk->iv = cpu_to_be64(ctrblk); in __ctr_crypt()
Dglue_helper.c224 le128 ctrblk; in glue_ctr_crypt_final_128bit() local
227 be128_to_le128(&ctrblk, (be128 *)walk->iv); in glue_ctr_crypt_final_128bit()
230 fn_ctr(ctx, &tmp, &tmp, &ctrblk); in glue_ctr_crypt_final_128bit()
233 le128_to_be128((be128 *)walk->iv, &ctrblk); in glue_ctr_crypt_final_128bit()
245 le128 ctrblk; in __glue_ctr_crypt_128bit() local
249 be128_to_le128(&ctrblk, (be128 *)walk->iv); in __glue_ctr_crypt_128bit()
258 gctx->funcs[i].fn_u.ctr(ctx, dst, src, &ctrblk); in __glue_ctr_crypt_128bit()
271 le128_to_be128((be128 *)walk->iv, &ctrblk); in __glue_ctr_crypt_128bit()
Dcast5_avx_glue.c252 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
258 __cast5_encrypt(ctx, keystream, ctrblk); in ctr_crypt_final()
262 crypto_inc(ctrblk, CAST5_BLOCK_SIZE); in ctr_crypt_final()
291 u64 ctrblk; in __ctr_crypt() local
296 ctrblk = *(u64 *)walk->iv; in __ctr_crypt()
299 __cast5_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __ctr_crypt()
300 *dst ^= ctrblk; in __ctr_crypt()
Dtwofish_glue_3way.c67 be128 ctrblk; in twofish_enc_blk_ctr() local
72 le128_to_be128(&ctrblk, iv); in twofish_enc_blk_ctr()
75 twofish_enc_blk(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in twofish_enc_blk_ctr()
76 u128_xor(dst, dst, (u128 *)&ctrblk); in twofish_enc_blk_ctr()
Dserpent_sse2_glue.c64 be128 ctrblk; in serpent_crypt_ctr() local
66 le128_to_be128(&ctrblk, iv); in serpent_crypt_ctr()
69 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in serpent_crypt_ctr()
70 u128_xor(dst, src, (u128 *)&ctrblk); in serpent_crypt_ctr()
Dcast6_avx_glue.c73 be128 ctrblk; in cast6_crypt_ctr() local
75 le128_to_be128(&ctrblk, iv); in cast6_crypt_ctr()
78 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in cast6_crypt_ctr()
79 u128_xor(dst, src, (u128 *)&ctrblk); in cast6_crypt_ctr()
Dserpent_avx_glue.c70 be128 ctrblk; in __serpent_crypt_ctr() local
72 le128_to_be128(&ctrblk, iv); in __serpent_crypt_ctr()
75 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk); in __serpent_crypt_ctr()
76 u128_xor(dst, src, (u128 *)&ctrblk); in __serpent_crypt_ctr()
Dcamellia_glue.c1294 be128 ctrblk; in camellia_crypt_ctr() local
1299 le128_to_be128(&ctrblk, iv); in camellia_crypt_ctr()
1302 camellia_enc_blk_xor(ctx, (u8 *)dst, (u8 *)&ctrblk); in camellia_crypt_ctr()
Daesni-intel_glue.c474 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
480 aesni_enc(ctx, keystream, ctrblk); in ctr_crypt_final()
483 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
/linux-4.4.14/crypto/
Dctr.c60 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_final() local
67 crypto_cipher_encrypt_one(tfm, keystream, ctrblk); in crypto_ctr_crypt_final()
71 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_final()
80 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_segment() local
87 fn(crypto_cipher_tfm(tfm), dst, ctrblk); in crypto_ctr_crypt_segment()
91 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_segment()
108 u8 *ctrblk = walk->iv; in crypto_ctr_crypt_inplace() local
115 fn(crypto_cipher_tfm(tfm), keystream, ctrblk); in crypto_ctr_crypt_inplace()
119 crypto_inc(ctrblk, bsize); in crypto_ctr_crypt_inplace()
/linux-4.4.14/arch/s390/crypto/
Ddes_s390.c28 static u8 *ctrblk; variable
399 ctrptr = ctrblk; in ctr_desall_crypt()
406 if (ctrptr == ctrblk) in ctr_desall_crypt()
413 if (ctrptr == ctrblk) in ctr_desall_crypt()
427 if (ctrptr == ctrblk) { in ctr_desall_crypt()
576 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in des_s390_init()
577 if (!ctrblk) { in des_s390_init()
607 if (ctrblk) { in des_s390_exit()
610 free_page((unsigned long) ctrblk); in des_s390_exit()
Daes_s390.c36 static u8 *ctrblk; variable
790 ctrptr = ctrblk; in ctr_aes_crypt()
797 if (ctrptr == ctrblk) in ctr_aes_crypt()
804 if (ctrptr == ctrblk) in ctr_aes_crypt()
818 if (ctrptr == ctrblk) { in ctr_aes_crypt()
939 ctrblk = (u8 *) __get_free_page(GFP_KERNEL); in aes_s390_init()
940 if (!ctrblk) { in aes_s390_init()
946 free_page((unsigned long) ctrblk); in aes_s390_init()
971 free_page((unsigned long) ctrblk); in aes_s390_fini()
/linux-4.4.14/drivers/crypto/vmx/
Daes_ctr.c96 u8 *ctrblk = walk->iv; in p8_aes_ctr_final() local
105 aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); in p8_aes_ctr_final()
110 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
/linux-4.4.14/arch/sparc/crypto/
Daes_glue.c339 u8 *ctrblk = walk->iv; in ctr_crypt_final() local
345 ctx->ops->ecb_encrypt(&ctx->key[0], (const u64 *)ctrblk, in ctr_crypt_final()
349 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()