/linux-4.4.14/include/crypto/ |
D | gf128mul.h | 160 void gf128mul_lle(be128 *a, const be128 *b); 162 void gf128mul_bbe(be128 *a, const be128 *b); 165 void gf128mul_x_ble(be128 *a, const be128 *b); 170 be128 t[256]; 173 struct gf128mul_4k *gf128mul_init_4k_lle(const be128 *g); 174 struct gf128mul_4k *gf128mul_init_4k_bbe(const be128 *g); 175 void gf128mul_4k_lle(be128 *a, struct gf128mul_4k *t); 176 void gf128mul_4k_bbe(be128 *a, struct gf128mul_4k *t); 194 struct gf128mul_64k *gf128mul_init_64k_lle(const be128 *g); 195 struct gf128mul_64k *gf128mul_init_64k_bbe(const be128 *g); [all …]
|
D | b128ops.h | 58 } be128; typedef 70 static inline void be128_xor(be128 *r, const be128 *p, const be128 *q) in be128_xor()
|
D | lrw.h | 24 be128 mulinc[128]; 31 be128 *tbuf;
|
D | xts.h | 12 be128 *tbuf;
|
/linux-4.4.14/crypto/ |
D | gf128mul.c | 125 static void gf128mul_x_lle(be128 *r, const be128 *x) in gf128mul_x_lle() 135 static void gf128mul_x_bbe(be128 *r, const be128 *x) in gf128mul_x_bbe() 145 void gf128mul_x_ble(be128 *r, const be128 *x) in gf128mul_x_ble() 156 static void gf128mul_x8_lle(be128 *x) in gf128mul_x8_lle() 166 static void gf128mul_x8_bbe(be128 *x) in gf128mul_x8_bbe() 176 void gf128mul_lle(be128 *r, const be128 *b) in gf128mul_lle() 178 be128 p[8]; in gf128mul_lle() 214 void gf128mul_bbe(be128 *r, const be128 *b) in gf128mul_bbe() 216 be128 p[8]; in gf128mul_bbe() 266 struct gf128mul_64k *gf128mul_init_64k_lle(const be128 *g) in gf128mul_init_64k_lle() [all …]
|
D | lrw.c | 50 be128 tmp = { 0 }; in lrw_init_table() 57 ctx->table = gf128mul_init_64k_bbe((be128 *)tweak); in lrw_init_table() 100 be128 t; 105 static inline void inc(be128 *iv) in inc() 121 static inline int get_index128(be128 *block) in get_index128() 149 be128 *iv; in crypt() 161 iv = (be128 *)w->iv; in crypt() 226 be128 *iv, *src, *dst, *t; in lrw_crypt() 227 be128 *t_buf = req->tbuf; in lrw_crypt() 240 src = (be128 *)walk.src.virt.addr; in lrw_crypt() [all …]
|
D | xts.c | 81 be128 *t; 112 s.t = (be128 *)w->iv; in crypt() 177 be128 *src, *dst, *t; in xts_crypt() 178 be128 *t_buf = req->tbuf; in xts_crypt() 191 src = (be128 *)walk.src.virt.addr; in xts_crypt() 192 dst = (be128 *)walk.dst.virt.addr; in xts_crypt() 225 *(be128 *)walk.iv = *t; in xts_crypt() 233 src = (be128 *)walk.src.virt.addr; in xts_crypt() 234 dst = (be128 *)walk.dst.virt.addr; in xts_crypt()
|
D | ghash-generic.c | 56 ctx->gf128 = gf128mul_init_4k_lle((be128 *)key); in ghash_setkey() 84 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 89 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_update() 113 gf128mul_4k_lle((be128 *)dst, ctx->gf128); in ghash_flush()
|
D | gcm.c | 119 be128 hash; in crypto_gcm_setkey() 167 err = crypto_ahash_setkey(ghash, (u8 *)&data->hash, sizeof(be128)); in crypto_gcm_setkey()
|
/linux-4.4.14/arch/x86/crypto/ |
D | twofish_glue_3way.c | 67 be128 ctrblk; in twofish_enc_blk_ctr() 83 be128 ctrblks[3]; in twofish_enc_blk_ctr_3way() 235 be128 buf[3]; in lrw_encrypt() 252 be128 buf[3]; in lrw_decrypt() 303 be128 buf[3]; in xts_encrypt() 321 be128 buf[3]; in xts_decrypt()
|
D | serpent_sse2_glue.c | 64 be128 ctrblk; in serpent_crypt_ctr() 76 be128 ctrblks[SERPENT_PARALLEL_BLOCKS]; in serpent_crypt_ctr_xway() 248 be128 buf[SERPENT_PARALLEL_BLOCKS]; in lrw_encrypt() 274 be128 buf[SERPENT_PARALLEL_BLOCKS]; in lrw_decrypt() 336 be128 buf[SERPENT_PARALLEL_BLOCKS]; in xts_encrypt() 363 be128 buf[SERPENT_PARALLEL_BLOCKS]; in xts_decrypt()
|
D | glue_helper.c | 227 be128_to_le128(&ctrblk, (be128 *)walk->iv); in glue_ctr_crypt_final_128bit() 233 le128_to_be128((be128 *)walk->iv, &ctrblk); in glue_ctr_crypt_final_128bit() 249 be128_to_le128(&ctrblk, (be128 *)walk->iv); in __glue_ctr_crypt_128bit() 271 le128_to_be128((be128 *)walk->iv, &ctrblk); in __glue_ctr_crypt_128bit()
|
D | camellia_glue.c | 1294 be128 ctrblk; in camellia_crypt_ctr() 1308 be128 ctrblks[2]; in camellia_crypt_ctr_2way() 1461 be128 buf[2 * 4]; in lrw_encrypt() 1478 be128 buf[2 * 4]; in lrw_decrypt() 1529 be128 buf[2 * 4]; in xts_encrypt() 1547 be128 buf[2 * 4]; in xts_decrypt()
|
D | ghash-clmulni-intel_glue.c | 59 be128 *x = (be128 *)key; in ghash_setkey()
|
D | cast6_avx_glue.c | 73 be128 ctrblk; in cast6_crypt_ctr() 265 be128 buf[CAST6_PARALLEL_BLOCKS]; in lrw_encrypt() 291 be128 buf[CAST6_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | serpent_avx_glue.c | 70 be128 ctrblk; in __serpent_crypt_ctr() 275 be128 buf[SERPENT_PARALLEL_BLOCKS]; in lrw_encrypt() 301 be128 buf[SERPENT_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | serpent_avx2_glue.c | 239 be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS]; in lrw_encrypt() 265 be128 buf[SERPENT_AVX2_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | twofish_avx_glue.c | 269 be128 buf[TWOFISH_PARALLEL_BLOCKS]; in lrw_encrypt() 295 be128 buf[TWOFISH_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | camellia_aesni_avx_glue.c | 265 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS]; in lrw_encrypt() 291 be128 buf[CAMELLIA_AESNI_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | camellia_aesni_avx2_glue.c | 273 be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS]; in lrw_encrypt() 299 be128 buf[CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS]; in lrw_decrypt()
|
D | aesni-intel_glue.c | 594 be128 buf[8]; in lrw_encrypt() 618 be128 buf[8]; in lrw_decrypt() 745 be128 buf[8]; in xts_encrypt() 770 be128 buf[8]; in xts_decrypt()
|
/linux-4.4.14/arch/x86/include/asm/crypto/ |
D | glue_helper.h | 78 static inline void le128_to_be128(be128 *dst, const le128 *src) in le128_to_be128() 84 static inline void be128_to_le128(le128 *dst, const be128 *src) in be128_to_le128()
|