/linux-4.1.27/arch/arm/crypto/ |
D | aesbs-glue.c | 19 #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE) 116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 123 crypto_xor(src, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 126 src += AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 128 memcpy(walk.iv, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 133 crypto_xor(walk.iv, src, AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 135 memcpy(walk.iv, dst, AES_BLOCK_SIZE); in aesbs_cbc_encrypt() 136 src += AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 137 dst += AES_BLOCK_SIZE; in aesbs_cbc_encrypt() 140 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_encrypt() [all …]
|
D | aes-ce-glue.c | 48 u8 b[AES_BLOCK_SIZE]; 179 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt() 183 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 202 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt() 206 walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 225 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt() 230 walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 249 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt() 254 walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt() 269 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_encrypt() [all …]
|
D | aes_glue.c | 66 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/net/mac80211/ |
D | aes_cmac.c | 31 for (i = 0; i < AES_BLOCK_SIZE - 1; i++) in gf_mulx() 33 pad[AES_BLOCK_SIZE - 1] <<= 1; in gf_mulx() 35 pad[AES_BLOCK_SIZE - 1] ^= 0x87; in gf_mulx() 42 u8 cbc[AES_BLOCK_SIZE], pad[AES_BLOCK_SIZE]; in aes_cmac_vector() 46 memset(cbc, 0, AES_BLOCK_SIZE); in aes_cmac_vector() 57 while (left >= AES_BLOCK_SIZE) { in aes_cmac_vector() 58 for (i = 0; i < AES_BLOCK_SIZE; i++) { in aes_cmac_vector() 66 if (left > AES_BLOCK_SIZE) in aes_cmac_vector() 68 left -= AES_BLOCK_SIZE; in aes_cmac_vector() 71 memset(pad, 0, AES_BLOCK_SIZE); in aes_cmac_vector() [all …]
|
D | aes_gmac.c | 32 u8 zero[GMAC_MIC_LEN], iv[AES_BLOCK_SIZE]; in ieee80211_aes_gmac() 47 iv[AES_BLOCK_SIZE - 1] = 0x01; in ieee80211_aes_gmac()
|
D | wpa.c | 408 u8 aad[2 * AES_BLOCK_SIZE]; in ccmp_encrypt_skb() 409 u8 b_0[AES_BLOCK_SIZE]; in ccmp_encrypt_skb() 529 u8 aad[2 * AES_BLOCK_SIZE]; in ieee80211_crypto_ccmp_decrypt() 530 u8 b_0[AES_BLOCK_SIZE]; in ieee80211_crypto_ccmp_decrypt() 563 j_0[AES_BLOCK_SIZE - 1] = 0x01; in gcmp_special_blocks() 633 u8 aad[2 * AES_BLOCK_SIZE]; in gcmp_encrypt_skb() 634 u8 j_0[AES_BLOCK_SIZE]; in gcmp_encrypt_skb() 752 u8 aad[2 * AES_BLOCK_SIZE]; in ieee80211_crypto_gcmp_decrypt() 753 u8 j_0[AES_BLOCK_SIZE]; in ieee80211_crypto_gcmp_decrypt()
|
/linux-4.1.27/arch/arm64/crypto/ |
D | aes-glue.c | 112 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt() 115 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt() 134 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt() 137 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt() 156 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in cbc_encrypt() 160 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt() 179 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in cbc_decrypt() 183 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt() 199 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_encrypt() 203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt() [all …]
|
D | aes-ce-ccm-glue.c | 71 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac() 89 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac() 102 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac() 154 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt() 155 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt() 169 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt() 173 AES_BLOCK_SIZE); in ccm_encrypt() 176 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt() 210 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt() 211 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt() [all …]
|
D | aes-ce-cipher.c | 24 u8 b[AES_BLOCK_SIZE]; 242 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/drivers/crypto/nx/ |
D | nx-aes-xcbc.c | 35 u8 state[AES_BLOCK_SIZE]; 37 u8 buffer[AES_BLOCK_SIZE]; 76 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty() 83 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty() 84 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 120 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty() 127 len = AES_BLOCK_SIZE; in nx_xcbc_empty() 131 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty() 146 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty() 203 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update() [all …]
|
D | nx-aes-gcm.c | 136 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca() 179 AES_BLOCK_SIZE); in nx_gca() 188 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE); in nx_gca() 215 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, desc->info, AES_BLOCK_SIZE); in gmac() 247 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac() 249 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac() 271 char out[AES_BLOCK_SIZE]; in gcm_empty() 289 len = AES_BLOCK_SIZE; in gcm_empty() 295 if (len != AES_BLOCK_SIZE) in gcm_empty() 398 memcpy(desc.info, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt() [all …]
|
D | nx-aes-ccm.c | 326 AES_BLOCK_SIZE); in generate_pat() 340 memcpy(out, result, AES_BLOCK_SIZE); in generate_pat() 399 memcpy(desc->info, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_decrypt() 401 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_decrypt() 403 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_decrypt() 467 memcpy(desc->info, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_encrypt() 469 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_encrypt() 471 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_encrypt() 576 .ivsize = AES_BLOCK_SIZE, 577 .maxauthsize = AES_BLOCK_SIZE, [all …]
|
D | nx-aes-cbc.c | 102 memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in cbc_aes_nx_crypt() 135 .cra_blocksize = AES_BLOCK_SIZE, 145 .ivsize = AES_BLOCK_SIZE,
|
D | nx-aes-ctr.c | 115 memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in ctr_aes_nx_crypt() 161 .ivsize = AES_BLOCK_SIZE,
|
D | nx.c | 246 data_back = (abs(oplen) / AES_BLOCK_SIZE) * sg->len; in trim_sg_list() 247 data_back = *nbytes - (data_back & ~(AES_BLOCK_SIZE - 1)); in trim_sg_list() 291 memcpy(iv, desc->info, AES_BLOCK_SIZE); in nx_build_sg_lists() 301 delta = *nbytes - (*nbytes & ~(AES_BLOCK_SIZE - 1)); in nx_build_sg_lists()
|
D | nx-aes-ecb.c | 135 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/arch/s390/crypto/ |
D | aes_s390.c | 147 AES_BLOCK_SIZE); in aes_encrypt() 151 AES_BLOCK_SIZE); in aes_encrypt() 155 AES_BLOCK_SIZE); in aes_encrypt() 172 AES_BLOCK_SIZE); in aes_decrypt() 176 AES_BLOCK_SIZE); in aes_decrypt() 180 AES_BLOCK_SIZE); in aes_decrypt() 216 .cra_blocksize = AES_BLOCK_SIZE, 323 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt() 331 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_crypt() 397 .cra_blocksize = AES_BLOCK_SIZE, [all …]
|
/linux-4.1.27/drivers/crypto/ccp/ |
D | ccp-crypto.h | 91 u8 k1[AES_BLOCK_SIZE]; 92 u8 k2[AES_BLOCK_SIZE]; 97 u8 iv[AES_BLOCK_SIZE]; 101 u8 rfc3686_iv[AES_BLOCK_SIZE]; 119 u8 iv[AES_BLOCK_SIZE]; 123 u8 buf[AES_BLOCK_SIZE]; 127 u8 pad[AES_BLOCK_SIZE]; 135 u8 iv[AES_BLOCK_SIZE]; 138 u8 buf[AES_BLOCK_SIZE];
|
D | ccp-crypto-aes.c | 35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete() 84 (req->nbytes & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt() 91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt() 93 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt() 223 .cra_blocksize = AES_BLOCK_SIZE, 274 .blocksize = AES_BLOCK_SIZE, 282 .blocksize = AES_BLOCK_SIZE, 283 .ivsize = AES_BLOCK_SIZE, 290 .blocksize = AES_BLOCK_SIZE, 291 .ivsize = AES_BLOCK_SIZE, [all …]
|
D | ccp-crypto-aes-xts.c | 92 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete() 131 if (req->nbytes & (AES_BLOCK_SIZE - 1)) in ccp_aes_xts_crypt() 160 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 161 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt() 172 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; in ccp_aes_xts_crypt() 246 alg->cra_blocksize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg() 255 alg->cra_ablkcipher.ivsize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg()
|
D | ccp-ops.c | 894 if (aes->src_len & (AES_BLOCK_SIZE - 1)) in ccp_run_aes_cmac_cmd() 897 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd() 904 if (aes->cmac_key_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd() 955 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd() 966 AES_BLOCK_SIZE, DMA_TO_DEVICE); in ccp_run_aes_cmac_cmd() 971 ccp_prepare_data(&src, NULL, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_cmac_cmd() 1014 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd() 1050 (aes->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_aes_cmd() 1057 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmd() 1110 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd() [all …]
|
D | ccp-crypto-aes-cmac.c | 155 rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; in ccp_do_cmac_update() 378 halg->digestsize = AES_BLOCK_SIZE; in ccp_register_aes_cmac_algs() 387 base->cra_blocksize = AES_BLOCK_SIZE; in ccp_register_aes_cmac_algs()
|
/linux-4.1.27/drivers/crypto/ |
D | padlock-aes.c | 33 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE) 37 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE) 214 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy() 217 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy() 228 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy() 231 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy() 327 .cra_blocksize = AES_BLOCK_SIZE, 360 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt() 361 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt() 389 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt() [all …]
|
D | geode-aes.c | 229 op->len = AES_BLOCK_SIZE; in geode_encrypt() 250 op->len = AES_BLOCK_SIZE; in geode_decrypt() 289 .cra_blocksize = AES_BLOCK_SIZE, 323 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_decrypt() 355 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_encrypt() 399 .cra_blocksize = AES_BLOCK_SIZE, 411 .ivsize = AES_BLOCK_SIZE, 435 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_ecb_decrypt() 465 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_ecb_encrypt() 485 .cra_blocksize = AES_BLOCK_SIZE,
|
D | atmel-aes.c | 658 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init() 738 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in atmel_aes_crypt() 742 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_crypt() 965 .cra_blocksize = AES_BLOCK_SIZE, 985 .cra_blocksize = AES_BLOCK_SIZE, 995 .ivsize = AES_BLOCK_SIZE, 1006 .cra_blocksize = AES_BLOCK_SIZE, 1016 .ivsize = AES_BLOCK_SIZE, 1027 .cra_blocksize = AES_BLOCK_SIZE, 1037 .ivsize = AES_BLOCK_SIZE, [all …]
|
D | s5p-sss.c | 263 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_outdata() 289 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_indata() 538 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in s5p_aes_crypt() 603 .cra_blocksize = AES_BLOCK_SIZE, 624 .cra_blocksize = AES_BLOCK_SIZE, 633 .ivsize = AES_BLOCK_SIZE,
|
D | omap-aes.c | 97 #define AES_BLOCK_WORDS (AES_BLOCK_SIZE >> 2) 564 if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE)) in omap_aes_check_aligned() 729 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in omap_aes_crypt() 840 .cra_blocksize = AES_BLOCK_SIZE, 862 .cra_blocksize = AES_BLOCK_SIZE, 872 .ivsize = AES_BLOCK_SIZE, 888 .cra_blocksize = AES_BLOCK_SIZE, 899 .ivsize = AES_BLOCK_SIZE, 1049 dd->total -= AES_BLOCK_SIZE; in omap_aes_irq()
|
D | ixp4xx_crypto.c | 1268 .cra_blocksize = AES_BLOCK_SIZE, 1272 .ivsize = AES_BLOCK_SIZE, 1282 .cra_blocksize = AES_BLOCK_SIZE, 1294 .cra_blocksize = AES_BLOCK_SIZE, 1298 .ivsize = AES_BLOCK_SIZE, 1308 .cra_blocksize = AES_BLOCK_SIZE, 1312 .ivsize = AES_BLOCK_SIZE, 1376 .cra_blocksize = AES_BLOCK_SIZE, 1378 .ivsize = AES_BLOCK_SIZE, 1389 .cra_blocksize = AES_BLOCK_SIZE, [all …]
|
D | picoxcell_crypto.c | 177 u8 salt[AES_BLOCK_SIZE]; 1312 .cra_blocksize = AES_BLOCK_SIZE, 1322 .ivsize = AES_BLOCK_SIZE, 1339 .cra_blocksize = AES_BLOCK_SIZE, 1472 .cra_blocksize = AES_BLOCK_SIZE, 1482 .ivsize = AES_BLOCK_SIZE, 1502 .cra_blocksize = AES_BLOCK_SIZE, 1512 .ivsize = AES_BLOCK_SIZE, 1531 .cra_blocksize = AES_BLOCK_SIZE, 1541 .ivsize = AES_BLOCK_SIZE,
|
D | talitos.c | 1924 .cra_blocksize = AES_BLOCK_SIZE, 1927 .ivsize = AES_BLOCK_SIZE, 1963 .cra_blocksize = AES_BLOCK_SIZE, 1966 .ivsize = AES_BLOCK_SIZE, 2002 .cra_blocksize = AES_BLOCK_SIZE, 2005 .ivsize = AES_BLOCK_SIZE, 2041 .cra_blocksize = AES_BLOCK_SIZE, 2044 .ivsize = AES_BLOCK_SIZE, 2080 .cra_blocksize = AES_BLOCK_SIZE, 2083 .ivsize = AES_BLOCK_SIZE, [all …]
|
D | sahara.c | 490 dev->hw_desc[idx]->len1 = AES_BLOCK_SIZE; in sahara_hw_descriptor_create() 673 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in sahara_aes_crypt() 1279 .cra_blocksize = AES_BLOCK_SIZE, 1299 .cra_blocksize = AES_BLOCK_SIZE, 1309 .ivsize = AES_BLOCK_SIZE,
|
D | mxs-dcp.c | 790 .cra_blocksize = AES_BLOCK_SIZE, 813 .cra_blocksize = AES_BLOCK_SIZE, 824 .ivsize = AES_BLOCK_SIZE,
|
D | n2_core.c | 1223 .block_size = AES_BLOCK_SIZE, 1236 .block_size = AES_BLOCK_SIZE, 1240 .ivsize = AES_BLOCK_SIZE, 1250 .block_size = AES_BLOCK_SIZE, 1254 .ivsize = AES_BLOCK_SIZE,
|
D | mv_cesa.c | 955 .cra_blocksize = AES_BLOCK_SIZE, 963 .ivsize = AES_BLOCK_SIZE,
|
/linux-4.1.27/drivers/crypto/vmx/ |
D | aes_ctr.c | 94 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final() 107 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final() 128 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 129 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt() 134 (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv); in p8_aes_ctr_crypt() 137 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt() 138 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_ctr_crypt()
|
D | aes_cbc.c | 119 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_encrypt() 155 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_decrypt() 174 .cra_blocksize = AES_BLOCK_SIZE,
|
D | aesp8-ppc.h | 4 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
|
D | aes.c | 130 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/arch/powerpc/crypto/ |
D | aes-spe-glue.c | 193 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt() 221 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_decrypt() 249 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_encrypt() 277 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_decrypt() 301 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ppc_ctr_crypt() 306 nbytes : pbytes & ~(AES_BLOCK_SIZE - 1); in ppc_ctr_crypt() 337 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_encrypt() 368 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_decrypt() 396 .cra_blocksize = AES_BLOCK_SIZE, 414 .cra_blocksize = AES_BLOCK_SIZE, [all …]
|
/linux-4.1.27/arch/sparc/crypto/ |
D | aes_glue.c | 213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) 237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 267 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 297 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 328 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt() 340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final() 346 keystream, AES_BLOCK_SIZE); in ctr_crypt_final() 349 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final() 361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt() 365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt() [all …]
|
/linux-4.1.27/arch/x86/crypto/ |
D | aesni-intel_glue.c | 70 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1)) 389 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt() 413 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt() 437 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt() 461 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt() 474 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt_final() 482 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final() 513 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt() 517 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt() 520 nbytes &= AES_BLOCK_SIZE - 1; in ctr_crypt() [all …]
|
D | aes_glue.c | 40 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/drivers/crypto/qat/qat_common/ |
D | qat_algs.c | 117 uint8_t salt[AES_BLOCK_SIZE]; 325 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_enc_session() 411 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_dec_session() 468 cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_ablkcipher_init_com() 524 if (crypto_rng_get_bytes(crypto_default_rng, ctx->salt, AES_BLOCK_SIZE)) in qat_alg_aead_init_sessions() 850 areq->iv, AES_BLOCK_SIZE, qat_req); in qat_alg_aead_dec() 864 cipher_param->cipher_offset = areq->assoclen + AES_BLOCK_SIZE; in qat_alg_aead_dec() 865 memcpy(cipher_param->u.cipher_IV_array, areq->iv, AES_BLOCK_SIZE); in qat_alg_aead_dec() 869 cipher_param->cipher_length + AES_BLOCK_SIZE; in qat_alg_aead_dec() 894 iv, AES_BLOCK_SIZE, qat_req); in qat_alg_aead_enc_internal() [all …]
|
/linux-4.1.27/drivers/crypto/qce/ |
D | ablkcipher.c | 276 .blocksize = AES_BLOCK_SIZE, 277 .ivsize = AES_BLOCK_SIZE, 285 .blocksize = AES_BLOCK_SIZE, 286 .ivsize = AES_BLOCK_SIZE, 294 .blocksize = AES_BLOCK_SIZE, 295 .ivsize = AES_BLOCK_SIZE, 303 .blocksize = AES_BLOCK_SIZE, 304 .ivsize = AES_BLOCK_SIZE,
|
D | common.h | 27 #define QCE_AES_IV_LENGTH AES_BLOCK_SIZE 29 #define QCE_MAX_IV_SIZE AES_BLOCK_SIZE
|
/linux-4.1.27/include/crypto/ |
D | aes.h | 16 #define AES_BLOCK_SIZE 16 macro
|
/linux-4.1.27/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 285 u32 iv[AES_BLOCK_SIZE / 4]; in cfg_ivs() 1116 .cra_blocksize = AES_BLOCK_SIZE, 1141 .cra_blocksize = AES_BLOCK_SIZE, 1166 .cra_blocksize = AES_BLOCK_SIZE, 1179 .ivsize = AES_BLOCK_SIZE, 1192 .cra_blocksize = AES_BLOCK_SIZE, 1205 .ivsize = AES_BLOCK_SIZE,
|
/linux-4.1.27/drivers/crypto/caam/ |
D | caamalg.c | 3516 .blocksize = AES_BLOCK_SIZE, 3525 .ivsize = AES_BLOCK_SIZE, 3535 .blocksize = AES_BLOCK_SIZE, 3544 .ivsize = AES_BLOCK_SIZE, 3554 .blocksize = AES_BLOCK_SIZE, 3563 .ivsize = AES_BLOCK_SIZE, 3574 .blocksize = AES_BLOCK_SIZE, 3583 .ivsize = AES_BLOCK_SIZE, 3594 .blocksize = AES_BLOCK_SIZE, 3603 .ivsize = AES_BLOCK_SIZE, [all …]
|
/linux-4.1.27/drivers/staging/rtl8188eu/include/ |
D | rtw_security.h | 41 #define AES_BLOCK_SIZE 16 macro
|
/linux-4.1.27/drivers/staging/rtl8723au/include/ |
D | rtw_security.h | 27 #define AES_BLOCK_SIZE 16 macro
|
/linux-4.1.27/crypto/ |
D | aes_generic.c | 1447 .cra_blocksize = AES_BLOCK_SIZE,
|
/linux-4.1.27/drivers/crypto/amcc/ |
D | crypto4xx_core.c | 1136 .cra_blocksize = AES_BLOCK_SIZE,
|