Home
last modified time | relevance | path

Searched refs:AES_BLOCK_SIZE (Results 1 – 54 of 54) sorted by relevance

/linux-4.4.14/arch/arm/crypto/
Daesbs-glue.c19 #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
116 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
123 crypto_xor(src, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
126 src += AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
128 memcpy(walk.iv, iv, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
133 crypto_xor(walk.iv, src, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
135 memcpy(walk.iv, dst, AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
136 src += AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
137 dst += AES_BLOCK_SIZE; in aesbs_cbc_encrypt()
140 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in aesbs_cbc_encrypt()
[all …]
Daes-ce-glue.c48 u8 b[AES_BLOCK_SIZE];
179 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_encrypt()
183 walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
202 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ecb_decrypt()
206 walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
225 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_encrypt()
230 walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
249 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in cbc_decrypt()
254 walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
269 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_encrypt()
[all …]
Daes_glue.c66 .cra_blocksize = AES_BLOCK_SIZE,
/linux-4.4.14/arch/arm64/crypto/
Daes-glue.c112 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_encrypt()
115 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_encrypt()
134 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in ecb_decrypt()
137 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in ecb_decrypt()
156 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in cbc_encrypt()
160 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_encrypt()
179 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { in cbc_decrypt()
183 err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); in cbc_decrypt()
199 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_encrypt()
203 while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) { in ctr_encrypt()
[all …]
Daes-ce-ccm-glue.c71 __be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8]; in ccm_init_mac()
89 memcpy(maciv, req->iv, AES_BLOCK_SIZE - l); in ccm_init_mac()
102 memset(&req->iv[AES_BLOCK_SIZE - l], 0, l); in ccm_init_mac()
158 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_encrypt()
159 u8 buf[AES_BLOCK_SIZE]; in ccm_encrypt()
173 memcpy(buf, req->iv, AES_BLOCK_SIZE); in ccm_encrypt()
182 AES_BLOCK_SIZE); in ccm_encrypt()
185 u32 tail = walk.nbytes % AES_BLOCK_SIZE; in ccm_encrypt()
223 u8 __aligned(8) mac[AES_BLOCK_SIZE]; in ccm_decrypt()
224 u8 buf[AES_BLOCK_SIZE]; in ccm_decrypt()
[all …]
Daes-ce-cipher.c24 u8 b[AES_BLOCK_SIZE];
242 .cra_blocksize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/nx/
Dnx-aes-xcbc.c35 u8 state[AES_BLOCK_SIZE];
37 u8 buffer[AES_BLOCK_SIZE];
76 u8 keys[2][AES_BLOCK_SIZE]; in nx_xcbc_empty()
83 memcpy(key, csbcpb->cpb.aes_xcbc.key, AES_BLOCK_SIZE); in nx_xcbc_empty()
84 memcpy(csbcpb->cpb.aes_ecb.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
120 memcpy(csbcpb->cpb.aes_ecb.key, keys[0], AES_BLOCK_SIZE); in nx_xcbc_empty()
127 len = AES_BLOCK_SIZE; in nx_xcbc_empty()
131 if (len != AES_BLOCK_SIZE) in nx_xcbc_empty()
146 memcpy(csbcpb->cpb.aes_xcbc.key, key, AES_BLOCK_SIZE); in nx_xcbc_empty()
203 if (total <= AES_BLOCK_SIZE) { in nx_xcbc_update()
[all …]
Dnx-aes-gcm.c123 if (nbytes <= AES_BLOCK_SIZE) { in nx_gca()
166 AES_BLOCK_SIZE); in nx_gca()
175 memcpy(out, csbcpb_aead->cpb.aes_gca.out_pat, AES_BLOCK_SIZE); in nx_gca()
204 memcpy(csbcpb->cpb.aes_gcm.iv_or_cnt, desc->info, AES_BLOCK_SIZE); in gmac()
236 csbcpb->cpb.aes_gcm.out_pat_or_mac, AES_BLOCK_SIZE); in gmac()
238 csbcpb->cpb.aes_gcm.out_s0, AES_BLOCK_SIZE); in gmac()
261 char out[AES_BLOCK_SIZE]; in gcm_empty()
279 len = AES_BLOCK_SIZE; in gcm_empty()
285 if (len != AES_BLOCK_SIZE) in gcm_empty()
391 memcpy(desc.info, csbcpb->cpb.aes_gcm.out_cnt, AES_BLOCK_SIZE); in gcm_aes_nx_crypt()
[all …]
Dnx-aes-ccm.c323 AES_BLOCK_SIZE); in generate_pat()
336 memcpy(out, result, AES_BLOCK_SIZE); in generate_pat()
396 memcpy(desc->info, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_decrypt()
398 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_decrypt()
400 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_decrypt()
465 memcpy(desc->info, csbcpb->cpb.aes_ccm.out_ctr, AES_BLOCK_SIZE); in ccm_nx_encrypt()
467 csbcpb->cpb.aes_ccm.out_pat_or_mac, AES_BLOCK_SIZE); in ccm_nx_encrypt()
469 csbcpb->cpb.aes_ccm.out_s0, AES_BLOCK_SIZE); in ccm_nx_encrypt()
569 .ivsize = AES_BLOCK_SIZE,
570 .maxauthsize = AES_BLOCK_SIZE,
[all …]
Dnx-aes-cbc.c102 memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in cbc_aes_nx_crypt()
135 .cra_blocksize = AES_BLOCK_SIZE,
145 .ivsize = AES_BLOCK_SIZE,
Dnx.c246 data_back = (abs(oplen) / AES_BLOCK_SIZE) * sg->len; in trim_sg_list()
247 data_back = *nbytes - (data_back & ~(AES_BLOCK_SIZE - 1)); in trim_sg_list()
291 memcpy(iv, desc->info, AES_BLOCK_SIZE); in nx_build_sg_lists()
301 delta = *nbytes - (*nbytes & ~(AES_BLOCK_SIZE - 1)); in nx_build_sg_lists()
Dnx-aes-ecb.c135 .cra_blocksize = AES_BLOCK_SIZE,
Dnx-aes-ctr.c115 memcpy(desc->info, csbcpb->cpb.aes_cbc.cv, AES_BLOCK_SIZE); in ctr_aes_nx_crypt()
/linux-4.4.14/net/mac80211/
Daes_cmac.c31 for (i = 0; i < AES_BLOCK_SIZE - 1; i++) in gf_mulx()
33 pad[AES_BLOCK_SIZE - 1] <<= 1; in gf_mulx()
35 pad[AES_BLOCK_SIZE - 1] ^= 0x87; in gf_mulx()
42 u8 cbc[AES_BLOCK_SIZE], pad[AES_BLOCK_SIZE]; in aes_cmac_vector()
46 memset(cbc, 0, AES_BLOCK_SIZE); in aes_cmac_vector()
57 while (left >= AES_BLOCK_SIZE) { in aes_cmac_vector()
58 for (i = 0; i < AES_BLOCK_SIZE; i++) { in aes_cmac_vector()
66 if (left > AES_BLOCK_SIZE) in aes_cmac_vector()
68 left -= AES_BLOCK_SIZE; in aes_cmac_vector()
71 memset(pad, 0, AES_BLOCK_SIZE); in aes_cmac_vector()
[all …]
Daes_gmac.c32 u8 zero[GMAC_MIC_LEN], iv[AES_BLOCK_SIZE]; in ieee80211_aes_gmac()
48 iv[AES_BLOCK_SIZE - 1] = 0x01; in ieee80211_aes_gmac()
Dwpa.c411 u8 aad[2 * AES_BLOCK_SIZE]; in ccmp_encrypt_skb()
412 u8 b_0[AES_BLOCK_SIZE]; in ccmp_encrypt_skb()
534 u8 aad[2 * AES_BLOCK_SIZE]; in ieee80211_crypto_ccmp_decrypt()
535 u8 b_0[AES_BLOCK_SIZE]; in ieee80211_crypto_ccmp_decrypt()
569 j_0[AES_BLOCK_SIZE - 1] = 0x01; in gcmp_special_blocks()
639 u8 aad[2 * AES_BLOCK_SIZE]; in gcmp_encrypt_skb()
640 u8 j_0[AES_BLOCK_SIZE]; in gcmp_encrypt_skb()
760 u8 aad[2 * AES_BLOCK_SIZE]; in ieee80211_crypto_gcmp_decrypt()
761 u8 j_0[AES_BLOCK_SIZE]; in ieee80211_crypto_gcmp_decrypt()
/linux-4.4.14/arch/s390/crypto/
Daes_s390.c148 AES_BLOCK_SIZE); in aes_encrypt()
152 AES_BLOCK_SIZE); in aes_encrypt()
156 AES_BLOCK_SIZE); in aes_encrypt()
173 AES_BLOCK_SIZE); in aes_decrypt()
177 AES_BLOCK_SIZE); in aes_decrypt()
181 AES_BLOCK_SIZE); in aes_decrypt()
217 .cra_blocksize = AES_BLOCK_SIZE,
324 unsigned int n = nbytes & ~(AES_BLOCK_SIZE - 1); in ecb_aes_crypt()
332 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_crypt()
398 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux-4.4.14/drivers/crypto/ccp/
Dccp-crypto.h91 u8 k1[AES_BLOCK_SIZE];
92 u8 k2[AES_BLOCK_SIZE];
97 u8 iv[AES_BLOCK_SIZE];
101 u8 rfc3686_iv[AES_BLOCK_SIZE];
119 u8 iv[AES_BLOCK_SIZE];
123 u8 buf[AES_BLOCK_SIZE];
127 u8 pad[AES_BLOCK_SIZE];
135 u8 iv[AES_BLOCK_SIZE];
138 u8 buf[AES_BLOCK_SIZE];
Dccp-crypto-aes.c35 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_complete()
84 (req->nbytes & (AES_BLOCK_SIZE - 1))) in ccp_aes_crypt()
91 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_crypt()
93 iv_len = AES_BLOCK_SIZE; in ccp_aes_crypt()
223 .cra_blocksize = AES_BLOCK_SIZE,
274 .blocksize = AES_BLOCK_SIZE,
282 .blocksize = AES_BLOCK_SIZE,
283 .ivsize = AES_BLOCK_SIZE,
290 .blocksize = AES_BLOCK_SIZE,
291 .ivsize = AES_BLOCK_SIZE,
[all …]
Dccp-crypto-aes-xts.c92 memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_complete()
131 if (req->nbytes & (AES_BLOCK_SIZE - 1)) in ccp_aes_xts_crypt()
160 memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
161 sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); in ccp_aes_xts_crypt()
172 rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; in ccp_aes_xts_crypt()
246 alg->cra_blocksize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg()
255 alg->cra_ablkcipher.ivsize = AES_BLOCK_SIZE; in ccp_register_aes_xts_alg()
Dccp-ops.c898 if (aes->src_len & (AES_BLOCK_SIZE - 1)) in ccp_run_aes_cmac_cmd()
901 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
908 if (aes->cmac_key_len != AES_BLOCK_SIZE) in ccp_run_aes_cmac_cmd()
959 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
970 AES_BLOCK_SIZE, DMA_TO_DEVICE); in ccp_run_aes_cmac_cmd()
975 ccp_prepare_data(&src, NULL, &op, AES_BLOCK_SIZE, true); in ccp_run_aes_cmac_cmd()
1018 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmac_cmd()
1054 (aes->src_len & (AES_BLOCK_SIZE - 1))) in ccp_run_aes_cmd()
1061 if (aes->iv_len != AES_BLOCK_SIZE) in ccp_run_aes_cmd()
1114 dm_offset = CCP_KSB_BYTES - AES_BLOCK_SIZE; in ccp_run_aes_cmd()
[all …]
Dccp-crypto-aes-cmac.c168 rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; in ccp_do_cmac_update()
396 halg->digestsize = AES_BLOCK_SIZE; in ccp_register_aes_cmac_algs()
405 base->cra_blocksize = AES_BLOCK_SIZE; in ccp_register_aes_cmac_algs()
/linux-4.4.14/drivers/crypto/vmx/
Daes_ctr.c97 u8 keystream[AES_BLOCK_SIZE]; in p8_aes_ctr_final()
110 crypto_inc(ctrblk, AES_BLOCK_SIZE); in p8_aes_ctr_final()
133 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
134 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in p8_aes_ctr_crypt()
142 AES_BLOCK_SIZE, in p8_aes_ctr_crypt()
148 inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE; in p8_aes_ctr_crypt()
151 crypto_inc(walk.iv, AES_BLOCK_SIZE); in p8_aes_ctr_crypt()
153 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_ctr_crypt()
Daes_cbc.c128 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_encrypt()
169 nbytes &= AES_BLOCK_SIZE - 1; in p8_aes_cbc_decrypt()
189 .cra_blocksize = AES_BLOCK_SIZE,
Daesp8-ppc.h4 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
Daes.c139 .cra_blocksize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/
Dpadlock-aes.c33 #define ecb_fetch_bytes (ecb_fetch_blocks * AES_BLOCK_SIZE)
37 #define cbc_fetch_bytes (cbc_fetch_blocks * AES_BLOCK_SIZE)
214 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy()
217 memcpy(tmp, in, count * AES_BLOCK_SIZE); in ecb_crypt_copy()
228 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy()
231 memcpy(tmp, in, count * AES_BLOCK_SIZE); in cbc_crypt_copy()
327 .cra_blocksize = AES_BLOCK_SIZE,
360 nbytes / AES_BLOCK_SIZE); in ecb_aes_encrypt()
361 nbytes &= AES_BLOCK_SIZE - 1; in ecb_aes_encrypt()
389 nbytes / AES_BLOCK_SIZE); in ecb_aes_decrypt()
[all …]
Dgeode-aes.c229 op->len = AES_BLOCK_SIZE; in geode_encrypt()
250 op->len = AES_BLOCK_SIZE; in geode_decrypt()
289 .cra_blocksize = AES_BLOCK_SIZE,
323 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_decrypt()
355 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_cbc_encrypt()
399 .cra_blocksize = AES_BLOCK_SIZE,
411 .ivsize = AES_BLOCK_SIZE,
435 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_ecb_decrypt()
465 op->len = nbytes - (nbytes % AES_BLOCK_SIZE); in geode_ecb_encrypt()
485 .cra_blocksize = AES_BLOCK_SIZE,
Datmel-aes.c658 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
738 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in atmel_aes_crypt()
742 ctx->block_size = AES_BLOCK_SIZE; in atmel_aes_crypt()
965 .cra_blocksize = AES_BLOCK_SIZE,
985 .cra_blocksize = AES_BLOCK_SIZE,
995 .ivsize = AES_BLOCK_SIZE,
1006 .cra_blocksize = AES_BLOCK_SIZE,
1016 .ivsize = AES_BLOCK_SIZE,
1027 .cra_blocksize = AES_BLOCK_SIZE,
1037 .ivsize = AES_BLOCK_SIZE,
[all …]
Domap-aes.c98 #define AES_BLOCK_WORDS (AES_BLOCK_SIZE >> 2)
558 if (!IS_ALIGNED(total, AES_BLOCK_SIZE)) in omap_aes_check_aligned()
564 if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE)) in omap_aes_check_aligned()
582 total = ALIGN(dd->total, AES_BLOCK_SIZE); in omap_aes_copy_sgs()
654 len = ALIGN(dd->total, AES_BLOCK_SIZE); in omap_aes_handle_queue()
702 len = ALIGN(dd->total_save, AES_BLOCK_SIZE); in omap_aes_done_task()
838 .cra_blocksize = AES_BLOCK_SIZE,
860 .cra_blocksize = AES_BLOCK_SIZE,
870 .ivsize = AES_BLOCK_SIZE,
886 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
Ds5p-sss.c268 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_outdata()
294 if (!IS_ALIGNED(sg_dma_len(sg), AES_BLOCK_SIZE)) { in s5p_set_indata()
520 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in s5p_aes_crypt()
585 .cra_blocksize = AES_BLOCK_SIZE,
606 .cra_blocksize = AES_BLOCK_SIZE,
615 .ivsize = AES_BLOCK_SIZE,
Dixp4xx_crypto.c1249 .cra_blocksize = AES_BLOCK_SIZE,
1253 .ivsize = AES_BLOCK_SIZE,
1263 .cra_blocksize = AES_BLOCK_SIZE,
1275 .cra_blocksize = AES_BLOCK_SIZE,
1279 .ivsize = AES_BLOCK_SIZE,
1289 .cra_blocksize = AES_BLOCK_SIZE,
1293 .ivsize = AES_BLOCK_SIZE,
1357 .cra_blocksize = AES_BLOCK_SIZE,
1359 .ivsize = AES_BLOCK_SIZE,
1369 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
Dpicoxcell_crypto.c1223 .cra_blocksize = AES_BLOCK_SIZE,
1233 .ivsize = AES_BLOCK_SIZE,
1250 .cra_blocksize = AES_BLOCK_SIZE,
1390 .cra_blocksize = AES_BLOCK_SIZE,
1398 .ivsize = AES_BLOCK_SIZE,
1420 .cra_blocksize = AES_BLOCK_SIZE,
1428 .ivsize = AES_BLOCK_SIZE,
1450 .cra_blocksize = AES_BLOCK_SIZE,
1458 .ivsize = AES_BLOCK_SIZE,
Dtalitos.c2116 .cra_blocksize = AES_BLOCK_SIZE,
2119 .ivsize = AES_BLOCK_SIZE,
2158 .cra_blocksize = AES_BLOCK_SIZE,
2161 .ivsize = AES_BLOCK_SIZE,
2200 .cra_blocksize = AES_BLOCK_SIZE,
2203 .ivsize = AES_BLOCK_SIZE,
2242 .cra_blocksize = AES_BLOCK_SIZE,
2245 .ivsize = AES_BLOCK_SIZE,
2284 .cra_blocksize = AES_BLOCK_SIZE,
2287 .ivsize = AES_BLOCK_SIZE,
[all …]
Dsahara.c464 dev->hw_desc[idx]->len1 = AES_BLOCK_SIZE; in sahara_hw_descriptor_create()
647 if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) { in sahara_aes_crypt()
1215 .cra_blocksize = AES_BLOCK_SIZE,
1235 .cra_blocksize = AES_BLOCK_SIZE,
1245 .ivsize = AES_BLOCK_SIZE,
Dmxs-dcp.c790 .cra_blocksize = AES_BLOCK_SIZE,
813 .cra_blocksize = AES_BLOCK_SIZE,
824 .ivsize = AES_BLOCK_SIZE,
Dn2_core.c1223 .block_size = AES_BLOCK_SIZE,
1236 .block_size = AES_BLOCK_SIZE,
1240 .ivsize = AES_BLOCK_SIZE,
1250 .block_size = AES_BLOCK_SIZE,
1254 .ivsize = AES_BLOCK_SIZE,
Dmv_cesa.c960 .cra_blocksize = AES_BLOCK_SIZE,
968 .ivsize = AES_BLOCK_SIZE,
/linux-4.4.14/arch/powerpc/crypto/
Daes-spe-glue.c193 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_encrypt()
221 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_ecb_decrypt()
249 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_encrypt()
277 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_cbc_decrypt()
301 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ppc_ctr_crypt()
306 nbytes : pbytes & ~(AES_BLOCK_SIZE - 1); in ppc_ctr_crypt()
337 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_encrypt()
368 nbytes - MAX_BYTES : nbytes & (AES_BLOCK_SIZE - 1); in ppc_xts_decrypt()
396 .cra_blocksize = AES_BLOCK_SIZE,
414 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux-4.4.14/arch/sparc/crypto/
Daes_glue.c213 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
237 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
267 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
297 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt()
328 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt()
340 u64 keystream[AES_BLOCK_SIZE / sizeof(u64)]; in ctr_crypt_final()
346 keystream, AES_BLOCK_SIZE); in ctr_crypt_final()
349 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
361 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt()
365 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt()
[all …]
/linux-4.4.14/arch/x86/crypto/
Daesni-intel_glue.c48 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
390 nbytes &= AES_BLOCK_SIZE - 1; in ecb_encrypt()
414 nbytes &= AES_BLOCK_SIZE - 1; in ecb_decrypt()
438 nbytes &= AES_BLOCK_SIZE - 1; in cbc_encrypt()
462 nbytes &= AES_BLOCK_SIZE - 1; in cbc_decrypt()
475 u8 keystream[AES_BLOCK_SIZE]; in ctr_crypt_final()
483 crypto_inc(ctrblk, AES_BLOCK_SIZE); in ctr_crypt_final()
514 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); in ctr_crypt()
518 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { in ctr_crypt()
521 nbytes &= AES_BLOCK_SIZE - 1; in ctr_crypt()
[all …]
Daes_glue.c40 .cra_blocksize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/qce/
Dablkcipher.c266 .blocksize = AES_BLOCK_SIZE,
267 .ivsize = AES_BLOCK_SIZE,
275 .blocksize = AES_BLOCK_SIZE,
276 .ivsize = AES_BLOCK_SIZE,
284 .blocksize = AES_BLOCK_SIZE,
285 .ivsize = AES_BLOCK_SIZE,
293 .blocksize = AES_BLOCK_SIZE,
294 .ivsize = AES_BLOCK_SIZE,
Dcommon.h27 #define QCE_AES_IV_LENGTH AES_BLOCK_SIZE
29 #define QCE_MAX_IV_SIZE AES_BLOCK_SIZE
/linux-4.4.14/drivers/crypto/qat/qat_common/
Dqat_algs.c317 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_enc_session()
404 cipher_cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_aead_init_dec_session()
461 cd_ctrl->cipher_state_sz = AES_BLOCK_SIZE >> 3; in qat_alg_ablkcipher_init_com()
841 memcpy(cipher_param->u.cipher_IV_array, areq->iv, AES_BLOCK_SIZE); in qat_alg_aead_dec()
883 memcpy(cipher_param->u.cipher_IV_array, iv, AES_BLOCK_SIZE); in qat_alg_aead_enc()
1008 memcpy(cipher_param->u.cipher_IV_array, req->info, AES_BLOCK_SIZE); in qat_alg_ablkcipher_encrypt()
1045 memcpy(cipher_param->u.cipher_IV_array, req->info, AES_BLOCK_SIZE); in qat_alg_ablkcipher_decrypt()
1157 .cra_blocksize = AES_BLOCK_SIZE,
1166 .ivsize = AES_BLOCK_SIZE,
1174 .cra_blocksize = AES_BLOCK_SIZE,
[all …]
/linux-4.4.14/drivers/crypto/sunxi-ss/
Dsun4i-ss-core.c91 .cra_blocksize = AES_BLOCK_SIZE,
101 .ivsize = AES_BLOCK_SIZE,
113 .cra_blocksize = AES_BLOCK_SIZE,
123 .ivsize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/caam/
Dcaamalg.c2886 .blocksize = AES_BLOCK_SIZE,
2896 .ivsize = AES_BLOCK_SIZE,
2946 .ivsize = AES_BLOCK_SIZE,
2972 .blocksize = AES_BLOCK_SIZE,
2981 .ivsize = AES_BLOCK_SIZE,
3000 .maxauthsize = AES_BLOCK_SIZE,
3018 .maxauthsize = AES_BLOCK_SIZE,
3037 .maxauthsize = AES_BLOCK_SIZE,
3182 .cra_blocksize = AES_BLOCK_SIZE,
3188 .ivsize = AES_BLOCK_SIZE,
[all …]
/linux-4.4.14/include/crypto/
Daes.h16 #define AES_BLOCK_SIZE 16 macro
/linux-4.4.14/drivers/crypto/marvell/
Dcipher.c730 .cra_blocksize = AES_BLOCK_SIZE,
752 memcpy(tmpl->ctx.blkcipher.iv, req->info, AES_BLOCK_SIZE); in mv_cesa_cbc_aes_op()
781 .cra_blocksize = AES_BLOCK_SIZE,
791 .ivsize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/ux500/cryp/
Dcryp_core.c285 u32 iv[AES_BLOCK_SIZE / 4]; in cfg_ivs()
1116 .cra_blocksize = AES_BLOCK_SIZE,
1141 .cra_blocksize = AES_BLOCK_SIZE,
1166 .cra_blocksize = AES_BLOCK_SIZE,
1179 .ivsize = AES_BLOCK_SIZE,
1192 .cra_blocksize = AES_BLOCK_SIZE,
1205 .ivsize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/staging/rtl8723au/include/
Drtw_security.h27 #define AES_BLOCK_SIZE 16 macro
/linux-4.4.14/drivers/staging/rtl8188eu/include/
Drtw_security.h41 #define AES_BLOCK_SIZE 16 macro
/linux-4.4.14/crypto/
Daes_generic.c1447 .cra_blocksize = AES_BLOCK_SIZE,
/linux-4.4.14/drivers/crypto/amcc/
Dcrypto4xx_core.c1116 .cra_blocksize = AES_BLOCK_SIZE,