Home
last modified time | relevance | path

Searched refs:cryptlen (Results 1 – 23 of 23) sorted by relevance

/linux-4.4.14/crypto/
Dauthencesn.c111 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv_tail() local
117 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_genicv_tail()
120 scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); in crypto_authenc_esn_genicv_tail()
145 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_genicv() local
155 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); in crypto_authenc_esn_genicv()
161 ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); in crypto_authenc_esn_genicv()
201 unsigned int cryptlen = req->cryptlen; in crypto_authenc_esn_encrypt() local
221 ablkcipher_request_set_crypt(abreq, src, dst, cryptlen, req->iv); in crypto_authenc_esn_encrypt()
242 unsigned int cryptlen = req->cryptlen - authsize; in crypto_authenc_esn_decrypt_tail() local
250 scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); in crypto_authenc_esn_decrypt_tail()
[all …]
Dccm.c130 unsigned int cryptlen) in format_input() argument
148 return set_msg_len(info + 16 - l, cryptlen, l); in format_input()
252 unsigned int cryptlen) in crypto_ccm_auth() argument
264 err = format_input(odata, req, cryptlen); in crypto_ccm_auth()
280 if (cryptlen) in crypto_ccm_auth()
281 get_data_to_compute(cipher, pctx, plain, cryptlen); in crypto_ccm_auth()
296 req->assoclen + req->cryptlen, in crypto_ccm_encrypt_done()
352 unsigned int cryptlen = req->cryptlen; in crypto_ccm_encrypt() local
361 err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); in crypto_ccm_encrypt()
372 ablkcipher_request_set_crypt(abreq, pctx->src, dst, cryptlen + 16, iv); in crypto_ccm_encrypt()
[all …]
Dchacha20poly1305.c47 __le64 cryptlen; member
67 unsigned int cryptlen; member
103 req->assoclen + rctx->cryptlen, in poly_verify_tag()
115 req->assoclen + rctx->cryptlen, in poly_copy_tag()
148 rctx->cryptlen, creq->iv); in chacha_decrypt()
160 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_tail_continue()
183 len = cpu_to_le64(rctx->cryptlen); in poly_tail()
184 memcpy(&preq->tail.cryptlen, &len, sizeof(len)); in poly_tail()
213 padlen = (bs - (rctx->cryptlen % bs)) % bs; in poly_cipherpad()
243 if (rctx->cryptlen == req->cryptlen) /* encrypting */ in poly_cipher()
[all …]
Dgcm.c62 unsigned int cryptlen; member
221 unsigned int cryptlen) in crypto_gcm_init_crypt() argument
233 cryptlen + sizeof(pctx->auth_tag), in crypto_gcm_init_crypt()
274 lengths.b = cpu_to_be64(gctx->cryptlen * 8); in gcm_hash_len()
335 remain = gcm_remain(gctx->cryptlen); in gcm_hash_crypt_continue()
364 if (gctx->cryptlen) in gcm_hash_assoc_remain_continue()
366 gctx->src, gctx->cryptlen, flags) ?: in gcm_hash_assoc_remain_continue()
462 req->assoclen + req->cryptlen, in gcm_enc_copy_hash()
473 gctx->cryptlen = req->cryptlen; in gcm_encrypt_continue()
501 crypto_gcm_init_crypt(req, req->cryptlen); in crypto_gcm_encrypt()
[all …]
Dechainiv.c105 if (req->cryptlen < ivsize) in echainiv_encrypt()
121 req->assoclen + req->cryptlen); in echainiv_encrypt()
139 req->cryptlen, info); in echainiv_encrypt()
160 if (req->cryptlen < ivsize) in echainiv_decrypt()
170 req->cryptlen - ivsize, req->iv); in echainiv_decrypt()
Dauthenc.c131 req->assoclen + req->cryptlen, in authenc_geniv_ahash_done()
155 req->assoclen + req->cryptlen); in crypto_authenc_genicv()
163 scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, in crypto_authenc_genicv()
203 unsigned int cryptlen = req->cryptlen; in crypto_authenc_encrypt() local
225 ablkcipher_request_set_crypt(abreq, src, dst, cryptlen, req->iv); in crypto_authenc_encrypt()
267 req->cryptlen - authsize, req->iv); in crypto_authenc_decrypt_tail()
304 req->assoclen + req->cryptlen - authsize); in crypto_authenc_decrypt()
Dseqiv.c158 if (req->cryptlen < ivsize) in seqiv_aead_encrypt()
173 req->assoclen + req->cryptlen); in seqiv_aead_encrypt()
193 req->cryptlen - ivsize, info); in seqiv_aead_encrypt()
214 if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) in seqiv_aead_decrypt()
224 req->cryptlen - ivsize, req->iv); in seqiv_aead_decrypt()
Dskcipher.c66 return crypt(&desc, req->dst, req->src, req->cryptlen); in skcipher_crypt_blkcipher()
155 ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in skcipher_crypt_ablkcipher()
Dpcrypt.c170 req->cryptlen, req->iv); in pcrypt_aead_encrypt()
212 req->cryptlen, req->iv); in pcrypt_aead_decrypt()
/linux-4.4.14/include/crypto/
Daead.h90 unsigned int cryptlen; member
356 if (req->cryptlen < crypto_aead_authsize(aead)) in crypto_aead_decrypt()
512 unsigned int cryptlen, u8 *iv) in aead_request_set_crypt() argument
516 req->cryptlen = cryptlen; in aead_request_set_crypt()
Dskcipher.h30 unsigned int cryptlen; member
497 unsigned int cryptlen, void *iv) in skcipher_request_set_crypt() argument
501 req->cryptlen = cryptlen; in skcipher_request_set_crypt()
/linux-4.4.14/arch/arm64/crypto/
Daes-ce-ccm-glue.c160 u32 len = req->cryptlen; in ccm_encrypt()
206 scatterwalk_map_and_copy(mac, dst, req->cryptlen, in ccm_encrypt()
225 u32 len = req->cryptlen - authsize; in ccm_decrypt()
271 scatterwalk_map_and_copy(buf, src, req->cryptlen - authsize, in ccm_decrypt()
/linux-4.4.14/drivers/crypto/
Dtalitos.c1032 unsigned int offset, int cryptlen, in sg_to_link_tbl_offset() argument
1038 while (cryptlen && sg && n_sg--) { in sg_to_link_tbl_offset()
1048 if (len > cryptlen) in sg_to_link_tbl_offset()
1049 len = cryptlen; in sg_to_link_tbl_offset()
1056 cryptlen -= len; in sg_to_link_tbl_offset()
1071 int cryptlen, in sg_to_link_tbl() argument
1074 return sg_to_link_tbl_offset(sg, sg_count, 0, cryptlen, in sg_to_link_tbl()
1091 unsigned int cryptlen = areq->cryptlen; in ipsec_esp() local
1139 desc->ptr[4].len = cpu_to_be16(cryptlen); in ipsec_esp()
1142 sg_link_tbl_len = cryptlen; in ipsec_esp()
[all …]
Dixp4xx_crypto.c352 int decryptlen = req->assoclen + req->cryptlen - authsize; in finish_scattered_hmac()
996 unsigned int cryptlen; in aead_perform() local
1012 cryptlen = req->cryptlen; in aead_perform()
1016 cryptlen = req->cryptlen -authsize; in aead_perform()
1032 crypt->auth_len = req->assoclen + cryptlen; in aead_perform()
1083 req->src, cryptlen, authsize, 0); in aead_perform()
1184 return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv); in aead_encrypt()
1189 return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv); in aead_decrypt()
Dpicoxcell_crypto.c335 total = areq->assoclen + areq->cryptlen; in spacc_aead_make_ddts()
421 unsigned total = areq->assoclen + areq->cryptlen + in spacc_aead_free_ddts()
531 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, in spacc_aead_do_fallback()
568 proc_len = aead_req->cryptlen + assoc_len; in spacc_aead_submit()
/linux-4.4.14/drivers/crypto/qce/
Dcipher.h53 unsigned int cryptlen; member
Dcommon.c198 unsigned int enckeylen, unsigned int cryptlen) in qce_xtskey() argument
209 xtsdusize = min_t(u32, QCE_SECTOR_SIZE, cryptlen); in qce_xtskey()
354 rctx->cryptlen); in qce_setup_regs_ablkcipher()
375 qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); in qce_setup_regs_ablkcipher()
Dablkcipher.c75 rctx->cryptlen = req->nbytes; in qce_ablkcipher_async_req_handle()
/linux-4.4.14/drivers/crypto/nx/
Dnx-aes-ccm.c146 unsigned int cryptlen, u8 *b0) in generate_b0() argument
163 rc = set_msg_len(b0 + 16 - l, cryptlen, l); in generate_b0()
347 unsigned int nbytes = req->cryptlen; in ccm_nx_decrypt()
425 unsigned int nbytes = req->cryptlen; in ccm_nx_encrypt()
Dnx-aes-gcm.c328 unsigned int nbytes = req->cryptlen; in gcm_aes_nx_crypt()
/linux-4.4.14/arch/x86/crypto/
Daesni-intel_glue.c981 assoc = kmalloc(req->cryptlen + auth_tag_len + req->assoclen, in helper_rfc4106_encrypt()
986 req->assoclen + req->cryptlen, 0); in helper_rfc4106_encrypt()
992 aesni_gcm_enc_tfm(aes_ctx, dst, src, req->cryptlen, iv, in helper_rfc4106_encrypt()
994 dst + req->cryptlen, auth_tag_len); in helper_rfc4106_encrypt()
1010 req->cryptlen + auth_tag_len, 1); in helper_rfc4106_encrypt()
1040 tempCipherLen = (unsigned long)(req->cryptlen - auth_tag_len); in helper_rfc4106_decrypt()
1064 assoc = kmalloc(req->cryptlen + req->assoclen, GFP_ATOMIC); in helper_rfc4106_decrypt()
1068 req->assoclen + req->cryptlen, 0); in helper_rfc4106_decrypt()
/linux-4.4.14/drivers/crypto/caam/
Dcaamalg.c2059 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, in init_aead_job()
2078 req->assoclen + req->cryptlen + authsize, in init_aead_job()
2082 req->assoclen + req->cryptlen - authsize, in init_aead_job()
2104 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) in init_gcm_job()
2282 src_nents = sg_count(req->src, req->assoclen + req->cryptlen); in aead_edesc_alloc()
2284 req->assoclen + req->cryptlen + in aead_edesc_alloc()
2288 req->assoclen + req->cryptlen + in aead_edesc_alloc()
2514 req->assoclen + req->cryptlen, 1); in aead_decrypt()
2542 if (req->cryptlen < ivsize) in aead_givdecrypt()
2545 req->cryptlen -= ivsize; in aead_givdecrypt()
/linux-4.4.14/drivers/crypto/qat/qat_common/
Dqat_algs.c839 cipher_param->cipher_length = areq->cryptlen - digst_size; in qat_alg_aead_dec()
884 cipher_param->cipher_length = areq->cryptlen; in qat_alg_aead_enc()
888 auth_param->auth_len = areq->assoclen + areq->cryptlen; in qat_alg_aead_enc()