cryptlen 281 arch/arm/crypto/aes-ce-glue.c int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; cryptlen 292 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen <= AES_BLOCK_SIZE) { cryptlen 293 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 308 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen == AES_BLOCK_SIZE) cryptlen 311 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); cryptlen 314 arch/arm/crypto/aes-ce-glue.c subreq.cryptlen); cryptlen 319 arch/arm/crypto/aes-ce-glue.c req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, cryptlen 339 arch/arm/crypto/aes-ce-glue.c int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; cryptlen 350 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen <= AES_BLOCK_SIZE) { cryptlen 351 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 366 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen == AES_BLOCK_SIZE) cryptlen 369 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); cryptlen 372 arch/arm/crypto/aes-ce-glue.c subreq.cryptlen); cryptlen 377 arch/arm/crypto/aes-ce-glue.c req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, cryptlen 459 arch/arm/crypto/aes-ce-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; cryptlen 465 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 471 arch/arm/crypto/aes-ce-glue.c int xts_blocks = DIV_ROUND_UP(req->cryptlen, cryptlen 506 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); cryptlen 508 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); cryptlen 531 arch/arm/crypto/aes-ce-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; cryptlen 537 arch/arm/crypto/aes-ce-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 543 arch/arm/crypto/aes-ce-glue.c int xts_blocks = DIV_ROUND_UP(req->cryptlen, cryptlen 578 arch/arm/crypto/aes-ce-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); cryptlen 580 arch/arm/crypto/aes-ce-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); cryptlen 335 arch/arm/crypto/aes-neonbs-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; cryptlen 341 arch/arm/crypto/aes-neonbs-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 350 arch/arm/crypto/aes-neonbs-glue.c req->cryptlen - tail, req->iv); cryptlen 382 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, cryptlen 385 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->src, req->cryptlen, tail, 0); cryptlen 396 arch/arm/crypto/aes-neonbs-glue.c scatterwalk_map_and_copy(buf, req->dst, req->cryptlen - AES_BLOCK_SIZE, cryptlen 97 arch/arm/crypto/chacha-neon-glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 111 arch/arm/crypto/chacha-neon-glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 235 arch/arm64/crypto/aes-ce-ccm-glue.c u32 len = req->cryptlen; cryptlen 279 arch/arm64/crypto/aes-ce-ccm-glue.c scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, cryptlen 293 arch/arm64/crypto/aes-ce-ccm-glue.c u32 len = req->cryptlen - authsize; cryptlen 339 arch/arm64/crypto/aes-ce-ccm-glue.c req->assoclen + req->cryptlen - authsize, cryptlen 294 arch/arm64/crypto/aes-glue.c int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; cryptlen 304 arch/arm64/crypto/aes-glue.c if (req->cryptlen <= AES_BLOCK_SIZE) { cryptlen 305 arch/arm64/crypto/aes-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 320 arch/arm64/crypto/aes-glue.c if (req->cryptlen == AES_BLOCK_SIZE) cryptlen 323 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); cryptlen 326 arch/arm64/crypto/aes-glue.c subreq.cryptlen); cryptlen 331 arch/arm64/crypto/aes-glue.c req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, cryptlen 351 arch/arm64/crypto/aes-glue.c int cbc_blocks = DIV_ROUND_UP(req->cryptlen, AES_BLOCK_SIZE) - 2; cryptlen 361 arch/arm64/crypto/aes-glue.c if (req->cryptlen <= AES_BLOCK_SIZE) { cryptlen 362 arch/arm64/crypto/aes-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 377 arch/arm64/crypto/aes-glue.c if (req->cryptlen == AES_BLOCK_SIZE) cryptlen 380 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, subreq.cryptlen); cryptlen 383 arch/arm64/crypto/aes-glue.c subreq.cryptlen); cryptlen 388 arch/arm64/crypto/aes-glue.c req->cryptlen - cbc_blocks * AES_BLOCK_SIZE, cryptlen 530 arch/arm64/crypto/aes-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; cryptlen 536 arch/arm64/crypto/aes-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 542 arch/arm64/crypto/aes-glue.c int xts_blocks = DIV_ROUND_UP(req->cryptlen, cryptlen 577 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); cryptlen 579 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); cryptlen 602 arch/arm64/crypto/aes-glue.c int tail = req->cryptlen % AES_BLOCK_SIZE; cryptlen 608 arch/arm64/crypto/aes-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 614 arch/arm64/crypto/aes-glue.c int xts_blocks = DIV_ROUND_UP(req->cryptlen, cryptlen 649 arch/arm64/crypto/aes-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); cryptlen 651 arch/arm64/crypto/aes-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); cryptlen 323 arch/arm64/crypto/aes-neonbs-glue.c int tail = req->cryptlen % (8 * AES_BLOCK_SIZE); cryptlen 332 arch/arm64/crypto/aes-neonbs-glue.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 337 arch/arm64/crypto/aes-neonbs-glue.c int xts_blocks = DIV_ROUND_UP(req->cryptlen, cryptlen 394 arch/arm64/crypto/aes-neonbs-glue.c dst = src = scatterwalk_ffwd(sg_src, req->src, req->cryptlen); cryptlen 396 arch/arm64/crypto/aes-neonbs-glue.c dst = scatterwalk_ffwd(sg_dst, req->dst, req->cryptlen); cryptlen 94 arch/arm64/crypto/chacha-neon-glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 108 arch/arm64/crypto/chacha-neon-glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 402 arch/arm64/crypto/ghash-ce-glue.c u64 dg[], u8 tag[], int cryptlen) cryptlen 408 arch/arm64/crypto/ghash-ce-glue.c lengths.b = cpu_to_be64(cryptlen * 8); cryptlen 529 arch/arm64/crypto/ghash-ce-glue.c gcm_final(req, ctx, dg, tag, req->cryptlen); cryptlen 532 arch/arm64/crypto/ghash-ce-glue.c scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen, cryptlen 664 arch/arm64/crypto/ghash-ce-glue.c gcm_final(req, ctx, dg, tag, req->cryptlen - authsize); cryptlen 668 arch/arm64/crypto/ghash-ce-glue.c req->assoclen + req->cryptlen - authsize, cryptlen 995 arch/s390/crypto/aes_s390.c unsigned int pclen = req->cryptlen; cryptlen 44 arch/x86/crypto/aegis128-aesni-glue.c void *state, void *tag_xor, unsigned int cryptlen, cryptlen 169 arch/x86/crypto/aegis128-aesni-glue.c unsigned int cryptlen, cryptlen 184 arch/x86/crypto/aegis128-aesni-glue.c crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); cryptlen 200 arch/x86/crypto/aegis128-aesni-glue.c unsigned int cryptlen = req->cryptlen; cryptlen 202 arch/x86/crypto/aegis128-aesni-glue.c crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); cryptlen 205 arch/x86/crypto/aegis128-aesni-glue.c req->assoclen + cryptlen, authsize, 1); cryptlen 222 arch/x86/crypto/aegis128-aesni-glue.c unsigned int cryptlen = req->cryptlen - authsize; cryptlen 225 arch/x86/crypto/aegis128-aesni-glue.c req->assoclen + cryptlen, authsize, 0); cryptlen 227 arch/x86/crypto/aegis128-aesni-glue.c crypto_aegis128_aesni_crypt(req, &tag, cryptlen, &OPS); cryptlen 712 arch/x86/crypto/aesni-intel_glue.c unsigned long left = req->cryptlen; cryptlen 820 arch/x86/crypto/aesni-intel_glue.c req->assoclen + req->cryptlen - cryptlen 831 arch/x86/crypto/aesni-intel_glue.c req->assoclen + req->cryptlen, cryptlen 169 arch/x86/crypto/chacha_glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 192 arch/x86/crypto/chacha_glue.c if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable()) cryptlen 265 arch/x86/crypto/glue_helper.c const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); cryptlen 273 arch/x86/crypto/glue_helper.c if (req->cryptlen < XTS_BLOCK_SIZE) cryptlen 279 arch/x86/crypto/glue_helper.c tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; cryptlen 286 arch/x86/crypto/glue_helper.c req->cryptlen - tail, req->iv); cryptlen 316 arch/x86/crypto/glue_helper.c dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); cryptlen 318 arch/x86/crypto/glue_helper.c dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); cryptlen 232 crypto/adiantum.c const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; cryptlen 261 crypto/adiantum.c const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; cryptlen 295 crypto/adiantum.c const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; cryptlen 335 crypto/adiantum.c const unsigned int bulk_len = req->cryptlen - BLOCKCIPHER_BLOCK_SIZE; cryptlen 340 crypto/adiantum.c if (req->cryptlen < BLOCKCIPHER_BLOCK_SIZE) cryptlen 382 crypto/adiantum.c if (round_up(stream_len, CHACHA_BLOCK_SIZE) <= req->cryptlen) cryptlen 92 crypto/aead.c unsigned int cryptlen = req->cryptlen; cryptlen 100 crypto/aead.c crypto_stats_aead_encrypt(cryptlen, alg, ret); cryptlen 109 crypto/aead.c unsigned int cryptlen = req->cryptlen; cryptlen 115 crypto/aead.c else if (req->cryptlen < crypto_aead_authsize(aead)) cryptlen 119 crypto/aead.c crypto_stats_aead_decrypt(cryptlen, alg, ret); cryptlen 349 crypto/aegis128-core.c u64 assoclen, u64 cryptlen) cryptlen 352 crypto/aegis128-core.c u64 cryptbits = cryptlen * 8; cryptlen 395 crypto/aegis128-core.c unsigned int cryptlen, cryptlen 405 crypto/aegis128-core.c crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen); cryptlen 418 crypto/aegis128-core.c unsigned int cryptlen = req->cryptlen; cryptlen 425 crypto/aegis128-core.c crypto_aegis128_crypt(req, &tag, cryptlen, ops); cryptlen 427 crypto/aegis128-core.c scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, cryptlen 443 crypto/aegis128-core.c unsigned int cryptlen = req->cryptlen - authsize; cryptlen 445 crypto/aegis128-core.c scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, cryptlen 453 crypto/aegis128-core.c crypto_aegis128_crypt(req, &tag, cryptlen, ops); cryptlen 1091 crypto/algapi.c void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, cryptlen 1098 crypto/algapi.c atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen); cryptlen 1104 crypto/algapi.c void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, cryptlen 1111 crypto/algapi.c atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen); cryptlen 1264 crypto/algapi.c void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, cryptlen 1271 crypto/algapi.c atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen); cryptlen 1277 crypto/algapi.c void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, cryptlen 1284 crypto/algapi.c atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen); cryptlen 135 crypto/authenc.c req->assoclen + req->cryptlen, cryptlen 159 crypto/authenc.c req->assoclen + req->cryptlen); cryptlen 167 crypto/authenc.c scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, cryptlen 210 crypto/authenc.c unsigned int cryptlen = req->cryptlen; cryptlen 230 crypto/authenc.c skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); cryptlen 269 crypto/authenc.c req->cryptlen - authsize, req->iv); cryptlen 306 crypto/authenc.c req->assoclen + req->cryptlen - authsize); cryptlen 107 crypto/authencesn.c unsigned int cryptlen = req->cryptlen; cryptlen 113 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); cryptlen 116 crypto/authencesn.c scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); cryptlen 141 crypto/authencesn.c unsigned int cryptlen = req->cryptlen; cryptlen 151 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); cryptlen 157 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); cryptlen 200 crypto/authencesn.c unsigned int cryptlen = req->cryptlen; cryptlen 220 crypto/authencesn.c skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv); cryptlen 241 crypto/authencesn.c unsigned int cryptlen = req->cryptlen - authsize; cryptlen 252 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); cryptlen 266 crypto/authencesn.c skcipher_request_set_crypt(skreq, dst, dst, cryptlen, req->iv); cryptlen 291 crypto/authencesn.c unsigned int cryptlen = req->cryptlen; cryptlen 297 crypto/authencesn.c cryptlen -= authsize; cryptlen 300 crypto/authencesn.c err = crypto_authenc_esn_copy(req, assoclen + cryptlen); cryptlen 305 crypto/authencesn.c scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen, cryptlen 314 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); cryptlen 320 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen); cryptlen 136 crypto/ccm.c unsigned int cryptlen) cryptlen 154 crypto/ccm.c return set_msg_len(info + 16 - l, cryptlen, l); cryptlen 177 crypto/ccm.c unsigned int cryptlen) cryptlen 190 crypto/ccm.c err = format_input(odata, req, cryptlen); cryptlen 226 crypto/ccm.c cryptlen += ilen; cryptlen 229 crypto/ccm.c ahash_request_set_crypt(ahreq, plain, pctx->odata, cryptlen); cryptlen 244 crypto/ccm.c req->assoclen + req->cryptlen, cryptlen 300 crypto/ccm.c unsigned int cryptlen = req->cryptlen; cryptlen 309 crypto/ccm.c err = crypto_ccm_auth(req, sg_next(pctx->src), cryptlen); cryptlen 320 crypto/ccm.c skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); cryptlen 326 crypto/ccm.c scatterwalk_map_and_copy(odata, sg_next(dst), cryptlen, cryptlen 338 crypto/ccm.c unsigned int cryptlen = req->cryptlen - authsize; cryptlen 346 crypto/ccm.c err = crypto_ccm_auth(req, dst, cryptlen); cryptlen 361 crypto/ccm.c unsigned int cryptlen = req->cryptlen; cryptlen 367 crypto/ccm.c cryptlen -= authsize; cryptlen 373 crypto/ccm.c scatterwalk_map_and_copy(authtag, sg_next(pctx->src), cryptlen, cryptlen 385 crypto/ccm.c skcipher_request_set_crypt(skreq, pctx->src, dst, cryptlen + 16, iv); cryptlen 390 crypto/ccm.c err = crypto_ccm_auth(req, sg_next(dst), cryptlen); cryptlen 680 crypto/ccm.c req->cryptlen, iv); cryptlen 41 crypto/chacha20poly1305.c __le64 cryptlen; cryptlen 61 crypto/chacha20poly1305.c unsigned int cryptlen; cryptlen 103 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, cryptlen 115 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, cryptlen 133 crypto/chacha20poly1305.c if (rctx->cryptlen == 0) cryptlen 147 crypto/chacha20poly1305.c rctx->cryptlen, creq->iv); cryptlen 160 crypto/chacha20poly1305.c if (rctx->cryptlen == req->cryptlen) /* encrypting */ cryptlen 180 crypto/chacha20poly1305.c preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); cryptlen 209 crypto/chacha20poly1305.c padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; cryptlen 238 crypto/chacha20poly1305.c if (rctx->cryptlen == req->cryptlen) /* encrypting */ cryptlen 246 crypto/chacha20poly1305.c ahash_request_set_crypt(&preq->req, crypt, NULL, rctx->cryptlen); cryptlen 409 crypto/chacha20poly1305.c if (req->cryptlen == 0) cryptlen 423 crypto/chacha20poly1305.c req->cryptlen, creq->iv); cryptlen 436 crypto/chacha20poly1305.c rctx->cryptlen = req->cryptlen; cryptlen 458 crypto/chacha20poly1305.c rctx->cryptlen = req->cryptlen - POLY1305_DIGEST_SIZE; cryptlen 299 crypto/cryptd.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 327 crypto/cryptd.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 218 crypto/ctr.c req->cryptlen, iv); cryptlen 114 crypto/cts.c lastn = req->cryptlen - offset; cryptlen 154 crypto/cts.c unsigned int nbytes = req->cryptlen; cryptlen 196 crypto/cts.c lastn = req->cryptlen - offset; cryptlen 248 crypto/cts.c unsigned int nbytes = req->cryptlen; cryptlen 1839 crypto/drbg.c u32 cryptlen = min3(inlen, outlen, (u32)DRBG_OUTSCRATCHLEN); cryptlen 1843 crypto/drbg.c cryptlen, drbg->V); cryptlen 1851 crypto/drbg.c memcpy(outbuf, drbg->outscratchpad, cryptlen); cryptlen 1852 crypto/drbg.c memzero_explicit(drbg->outscratchpad, cryptlen); cryptlen 1854 crypto/drbg.c outlen -= cryptlen; cryptlen 1855 crypto/drbg.c outbuf += cryptlen; cryptlen 37 crypto/echainiv.c if (req->cryptlen < ivsize) cryptlen 51 crypto/echainiv.c req->assoclen + req->cryptlen, cryptlen 62 crypto/echainiv.c req->cryptlen, info); cryptlen 94 crypto/echainiv.c if (req->cryptlen < ivsize) cryptlen 104 crypto/echainiv.c req->cryptlen - ivsize, req->iv); cryptlen 167 crypto/essiv.c skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 263 crypto/essiv.c aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv); cryptlen 59 crypto/gcm.c unsigned int cryptlen; cryptlen 184 crypto/gcm.c unsigned int cryptlen) cryptlen 196 crypto/gcm.c cryptlen + sizeof(pctx->auth_tag), cryptlen 237 crypto/gcm.c lengths.b = cpu_to_be64(gctx->cryptlen * 8); cryptlen 298 crypto/gcm.c remain = gcm_remain(gctx->cryptlen); cryptlen 327 crypto/gcm.c if (gctx->cryptlen) cryptlen 329 crypto/gcm.c gctx->src, gctx->cryptlen, flags) ?: cryptlen 425 crypto/gcm.c req->assoclen + req->cryptlen, cryptlen 436 crypto/gcm.c gctx->cryptlen = req->cryptlen; cryptlen 464 crypto/gcm.c crypto_gcm_init_crypt(req, req->cryptlen); cryptlen 478 crypto/gcm.c unsigned int cryptlen = req->cryptlen - authsize; cryptlen 482 crypto/gcm.c req->assoclen + cryptlen, authsize, 0); cryptlen 502 crypto/gcm.c crypto_gcm_init_crypt(req, gctx->cryptlen); cryptlen 513 crypto/gcm.c unsigned int cryptlen = req->cryptlen; cryptlen 516 crypto/gcm.c cryptlen -= authsize; cryptlen 521 crypto/gcm.c gctx->cryptlen = cryptlen; cryptlen 796 crypto/gcm.c req->cryptlen, iv); cryptlen 1013 crypto/gcm.c aead_request_set_ad(subreq, req->assoclen + req->cryptlen - cryptlen 1014 crypto/gcm.c subreq->cryptlen); cryptlen 1024 crypto/gcm.c unsigned int nbytes = req->assoclen + req->cryptlen - cryptlen 129 crypto/keywrap.c u64 t = 6 * ((req->cryptlen) >> 3); cryptlen 137 crypto/keywrap.c if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) cryptlen 153 crypto/keywrap.c unsigned int nbytes = req->cryptlen; cryptlen 207 crypto/keywrap.c if (req->cryptlen < (2 * SEMIBSIZE) || req->cryptlen % SEMIBSIZE) cryptlen 226 crypto/keywrap.c unsigned int nbytes = req->cryptlen; cryptlen 233 crypto/lrw.c req->cryptlen, req->iv); cryptlen 113 crypto/pcrypt.c req->cryptlen, req->iv); cryptlen 158 crypto/pcrypt.c req->cryptlen, req->iv); cryptlen 61 crypto/seqiv.c if (req->cryptlen < ivsize) cryptlen 77 crypto/seqiv.c req->assoclen + req->cryptlen, cryptlen 99 crypto/seqiv.c req->cryptlen - ivsize, info); cryptlen 120 crypto/seqiv.c if (req->cryptlen < ivsize + crypto_aead_authsize(geniv)) cryptlen 130 crypto/seqiv.c req->cryptlen - ivsize, req->iv); cryptlen 453 crypto/skcipher.c walk->total = req->cryptlen; cryptlen 555 crypto/skcipher.c walk->total = req->cryptlen; cryptlen 564 crypto/skcipher.c walk->total = req->cryptlen; cryptlen 575 crypto/skcipher.c walk->total = req->cryptlen - crypto_aead_authsize(tfm); cryptlen 635 crypto/skcipher.c return crypt(&desc, req->dst, req->src, req->cryptlen); cryptlen 731 crypto/skcipher.c ablkcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 848 crypto/skcipher.c unsigned int cryptlen = req->cryptlen; cryptlen 856 crypto/skcipher.c crypto_stats_skcipher_encrypt(cryptlen, ret, alg); cryptlen 865 crypto/skcipher.c unsigned int cryptlen = req->cryptlen; cryptlen 873 crypto/skcipher.c crypto_stats_skcipher_decrypt(cryptlen, ret, alg); cryptlen 1932 crypto/testmgr.c if (req->cryptlen != (enc ? vec->plen : vec->clen) || cryptlen 1943 crypto/testmgr.c if (req->cryptlen != (enc ? vec->plen : vec->clen)) cryptlen 2521 crypto/testmgr.c if (req->cryptlen != vec->len || cryptlen 2531 crypto/testmgr.c if (req->cryptlen != vec->len) cryptlen 91 crypto/xts.c const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); cryptlen 167 crypto/xts.c int offset = req->cryptlen & ~(XTS_BLOCK_SIZE - 1); cryptlen 170 crypto/xts.c int tail = req->cryptlen % XTS_BLOCK_SIZE; cryptlen 211 crypto/xts.c if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { cryptlen 231 crypto/xts.c if (!err && unlikely(req->cryptlen % XTS_BLOCK_SIZE)) { cryptlen 247 crypto/xts.c if (req->cryptlen < XTS_BLOCK_SIZE) cryptlen 253 crypto/xts.c req->cryptlen & ~(XTS_BLOCK_SIZE - 1), NULL); cryptlen 272 crypto/xts.c if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) cryptlen 289 crypto/xts.c if (err || likely((req->cryptlen % XTS_BLOCK_SIZE) == 0)) cryptlen 77 drivers/crypto/amcc/crypto4xx_alg.c if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE)) cryptlen 84 drivers/crypto/amcc/crypto4xx_alg.c req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out, cryptlen 236 drivers/crypto/amcc/crypto4xx_alg.c req->cryptlen, iv, AES_IV_SIZE, cryptlen 251 drivers/crypto/amcc/crypto4xx_alg.c req->cryptlen, iv, AES_IV_SIZE, cryptlen 262 drivers/crypto/amcc/crypto4xx_alg.c unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / cryptlen 279 drivers/crypto/amcc/crypto4xx_alg.c req->cryptlen, req->iv); cryptlen 370 drivers/crypto/amcc/crypto4xx_alg.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 468 drivers/crypto/amcc/crypto4xx_alg.c unsigned int len = req->cryptlen; cryptlen 614 drivers/crypto/amcc/crypto4xx_alg.c unsigned int len = req->cryptlen; cryptlen 530 drivers/crypto/amcc/crypto4xx_core.c req->cryptlen, req->dst); cryptlen 596 drivers/crypto/amcc/crypto4xx_core.c scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen, cryptlen 601 drivers/crypto/amcc/crypto4xx_core.c aead_req->assoclen + aead_req->cryptlen - cryptlen 1574 drivers/crypto/atmel-aes.c ctx->textlen = req->cryptlen - (enc ? 0 : authsize); cryptlen 2216 drivers/crypto/atmel-aes.c if (!enc && req->cryptlen < authsize) cryptlen 2218 drivers/crypto/atmel-aes.c rctx->textlen = req->cryptlen - (enc ? 0 : authsize); cryptlen 1117 drivers/crypto/axis/artpec6_crypto.c req->dst, req->cryptlen); cryptlen 1162 drivers/crypto/axis/artpec6_crypto.c req->dst, req->cryptlen); cryptlen 1183 drivers/crypto/axis/artpec6_crypto.c unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / cryptlen 1210 drivers/crypto/axis/artpec6_crypto.c req->cryptlen, req->iv); cryptlen 1290 drivers/crypto/axis/artpec6_crypto.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 1790 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen); cryptlen 1796 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen); cryptlen 1803 drivers/crypto/axis/artpec6_crypto.c size_t pad = ALIGN(areq->cryptlen, AES_BLOCK_SIZE) - cryptlen 1804 drivers/crypto/axis/artpec6_crypto.c areq->cryptlen; cryptlen 1909 drivers/crypto/axis/artpec6_crypto.c input_length = areq->cryptlen; cryptlen 1973 drivers/crypto/axis/artpec6_crypto.c size_t output_len = areq->cryptlen; cryptlen 2167 drivers/crypto/axis/artpec6_crypto.c cipher_req->cryptlen - AES_BLOCK_SIZE, cryptlen 2179 drivers/crypto/axis/artpec6_crypto.c cipher_req->cryptlen - AES_BLOCK_SIZE, cryptlen 2202 drivers/crypto/axis/artpec6_crypto.c areq->assoclen + areq->cryptlen - cryptlen 1593 drivers/crypto/bcm/cipher.c result_len = req->cryptlen; cryptlen 2582 drivers/crypto/bcm/cipher.c if ((rctx->is_encrypt && (req->cryptlen == 0)) || cryptlen 2583 drivers/crypto/bcm/cipher.c (!rctx->is_encrypt && (req->cryptlen == ctx->digestsize))) { cryptlen 2624 drivers/crypto/bcm/cipher.c payload_len = req->cryptlen; cryptlen 2719 drivers/crypto/bcm/cipher.c rctx->total_todo = req->cryptlen; cryptlen 3107 drivers/crypto/bcm/cipher.c flow_log("%s() cryptlen:%u %08x\n", __func__, req->cryptlen, cryptlen 3108 drivers/crypto/bcm/cipher.c req->cryptlen); cryptlen 3109 drivers/crypto/bcm/cipher.c dump_sg(req->src, 0, req->cryptlen + req->assoclen); cryptlen 3117 drivers/crypto/bcm/cipher.c flow_log("%s() cryptlen:%u\n", __func__, req->cryptlen); cryptlen 3118 drivers/crypto/bcm/cipher.c dump_sg(req->src, 0, req->cryptlen + req->assoclen); cryptlen 1038 drivers/crypto/caam/caamalg.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1078 drivers/crypto/caam/caamalg.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1118 drivers/crypto/caam/caamalg.c append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, cryptlen 1141 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen + authsize, cryptlen 1145 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen - authsize, cryptlen 1165 drivers/crypto/caam/caamalg.c if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) cryptlen 1282 drivers/crypto/caam/caamalg.c (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen); cryptlen 1286 drivers/crypto/caam/caamalg.c edesc->src_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1302 drivers/crypto/caam/caamalg.c append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options); cryptlen 1315 drivers/crypto/caam/caamalg.c append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options); cryptlen 1337 drivers/crypto/caam/caamalg.c src_len = req->assoclen + req->cryptlen; cryptlen 1354 drivers/crypto/caam/caamalg.c src_len = req->assoclen + req->cryptlen + cryptlen 1649 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen, 1); cryptlen 1694 drivers/crypto/caam/caamalg.c src_nents = sg_nents_for_len(req->src, req->cryptlen); cryptlen 1697 drivers/crypto/caam/caamalg.c req->cryptlen); cryptlen 1702 drivers/crypto/caam/caamalg.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); cryptlen 1705 drivers/crypto/caam/caamalg.c req->cryptlen); cryptlen 1800 drivers/crypto/caam/caamalg.c sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg + cryptlen 1804 drivers/crypto/caam/caamalg.c sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg + cryptlen 1846 drivers/crypto/caam/caamalg.c if (!req->cryptlen) cryptlen 1883 drivers/crypto/caam/caamalg.c if (!req->cryptlen) cryptlen 971 drivers/crypto/caam/caamalg_qi.c src_len = req->assoclen + req->cryptlen + cryptlen 990 drivers/crypto/caam/caamalg_qi.c src_len = req->assoclen + req->cryptlen; cryptlen 1132 drivers/crypto/caam/caamalg_qi.c out_len = req->assoclen + req->cryptlen + cryptlen 1134 drivers/crypto/caam/caamalg_qi.c in_len = 4 + ivsize + req->assoclen + req->cryptlen; cryptlen 1229 drivers/crypto/caam/caamalg_qi.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1267 drivers/crypto/caam/caamalg_qi.c src_nents = sg_nents_for_len(req->src, req->cryptlen); cryptlen 1270 drivers/crypto/caam/caamalg_qi.c req->cryptlen); cryptlen 1275 drivers/crypto/caam/caamalg_qi.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); cryptlen 1278 drivers/crypto/caam/caamalg_qi.c req->cryptlen); cryptlen 1363 drivers/crypto/caam/caamalg_qi.c sg_to_qm_sg(req->src, req->cryptlen, sg_table + 1, 0); cryptlen 1366 drivers/crypto/caam/caamalg_qi.c sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0); cryptlen 1384 drivers/crypto/caam/caamalg_qi.c ivsize + req->cryptlen, 0); cryptlen 1388 drivers/crypto/caam/caamalg_qi.c sizeof(*sg_table), req->cryptlen + ivsize, cryptlen 1392 drivers/crypto/caam/caamalg_qi.c sizeof(*sg_table), req->cryptlen + ivsize, cryptlen 1405 drivers/crypto/caam/caamalg_qi.c if (!req->cryptlen) cryptlen 378 drivers/crypto/caam/caamalg_qi2.c src_len = req->assoclen + req->cryptlen; cryptlen 423 drivers/crypto/caam/caamalg_qi2.c src_len = req->assoclen + req->cryptlen + cryptlen 544 drivers/crypto/caam/caamalg_qi2.c out_len = req->assoclen + req->cryptlen + cryptlen 546 drivers/crypto/caam/caamalg_qi2.c in_len = 4 + ivsize + req->assoclen + req->cryptlen; cryptlen 1134 drivers/crypto/caam/caamalg_qi2.c src_nents = sg_nents_for_len(req->src, req->cryptlen); cryptlen 1137 drivers/crypto/caam/caamalg_qi2.c req->cryptlen); cryptlen 1142 drivers/crypto/caam/caamalg_qi2.c dst_nents = sg_nents_for_len(req->dst, req->cryptlen); cryptlen 1145 drivers/crypto/caam/caamalg_qi2.c req->cryptlen); cryptlen 1227 drivers/crypto/caam/caamalg_qi2.c sg_to_qm_sg(req->src, req->cryptlen, sg_table + 1, 0); cryptlen 1230 drivers/crypto/caam/caamalg_qi2.c sg_to_qm_sg(req->dst, req->cryptlen, sg_table + dst_sg_idx, 0); cryptlen 1247 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_len(in_fle, req->cryptlen + ivsize); cryptlen 1248 drivers/crypto/caam/caamalg_qi2.c dpaa2_fl_set_len(out_fle, req->cryptlen + ivsize); cryptlen 1417 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1455 drivers/crypto/caam/caamalg_qi2.c edesc->dst_nents > 1 ? 100 : req->cryptlen, 1); cryptlen 1480 drivers/crypto/caam/caamalg_qi2.c if (!req->cryptlen) cryptlen 1511 drivers/crypto/caam/caamalg_qi2.c if (!req->cryptlen) cryptlen 149 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param0 = cpu_to_be16(rctx->cryptlen); cryptlen 150 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); cryptlen 200 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen; cryptlen 202 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->assoclen + areq->cryptlen; cryptlen 231 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen - aead->authsize; cryptlen 233 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->cryptlen + areq->assoclen; cryptlen 419 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->cryptlen = areq->cryptlen; cryptlen 421 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; cryptlen 451 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->cryptlen = areq->cryptlen - aead->authsize; cryptlen 454 drivers/crypto/cavium/nitrox/nitrox_aead.c areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; cryptlen 231 drivers/crypto/cavium/nitrox/nitrox_req.h unsigned int cryptlen; cryptlen 150 drivers/crypto/cavium/nitrox/nitrox_skcipher.c skreq->cryptlen); cryptlen 171 drivers/crypto/cavium/nitrox/nitrox_skcipher.c skreq->cryptlen); cryptlen 223 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gph.param0 = cpu_to_be16(skreq->cryptlen); cryptlen 129 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.src_len = req->cryptlen; cryptlen 240 drivers/crypto/ccree/cc_aead.c areq->cryptlen, areq->assoclen); cryptlen 245 drivers/crypto/ccree/cc_aead.c u32 skip = areq->cryptlen + areq_ctx->dst_offset; cryptlen 749 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen > 0) cryptlen 759 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen > 0) cryptlen 796 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen, NS_BIT); cryptlen 846 drivers/crypto/ccree/cc_aead.c if (areq_ctx->cryptlen == 0) cryptlen 855 drivers/crypto/ccree/cc_aead.c areq_ctx->src_offset), areq_ctx->cryptlen, cryptlen 860 drivers/crypto/ccree/cc_aead.c areq_ctx->cryptlen, NS_BIT, 0); cryptlen 982 drivers/crypto/ccree/cc_aead.c if (req_ctx->cryptlen == 0) cryptlen 1329 drivers/crypto/ccree/cc_aead.c (req->cryptlen - ctx->authsize) : req->cryptlen; cryptlen 1332 drivers/crypto/ccree/cc_aead.c req->cryptlen < ctx->authsize) cryptlen 1498 drivers/crypto/ccree/cc_aead.c if (req_ctx->cryptlen) cryptlen 1557 drivers/crypto/ccree/cc_aead.c unsigned int cryptlen = (req_ctx->gen_ctx.op_type == cryptlen 1559 drivers/crypto/ccree/cc_aead.c req->cryptlen : cryptlen 1560 drivers/crypto/ccree/cc_aead.c (req->cryptlen - ctx->authsize); cryptlen 1581 drivers/crypto/ccree/cc_aead.c rc = set_msg_len(b0 + 16 - l, cryptlen, l); /* Write L'. */ cryptlen 1721 drivers/crypto/ccree/cc_aead.c if (req_ctx->cryptlen && !req_ctx->plaintext_authenticate_only) { cryptlen 1833 drivers/crypto/ccree/cc_aead.c if (req_ctx->cryptlen) cryptlen 1847 drivers/crypto/ccree/cc_aead.c unsigned int cryptlen = (req_ctx->gen_ctx.op_type == cryptlen 1849 drivers/crypto/ccree/cc_aead.c req->cryptlen : cryptlen 1850 drivers/crypto/ccree/cc_aead.c (req->cryptlen - ctx->authsize); cryptlen 1854 drivers/crypto/ccree/cc_aead.c __func__, cryptlen, req_ctx->assoclen, ctx->authsize); cryptlen 1872 drivers/crypto/ccree/cc_aead.c temp64 = cpu_to_be64(cryptlen * 8); cryptlen 1881 drivers/crypto/ccree/cc_aead.c GCM_BLOCK_RFC4_IV_SIZE + cryptlen) * 8); cryptlen 1919 drivers/crypto/ccree/cc_aead.c sg_virt(req->dst), req->dst->offset, req->cryptlen); cryptlen 1926 drivers/crypto/ccree/cc_aead.c req->cryptlen, areq_ctx->assoclen); cryptlen 96 drivers/crypto/ccree/cc_aead.h unsigned int cryptlen; cryptlen 68 drivers/crypto/ccree/cc_buffer_mgr.c u32 skip = areq_ctx->assoclen + req->cryptlen; cryptlen 547 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoclen, req->cryptlen); cryptlen 737 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, cryptlen 777 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, cryptlen 781 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl, areq_ctx->cryptlen, cryptlen 809 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst_sgl, areq_ctx->cryptlen, cryptlen 813 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src_sgl, areq_ctx->cryptlen, cryptlen 849 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int size_for_map = areq_ctx->assoclen + req->cryptlen; cryptlen 896 drivers/crypto/ccree/cc_buffer_mgr.c size_for_map = areq_ctx->assoclen + req->cryptlen; cryptlen 1033 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->cryptlen = (areq_ctx->gen_ctx.op_type == cryptlen 1035 drivers/crypto/ccree/cc_buffer_mgr.c req->cryptlen : cryptlen 1036 drivers/crypto/ccree/cc_buffer_mgr.c (req->cryptlen - authsize); cryptlen 1115 drivers/crypto/ccree/cc_buffer_mgr.c size_to_map = req->cryptlen + areq_ctx->assoclen; cryptlen 861 drivers/crypto/ccree/cc_cipher.c unsigned int nbytes = req->cryptlen; cryptlen 192 drivers/crypto/chelsio/chcr_algo.c req->cryptlen - authsize); cryptlen 2273 drivers/crypto/chelsio/chcr_algo.c if (reqctx->op && req->cryptlen < authsize) cryptlen 2298 drivers/crypto/chelsio/chcr_algo.c if (((req->cryptlen - (op_type ? authsize : 0)) == 0) || cryptlen 2315 drivers/crypto/chelsio/chcr_algo.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 2345 drivers/crypto/chelsio/chcr_algo.c if (req->cryptlen == 0) cryptlen 2357 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + cryptlen 2360 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, cryptlen 2366 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) < cryptlen 2368 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) cryptlen 2396 drivers/crypto/chelsio/chcr_algo.c chcr_req->sec_cpl.pldlen = htonl(req->assoclen + IV + req->cryptlen); cryptlen 2448 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); cryptlen 2470 drivers/crypto/chelsio/chcr_algo.c dst_size = req->assoclen + req->cryptlen + (op_type ? cryptlen 2472 drivers/crypto/chelsio/chcr_algo.c if (!req->cryptlen || !dst_size) cryptlen 2516 drivers/crypto/chelsio/chcr_algo.c dst_size = req->assoclen + req->cryptlen + (op_type ? cryptlen 2518 drivers/crypto/chelsio/chcr_algo.c if (!req->cryptlen || !dst_size) cryptlen 2548 drivers/crypto/chelsio/chcr_algo.c buf, req->cryptlen + req->assoclen, 0); cryptlen 2554 drivers/crypto/chelsio/chcr_algo.c ulptx_walk_add_sg(&ulp_walk, req->src, req->cryptlen + cryptlen 2573 drivers/crypto/chelsio/chcr_algo.c temp = req->assoclen + req->cryptlen + cryptlen 2767 drivers/crypto/chelsio/chcr_algo.c req->cryptlen - m : req->cryptlen, l); cryptlen 2831 drivers/crypto/chelsio/chcr_algo.c auth_offset = req->cryptlen ? cryptlen 2834 drivers/crypto/chelsio/chcr_algo.c if (crypto_aead_authsize(tfm) != req->cryptlen) cryptlen 2844 drivers/crypto/chelsio/chcr_algo.c htonl(req->assoclen + IV + req->cryptlen + ccm_xtra); cryptlen 2915 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen cryptlen 2920 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, cryptlen 2925 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen + cryptlen 2927 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen + cryptlen 2966 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen + cryptlen 3007 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + cryptlen 3010 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, cryptlen 3016 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <= cryptlen 3018 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) : cryptlen 3042 drivers/crypto/chelsio/chcr_algo.c htonl(req->assoclen + IV + req->cryptlen); cryptlen 3082 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); cryptlen 730 drivers/crypto/hisilicon/sec/sec_algs.c ret = sec_alg_alloc_and_calc_split_sizes(skreq->cryptlen, &split_sizes, cryptlen 147 drivers/crypto/inside-secure/safexcel_cipher.c u32 cryptlen, u32 assoclen, u32 digestsize) cryptlen 165 drivers/crypto/inside-secure/safexcel_cipher.c cryptlen -= digestsize; cryptlen 188 drivers/crypto/inside-secure/safexcel_cipher.c if (likely(cryptlen)) { cryptlen 192 drivers/crypto/inside-secure/safexcel_cipher.c token[10].packet_length = cryptlen; cryptlen 236 drivers/crypto/inside-secure/safexcel_cipher.c cbcmaciv[14] = cryptlen >> 8; cryptlen 237 drivers/crypto/inside-secure/safexcel_cipher.c cbcmaciv[15] = cryptlen & 255; cryptlen 251 drivers/crypto/inside-secure/safexcel_cipher.c if (likely(cryptlen)) { cryptlen 258 drivers/crypto/inside-secure/safexcel_cipher.c cryptlen &= 15; cryptlen 259 drivers/crypto/inside-secure/safexcel_cipher.c token[11].packet_length = cryptlen ? 16 - cryptlen : 0; cryptlen 492 drivers/crypto/inside-secure/safexcel_cipher.c unsigned int cryptlen, cryptlen 539 drivers/crypto/inside-secure/safexcel_cipher.c (cryptlen - cryptlen 551 drivers/crypto/inside-secure/safexcel_cipher.c unsigned int cryptlen, unsigned int assoclen, cryptlen 564 drivers/crypto/inside-secure/safexcel_cipher.c unsigned int totlen_src = cryptlen + assoclen; cryptlen 686 drivers/crypto/inside-secure/safexcel_cipher.c sreq->direction, cryptlen, cryptlen 690 drivers/crypto/inside-secure/safexcel_cipher.c cryptlen); cryptlen 846 drivers/crypto/inside-secure/safexcel_cipher.c req->dst, req->cryptlen, sreq, cryptlen 870 drivers/crypto/inside-secure/safexcel_cipher.c req->cryptlen + crypto_aead_authsize(tfm), cryptlen 918 drivers/crypto/inside-secure/safexcel_cipher.c req->dst, req->cryptlen, 0, 0, input_iv, cryptlen 942 drivers/crypto/inside-secure/safexcel_cipher.c req->cryptlen, req->assoclen, cryptlen 2025 drivers/crypto/inside-secure/safexcel_cipher.c if (req->cryptlen < XTS_BLOCK_SIZE) cryptlen 2033 drivers/crypto/inside-secure/safexcel_cipher.c if (req->cryptlen < XTS_BLOCK_SIZE) cryptlen 347 drivers/crypto/ixp4xx_crypto.c int decryptlen = req->assoclen + req->cryptlen - authsize; cryptlen 985 drivers/crypto/ixp4xx_crypto.c unsigned int cryptlen; cryptlen 1001 drivers/crypto/ixp4xx_crypto.c cryptlen = req->cryptlen; cryptlen 1005 drivers/crypto/ixp4xx_crypto.c cryptlen = req->cryptlen -authsize; cryptlen 1021 drivers/crypto/ixp4xx_crypto.c crypt->auth_len = req->assoclen + cryptlen; cryptlen 1070 drivers/crypto/ixp4xx_crypto.c req->src, cryptlen, authsize, 0); cryptlen 1204 drivers/crypto/ixp4xx_crypto.c return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv); cryptlen 1209 drivers/crypto/ixp4xx_crypto.c return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv); cryptlen 42 drivers/crypto/marvell/cipher.c mv_cesa_req_dma_iter_init(&iter->base, req->cryptlen); cryptlen 86 drivers/crypto/marvell/cipher.c size_t len = min_t(size_t, req->cryptlen - sreq->offset, cryptlen 127 drivers/crypto/marvell/cipher.c if (sreq->offset < req->cryptlen) cryptlen 205 drivers/crypto/marvell/cipher.c atomic_sub(skreq->cryptlen, &engine->load); cryptlen 418 drivers/crypto/marvell/cipher.c if (!IS_ALIGNED(req->cryptlen, blksize)) cryptlen 421 drivers/crypto/marvell/cipher.c creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); cryptlen 426 drivers/crypto/marvell/cipher.c creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); cryptlen 454 drivers/crypto/marvell/cipher.c engine = mv_cesa_select_engine(req->cryptlen); cryptlen 963 drivers/crypto/mediatek/mtk-aes.c u32 len = req->assoclen + req->cryptlen; cryptlen 996 drivers/crypto/mediatek/mtk-aes.c gctx->textlen = req->cryptlen - (enc ? 0 : gctx->authsize); cryptlen 134 drivers/crypto/nx/nx-aes-ccm.c unsigned int cryptlen, u8 *b0) cryptlen 151 drivers/crypto/nx/nx-aes-ccm.c rc = set_msg_len(b0 + 16 - l, cryptlen, l); cryptlen 335 drivers/crypto/nx/nx-aes-ccm.c unsigned int nbytes = req->cryptlen; cryptlen 413 drivers/crypto/nx/nx-aes-ccm.c unsigned int nbytes = req->cryptlen; cryptlen 317 drivers/crypto/nx/nx-aes-gcm.c unsigned int nbytes = req->cryptlen; cryptlen 90 drivers/crypto/omap-aes-gcm.c int alen, clen, cryptlen, assoclen, ret; cryptlen 98 drivers/crypto/omap-aes-gcm.c cryptlen = req->cryptlen; cryptlen 104 drivers/crypto/omap-aes-gcm.c cryptlen -= authlen; cryptlen 107 drivers/crypto/omap-aes-gcm.c clen = ALIGN(cryptlen, AES_BLOCK_SIZE); cryptlen 109 drivers/crypto/omap-aes-gcm.c nsg = !!(assoclen && cryptlen); cryptlen 125 drivers/crypto/omap-aes-gcm.c if (cryptlen) { cryptlen 128 drivers/crypto/omap-aes-gcm.c ret = omap_crypto_align_sg(&tmp, cryptlen, cryptlen 138 drivers/crypto/omap-aes-gcm.c dd->total = cryptlen; cryptlen 151 drivers/crypto/omap-aes-gcm.c ret = omap_crypto_align_sg(&dd->out_sg, cryptlen, cryptlen 321 drivers/crypto/omap-aes-gcm.c if (assoclen + req->cryptlen == 0) { cryptlen 321 drivers/crypto/picoxcell_crypto.c total = areq->assoclen + areq->cryptlen; cryptlen 415 drivers/crypto/picoxcell_crypto.c unsigned total = areq->assoclen + areq->cryptlen + cryptlen 541 drivers/crypto/picoxcell_crypto.c aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen, cryptlen 578 drivers/crypto/picoxcell_crypto.c proc_len = aead_req->cryptlen + assoc_len; cryptlen 890 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_length = areq->cryptlen - digst_size; cryptlen 935 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_length = areq->cryptlen; cryptlen 939 drivers/crypto/qat/qat_common/qat_algs.c auth_param->auth_len = areq->assoclen + areq->cryptlen; cryptlen 67 drivers/crypto/qce/ablkcipher.c rctx->cryptlen = req->nbytes; cryptlen 45 drivers/crypto/qce/cipher.h unsigned int cryptlen; cryptlen 190 drivers/crypto/qce/common.c unsigned int enckeylen, unsigned int cryptlen) cryptlen 201 drivers/crypto/qce/common.c xtsdusize = min_t(u32, QCE_SECTOR_SIZE, cryptlen); cryptlen 346 drivers/crypto/qce/common.c rctx->cryptlen); cryptlen 367 drivers/crypto/qce/common.c qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); cryptlen 464 drivers/crypto/stm32/stm32-cryp.c return is_encrypt(cryp) ? cryp->areq->cryptlen : cryptlen 465 drivers/crypto/stm32/stm32-cryp.c cryp->areq->cryptlen - cryp->authsize; cryptlen 969 drivers/crypto/stm32/stm32-cryp.c cryp->total_in = areq->assoclen + areq->cryptlen; cryptlen 1127 drivers/crypto/stm32/stm32-cryp.c size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : cryptlen 1128 drivers/crypto/stm32/stm32-cryp.c cryp->areq->cryptlen - AES_BLOCK_SIZE; cryptlen 30 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int ileft = areq->cryptlen; cryptlen 31 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int oleft = areq->cryptlen; cryptlen 37 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->cryptlen) cryptlen 70 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c ileft = areq->cryptlen / 4; cryptlen 71 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oleft = areq->cryptlen / 4; cryptlen 132 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c areq->cryptlen, areq->iv); cryptlen 163 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int ileft = areq->cryptlen; cryptlen 164 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int oleft = areq->cryptlen; cryptlen 174 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->cryptlen) cryptlen 183 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->cryptlen % algt->alg.crypto.base.cra_blocksize) cryptlen 231 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c ileft = areq->cryptlen; cryptlen 232 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oleft = areq->cryptlen; cryptlen 283 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oi, mi.length, ileft, areq->cryptlen, rx_cnt, cryptlen 284 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oo, mo.length, oleft, areq->cryptlen, tx_cnt, ob); cryptlen 999 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); cryptlen 1009 drivers/crypto/talitos.c cryptlen + authsize, areq->assoclen); cryptlen 1019 drivers/crypto/talitos.c areq->assoclen + cryptlen - ivsize); cryptlen 1104 drivers/crypto/talitos.c int cryptlen = datalen + elen; cryptlen 1106 drivers/crypto/talitos.c while (cryptlen && sg && n_sg--) { cryptlen 1116 drivers/crypto/talitos.c if (len > cryptlen) cryptlen 1117 drivers/crypto/talitos.c len = cryptlen; cryptlen 1131 drivers/crypto/talitos.c cryptlen -= len; cryptlen 1206 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); cryptlen 1225 drivers/crypto/talitos.c areq->assoclen + cryptlen); cryptlen 1256 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4], cryptlen 1276 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5], cryptlen 1297 drivers/crypto/talitos.c sg_count, areq->assoclen + cryptlen, tbl_off); cryptlen 1326 drivers/crypto/talitos.c unsigned int cryptlen, cryptlen 1342 drivers/crypto/talitos.c if (cryptlen + authsize > max_len) { cryptlen 1348 drivers/crypto/talitos.c src_len = assoclen + cryptlen + authsize; cryptlen 1358 drivers/crypto/talitos.c src_len = assoclen + cryptlen + (encrypt ? 0 : authsize); cryptlen 1365 drivers/crypto/talitos.c dst_len = assoclen + cryptlen + (encrypt ? authsize : 0); cryptlen 1426 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); cryptlen 1429 drivers/crypto/talitos.c iv, areq->assoclen, cryptlen, cryptlen 1488 drivers/crypto/talitos.c req->assoclen + req->cryptlen - authsize); cryptlen 1580 drivers/crypto/talitos.c unsigned int cryptlen = areq->nbytes; cryptlen 1598 drivers/crypto/talitos.c cryptlen); cryptlen 1606 drivers/crypto/talitos.c sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc, cryptlen 1618 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4], cryptlen 87 drivers/crypto/vmx/aes_xts.c if (req->cryptlen < AES_BLOCK_SIZE) cryptlen 90 drivers/crypto/vmx/aes_xts.c if (!crypto_simd_usable() || (req->cryptlen % XTS_BLOCK_SIZE) != 0) { cryptlen 3506 drivers/md/dm-integrity.c section_req->cryptlen = (size_t)ic->journal_section_sectors << SECTOR_SHIFT; cryptlen 85 include/crypto/aead.h unsigned int cryptlen; cryptlen 483 include/crypto/aead.h unsigned int cryptlen, u8 *iv) cryptlen 487 include/crypto/aead.h req->cryptlen = cryptlen; cryptlen 25 include/crypto/skcipher.h unsigned int cryptlen; cryptlen 610 include/crypto/skcipher.h unsigned int cryptlen, void *iv) cryptlen 614 include/crypto/skcipher.h req->cryptlen = cryptlen; cryptlen 599 include/linux/crypto.h void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); cryptlen 600 include/linux/crypto.h void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret); cryptlen 614 include/linux/crypto.h void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); cryptlen 615 include/linux/crypto.h void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg); cryptlen 625 include/linux/crypto.h static inline void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) cryptlen 627 include/linux/crypto.h static inline void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg, int ret) cryptlen 655 include/linux/crypto.h static inline void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg) cryptlen 657 include/linux/crypto.h static inline void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret, struct crypto_alg *alg)