assoclen 98 arch/arm64/crypto/aes-ce-ccm-glue.c if (req->assoclen) assoclen 146 arch/arm64/crypto/aes-ce-ccm-glue.c u32 len = req->assoclen; assoclen 242 arch/arm64/crypto/aes-ce-ccm-glue.c if (req->assoclen) assoclen 279 arch/arm64/crypto/aes-ce-ccm-glue.c scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen, assoclen 300 arch/arm64/crypto/aes-ce-ccm-glue.c if (req->assoclen) assoclen 339 arch/arm64/crypto/aes-ce-ccm-glue.c req->assoclen + req->cryptlen - authsize, assoclen 371 arch/arm64/crypto/ghash-ce-glue.c u32 len = req->assoclen; assoclen 407 arch/arm64/crypto/ghash-ce-glue.c lengths.a = cpu_to_be64(req->assoclen * 8); assoclen 431 arch/arm64/crypto/ghash-ce-glue.c if (req->assoclen) assoclen 532 arch/arm64/crypto/ghash-ce-glue.c scatterwalk_map_and_copy(tag, req->dst, req->assoclen + req->cryptlen, assoclen 551 arch/arm64/crypto/ghash-ce-glue.c if (req->assoclen) assoclen 668 arch/arm64/crypto/ghash-ce-glue.c req->assoclen + req->cryptlen - authsize, assoclen 994 arch/s390/crypto/aes_s390.c unsigned int aadlen = req->assoclen; assoclen 45 arch/x86/crypto/aegis128-aesni-glue.c unsigned int assoclen); assoclen 71 arch/x86/crypto/aegis128-aesni-glue.c unsigned int assoclen) assoclen 78 arch/x86/crypto/aegis128-aesni-glue.c while (assoclen != 0) { assoclen 79 arch/x86/crypto/aegis128-aesni-glue.c unsigned int size = scatterwalk_clamp(&walk, assoclen); assoclen 104 arch/x86/crypto/aegis128-aesni-glue.c assoclen -= size; assoclen 108 arch/x86/crypto/aegis128-aesni-glue.c scatterwalk_done(&walk, 0, assoclen); assoclen 182 arch/x86/crypto/aegis128-aesni-glue.c crypto_aegis128_aesni_process_ad(&state, req->src, req->assoclen); assoclen 184 arch/x86/crypto/aegis128-aesni-glue.c crypto_aegis128_aesni_final(&state, tag_xor, req->assoclen, cryptlen); assoclen 205 arch/x86/crypto/aegis128-aesni-glue.c req->assoclen + cryptlen, authsize, 1); assoclen 225 arch/x86/crypto/aegis128-aesni-glue.c req->assoclen + cryptlen, authsize, 0); assoclen 704 arch/x86/crypto/aesni-intel_glue.c unsigned int assoclen, u8 *hash_subkey, assoclen 737 arch/x86/crypto/aesni-intel_glue.c if (req->src->length >= assoclen && req->src->length && assoclen 744 arch/x86/crypto/aesni-intel_glue.c assocmem = kmalloc(assoclen, GFP_ATOMIC); assoclen 749 arch/x86/crypto/aesni-intel_glue.c scatterwalk_map_and_copy(assoc, req->src, 0, assoclen, 0); assoclen 753 arch/x86/crypto/aesni-intel_glue.c src_sg = scatterwalk_ffwd(src_start, req->src, req->assoclen); assoclen 757 arch/x86/crypto/aesni-intel_glue.c req->assoclen); assoclen 764 arch/x86/crypto/aesni-intel_glue.c hash_subkey, assoc, assoclen); assoclen 820 arch/x86/crypto/aesni-intel_glue.c req->assoclen + req->cryptlen - assoclen 831 arch/x86/crypto/aesni-intel_glue.c req->assoclen + req->cryptlen, assoclen 837 arch/x86/crypto/aesni-intel_glue.c static int gcmaes_encrypt(struct aead_request *req, unsigned int assoclen, assoclen 840 arch/x86/crypto/aesni-intel_glue.c return gcmaes_crypt_by_sg(true, req, assoclen, hash_subkey, iv, assoclen 844 arch/x86/crypto/aesni-intel_glue.c static int gcmaes_decrypt(struct aead_request *req, unsigned int assoclen, assoclen 847 arch/x86/crypto/aesni-intel_glue.c return gcmaes_crypt_by_sg(false, req, assoclen, hash_subkey, iv, assoclen 863 arch/x86/crypto/aesni-intel_glue.c if (unlikely(req->assoclen != 16 && req->assoclen != 20)) assoclen 873 arch/x86/crypto/aesni-intel_glue.c return gcmaes_encrypt(req, req->assoclen - 8, ctx->hash_subkey, iv, assoclen 886 arch/x86/crypto/aesni-intel_glue.c if (unlikely(req->assoclen != 16 && req->assoclen != 20)) assoclen 900 arch/x86/crypto/aesni-intel_glue.c return gcmaes_decrypt(req, req->assoclen - 8, ctx->hash_subkey, iv, assoclen 1019 arch/x86/crypto/aesni-intel_glue.c return gcmaes_encrypt(req, req->assoclen, ctx->hash_subkey, iv, assoclen 1034 arch/x86/crypto/aesni-intel_glue.c return gcmaes_decrypt(req, req->assoclen, ctx->hash_subkey, iv, assoclen 283 crypto/aegis128-core.c unsigned int assoclen) assoclen 290 crypto/aegis128-core.c while (assoclen != 0) { assoclen 291 crypto/aegis128-core.c unsigned int size = scatterwalk_clamp(&walk, assoclen); assoclen 314 crypto/aegis128-core.c assoclen -= size; assoclen 317 crypto/aegis128-core.c scatterwalk_done(&walk, 0, assoclen); assoclen 349 crypto/aegis128-core.c u64 assoclen, u64 cryptlen) assoclen 351 crypto/aegis128-core.c u64 assocbits = assoclen * 8; assoclen 403 crypto/aegis128-core.c crypto_aegis128_process_ad(&state, req->src, req->assoclen); assoclen 405 crypto/aegis128-core.c crypto_aegis128_final(&state, tag_xor, req->assoclen, cryptlen); assoclen 427 crypto/aegis128-core.c scatterwalk_map_and_copy(tag.bytes, req->dst, req->assoclen + cryptlen, assoclen 445 crypto/aegis128-core.c scatterwalk_map_and_copy(tag.bytes, req->src, req->assoclen + cryptlen, assoclen 135 crypto/authenc.c req->assoclen + req->cryptlen, assoclen 159 crypto/authenc.c req->assoclen + req->cryptlen); assoclen 167 crypto/authenc.c scatterwalk_map_and_copy(hash, req->dst, req->assoclen + req->cryptlen, assoclen 196 crypto/authenc.c skcipher_request_set_crypt(skreq, req->src, req->dst, req->assoclen, assoclen 216 crypto/authenc.c src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); assoclen 224 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); assoclen 259 crypto/authenc.c src = scatterwalk_ffwd(areq_ctx->src, req->src, req->assoclen); assoclen 263 crypto/authenc.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, req->assoclen); assoclen 306 crypto/authenc.c req->assoclen + req->cryptlen - authsize); assoclen 106 crypto/authencesn.c unsigned int assoclen = req->assoclen; assoclen 113 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); assoclen 116 crypto/authencesn.c scatterwalk_map_and_copy(hash, dst, assoclen + cryptlen, authsize, 1); assoclen 140 crypto/authencesn.c unsigned int assoclen = req->assoclen; assoclen 151 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); assoclen 157 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, hash, assoclen + cryptlen); assoclen 199 crypto/authencesn.c unsigned int assoclen = req->assoclen; assoclen 205 crypto/authencesn.c src = scatterwalk_ffwd(areq_ctx->src, req->src, assoclen); assoclen 209 crypto/authencesn.c err = crypto_authenc_esn_copy(req, assoclen); assoclen 214 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, req->dst, assoclen); assoclen 242 crypto/authencesn.c unsigned int assoclen = req->assoclen; assoclen 252 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 0); assoclen 261 crypto/authencesn.c dst = scatterwalk_ffwd(areq_ctx->dst, dst, assoclen); assoclen 290 crypto/authencesn.c unsigned int assoclen = req->assoclen; assoclen 300 crypto/authencesn.c err = crypto_authenc_esn_copy(req, assoclen + cryptlen); assoclen 305 crypto/authencesn.c scatterwalk_map_and_copy(ihash, req->src, assoclen + cryptlen, assoclen 314 crypto/authencesn.c scatterwalk_map_and_copy(tmp + 1, dst, assoclen + cryptlen, 4, 1); assoclen 320 crypto/authencesn.c ahash_request_set_crypt(ahreq, dst, ohash, assoclen + cryptlen); assoclen 151 crypto/ccm.c if (req->assoclen) assoclen 183 crypto/ccm.c unsigned int assoclen = req->assoclen; assoclen 198 crypto/ccm.c if (assoclen) { assoclen 199 crypto/ccm.c ilen = format_adata(idata, assoclen); assoclen 209 crypto/ccm.c ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16); assoclen 218 crypto/ccm.c ilen = 16 - (assoclen + ilen) % 16; assoclen 244 crypto/ccm.c req->assoclen + req->cryptlen, assoclen 278 crypto/ccm.c sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); assoclen 285 crypto/ccm.c sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); assoclen 659 crypto/ccm.c scatterwalk_map_and_copy(iv + 16, req->src, 0, req->assoclen - 8, 0); assoclen 662 crypto/ccm.c sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); assoclen 663 crypto/ccm.c sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); assoclen 669 crypto/ccm.c sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); assoclen 670 crypto/ccm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); assoclen 681 crypto/ccm.c aead_request_set_ad(subreq, req->assoclen - 8); assoclen 688 crypto/ccm.c if (req->assoclen != 16 && req->assoclen != 20) assoclen 698 crypto/ccm.c if (req->assoclen != 16 && req->assoclen != 20) assoclen 40 crypto/chacha20poly1305.c __le64 assoclen; assoclen 63 crypto/chacha20poly1305.c unsigned int assoclen; assoclen 103 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, assoclen 115 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, assoclen 138 crypto/chacha20poly1305.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); assoclen 141 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); assoclen 179 crypto/chacha20poly1305.c preq->tail.assoclen = cpu_to_le64(rctx->assoclen); assoclen 241 crypto/chacha20poly1305.c crypt = scatterwalk_ffwd(rctx->src, crypt, req->assoclen); assoclen 268 crypto/chacha20poly1305.c padlen = -rctx->assoclen % POLY1305_BLOCK_SIZE; assoclen 299 crypto/chacha20poly1305.c ahash_request_set_crypt(&preq->req, req->src, NULL, rctx->assoclen); assoclen 370 crypto/chacha20poly1305.c rctx->assoclen = req->assoclen; assoclen 373 crypto/chacha20poly1305.c if (rctx->assoclen < 8) assoclen 375 crypto/chacha20poly1305.c rctx->assoclen -= 8; assoclen 414 crypto/chacha20poly1305.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); assoclen 417 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); assoclen 51 crypto/echainiv.c req->assoclen + req->cryptlen, assoclen 63 crypto/echainiv.c aead_request_set_ad(subreq, req->assoclen); assoclen 69 crypto/echainiv.c scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); assoclen 105 crypto/echainiv.c aead_request_set_ad(subreq, req->assoclen + ivsize); assoclen 107 crypto/echainiv.c scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0); assoclen 215 crypto/essiv.c req->assoclen - crypto_aead_ivsize(tfm), assoclen 220 crypto/essiv.c int ssize = req->assoclen - ivsize; assoclen 252 crypto/essiv.c sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen); assoclen 260 crypto/essiv.c aead_request_set_ad(subreq, req->assoclen); assoclen 170 crypto/gcm.c sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen); assoclen 177 crypto/gcm.c sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen); assoclen 236 crypto/gcm.c lengths.a = cpu_to_be64(req->assoclen * 8); assoclen 355 crypto/gcm.c remain = gcm_remain(req->assoclen); assoclen 381 crypto/gcm.c if (req->assoclen) assoclen 383 crypto/gcm.c req->src, req->assoclen, flags) ?: assoclen 425 crypto/gcm.c req->assoclen + req->cryptlen, assoclen 482 crypto/gcm.c req->assoclen + cryptlen, authsize, 0); assoclen 772 crypto/gcm.c scatterwalk_map_and_copy(iv + GCM_AES_IV_SIZE, req->src, 0, req->assoclen - 8, 0); assoclen 778 crypto/gcm.c sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8); assoclen 779 crypto/gcm.c sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); assoclen 785 crypto/gcm.c sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8); assoclen 786 crypto/gcm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); assoclen 797 crypto/gcm.c aead_request_set_ad(subreq, req->assoclen - 8); assoclen 806 crypto/gcm.c err = crypto_ipsec_check_assoclen(req->assoclen); assoclen 819 crypto/gcm.c err = crypto_ipsec_check_assoclen(req->assoclen); assoclen 1013 crypto/gcm.c aead_request_set_ad(subreq, req->assoclen + req->cryptlen - assoclen 1024 crypto/gcm.c unsigned int nbytes = req->assoclen + req->cryptlen - assoclen 1037 crypto/gcm.c return crypto_ipsec_check_assoclen(req->assoclen) ?: assoclen 1043 crypto/gcm.c return crypto_ipsec_check_assoclen(req->assoclen) ?: assoclen 114 crypto/pcrypt.c aead_request_set_ad(creq, req->assoclen); assoclen 159 crypto/pcrypt.c aead_request_set_ad(creq, req->assoclen); assoclen 77 crypto/seqiv.c req->assoclen + req->cryptlen, assoclen 100 crypto/seqiv.c aead_request_set_ad(subreq, req->assoclen + ivsize); assoclen 103 crypto/seqiv.c scatterwalk_map_and_copy(info, req->dst, req->assoclen, ivsize, 1); assoclen 131 crypto/seqiv.c aead_request_set_ad(subreq, req->assoclen + ivsize); assoclen 133 crypto/seqiv.c scatterwalk_map_and_copy(req->iv, req->src, req->assoclen, ivsize, 0); assoclen 528 crypto/skcipher.c scatterwalk_copychunks(NULL, &walk->in, req->assoclen, 2); assoclen 529 crypto/skcipher.c scatterwalk_copychunks(NULL, &walk->out, req->assoclen, 2); assoclen 1933 crypto/testmgr.c req->assoclen != vec->alen || assoclen 1945 crypto/testmgr.c if (req->assoclen != vec->alen) assoclen 352 drivers/crypto/amcc/crypto4xx_alg.c if (req->assoclen & 0x3 || req->assoclen > 1020) assoclen 372 drivers/crypto/amcc/crypto4xx_alg.c aead_request_set_ad(subreq, req->assoclen); assoclen 489 drivers/crypto/amcc/crypto4xx_alg.c sa, ctx->sa_len, req->assoclen, rctx->dst); assoclen 628 drivers/crypto/amcc/crypto4xx_alg.c ctx->sa_len, req->assoclen, rctx->dst); assoclen 601 drivers/crypto/amcc/crypto4xx_core.c aead_req->assoclen + aead_req->cryptlen - assoclen 686 drivers/crypto/amcc/crypto4xx_core.c const unsigned int assoclen, assoclen 722 drivers/crypto/amcc/crypto4xx_core.c tmp = sg_nents_for_len(src, assoclen + datalen); assoclen 731 drivers/crypto/amcc/crypto4xx_core.c if (assoclen) { assoclen 732 drivers/crypto/amcc/crypto4xx_core.c nbytes += assoclen; assoclen 733 drivers/crypto/amcc/crypto4xx_core.c dst = scatterwalk_ffwd(_dst, dst, assoclen); assoclen 823 drivers/crypto/amcc/crypto4xx_core.c sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2); assoclen 922 drivers/crypto/amcc/crypto4xx_core.c pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen); assoclen 160 drivers/crypto/amcc/crypto4xx_core.h const unsigned int assoclen, assoclen 1580 drivers/crypto/atmel-aes.c if (likely(req->assoclen != 0 || ctx->textlen != 0)) assoclen 1601 drivers/crypto/atmel-aes.c atmel_aes_write(dd, AES_AADLENR, req->assoclen); assoclen 1605 drivers/crypto/atmel-aes.c if (unlikely(req->assoclen == 0)) { assoclen 1611 drivers/crypto/atmel-aes.c padlen = atmel_aes_padlen(req->assoclen, AES_BLOCK_SIZE); assoclen 1612 drivers/crypto/atmel-aes.c if (unlikely(req->assoclen + padlen > dd->buflen)) assoclen 1614 drivers/crypto/atmel-aes.c sg_copy_to_buffer(req->src, sg_nents(req->src), dd->buf, req->assoclen); assoclen 1618 drivers/crypto/atmel-aes.c dd->datalen = req->assoclen + padlen; assoclen 1649 drivers/crypto/atmel-aes.c src = scatterwalk_ffwd(ctx->src, req->src, req->assoclen); assoclen 1651 drivers/crypto/atmel-aes.c scatterwalk_ffwd(ctx->dst, req->dst, req->assoclen)); assoclen 1689 drivers/crypto/atmel-aes.c data[0] = cpu_to_be64(req->assoclen * 8); assoclen 1730 drivers/crypto/atmel-aes.c offset = req->assoclen + ctx->textlen; assoclen 2029 drivers/crypto/atmel-aes.c req->src, req->assoclen, assoclen 2050 drivers/crypto/atmel-aes.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); assoclen 2054 drivers/crypto/atmel-aes.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); assoclen 2104 drivers/crypto/atmel-aes.c offs = req->assoclen + rctx->textlen; assoclen 2225 drivers/crypto/atmel-aes.c if (!rctx->textlen && !req->assoclen) assoclen 41 drivers/crypto/atmel-authenc.h struct scatterlist *assoc, unsigned int assoclen, assoclen 2230 drivers/crypto/atmel-sha.c u32 assoclen; assoclen 2392 drivers/crypto/atmel-sha.c struct scatterlist *assoc, unsigned int assoclen, assoclen 2403 drivers/crypto/atmel-sha.c if (unlikely(!IS_ALIGNED(assoclen, sizeof(u32)))) assoclen 2409 drivers/crypto/atmel-sha.c authctx->assoclen = assoclen; assoclen 2442 drivers/crypto/atmel-sha.c msg_size = authctx->assoclen + authctx->textlen; assoclen 2449 drivers/crypto/atmel-sha.c return atmel_sha_cpu_start(dd, authctx->assoc, authctx->assoclen, assoclen 1915 drivers/crypto/axis/artpec6_crypto.c __cpu_to_be64(8*areq->assoclen); assoclen 1935 drivers/crypto/axis/artpec6_crypto.c count = areq->assoclen; assoclen 1940 drivers/crypto/axis/artpec6_crypto.c if (!IS_ALIGNED(areq->assoclen, 16)) { assoclen 1941 drivers/crypto/axis/artpec6_crypto.c size_t assoc_pad = 16 - (areq->assoclen % 16); assoclen 1981 drivers/crypto/axis/artpec6_crypto.c count = artpec6_crypto_walk_advance(&walk, areq->assoclen); assoclen 2202 drivers/crypto/axis/artpec6_crypto.c areq->assoclen + areq->cryptlen - assoclen 1357 drivers/crypto/bcm/cipher.c aead_parms.assoc_size = req->assoclen; assoclen 1430 drivers/crypto/bcm/cipher.c spu->spu_ccm_update_iv(digestsize, &cipher_parms, req->assoclen, assoclen 1459 drivers/crypto/bcm/cipher.c req->assoclen + rctx->total_sent - assoclen 1584 drivers/crypto/bcm/cipher.c if (req->assoclen) assoclen 1586 drivers/crypto/bcm/cipher.c req->assoclen); assoclen 1595 drivers/crypto/bcm/cipher.c icv_offset = req->assoclen + rctx->total_sent; assoclen 1604 drivers/crypto/bcm/cipher.c dump_sg(req->dst, req->assoclen, result_len); assoclen 2581 drivers/crypto/bcm/cipher.c (req->assoclen == 0)) { assoclen 2605 drivers/crypto/bcm/cipher.c (req->assoclen == 0)) { assoclen 2618 drivers/crypto/bcm/cipher.c req->assoclen != 16 && req->assoclen != 20) { assoclen 2626 drivers/crypto/bcm/cipher.c payload_len += req->assoclen; assoclen 2707 drivers/crypto/bcm/cipher.c if (req->assoclen > MAX_ASSOC_SIZE) { assoclen 2710 drivers/crypto/bcm/cipher.c __func__, req->assoclen, MAX_ASSOC_SIZE); assoclen 2735 drivers/crypto/bcm/cipher.c if (spu_sg_at_offset(req->src, req->assoclen, &rctx->src_sg, assoclen 2753 drivers/crypto/bcm/cipher.c if (spu_sg_at_offset(req->dst, req->assoclen, &rctx->dst_sg, assoclen 2780 drivers/crypto/bcm/cipher.c flow_log(" assoc: %p, assoclen %u\n", rctx->assoc, req->assoclen); assoclen 3109 drivers/crypto/bcm/cipher.c dump_sg(req->src, 0, req->cryptlen + req->assoclen); assoclen 3110 drivers/crypto/bcm/cipher.c flow_log(" assoc_len:%u\n", req->assoclen); assoclen 3118 drivers/crypto/bcm/cipher.c dump_sg(req->src, 0, req->cryptlen + req->assoclen); assoclen 3119 drivers/crypto/bcm/cipher.c flow_log(" assoc_len:%u\n", req->assoclen); assoclen 401 drivers/crypto/bcm/cipher.h unsigned int assoclen, unsigned int chunksize, assoclen 1166 drivers/crypto/bcm/spu.c unsigned int assoclen, assoclen 1211 drivers/crypto/bcm/spu.c adata = (assoclen > 0); /* adata = 1 if any associated data */ assoclen 271 drivers/crypto/bcm/spu.h unsigned int assoclen, assoclen 1360 drivers/crypto/bcm/spu2.c unsigned int assoclen, unsigned int chunksize, assoclen 214 drivers/crypto/bcm/spu2.h unsigned int assoclen, unsigned int chunksize, assoclen 1118 drivers/crypto/caam/caamalg.c append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen, assoclen 1141 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen + authsize, assoclen 1145 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen - authsize, assoclen 1161 drivers/crypto/caam/caamalg.c append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); assoclen 1165 drivers/crypto/caam/caamalg.c if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen)) assoclen 1185 drivers/crypto/caam/caamalg.c unsigned int assoclen = req->assoclen; assoclen 1199 drivers/crypto/caam/caamalg.c assoclen -= ivsize; assoclen 1202 drivers/crypto/caam/caamalg.c append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen); assoclen 1251 drivers/crypto/caam/caamalg.c append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen); assoclen 1253 drivers/crypto/caam/caamalg.c append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen); assoclen 1337 drivers/crypto/caam/caamalg.c src_len = req->assoclen + req->cryptlen; assoclen 1354 drivers/crypto/caam/caamalg.c src_len = req->assoclen + req->cryptlen + assoclen 1560 drivers/crypto/caam/caamalg.c return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req); assoclen 1634 drivers/crypto/caam/caamalg.c return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req); assoclen 1649 drivers/crypto/caam/caamalg.c req->assoclen + req->cryptlen, 1); assoclen 805 drivers/crypto/caam/caamalg_qi.c unsigned int assoclen; assoclen 971 drivers/crypto/caam/caamalg_qi.c src_len = req->assoclen + req->cryptlen + assoclen 990 drivers/crypto/caam/caamalg_qi.c src_len = req->assoclen + req->cryptlen; assoclen 1096 drivers/crypto/caam/caamalg_qi.c edesc->assoclen = cpu_to_caam32(req->assoclen); assoclen 1097 drivers/crypto/caam/caamalg_qi.c edesc->assoclen_dma = dma_map_single(qidev, &edesc->assoclen, 4, assoclen 1132 drivers/crypto/caam/caamalg_qi.c out_len = req->assoclen + req->cryptlen + assoclen 1134 drivers/crypto/caam/caamalg_qi.c in_len = 4 + ivsize + req->assoclen + req->cryptlen; assoclen 1197 drivers/crypto/caam/caamalg_qi.c return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req, assoclen 1203 drivers/crypto/caam/caamalg_qi.c return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_crypt(req, assoclen 378 drivers/crypto/caam/caamalg_qi2.c src_len = req->assoclen + req->cryptlen; assoclen 423 drivers/crypto/caam/caamalg_qi2.c src_len = req->assoclen + req->cryptlen + assoclen 506 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen = cpu_to_caam32(req->assoclen - ivsize); assoclen 508 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen = cpu_to_caam32(req->assoclen); assoclen 509 drivers/crypto/caam/caamalg_qi2.c edesc->assoclen_dma = dma_map_single(dev, &edesc->assoclen, 4, assoclen 544 drivers/crypto/caam/caamalg_qi2.c out_len = req->assoclen + req->cryptlen + assoclen 546 drivers/crypto/caam/caamalg_qi2.c in_len = 4 + ivsize + req->assoclen + req->cryptlen; assoclen 1388 drivers/crypto/caam/caamalg_qi2.c return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_encrypt(req); assoclen 1393 drivers/crypto/caam/caamalg_qi2.c return crypto_ipsec_check_assoclen(req->assoclen) ? : aead_decrypt(req); assoclen 115 drivers/crypto/caam/caamalg_qi2.h unsigned int assoclen; assoclen 150 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); assoclen 151 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); assoclen 201 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; assoclen 202 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->assoclen + areq->cryptlen; assoclen 232 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; assoclen 233 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->cryptlen + areq->assoclen; assoclen 367 drivers/crypto/cavium/nitrox/nitrox_aead.c unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; assoclen 370 drivers/crypto/cavium/nitrox/nitrox_aead.c if (areq->assoclen != 16 && areq->assoclen != 20) assoclen 373 drivers/crypto/cavium/nitrox/nitrox_aead.c scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0); assoclen 375 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_set_buf(rctx->src, rctx->assoc, assoclen); assoclen 376 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen); assoclen 382 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_set_buf(rctx->dst, rctx->assoc, assoclen); assoclen 383 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen); assoclen 420 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; assoclen 421 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->srclen = aead_rctx->assoclen + aead_rctx->cryptlen; assoclen 452 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; assoclen 454 drivers/crypto/cavium/nitrox/nitrox_aead.c areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; assoclen 232 drivers/crypto/cavium/nitrox/nitrox_req.h unsigned int assoclen; assoclen 130 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.aad_len = req->assoclen; assoclen 65 drivers/crypto/ccree/cc_aead.c return ((req->assoclen == 16) || (req->assoclen == 20)); assoclen 240 drivers/crypto/ccree/cc_aead.c areq->cryptlen, areq->assoclen); assoclen 746 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen, NS_BIT); assoclen 1099 drivers/crypto/ccree/cc_aead.c if (areq_ctx->assoclen > 0) assoclen 1327 drivers/crypto/ccree/cc_aead.c unsigned int assoclen = areq_ctx->assoclen; assoclen 1350 drivers/crypto/ccree/cc_aead.c if (!IS_ALIGNED(assoclen, sizeof(u32))) assoclen 1361 drivers/crypto/ccree/cc_aead.c if (!IS_ALIGNED(assoclen, DES_BLOCK_SIZE)) assoclen 1486 drivers/crypto/ccree/cc_aead.c if (req_ctx->assoclen > 0) { assoclen 1578 drivers/crypto/ccree/cc_aead.c if (req_ctx->assoclen > 0) assoclen 1589 drivers/crypto/ccree/cc_aead.c req_ctx->ccm_hdr_size = format_ccm_a0(a0, req_ctx->assoclen); assoclen 1621 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen -= CCM_BLOCK_IV_SIZE; assoclen 1829 drivers/crypto/ccree/cc_aead.c if (req_ctx->assoclen > 0) assoclen 1854 drivers/crypto/ccree/cc_aead.c __func__, cryptlen, req_ctx->assoclen, ctx->authsize); assoclen 1870 drivers/crypto/ccree/cc_aead.c temp64 = cpu_to_be64(req_ctx->assoclen * 8); assoclen 1880 drivers/crypto/ccree/cc_aead.c temp64 = cpu_to_be64((req_ctx->assoclen + assoclen 1901 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen -= GCM_BLOCK_RFC4_IV_SIZE; assoclen 1926 drivers/crypto/ccree/cc_aead.c req->cryptlen, areq_ctx->assoclen); assoclen 2044 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2067 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2075 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2096 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2117 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2125 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2236 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2244 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2267 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2278 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2301 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2309 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 2332 drivers/crypto/ccree/cc_aead.c dev_err(dev, "invalid Assoclen:%u\n", req->assoclen); assoclen 2343 drivers/crypto/ccree/cc_aead.c areq_ctx->assoclen = req->assoclen; assoclen 69 drivers/crypto/ccree/cc_aead.h u32 assoclen; /* internal assoclen */ assoclen 68 drivers/crypto/ccree/cc_buffer_mgr.c u32 skip = areq_ctx->assoclen + req->cryptlen; assoclen 321 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int assoclen) assoclen 337 drivers/crypto/ccree/cc_buffer_mgr.c if (assoclen > 0) { assoclen 547 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoclen, req->cryptlen); assoclen 635 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int size_of_assoc = areq_ctx->assoclen; assoclen 646 drivers/crypto/ccree/cc_buffer_mgr.c if (areq_ctx->assoclen == 0) { assoclen 690 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoclen, 0, is_last, assoclen 849 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int size_for_map = areq_ctx->assoclen + req->cryptlen; assoclen 853 drivers/crypto/ccree/cc_buffer_mgr.c u32 size_to_skip = areq_ctx->assoclen; assoclen 896 drivers/crypto/ccree/cc_buffer_mgr.c size_for_map = areq_ctx->assoclen + req->cryptlen; assoclen 1064 drivers/crypto/ccree/cc_buffer_mgr.c &sg_data, areq_ctx->assoclen); assoclen 1115 drivers/crypto/ccree/cc_buffer_mgr.c size_to_map = req->cryptlen + areq_ctx->assoclen; assoclen 191 drivers/crypto/chelsio/chcr_algo.c authsize, req->assoclen + assoclen 2300 drivers/crypto/chelsio/chcr_algo.c (req->assoclen > aadmax) || assoclen 2317 drivers/crypto/chelsio/chcr_algo.c aead_request_set_ad(subreq, req->assoclen); assoclen 2357 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + assoclen 2360 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, assoclen 2366 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) < assoclen 2368 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) assoclen 2396 drivers/crypto/chelsio/chcr_algo.c chcr_req->sec_cpl.pldlen = htonl(req->assoclen + IV + req->cryptlen); assoclen 2399 drivers/crypto/chelsio/chcr_algo.c null ? 0 : IV + req->assoclen, assoclen 2400 drivers/crypto/chelsio/chcr_algo.c req->assoclen + IV + 1, assoclen 2404 drivers/crypto/chelsio/chcr_algo.c null ? 0 : req->assoclen + IV + 1, assoclen 2448 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); assoclen 2470 drivers/crypto/chelsio/chcr_algo.c dst_size = req->assoclen + req->cryptlen + (op_type ? assoclen 2516 drivers/crypto/chelsio/chcr_algo.c dst_size = req->assoclen + req->cryptlen + (op_type ? assoclen 2548 drivers/crypto/chelsio/chcr_algo.c buf, req->cryptlen + req->assoclen, 0); assoclen 2555 drivers/crypto/chelsio/chcr_algo.c req->assoclen, 0); assoclen 2573 drivers/crypto/chelsio/chcr_algo.c temp = req->assoclen + req->cryptlen + assoclen 2763 drivers/crypto/chelsio/chcr_algo.c if (req->assoclen) assoclen 2785 drivers/crypto/chelsio/chcr_algo.c unsigned int assoclen) assoclen 2800 drivers/crypto/chelsio/chcr_algo.c if (assoclen) assoclen 2802 drivers/crypto/chelsio/chcr_algo.c htons(assoclen); assoclen 2822 drivers/crypto/chelsio/chcr_algo.c unsigned int assoclen; assoclen 2825 drivers/crypto/chelsio/chcr_algo.c assoclen = req->assoclen - 8; assoclen 2827 drivers/crypto/chelsio/chcr_algo.c assoclen = req->assoclen; assoclen 2829 drivers/crypto/chelsio/chcr_algo.c ((assoclen) ? CCM_AAD_FIELD_SIZE : 0); assoclen 2832 drivers/crypto/chelsio/chcr_algo.c (req->assoclen + IV + 1 + ccm_xtra) : 0; assoclen 2844 drivers/crypto/chelsio/chcr_algo.c htonl(req->assoclen + IV + req->cryptlen + ccm_xtra); assoclen 2847 drivers/crypto/chelsio/chcr_algo.c 1 + IV, IV + assoclen + ccm_xtra, assoclen 2848 drivers/crypto/chelsio/chcr_algo.c req->assoclen + IV + 1 + ccm_xtra, 0); assoclen 2874 drivers/crypto/chelsio/chcr_algo.c if (req->assoclen != 16 && req->assoclen != 20) { assoclen 2876 drivers/crypto/chelsio/chcr_algo.c req->assoclen); assoclen 2896 drivers/crypto/chelsio/chcr_algo.c unsigned int sub_type, assoclen = req->assoclen; assoclen 2906 drivers/crypto/chelsio/chcr_algo.c assoclen -= 8; assoclen 2907 drivers/crypto/chelsio/chcr_algo.c reqctx->b0_len = CCM_B0_SIZE + (assoclen ? CCM_AAD_FIELD_SIZE : 0); assoclen 2915 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen assoclen 2920 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, assoclen 2925 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen + assoclen 2927 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen + assoclen 2958 drivers/crypto/chelsio/chcr_algo.c error = ccm_format_packet(req, ivptr, sub_type, reqctx->op, assoclen); assoclen 2966 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen + assoclen 2992 drivers/crypto/chelsio/chcr_algo.c unsigned int dst_size = 0, temp = 0, kctx_len, assoclen = req->assoclen; assoclen 3001 drivers/crypto/chelsio/chcr_algo.c assoclen = req->assoclen - 8; assoclen 3007 drivers/crypto/chelsio/chcr_algo.c dnents = sg_nents_xlen(req->dst, req->assoclen + req->cryptlen + assoclen 3010 drivers/crypto/chelsio/chcr_algo.c snents = sg_nents_xlen(req->src, req->assoclen + req->cryptlen, assoclen 3016 drivers/crypto/chelsio/chcr_algo.c reqctx->imm = (transhdr_len + req->assoclen + req->cryptlen) <= assoclen 3018 drivers/crypto/chelsio/chcr_algo.c temp = reqctx->imm ? roundup(req->assoclen + req->cryptlen, 16) : assoclen 3042 drivers/crypto/chelsio/chcr_algo.c htonl(req->assoclen + IV + req->cryptlen); assoclen 3044 drivers/crypto/chelsio/chcr_algo.c assoclen ? 1 + IV : 0, assoclen 3045 drivers/crypto/chelsio/chcr_algo.c assoclen ? IV + assoclen : 0, assoclen 3046 drivers/crypto/chelsio/chcr_algo.c req->assoclen + IV + 1, 0); assoclen 3048 drivers/crypto/chelsio/chcr_algo.c FILL_SEC_CPL_AUTHINSERT(0, req->assoclen + IV + 1, assoclen 3082 drivers/crypto/chelsio/chcr_algo.c kctx_len + (reqctx->imm ? (req->assoclen + req->cryptlen) : 0); assoclen 147 drivers/crypto/inside-secure/safexcel_cipher.c u32 cryptlen, u32 assoclen, u32 digestsize) assoclen 186 drivers/crypto/inside-secure/safexcel_cipher.c token[6].packet_length = assoclen; assoclen 226 drivers/crypto/inside-secure/safexcel_cipher.c ((assoclen > 0) << 1); assoclen 232 drivers/crypto/inside-secure/safexcel_cipher.c cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2); assoclen 239 drivers/crypto/inside-secure/safexcel_cipher.c if (assoclen) { assoclen 240 drivers/crypto/inside-secure/safexcel_cipher.c *aadlen = cpu_to_le32(cpu_to_be16(assoclen)); assoclen 241 drivers/crypto/inside-secure/safexcel_cipher.c assoclen += 2; assoclen 248 drivers/crypto/inside-secure/safexcel_cipher.c assoclen &= 15; assoclen 249 drivers/crypto/inside-secure/safexcel_cipher.c token[7].packet_length = assoclen ? 16 - assoclen : 0; assoclen 551 drivers/crypto/inside-secure/safexcel_cipher.c unsigned int cryptlen, unsigned int assoclen, assoclen 564 drivers/crypto/inside-secure/safexcel_cipher.c unsigned int totlen_src = cryptlen + assoclen; assoclen 687 drivers/crypto/inside-secure/safexcel_cipher.c assoclen, digestsize); assoclen 705 drivers/crypto/inside-secure/safexcel_cipher.c if (assoclen) { assoclen 706 drivers/crypto/inside-secure/safexcel_cipher.c if (assoclen >= len) { assoclen 707 drivers/crypto/inside-secure/safexcel_cipher.c assoclen -= len; assoclen 712 drivers/crypto/inside-secure/safexcel_cipher.c assoclen, assoclen 713 drivers/crypto/inside-secure/safexcel_cipher.c len - assoclen); assoclen 714 drivers/crypto/inside-secure/safexcel_cipher.c assoclen = 0; assoclen 942 drivers/crypto/inside-secure/safexcel_cipher.c req->cryptlen, req->assoclen, assoclen 347 drivers/crypto/ixp4xx_crypto.c int decryptlen = req->assoclen + req->cryptlen - authsize; assoclen 1021 drivers/crypto/ixp4xx_crypto.c crypt->auth_len = req->assoclen + cryptlen; assoclen 1204 drivers/crypto/ixp4xx_crypto.c return aead_perform(req, 1, req->assoclen, req->cryptlen, req->iv); assoclen 1209 drivers/crypto/ixp4xx_crypto.c return aead_perform(req, 0, req->assoclen, req->cryptlen, req->iv); assoclen 890 drivers/crypto/mediatek/mtk-aes.c info->cmd[cnt++] = AES_GCM_CMD0 | cpu_to_le32(req->assoclen); assoclen 891 drivers/crypto/mediatek/mtk-aes.c info->cmd[cnt++] = AES_GCM_CMD1 | cpu_to_le32(req->assoclen); assoclen 963 drivers/crypto/mediatek/mtk-aes.c u32 len = req->assoclen + req->cryptlen; assoclen 999 drivers/crypto/mediatek/mtk-aes.c if (!gctx->textlen && !req->assoclen) assoclen 133 drivers/crypto/nx/nx-aes-ccm.c static int generate_b0(u8 *iv, unsigned int assoclen, unsigned int authsize, assoclen 148 drivers/crypto/nx/nx-aes-ccm.c if (assoclen) assoclen 161 drivers/crypto/nx/nx-aes-ccm.c unsigned int assoclen, assoclen 188 drivers/crypto/nx/nx-aes-ccm.c if (!assoclen) { assoclen 190 drivers/crypto/nx/nx-aes-ccm.c } else if (assoclen <= 14) { assoclen 196 drivers/crypto/nx/nx-aes-ccm.c iauth_len = assoclen; assoclen 197 drivers/crypto/nx/nx-aes-ccm.c } else if (assoclen <= 65280) { assoclen 211 drivers/crypto/nx/nx-aes-ccm.c rc = generate_b0(iv, assoclen, authsize, nbytes, b0); assoclen 221 drivers/crypto/nx/nx-aes-ccm.c if (assoclen <= 65280) { assoclen 222 drivers/crypto/nx/nx-aes-ccm.c *(u16 *)b1 = assoclen; assoclen 227 drivers/crypto/nx/nx-aes-ccm.c *(u32 *)&b1[2] = assoclen; assoclen 234 drivers/crypto/nx/nx-aes-ccm.c if (!assoclen) { assoclen 236 drivers/crypto/nx/nx-aes-ccm.c } else if (assoclen <= 14) { assoclen 268 drivers/crypto/nx/nx-aes-ccm.c atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); assoclen 282 drivers/crypto/nx/nx-aes-ccm.c to_process = min_t(u32, assoclen - processed, assoclen 290 drivers/crypto/nx/nx-aes-ccm.c if ((to_process + processed) < assoclen) { assoclen 316 drivers/crypto/nx/nx-aes-ccm.c atomic64_add(assoclen, &nx_ctx->stats->aes_bytes); assoclen 319 drivers/crypto/nx/nx-aes-ccm.c } while (processed < assoclen); assoclen 331 drivers/crypto/nx/nx-aes-ccm.c unsigned int assoclen) assoclen 348 drivers/crypto/nx/nx-aes-ccm.c req->src, nbytes + req->assoclen, authsize, assoclen 351 drivers/crypto/nx/nx-aes-ccm.c rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen, assoclen 371 drivers/crypto/nx/nx-aes-ccm.c &to_process, processed + req->assoclen, assoclen 409 drivers/crypto/nx/nx-aes-ccm.c unsigned int assoclen) assoclen 421 drivers/crypto/nx/nx-aes-ccm.c rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, assoclen, assoclen 440 drivers/crypto/nx/nx-aes-ccm.c &to_process, processed + req->assoclen, assoclen 472 drivers/crypto/nx/nx-aes-ccm.c req->dst, nbytes + req->assoclen, authsize, assoclen 493 drivers/crypto/nx/nx-aes-ccm.c return ccm_nx_encrypt(req, &desc, req->assoclen - 8); assoclen 507 drivers/crypto/nx/nx-aes-ccm.c return ccm_nx_encrypt(req, &desc, req->assoclen); assoclen 523 drivers/crypto/nx/nx-aes-ccm.c return ccm_nx_decrypt(req, &desc, req->assoclen - 8); assoclen 537 drivers/crypto/nx/nx-aes-ccm.c return ccm_nx_decrypt(req, &desc, req->assoclen); assoclen 102 drivers/crypto/nx/nx-aes-gcm.c unsigned int assoclen) assoclen 108 drivers/crypto/nx/nx-aes-gcm.c unsigned int nbytes = assoclen; assoclen 159 drivers/crypto/nx/nx-aes-gcm.c atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes)); assoclen 170 drivers/crypto/nx/nx-aes-gcm.c unsigned int assoclen) assoclen 177 drivers/crypto/nx/nx-aes-gcm.c unsigned int nbytes = assoclen; assoclen 232 drivers/crypto/nx/nx-aes-gcm.c atomic64_add(assoclen, &(nx_ctx->stats->aes_bytes)); assoclen 310 drivers/crypto/nx/nx-aes-gcm.c unsigned int assoclen) assoclen 329 drivers/crypto/nx/nx-aes-gcm.c if (assoclen == 0) assoclen 332 drivers/crypto/nx/nx-aes-gcm.c rc = gmac(req, &desc, assoclen); assoclen 340 drivers/crypto/nx/nx-aes-gcm.c csbcpb->cpb.aes_gcm.bit_length_aad = assoclen * 8; assoclen 341 drivers/crypto/nx/nx-aes-gcm.c if (assoclen) { assoclen 343 drivers/crypto/nx/nx-aes-gcm.c assoclen); assoclen 363 drivers/crypto/nx/nx-aes-gcm.c processed + req->assoclen, assoclen 400 drivers/crypto/nx/nx-aes-gcm.c req->dst, req->assoclen + nbytes, assoclen 408 drivers/crypto/nx/nx-aes-gcm.c itag, req->src, req->assoclen + nbytes, assoclen 427 drivers/crypto/nx/nx-aes-gcm.c return gcm_aes_nx_crypt(req, 1, req->assoclen); assoclen 437 drivers/crypto/nx/nx-aes-gcm.c return gcm_aes_nx_crypt(req, 0, req->assoclen); assoclen 451 drivers/crypto/nx/nx-aes-gcm.c if (req->assoclen < 8) assoclen 454 drivers/crypto/nx/nx-aes-gcm.c return gcm_aes_nx_crypt(req, 1, req->assoclen - 8); assoclen 468 drivers/crypto/nx/nx-aes-gcm.c if (req->assoclen < 8) assoclen 471 drivers/crypto/nx/nx-aes-gcm.c return gcm_aes_nx_crypt(req, 0, req->assoclen - 8); assoclen 58 drivers/crypto/omap-aes-gcm.c dd->aead_req->assoclen, dd->total, assoclen 64 drivers/crypto/omap-aes-gcm.c dd->total + dd->aead_req->assoclen, assoclen 90 drivers/crypto/omap-aes-gcm.c int alen, clen, cryptlen, assoclen, ret; assoclen 97 drivers/crypto/omap-aes-gcm.c assoclen = req->assoclen; assoclen 101 drivers/crypto/omap-aes-gcm.c assoclen -= 8; assoclen 106 drivers/crypto/omap-aes-gcm.c alen = ALIGN(assoclen, AES_BLOCK_SIZE); assoclen 109 drivers/crypto/omap-aes-gcm.c nsg = !!(assoclen && cryptlen); assoclen 114 drivers/crypto/omap-aes-gcm.c if (assoclen) { assoclen 116 drivers/crypto/omap-aes-gcm.c ret = omap_crypto_align_sg(&tmp, assoclen, assoclen 126 drivers/crypto/omap-aes-gcm.c tmp = scatterwalk_ffwd(sg_arr, req->src, req->assoclen); assoclen 139 drivers/crypto/omap-aes-gcm.c dd->assoc_len = assoclen; assoclen 145 drivers/crypto/omap-aes-gcm.c dd->out_sg = scatterwalk_ffwd(sg_arr, req->dst, assoclen); assoclen 231 drivers/crypto/omap-aes-gcm.c dd->total + dd->aead_req->assoclen, assoclen 308 drivers/crypto/omap-aes-gcm.c int err, assoclen; assoclen 318 drivers/crypto/omap-aes-gcm.c assoclen = req->assoclen - 8; assoclen 320 drivers/crypto/omap-aes-gcm.c assoclen = req->assoclen; assoclen 321 drivers/crypto/omap-aes-gcm.c if (assoclen + req->cryptlen == 0) { assoclen 321 drivers/crypto/picoxcell_crypto.c total = areq->assoclen + areq->cryptlen; assoclen 385 drivers/crypto/picoxcell_crypto.c total = req->is_encrypt ? 0 : areq->assoclen; assoclen 415 drivers/crypto/picoxcell_crypto.c unsigned total = areq->assoclen + areq->cryptlen + assoclen 543 drivers/crypto/picoxcell_crypto.c aead_request_set_ad(subreq, req->assoclen); assoclen 577 drivers/crypto/picoxcell_crypto.c assoc_len = aead_req->assoclen; assoclen 891 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_offset = areq->assoclen; assoclen 895 drivers/crypto/qat/qat_common/qat_algs.c auth_param->auth_len = areq->assoclen + cipher_param->cipher_length; assoclen 936 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_offset = areq->assoclen; assoclen 939 drivers/crypto/qat/qat_common/qat_algs.c auth_param->auth_len = areq->assoclen + areq->cryptlen; assoclen 507 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen) assoclen 604 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen) { assoclen 969 drivers/crypto/stm32/stm32-cryp.c cryp->total_in = areq->assoclen + areq->cryptlen; assoclen 1008 drivers/crypto/stm32/stm32-cryp.c scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen); assoclen 1009 drivers/crypto/stm32/stm32-cryp.c cryp->total_out -= cryp->areq->assoclen; assoclen 1059 drivers/crypto/stm32/stm32-cryp.c if (unlikely(!cryp->areq->assoclen && assoclen 1120 drivers/crypto/stm32/stm32-cryp.c size_bit = cryp->areq->assoclen * 8; assoclen 1545 drivers/crypto/stm32/stm32-cryp.c cryp->areq->assoclen) { assoclen 1589 drivers/crypto/stm32/stm32-cryp.c alen = cryp->areq->assoclen; assoclen 1592 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen <= 65280) { assoclen 1009 drivers/crypto/talitos.c cryptlen + authsize, areq->assoclen); assoclen 1019 drivers/crypto/talitos.c areq->assoclen + cryptlen - ivsize); assoclen 1225 drivers/crypto/talitos.c areq->assoclen + cryptlen); assoclen 1232 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc, assoclen 1257 drivers/crypto/talitos.c sg_count, areq->assoclen, tbl_off, elen, assoclen 1277 drivers/crypto/talitos.c sg_count, areq->assoclen, tbl_off, elen, assoclen 1297 drivers/crypto/talitos.c sg_count, areq->assoclen + cryptlen, tbl_off); assoclen 1325 drivers/crypto/talitos.c unsigned int assoclen, assoclen 1348 drivers/crypto/talitos.c src_len = assoclen + cryptlen + authsize; assoclen 1358 drivers/crypto/talitos.c src_len = assoclen + cryptlen + (encrypt ? 0 : authsize); assoclen 1365 drivers/crypto/talitos.c dst_len = assoclen + cryptlen + (encrypt ? authsize : 0); assoclen 1429 drivers/crypto/talitos.c iv, areq->assoclen, cryptlen, assoclen 1488 drivers/crypto/talitos.c req->assoclen + req->cryptlen - authsize); assoclen 84 include/crypto/aead.h unsigned int assoclen; assoclen 500 include/crypto/aead.h unsigned int assoclen) assoclen 502 include/crypto/aead.h req->assoclen = assoclen; assoclen 51 include/crypto/gcm.h static inline int crypto_ipsec_check_assoclen(unsigned int assoclen) assoclen 53 include/crypto/gcm.h switch (assoclen) { assoclen 371 net/ipv4/esp4.c int assoclen; assoclen 381 net/ipv4/esp4.c assoclen = sizeof(struct ip_esp_hdr); assoclen 386 net/ipv4/esp4.c assoclen += sizeof(__be32); assoclen 413 net/ipv4/esp4.c assoclen + ivlen + esp->clen + alen); assoclen 441 net/ipv4/esp4.c assoclen + ivlen + esp->clen + alen); assoclen 452 net/ipv4/esp4.c aead_request_set_ad(req, assoclen); assoclen 704 net/ipv4/esp4.c int assoclen; assoclen 718 net/ipv4/esp4.c assoclen = sizeof(struct ip_esp_hdr); assoclen 723 net/ipv4/esp4.c assoclen += seqhilen; assoclen 774 net/ipv4/esp4.c aead_request_set_ad(req, assoclen); assoclen 313 net/ipv6/esp6.c int assoclen; assoclen 323 net/ipv6/esp6.c assoclen = sizeof(struct ip_esp_hdr); assoclen 328 net/ipv6/esp6.c assoclen += sizeof(__be32); assoclen 354 net/ipv6/esp6.c assoclen + ivlen + esp->clen + alen); assoclen 382 net/ipv6/esp6.c assoclen + ivlen + esp->clen + alen); assoclen 393 net/ipv6/esp6.c aead_request_set_ad(req, assoclen); assoclen 599 net/ipv6/esp6.c int assoclen; assoclen 617 net/ipv6/esp6.c assoclen = sizeof(struct ip_esp_hdr); assoclen 622 net/ipv6/esp6.c assoclen += seqhilen; assoclen 673 net/ipv6/esp6.c aead_request_set_ad(req, assoclen); assoclen 654 net/mac802154/llsec.c int authlen, assoclen, datalen, rc; assoclen 665 net/mac802154/llsec.c assoclen = skb->mac_len; assoclen 672 net/mac802154/llsec.c sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen + authlen); assoclen 675 net/mac802154/llsec.c assoclen += datalen; assoclen 681 net/mac802154/llsec.c aead_request_set_ad(req, assoclen); assoclen 860 net/mac802154/llsec.c int authlen, datalen, assoclen, rc; assoclen 871 net/mac802154/llsec.c assoclen = skb->mac_len; assoclen 876 net/mac802154/llsec.c sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen); assoclen 879 net/mac802154/llsec.c assoclen += datalen - authlen; assoclen 885 net/mac802154/llsec.c aead_request_set_ad(req, assoclen);