areq 319 crypto/adiantum.c static void adiantum_streamcipher_done(struct crypto_async_request *areq, areq 322 crypto/adiantum.c struct skcipher_request *req = areq->data; areq 650 crypto/af_alg.c static void af_alg_free_areq_sgls(struct af_alg_async_req *areq) areq 652 crypto/af_alg.c struct sock *sk = areq->sk; areq 660 crypto/af_alg.c list_for_each_entry_safe(rsgl, tmp, &areq->rsgl_list, list) { areq 664 crypto/af_alg.c if (rsgl != &areq->first_rsgl) areq 668 crypto/af_alg.c tsgl = areq->tsgl; areq 670 crypto/af_alg.c for_each_sg(tsgl, sg, areq->tsgl_entries, i) { areq 676 crypto/af_alg.c sock_kfree_s(sk, tsgl, areq->tsgl_entries * sizeof(*tsgl)); areq 1017 crypto/af_alg.c void af_alg_free_resources(struct af_alg_async_req *areq) areq 1019 crypto/af_alg.c struct sock *sk = areq->sk; areq 1021 crypto/af_alg.c af_alg_free_areq_sgls(areq); areq 1022 crypto/af_alg.c sock_kfree_s(sk, areq, areq->areqlen); areq 1037 crypto/af_alg.c struct af_alg_async_req *areq = _req->data; areq 1038 crypto/af_alg.c struct sock *sk = areq->sk; areq 1039 crypto/af_alg.c struct kiocb *iocb = areq->iocb; areq 1043 crypto/af_alg.c resultlen = areq->outlen; areq 1045 crypto/af_alg.c af_alg_free_resources(areq); areq 1086 crypto/af_alg.c struct af_alg_async_req *areq = sock_kmalloc(sk, areqlen, GFP_KERNEL); areq 1088 crypto/af_alg.c if (unlikely(!areq)) areq 1091 crypto/af_alg.c areq->areqlen = areqlen; areq 1092 crypto/af_alg.c areq->sk = sk; areq 1093 crypto/af_alg.c areq->last_rsgl = NULL; areq 1094 crypto/af_alg.c INIT_LIST_HEAD(&areq->rsgl_list); areq 1095 crypto/af_alg.c areq->tsgl = NULL; areq 1096 crypto/af_alg.c areq->tsgl_entries = 0; areq 1098 crypto/af_alg.c return areq; areq 1115 crypto/af_alg.c struct af_alg_async_req *areq, size_t maxsize, areq 1134 crypto/af_alg.c if (list_empty(&areq->rsgl_list)) { areq 1135 crypto/af_alg.c rsgl = &areq->first_rsgl; areq 1143 crypto/af_alg.c list_add_tail(&rsgl->list, &areq->rsgl_list); areq 1153 crypto/af_alg.c if (areq->last_rsgl) areq 1154 crypto/af_alg.c af_alg_link_sg(&areq->last_rsgl->sgl, &rsgl->sgl); areq 1156 crypto/af_alg.c areq->last_rsgl = rsgl; areq 318 crypto/ahash.c struct ahash_request *areq = req->data; areq 321 crypto/ahash.c ahash_notify_einprogress(areq); areq 335 crypto/ahash.c ahash_restore_req(areq, err); areq 338 crypto/ahash.c areq->base.complete(&areq->base, err); areq 418 crypto/ahash.c struct ahash_request *areq = req->data; areq 423 crypto/ahash.c ahash_restore_req(areq, err); areq 425 crypto/ahash.c areq->base.complete(&areq->base, err); areq 446 crypto/ahash.c struct ahash_request *areq = req->data; areq 449 crypto/ahash.c ahash_notify_einprogress(areq); areq 453 crypto/ahash.c areq->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; areq 455 crypto/ahash.c err = ahash_def_finup_finish1(areq, err); areq 456 crypto/ahash.c if (areq->priv) areq 459 crypto/ahash.c areq->base.complete(&areq->base, err); areq 100 crypto/algif_aead.c struct af_alg_async_req *areq; areq 153 crypto/algif_aead.c areq = af_alg_alloc_areq(sk, sizeof(struct af_alg_async_req) + areq 155 crypto/algif_aead.c if (IS_ERR(areq)) areq 156 crypto/algif_aead.c return PTR_ERR(areq); areq 159 crypto/algif_aead.c err = af_alg_get_rsgl(sk, msg, flags, areq, outlen, &usedpages); areq 213 crypto/algif_aead.c rsgl_src = areq->first_rsgl.sgl.sg; areq 227 crypto/algif_aead.c areq->first_rsgl.sgl.sg, processed); areq 245 crypto/algif_aead.c areq->first_rsgl.sgl.sg, outlen); areq 250 crypto/algif_aead.c areq->tsgl_entries = af_alg_count_tsgl(sk, processed, areq 252 crypto/algif_aead.c if (!areq->tsgl_entries) areq 253 crypto/algif_aead.c areq->tsgl_entries = 1; areq 254 crypto/algif_aead.c areq->tsgl = sock_kmalloc(sk, array_size(sizeof(*areq->tsgl), areq 255 crypto/algif_aead.c areq->tsgl_entries), areq 257 crypto/algif_aead.c if (!areq->tsgl) { areq 261 crypto/algif_aead.c sg_init_table(areq->tsgl, areq->tsgl_entries); areq 264 crypto/algif_aead.c af_alg_pull_tsgl(sk, processed, areq->tsgl, processed - as); areq 269 crypto/algif_aead.c struct af_alg_sgl *sgl_prev = &areq->last_rsgl->sgl; areq 273 crypto/algif_aead.c areq->tsgl); areq 276 crypto/algif_aead.c rsgl_src = areq->tsgl; areq 280 crypto/algif_aead.c aead_request_set_crypt(&areq->cra_u.aead_req, rsgl_src, areq 281 crypto/algif_aead.c areq->first_rsgl.sgl.sg, used, ctx->iv); areq 282 crypto/algif_aead.c aead_request_set_ad(&areq->cra_u.aead_req, ctx->aead_assoclen); areq 283 crypto/algif_aead.c aead_request_set_tfm(&areq->cra_u.aead_req, tfm); areq 288 crypto/algif_aead.c areq->iocb = msg->msg_iocb; areq 291 crypto/algif_aead.c areq->outlen = outlen; areq 293 crypto/algif_aead.c aead_request_set_callback(&areq->cra_u.aead_req, areq 295 crypto/algif_aead.c af_alg_async_cb, areq); areq 296 crypto/algif_aead.c err = ctx->enc ? crypto_aead_encrypt(&areq->cra_u.aead_req) : areq 297 crypto/algif_aead.c crypto_aead_decrypt(&areq->cra_u.aead_req); areq 306 crypto/algif_aead.c aead_request_set_callback(&areq->cra_u.aead_req, areq 310 crypto/algif_aead.c crypto_aead_encrypt(&areq->cra_u.aead_req) : areq 311 crypto/algif_aead.c crypto_aead_decrypt(&areq->cra_u.aead_req), areq 317 crypto/algif_aead.c af_alg_free_resources(areq); areq 60 crypto/algif_skcipher.c struct af_alg_async_req *areq; areq 71 crypto/algif_skcipher.c areq = af_alg_alloc_areq(sk, sizeof(struct af_alg_async_req) + areq 73 crypto/algif_skcipher.c if (IS_ERR(areq)) areq 74 crypto/algif_skcipher.c return PTR_ERR(areq); areq 77 crypto/algif_skcipher.c err = af_alg_get_rsgl(sk, msg, flags, areq, -1, &len); areq 96 crypto/algif_skcipher.c areq->tsgl_entries = af_alg_count_tsgl(sk, len, 0); areq 97 crypto/algif_skcipher.c if (!areq->tsgl_entries) areq 98 crypto/algif_skcipher.c areq->tsgl_entries = 1; areq 99 crypto/algif_skcipher.c areq->tsgl = sock_kmalloc(sk, array_size(sizeof(*areq->tsgl), areq 100 crypto/algif_skcipher.c areq->tsgl_entries), areq 102 crypto/algif_skcipher.c if (!areq->tsgl) { areq 106 crypto/algif_skcipher.c sg_init_table(areq->tsgl, areq->tsgl_entries); areq 107 crypto/algif_skcipher.c af_alg_pull_tsgl(sk, len, areq->tsgl, 0); areq 110 crypto/algif_skcipher.c skcipher_request_set_tfm(&areq->cra_u.skcipher_req, tfm); areq 111 crypto/algif_skcipher.c skcipher_request_set_crypt(&areq->cra_u.skcipher_req, areq->tsgl, areq 112 crypto/algif_skcipher.c areq->first_rsgl.sgl.sg, len, ctx->iv); areq 117 crypto/algif_skcipher.c areq->iocb = msg->msg_iocb; areq 120 crypto/algif_skcipher.c areq->outlen = len; areq 122 crypto/algif_skcipher.c skcipher_request_set_callback(&areq->cra_u.skcipher_req, areq 124 crypto/algif_skcipher.c af_alg_async_cb, areq); areq 126 crypto/algif_skcipher.c crypto_skcipher_encrypt(&areq->cra_u.skcipher_req) : areq 127 crypto/algif_skcipher.c crypto_skcipher_decrypt(&areq->cra_u.skcipher_req); areq 136 crypto/algif_skcipher.c skcipher_request_set_callback(&areq->cra_u.skcipher_req, areq 141 crypto/algif_skcipher.c crypto_skcipher_encrypt(&areq->cra_u.skcipher_req) : areq 142 crypto/algif_skcipher.c crypto_skcipher_decrypt(&areq->cra_u.skcipher_req), areq 148 crypto/algif_skcipher.c af_alg_free_resources(areq); areq 122 crypto/authenc.c static void authenc_geniv_ahash_done(struct crypto_async_request *areq, int err) areq 124 crypto/authenc.c struct aead_request *req = areq->data; areq 176 crypto/authenc.c struct aead_request *areq = req->data; areq 181 crypto/authenc.c err = crypto_authenc_genicv(areq, 0); areq 184 crypto/authenc.c authenc_request_complete(areq, err); areq 274 crypto/authenc.c static void authenc_verify_ahash_done(struct crypto_async_request *areq, areq 277 crypto/authenc.c struct aead_request *req = areq->data; areq 120 crypto/authencesn.c static void authenc_esn_geniv_ahash_done(struct crypto_async_request *areq, areq 123 crypto/authencesn.c struct aead_request *req = areq->data; areq 169 crypto/authencesn.c struct aead_request *areq = req->data; areq 172 crypto/authencesn.c err = crypto_authenc_esn_genicv(areq, 0); areq 174 crypto/authencesn.c authenc_esn_request_complete(areq, err); areq 271 crypto/authencesn.c static void authenc_esn_verify_ahash_done(struct crypto_async_request *areq, areq 274 crypto/authencesn.c struct aead_request *req = areq->data; areq 235 crypto/ccm.c static void crypto_ccm_encrypt_done(struct crypto_async_request *areq, int err) areq 237 crypto/ccm.c struct aead_request *req = areq->data; areq 331 crypto/ccm.c static void crypto_ccm_decrypt_done(struct crypto_async_request *areq, areq 334 crypto/ccm.c struct aead_request *req = areq->data; areq 120 crypto/chacha20poly1305.c static void chacha_decrypt_done(struct crypto_async_request *areq, int err) areq 122 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_verify_tag); areq 166 crypto/chacha20poly1305.c static void poly_tail_done(struct crypto_async_request *areq, int err) areq 168 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_tail_continue); areq 196 crypto/chacha20poly1305.c static void poly_cipherpad_done(struct crypto_async_request *areq, int err) areq 198 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_tail); areq 225 crypto/chacha20poly1305.c static void poly_cipher_done(struct crypto_async_request *areq, int err) areq 227 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_cipherpad); areq 255 crypto/chacha20poly1305.c static void poly_adpad_done(struct crypto_async_request *areq, int err) areq 257 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_cipher); areq 284 crypto/chacha20poly1305.c static void poly_ad_done(struct crypto_async_request *areq, int err) areq 286 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_adpad); areq 308 crypto/chacha20poly1305.c static void poly_setkey_done(struct crypto_async_request *areq, int err) areq 310 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_ad); areq 334 crypto/chacha20poly1305.c static void poly_init_done(struct crypto_async_request *areq, int err) areq 336 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_setkey); areq 357 crypto/chacha20poly1305.c static void poly_genkey_done(struct crypto_async_request *areq, int err) areq 359 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_init); areq 396 crypto/chacha20poly1305.c static void chacha_encrypt_done(struct crypto_async_request *areq, int err) areq 398 crypto/chacha20poly1305.c async_done_continue(areq->data, err, poly_genkey); areq 785 crypto/cryptd.c static void cryptd_aead_encrypt(struct crypto_async_request *areq, int err) areq 787 crypto/cryptd.c struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); areq 791 crypto/cryptd.c req = container_of(areq, struct aead_request, base); areq 795 crypto/cryptd.c static void cryptd_aead_decrypt(struct crypto_async_request *areq, int err) areq 797 crypto/cryptd.c struct cryptd_aead_ctx *ctx = crypto_tfm_ctx(areq->tfm); areq 801 crypto/cryptd.c req = container_of(areq, struct aead_request, base); areq 92 crypto/cts.c static void cts_cbc_crypt_done(struct crypto_async_request *areq, int err) areq 94 crypto/cts.c struct skcipher_request *req = areq->data; areq 132 crypto/cts.c static void crypto_cts_encrypt_done(struct crypto_async_request *areq, int err) areq 134 crypto/cts.c struct skcipher_request *req = areq->data; areq 226 crypto/cts.c static void crypto_cts_decrypt_done(struct crypto_async_request *areq, int err) areq 228 crypto/cts.c struct skcipher_request *req = areq->data; areq 151 crypto/essiv.c static void essiv_skcipher_done(struct crypto_async_request *areq, int err) areq 153 crypto/essiv.c struct skcipher_request *req = areq->data; areq 186 crypto/essiv.c static void essiv_aead_done(struct crypto_async_request *areq, int err) areq 188 crypto/essiv.c struct aead_request *req = areq->data; areq 206 crypto/gcm.c static void gcm_hash_len_done(struct crypto_async_request *areq, int err); areq 255 crypto/gcm.c static void gcm_hash_len_done(struct crypto_async_request *areq, int err) areq 257 crypto/gcm.c struct aead_request *req = areq->data; areq 276 crypto/gcm.c static void gcm_hash_crypt_remain_done(struct crypto_async_request *areq, areq 279 crypto/gcm.c struct aead_request *req = areq->data; areq 307 crypto/gcm.c static void gcm_hash_crypt_done(struct crypto_async_request *areq, int err) areq 309 crypto/gcm.c struct aead_request *req = areq->data; areq 335 crypto/gcm.c static void gcm_hash_assoc_remain_done(struct crypto_async_request *areq, areq 338 crypto/gcm.c struct aead_request *req = areq->data; areq 364 crypto/gcm.c static void gcm_hash_assoc_done(struct crypto_async_request *areq, int err) areq 366 crypto/gcm.c struct aead_request *req = areq->data; areq 389 crypto/gcm.c static void gcm_hash_init_done(struct crypto_async_request *areq, int err) areq 391 crypto/gcm.c struct aead_request *req = areq->data; areq 442 crypto/gcm.c static void gcm_encrypt_done(struct crypto_async_request *areq, int err) areq 444 crypto/gcm.c struct aead_request *req = areq->data; areq 486 crypto/gcm.c static void gcm_decrypt_done(struct crypto_async_request *areq, int err) areq 488 crypto/gcm.c struct aead_request *req = areq->data; areq 209 crypto/lrw.c static void crypt_done(struct crypto_async_request *areq, int err) areq 211 crypto/lrw.c struct skcipher_request *req = areq->data; areq 67 crypto/pcrypt.c static void pcrypt_aead_done(struct crypto_async_request *areq, int err) areq 69 crypto/pcrypt.c struct aead_request *req = areq->data; areq 147 crypto/xts.c static void cts_done(struct crypto_async_request *areq, int err) areq 149 crypto/xts.c struct skcipher_request *req = areq->data; areq 201 crypto/xts.c static void encrypt_done(struct crypto_async_request *areq, int err) areq 203 crypto/xts.c struct skcipher_request *req = areq->data; areq 221 crypto/xts.c static void decrypt_done(struct crypto_async_request *areq, int err) areq 223 crypto/xts.c struct skcipher_request *req = areq->data; areq 186 drivers/crypto/atmel-aes.c struct crypto_async_request *areq; areq 494 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 529 drivers/crypto/atmel-aes.c dd->areq->complete(dd->areq, err); areq 936 drivers/crypto/atmel-aes.c struct crypto_async_request *areq, *backlog; areq 950 drivers/crypto/atmel-aes.c areq = crypto_dequeue_request(&dd->queue); areq 951 drivers/crypto/atmel-aes.c if (areq) areq 955 drivers/crypto/atmel-aes.c if (!areq) areq 961 drivers/crypto/atmel-aes.c ctx = crypto_tfm_ctx(areq->tfm); areq 963 drivers/crypto/atmel-aes.c dd->areq = areq; areq 965 drivers/crypto/atmel-aes.c start_async = (areq != new_areq); areq 983 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 1013 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 1069 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 1530 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1567 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1590 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1625 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1673 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1718 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1857 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 1882 drivers/crypto/atmel-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); areq 1987 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 1997 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 2016 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 2037 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 2079 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 2092 drivers/crypto/atmel-aes.c struct aead_request *req = aead_request_cast(dd->areq); areq 50 drivers/crypto/atmel-ecc.c static void atmel_ecdh_done(struct atmel_i2c_work_data *work_data, void *areq, areq 53 drivers/crypto/atmel-ecc.c struct kpp_request *req = areq; areq 262 drivers/crypto/atmel-i2c.c work_data->cbk(work_data, work_data->areq, status); areq 267 drivers/crypto/atmel-i2c.c void *areq, int status), areq 268 drivers/crypto/atmel-i2c.c void *areq) areq 271 drivers/crypto/atmel-i2c.c work_data->areq = areq; areq 163 drivers/crypto/atmel-i2c.h void (*cbk)(struct atmel_i2c_work_data *work_data, void *areq, areq 165 drivers/crypto/atmel-i2c.h void *areq; areq 174 drivers/crypto/atmel-i2c.h void *areq, int status), areq 175 drivers/crypto/atmel-i2c.h void *areq); areq 2238 drivers/crypto/atmel-sha.c static void atmel_sha_authenc_complete(struct crypto_async_request *areq, areq 2241 drivers/crypto/atmel-sha.c struct ahash_request *req = areq->data; areq 22 drivers/crypto/atmel-sha204a.c void *areq, int status) areq 25 drivers/crypto/atmel-sha204a.c struct hwrng *rng = areq; areq 359 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_aead(struct aead_request *areq); areq 360 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq); areq 361 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_hash(struct ahash_request *areq); areq 1309 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_hash(struct ahash_request *areq) areq 1311 drivers/crypto/axis/artpec6_crypto.c struct artpec6_hashalg_context *ctx = crypto_tfm_ctx(areq->base.tfm); areq 1312 drivers/crypto/axis/artpec6_crypto.c struct artpec6_hash_request_context *req_ctx = ahash_request_ctx(areq); areq 1313 drivers/crypto/axis/artpec6_crypto.c size_t digestsize = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq)); areq 1316 drivers/crypto/axis/artpec6_crypto.c crypto_ahash_tfm(crypto_ahash_reqtfm(areq))); areq 1402 drivers/crypto/axis/artpec6_crypto.c size_t total_bytes = areq->nbytes + req_ctx->partial_bytes; areq 1428 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->src); areq 1438 drivers/crypto/axis/artpec6_crypto.c size_t sg_rem = areq->nbytes - sg_skip; areq 1440 drivers/crypto/axis/artpec6_crypto.c sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), areq 1497 drivers/crypto/axis/artpec6_crypto.c error = artpec6_crypto_setup_in_descr(common, areq->result, areq 1665 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_crypto(struct skcipher_request *areq) areq 1669 drivers/crypto/axis/artpec6_crypto.c struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq); areq 1681 drivers/crypto/axis/artpec6_crypto.c req_ctx = skcipher_request_ctx(areq); areq 1783 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_setup_out_descr(common, areq->iv, iv_len, areq 1789 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->src); areq 1790 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen); areq 1795 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->dst); areq 1796 drivers/crypto/axis/artpec6_crypto.c ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen); areq 1803 drivers/crypto/axis/artpec6_crypto.c size_t pad = ALIGN(areq->cryptlen, AES_BLOCK_SIZE) - areq 1804 drivers/crypto/axis/artpec6_crypto.c areq->cryptlen; areq 1832 drivers/crypto/axis/artpec6_crypto.c static int artpec6_crypto_prepare_aead(struct aead_request *areq) areq 1837 drivers/crypto/axis/artpec6_crypto.c struct artpec6_cryptotfm_context *ctx = crypto_tfm_ctx(areq->base.tfm); areq 1838 drivers/crypto/axis/artpec6_crypto.c struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); areq 1839 drivers/crypto/axis/artpec6_crypto.c struct crypto_aead *cipher = crypto_aead_reqtfm(areq); areq 1909 drivers/crypto/axis/artpec6_crypto.c input_length = areq->cryptlen; areq 1915 drivers/crypto/axis/artpec6_crypto.c __cpu_to_be64(8*areq->assoclen); areq 1920 drivers/crypto/axis/artpec6_crypto.c memcpy(req_ctx->hw_ctx.J0, areq->iv, crypto_aead_ivsize(cipher)); areq 1932 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->src); areq 1935 drivers/crypto/axis/artpec6_crypto.c count = areq->assoclen; areq 1940 drivers/crypto/axis/artpec6_crypto.c if (!IS_ALIGNED(areq->assoclen, 16)) { areq 1941 drivers/crypto/axis/artpec6_crypto.c size_t assoc_pad = 16 - (areq->assoclen % 16); areq 1973 drivers/crypto/axis/artpec6_crypto.c size_t output_len = areq->cryptlen; areq 1978 drivers/crypto/axis/artpec6_crypto.c artpec6_crypto_walk_init(&walk, areq->dst); areq 1981 drivers/crypto/axis/artpec6_crypto.c count = artpec6_crypto_walk_advance(&walk, areq->assoclen); areq 2189 drivers/crypto/axis/artpec6_crypto.c struct aead_request *areq = container_of(req, areq 2191 drivers/crypto/axis/artpec6_crypto.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 2192 drivers/crypto/axis/artpec6_crypto.c struct artpec6_crypto_aead_req_ctx *req_ctx = aead_request_ctx(areq); areq 2198 drivers/crypto/axis/artpec6_crypto.c sg_pcopy_to_buffer(areq->src, areq 2199 drivers/crypto/axis/artpec6_crypto.c sg_nents(areq->src), areq 2202 drivers/crypto/axis/artpec6_crypto.c areq->assoclen + areq->cryptlen - areq 306 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 308 drivers/crypto/bcm/cipher.c container_of(areq, struct ablkcipher_request, base); areq 506 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 507 drivers/crypto/bcm/cipher.c struct ablkcipher_request *req = ablkcipher_request_cast(areq); areq 692 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 693 drivers/crypto/bcm/cipher.c struct ahash_request *req = ahash_request_cast(areq); areq 983 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 984 drivers/crypto/bcm/cipher.c struct ahash_request *req = ahash_request_cast(areq); areq 1034 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 1035 drivers/crypto/bcm/cipher.c struct ahash_request *req = ahash_request_cast(areq); areq 1299 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 1300 drivers/crypto/bcm/cipher.c struct aead_request *req = container_of(areq, areq 1569 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 1570 drivers/crypto/bcm/cipher.c struct aead_request *req = container_of(areq, areq 1646 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; areq 1653 drivers/crypto/bcm/cipher.c if (areq) areq 1654 drivers/crypto/bcm/cipher.c areq->complete(areq, err); areq 2636 drivers/crypto/bcm/cipher.c static void aead_complete(struct crypto_async_request *areq, int err) areq 2639 drivers/crypto/bcm/cipher.c container_of(areq, struct aead_request, base); areq 2645 drivers/crypto/bcm/cipher.c areq->tfm = crypto_aead_tfm(aead); areq 2647 drivers/crypto/bcm/cipher.c areq->complete = rctx->old_complete; areq 2648 drivers/crypto/bcm/cipher.c areq->data = rctx->old_data; areq 2650 drivers/crypto/bcm/cipher.c areq->complete(areq, err); areq 126 drivers/crypto/caam/caamalg_qi2.c static struct caam_request *to_caam_req(struct crypto_async_request *areq) areq 128 drivers/crypto/caam/caamalg_qi2.c switch (crypto_tfm_alg_type(areq->tfm)) { areq 130 drivers/crypto/caam/caamalg_qi2.c return skcipher_request_ctx(skcipher_request_cast(areq)); areq 132 drivers/crypto/caam/caamalg_qi2.c return aead_request_ctx(container_of(areq, struct aead_request, areq 135 drivers/crypto/caam/caamalg_qi2.c return ahash_request_ctx(ahash_request_cast(areq)); areq 1290 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 1291 drivers/crypto/caam/caamalg_qi2.c struct aead_request *req = container_of(areq, struct aead_request, areq 1293 drivers/crypto/caam/caamalg_qi2.c struct caam_request *req_ctx = to_caam_req(areq); areq 1311 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 1312 drivers/crypto/caam/caamalg_qi2.c struct aead_request *req = container_of(areq, struct aead_request, areq 1314 drivers/crypto/caam/caamalg_qi2.c struct caam_request *req_ctx = to_caam_req(areq); areq 1398 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 1399 drivers/crypto/caam/caamalg_qi2.c struct skcipher_request *req = skcipher_request_cast(areq); areq 1400 drivers/crypto/caam/caamalg_qi2.c struct caam_request *req_ctx = to_caam_req(areq); areq 1436 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 1437 drivers/crypto/caam/caamalg_qi2.c struct skcipher_request *req = skcipher_request_cast(areq); areq 1438 drivers/crypto/caam/caamalg_qi2.c struct caam_request *req_ctx = to_caam_req(areq); areq 3345 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 3346 drivers/crypto/caam/caamalg_qi2.c struct ahash_request *req = ahash_request_cast(areq); areq 3372 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 3373 drivers/crypto/caam/caamalg_qi2.c struct ahash_request *req = ahash_request_cast(areq); areq 3402 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 3403 drivers/crypto/caam/caamalg_qi2.c struct ahash_request *req = ahash_request_cast(areq); areq 3429 drivers/crypto/caam/caamalg_qi2.c struct crypto_async_request *areq = cbk_ctx; areq 3430 drivers/crypto/caam/caamalg_qi2.c struct ahash_request *req = ahash_request_cast(areq); areq 356 drivers/crypto/caam/jr.c u32 status, void *areq), areq 357 drivers/crypto/caam/jr.c void *areq) areq 387 drivers/crypto/caam/jr.c head_entry->cbkarg = areq; areq 16 drivers/crypto/caam/jr.h void *areq), areq 17 drivers/crypto/caam/jr.h void *areq); areq 176 drivers/crypto/cavium/nitrox/nitrox_aead.c struct aead_request *areq = arg; areq 177 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); areq 186 drivers/crypto/cavium/nitrox/nitrox_aead.c areq->base.complete(&areq->base, err); areq 189 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_aes_gcm_enc(struct aead_request *areq) areq 191 drivers/crypto/cavium/nitrox/nitrox_aead.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 193 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); areq 198 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); areq 200 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen; areq 201 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; areq 202 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->assoclen + areq->cryptlen; areq 204 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; areq 206 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->flags = areq->base.flags; areq 208 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->src = areq->src; areq 209 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; areq 217 drivers/crypto/cavium/nitrox/nitrox_aead.c areq); areq 220 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_aes_gcm_dec(struct aead_request *areq) areq 222 drivers/crypto/cavium/nitrox/nitrox_aead.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 224 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); areq 229 drivers/crypto/cavium/nitrox/nitrox_aead.c memcpy(fctx->crypto.iv, areq->iv, GCM_AES_SALT_SIZE); areq 231 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen - aead->authsize; areq 232 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; areq 233 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->cryptlen + areq->assoclen; areq 235 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; areq 237 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->flags = areq->base.flags; areq 239 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->src = areq->src; areq 240 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; areq 248 drivers/crypto/cavium/nitrox/nitrox_aead.c areq); areq 363 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_rfc4106_set_aead_rctx_sglist(struct aead_request *areq) areq 365 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); areq 367 drivers/crypto/cavium/nitrox/nitrox_aead.c unsigned int assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; areq 370 drivers/crypto/cavium/nitrox/nitrox_aead.c if (areq->assoclen != 16 && areq->assoclen != 20) areq 373 drivers/crypto/cavium/nitrox/nitrox_aead.c scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0); areq 376 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen); areq 380 drivers/crypto/cavium/nitrox/nitrox_aead.c if (areq->src != areq->dst) { areq 383 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen); areq 389 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst; areq 396 drivers/crypto/cavium/nitrox/nitrox_aead.c struct aead_request *areq = arg; areq 397 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); areq 407 drivers/crypto/cavium/nitrox/nitrox_aead.c areq->base.complete(&areq->base, err); areq 410 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_rfc4106_enc(struct aead_request *areq) areq 412 drivers/crypto/cavium/nitrox/nitrox_aead.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 414 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); areq 419 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->cryptlen = areq->cryptlen; areq 420 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; areq 423 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->iv = areq->iv; areq 425 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->flags = areq->base.flags; areq 429 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); areq 439 drivers/crypto/cavium/nitrox/nitrox_aead.c nitrox_rfc4106_callback, areq); areq 442 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_rfc4106_dec(struct aead_request *areq) areq 444 drivers/crypto/cavium/nitrox/nitrox_aead.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 446 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); areq 451 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->cryptlen = areq->cryptlen - aead->authsize; areq 452 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->assoclen = areq->assoclen - GCM_RFC4106_IV_SIZE; areq 454 drivers/crypto/cavium/nitrox/nitrox_aead.c areq->cryptlen - GCM_RFC4106_IV_SIZE + areq->assoclen; areq 456 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->iv = areq->iv; areq 458 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->flags = areq->base.flags; areq 462 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = nitrox_rfc4106_set_aead_rctx_sglist(areq); areq 472 drivers/crypto/cavium/nitrox/nitrox_aead.c nitrox_rfc4106_callback, areq); areq 214 drivers/crypto/ccree/cc_aead.c struct aead_request *areq = (struct aead_request *)cc_req; areq 215 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq 223 drivers/crypto/ccree/cc_aead.c cc_unmap_aead_request(dev, areq); areq 226 drivers/crypto/ccree/cc_aead.c areq->iv = areq_ctx->backup_iv; areq 239 drivers/crypto/ccree/cc_aead.c sg_zero_buffer(areq->dst, sg_nents(areq->dst), areq 240 drivers/crypto/ccree/cc_aead.c areq->cryptlen, areq->assoclen); areq 245 drivers/crypto/ccree/cc_aead.c u32 skip = areq->cryptlen + areq_ctx->dst_offset; areq 252 drivers/crypto/ccree/cc_aead.c aead_request_complete(areq, err); areq 731 drivers/crypto/ccree/cc_aead.c static void cc_set_assoc_desc(struct aead_request *areq, unsigned int flow_mode, areq 734 drivers/crypto/ccree/cc_aead.c struct crypto_aead *tfm = crypto_aead_reqtfm(areq); areq 736 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq 745 drivers/crypto/ccree/cc_aead.c set_din_type(&desc[idx], DMA_DLLI, sg_dma_address(areq->src), areq 770 drivers/crypto/ccree/cc_aead.c static void cc_proc_authen_desc(struct aead_request *areq, areq 775 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq 778 drivers/crypto/ccree/cc_aead.c struct crypto_aead *tfm = crypto_aead_reqtfm(areq); areq 834 drivers/crypto/ccree/cc_aead.c static void cc_proc_cipher_desc(struct aead_request *areq, areq 840 drivers/crypto/ccree/cc_aead.c struct aead_req_ctx *areq_ctx = aead_request_ctx(areq); areq 842 drivers/crypto/ccree/cc_aead.c struct crypto_aead *tfm = crypto_aead_reqtfm(areq); areq 748 drivers/crypto/ccree/cc_cipher.c unsigned int nbytes, void *areq, areq 52 drivers/crypto/ccree/cc_hash.c static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[], areq 55 drivers/crypto/ccree/cc_hash.c static void cc_setup_cmac(struct ahash_request *areq, struct cc_hw_desc desc[], areq 2124 drivers/crypto/ccree/cc_hash.c static void cc_setup_xcbc(struct ahash_request *areq, struct cc_hw_desc desc[], areq 2128 drivers/crypto/ccree/cc_hash.c struct ahash_req_ctx *state = ahash_request_ctx(areq); areq 2129 drivers/crypto/ccree/cc_hash.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 2181 drivers/crypto/ccree/cc_hash.c static void cc_setup_cmac(struct ahash_request *areq, struct cc_hw_desc desc[], areq 2185 drivers/crypto/ccree/cc_hash.c struct ahash_req_ctx *state = ahash_request_ctx(areq); areq 2186 drivers/crypto/ccree/cc_hash.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 2069 drivers/crypto/chelsio/chcr_algo.c static int chcr_ahash_export(struct ahash_request *areq, void *out) areq 2071 drivers/crypto/chelsio/chcr_algo.c struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2083 drivers/crypto/chelsio/chcr_algo.c static int chcr_ahash_import(struct ahash_request *areq, const void *in) areq 2085 drivers/crypto/chelsio/chcr_algo.c struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2181 drivers/crypto/chelsio/chcr_algo.c static int chcr_sha_init(struct ahash_request *areq) areq 2183 drivers/crypto/chelsio/chcr_algo.c struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2184 drivers/crypto/chelsio/chcr_algo.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 2203 drivers/crypto/chelsio/chcr_algo.c static int chcr_hmac_init(struct ahash_request *areq) areq 2205 drivers/crypto/chelsio/chcr_algo.c struct chcr_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2206 drivers/crypto/chelsio/chcr_algo.c struct crypto_ahash *rtfm = crypto_ahash_reqtfm(areq); areq 2211 drivers/crypto/chelsio/chcr_algo.c chcr_sha_init(areq); areq 496 drivers/crypto/inside-secure/safexcel_cipher.c struct skcipher_request *areq = skcipher_request_cast(async); areq 497 drivers/crypto/inside-secure/safexcel_cipher.c struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq); areq 537 drivers/crypto/inside-secure/safexcel_cipher.c sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv, areq 555 drivers/crypto/inside-secure/safexcel_cipher.c struct skcipher_request *areq = skcipher_request_cast(base); areq 556 drivers/crypto/inside-secure/safexcel_cipher.c struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq); areq 595 drivers/crypto/inside-secure/safexcel_cipher.c sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv, areq 174 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_enqueue(struct ahash_request *areq); areq 182 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 183 drivers/crypto/inside-secure/safexcel_hash.c struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); areq 184 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *sreq = ahash_request_ctx(areq); areq 202 drivers/crypto/inside-secure/safexcel_hash.c dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); areq 234 drivers/crypto/inside-secure/safexcel_hash.c areq->nbytes = 0; areq 235 drivers/crypto/inside-secure/safexcel_hash.c safexcel_ahash_enqueue(areq); areq 241 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sreq->state, areq 257 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 258 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 259 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 271 drivers/crypto/inside-secure/safexcel_hash.c cache_len = queued - areq->nbytes; areq 285 drivers/crypto/inside-secure/safexcel_hash.c sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), areq 287 drivers/crypto/inside-secure/safexcel_hash.c areq->nbytes - extra); areq 323 drivers/crypto/inside-secure/safexcel_hash.c if (!areq->nbytes) areq 327 drivers/crypto/inside-secure/safexcel_hash.c req->nents = dma_map_sg(priv->dev, areq->src, areq 328 drivers/crypto/inside-secure/safexcel_hash.c sg_nents_for_len(areq->src, areq 329 drivers/crypto/inside-secure/safexcel_hash.c areq->nbytes), areq 336 drivers/crypto/inside-secure/safexcel_hash.c for_each_sg(areq->src, sg, req->nents, i) { areq 383 drivers/crypto/inside-secure/safexcel_hash.c safexcel_rdr_req_set(priv, ring, rdesc, &areq->base); areq 395 drivers/crypto/inside-secure/safexcel_hash.c dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE); areq 416 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 417 drivers/crypto/inside-secure/safexcel_hash.c struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); areq 464 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 465 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 485 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 486 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 503 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq = ahash_request_cast(async); areq 504 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 557 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_cache(struct ahash_request *areq) areq 559 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 571 drivers/crypto/inside-secure/safexcel_hash.c if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) { areq 572 drivers/crypto/inside-secure/safexcel_hash.c sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), areq 574 drivers/crypto/inside-secure/safexcel_hash.c areq->nbytes, 0); areq 582 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_enqueue(struct ahash_request *areq) areq 584 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 585 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 620 drivers/crypto/inside-secure/safexcel_hash.c EIP197_GFP_FLAGS(areq->base), areq 629 drivers/crypto/inside-secure/safexcel_hash.c ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base); areq 638 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_update(struct ahash_request *areq) areq 640 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 644 drivers/crypto/inside-secure/safexcel_hash.c if (!areq->nbytes) areq 648 drivers/crypto/inside-secure/safexcel_hash.c ret = safexcel_ahash_cache(areq); areq 651 drivers/crypto/inside-secure/safexcel_hash.c req->len += areq->nbytes; areq 658 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_enqueue(areq); areq 663 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_final(struct ahash_request *areq) areq 665 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 666 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 670 drivers/crypto/inside-secure/safexcel_hash.c if (unlikely(!req->len && !areq->nbytes)) { areq 677 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, md5_zero_message_hash, areq 680 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sha1_zero_message_hash, areq 683 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sha224_zero_message_hash, areq 686 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sha256_zero_message_hash, areq 689 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sha384_zero_message_hash, areq 692 drivers/crypto/inside-secure/safexcel_hash.c memcpy(areq->result, sha512_zero_message_hash, areq 698 drivers/crypto/inside-secure/safexcel_hash.c !areq->nbytes)) { areq 735 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_enqueue(areq); areq 738 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_finup(struct ahash_request *areq) areq 740 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 744 drivers/crypto/inside-secure/safexcel_hash.c safexcel_ahash_update(areq); areq 745 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_final(areq); areq 748 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_export(struct ahash_request *areq, void *out) areq 750 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 764 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_ahash_import(struct ahash_request *areq, const void *in) areq 766 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 770 drivers/crypto/inside-secure/safexcel_hash.c ret = crypto_ahash_init(areq); areq 801 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha1_init(struct ahash_request *areq) areq 803 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 804 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 816 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha1_digest(struct ahash_request *areq) areq 818 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_sha1_init(areq); areq 823 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 876 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha1_init(struct ahash_request *areq) areq 878 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 879 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 898 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha1_digest(struct ahash_request *areq) areq 900 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_sha1_init(areq); areq 905 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 924 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_init_pad(struct ahash_request *areq, areq 940 drivers/crypto/inside-secure/safexcel_hash.c ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, areq 943 drivers/crypto/inside-secure/safexcel_hash.c ahash_request_set_crypt(areq, &sg, ipad, keylen); areq 946 drivers/crypto/inside-secure/safexcel_hash.c ret = crypto_ahash_digest(areq); areq 959 drivers/crypto/inside-secure/safexcel_hash.c keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq)); areq 973 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_init_iv(struct ahash_request *areq, areq 981 drivers/crypto/inside-secure/safexcel_hash.c ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG, areq 984 drivers/crypto/inside-secure/safexcel_hash.c ahash_request_set_crypt(areq, &sg, pad, blocksize); areq 987 drivers/crypto/inside-secure/safexcel_hash.c ret = crypto_ahash_init(areq); areq 991 drivers/crypto/inside-secure/safexcel_hash.c req = ahash_request_ctx(areq); areq 995 drivers/crypto/inside-secure/safexcel_hash.c ret = crypto_ahash_update(areq); areq 1003 drivers/crypto/inside-secure/safexcel_hash.c return crypto_ahash_export(areq, state); areq 1009 drivers/crypto/inside-secure/safexcel_hash.c struct ahash_request *areq; areq 1019 drivers/crypto/inside-secure/safexcel_hash.c areq = ahash_request_alloc(tfm, GFP_KERNEL); areq 1020 drivers/crypto/inside-secure/safexcel_hash.c if (!areq) { areq 1036 drivers/crypto/inside-secure/safexcel_hash.c ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad); areq 1040 drivers/crypto/inside-secure/safexcel_hash.c ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate); areq 1044 drivers/crypto/inside-secure/safexcel_hash.c ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate); areq 1049 drivers/crypto/inside-secure/safexcel_hash.c ahash_request_free(areq); areq 1118 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha256_init(struct ahash_request *areq) areq 1120 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1121 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1133 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha256_digest(struct ahash_request *areq) areq 1135 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_sha256_init(areq); areq 1140 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1173 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha224_init(struct ahash_request *areq) areq 1175 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1176 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1188 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha224_digest(struct ahash_request *areq) areq 1190 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_sha224_init(areq); areq 1195 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1235 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha224_init(struct ahash_request *areq) areq 1237 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1238 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1257 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha224_digest(struct ahash_request *areq) areq 1259 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_sha224_init(areq); areq 1264 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1305 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha256_init(struct ahash_request *areq) areq 1307 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1308 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1327 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha256_digest(struct ahash_request *areq) areq 1329 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_sha256_init(areq); areq 1334 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1368 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha512_init(struct ahash_request *areq) areq 1370 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1371 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1383 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha512_digest(struct ahash_request *areq) areq 1385 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_sha512_init(areq); areq 1390 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1423 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha384_init(struct ahash_request *areq) areq 1425 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1426 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1438 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_sha384_digest(struct ahash_request *areq) areq 1440 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_sha384_init(areq); areq 1445 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1485 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha512_init(struct ahash_request *areq) areq 1487 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1488 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1507 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha512_digest(struct ahash_request *areq) areq 1509 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_sha512_init(areq); areq 1514 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1555 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha384_init(struct ahash_request *areq) areq 1557 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1558 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1577 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_sha384_digest(struct ahash_request *areq) areq 1579 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_sha384_init(areq); areq 1584 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1618 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_md5_init(struct ahash_request *areq) areq 1620 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1621 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1633 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_md5_digest(struct ahash_request *areq) areq 1635 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_md5_init(areq); areq 1640 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 1673 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_md5_init(struct ahash_request *areq) areq 1675 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq)); areq 1676 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *req = ahash_request_ctx(areq); areq 1703 drivers/crypto/inside-secure/safexcel_hash.c static int safexcel_hmac_md5_digest(struct ahash_request *areq) areq 1705 drivers/crypto/inside-secure/safexcel_hash.c int ret = safexcel_hmac_md5_init(areq); areq 1710 drivers/crypto/inside-secure/safexcel_hash.c return safexcel_ahash_finup(areq); areq 267 drivers/crypto/mediatek/mtk-aes.c aes->areq->complete(aes->areq, err); areq 417 drivers/crypto/mediatek/mtk-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(aes->areq); areq 513 drivers/crypto/mediatek/mtk-aes.c struct crypto_async_request *areq, *backlog; areq 526 drivers/crypto/mediatek/mtk-aes.c areq = crypto_dequeue_request(&aes->queue); areq 527 drivers/crypto/mediatek/mtk-aes.c if (areq) areq 531 drivers/crypto/mediatek/mtk-aes.c if (!areq) areq 537 drivers/crypto/mediatek/mtk-aes.c ctx = crypto_tfm_ctx(areq->tfm); areq 541 drivers/crypto/mediatek/mtk-aes.c aes->areq = areq; areq 555 drivers/crypto/mediatek/mtk-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(aes->areq); areq 574 drivers/crypto/mediatek/mtk-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(aes->areq); areq 623 drivers/crypto/mediatek/mtk-aes.c struct ablkcipher_request *req = ablkcipher_request_cast(aes->areq); areq 881 drivers/crypto/mediatek/mtk-aes.c struct aead_request *req = aead_request_cast(aes->areq); areq 961 drivers/crypto/mediatek/mtk-aes.c struct aead_request *req = aead_request_cast(aes->areq); areq 148 drivers/crypto/mediatek/mtk-platform.h struct crypto_async_request *areq; areq 415 drivers/crypto/omap-aes.c void *areq) areq 417 drivers/crypto/omap-aes.c struct ablkcipher_request *req = container_of(areq, struct ablkcipher_request, base); areq 470 drivers/crypto/omap-aes.c void *areq) areq 472 drivers/crypto/omap-aes.c struct ablkcipher_request *req = container_of(areq, struct ablkcipher_request, base); areq 526 drivers/crypto/omap-des.c void *areq) areq 528 drivers/crypto/omap-des.c struct ablkcipher_request *req = container_of(areq, struct ablkcipher_request, base); areq 583 drivers/crypto/omap-des.c void *areq) areq 585 drivers/crypto/omap-des.c struct ablkcipher_request *req = container_of(areq, struct ablkcipher_request, base); areq 706 drivers/crypto/omap-des.c void *areq); areq 708 drivers/crypto/omap-des.c void *areq); areq 310 drivers/crypto/picoxcell_crypto.c static int spacc_aead_make_ddts(struct aead_request *areq) areq 312 drivers/crypto/picoxcell_crypto.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 313 drivers/crypto/picoxcell_crypto.c struct spacc_req *req = aead_request_ctx(areq); areq 321 drivers/crypto/picoxcell_crypto.c total = areq->assoclen + areq->cryptlen; areq 325 drivers/crypto/picoxcell_crypto.c src_nents = sg_nents_for_len(areq->src, total); areq 334 drivers/crypto/picoxcell_crypto.c if (areq->src != areq->dst) { areq 335 drivers/crypto/picoxcell_crypto.c dst_nents = sg_nents_for_len(areq->dst, total); areq 356 drivers/crypto/picoxcell_crypto.c src_ents = dma_map_sg(engine->dev, areq->src, src_nents, areq 361 drivers/crypto/picoxcell_crypto.c dst_ents = dma_map_sg(engine->dev, areq->dst, dst_nents, areq 365 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->src, src_nents, areq 370 drivers/crypto/picoxcell_crypto.c src_ents = dma_map_sg(engine->dev, areq->src, src_nents, areq 381 drivers/crypto/picoxcell_crypto.c for_each_sg(areq->src, cur, src_ents, i) areq 385 drivers/crypto/picoxcell_crypto.c total = req->is_encrypt ? 0 : areq->assoclen; areq 386 drivers/crypto/picoxcell_crypto.c for_each_sg(areq->dst, cur, dst_ents, i) { areq 412 drivers/crypto/picoxcell_crypto.c struct aead_request *areq = container_of(req->req, struct aead_request, areq 414 drivers/crypto/picoxcell_crypto.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 415 drivers/crypto/picoxcell_crypto.c unsigned total = areq->assoclen + areq->cryptlen + areq 419 drivers/crypto/picoxcell_crypto.c int nents = sg_nents_for_len(areq->src, total); areq 427 drivers/crypto/picoxcell_crypto.c if (areq->src != areq->dst) { areq 428 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->src, nents, DMA_TO_DEVICE); areq 429 drivers/crypto/picoxcell_crypto.c nents = sg_nents_for_len(areq->dst, total); areq 434 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->dst, nents, DMA_FROM_DEVICE); areq 436 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->src, nents, DMA_BIDIRECTIONAL); areq 825 drivers/crypto/qat/qat_common/qat_algs.c struct aead_request *areq = qat_req->aead_req; areq 832 drivers/crypto/qat/qat_common/qat_algs.c areq->base.complete(&areq->base, res); areq 840 drivers/crypto/qat/qat_common/qat_algs.c struct ablkcipher_request *areq = qat_req->ablkcipher_req; areq 849 drivers/crypto/qat/qat_common/qat_algs.c memcpy(areq->info, qat_req->iv, AES_BLOCK_SIZE); areq 853 drivers/crypto/qat/qat_common/qat_algs.c areq->base.complete(&areq->base, res); areq 865 drivers/crypto/qat/qat_common/qat_algs.c static int qat_alg_aead_dec(struct aead_request *areq) areq 867 drivers/crypto/qat/qat_common/qat_algs.c struct crypto_aead *aead_tfm = crypto_aead_reqtfm(areq); areq 870 drivers/crypto/qat/qat_common/qat_algs.c struct qat_crypto_request *qat_req = aead_request_ctx(areq); areq 877 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req); areq 884 drivers/crypto/qat/qat_common/qat_algs.c qat_req->aead_req = areq; areq 890 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_length = areq->cryptlen - digst_size; areq 891 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_offset = areq->assoclen; areq 892 drivers/crypto/qat/qat_common/qat_algs.c memcpy(cipher_param->u.cipher_IV_array, areq->iv, AES_BLOCK_SIZE); areq 895 drivers/crypto/qat/qat_common/qat_algs.c auth_param->auth_len = areq->assoclen + cipher_param->cipher_length; areq 907 drivers/crypto/qat/qat_common/qat_algs.c static int qat_alg_aead_enc(struct aead_request *areq) areq 909 drivers/crypto/qat/qat_common/qat_algs.c struct crypto_aead *aead_tfm = crypto_aead_reqtfm(areq); areq 912 drivers/crypto/qat/qat_common/qat_algs.c struct qat_crypto_request *qat_req = aead_request_ctx(areq); areq 916 drivers/crypto/qat/qat_common/qat_algs.c uint8_t *iv = areq->iv; areq 919 drivers/crypto/qat/qat_common/qat_algs.c ret = qat_alg_sgl_to_bufl(ctx->inst, areq->src, areq->dst, qat_req); areq 926 drivers/crypto/qat/qat_common/qat_algs.c qat_req->aead_req = areq; areq 935 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_length = areq->cryptlen; areq 936 drivers/crypto/qat/qat_common/qat_algs.c cipher_param->cipher_offset = areq->assoclen; areq 939 drivers/crypto/qat/qat_common/qat_algs.c auth_param->auth_len = areq->assoclen + areq->cryptlen; areq 180 drivers/crypto/qat/qat_common/qat_asym_algs.c } areq; areq 188 drivers/crypto/qat/qat_common/qat_asym_algs.c struct kpp_request *areq = req->areq.dh; areq 195 drivers/crypto/qat/qat_common/qat_asym_algs.c if (areq->src) { areq 204 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->dst_len = req->ctx.dh->p_size; areq 206 drivers/crypto/qat/qat_common/qat_asym_algs.c scatterwalk_map_and_copy(req->dst_align, areq->dst, 0, areq 207 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->dst_len, 1); areq 222 drivers/crypto/qat/qat_common/qat_asym_algs.c kpp_request_complete(areq, err); areq 287 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->areq.dh = req; areq 556 drivers/crypto/qat/qat_common/qat_asym_algs.c struct akcipher_request *areq = req->areq.rsa; areq 570 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->dst_len = req->ctx.rsa->key_sz; areq 572 drivers/crypto/qat/qat_common/qat_asym_algs.c scatterwalk_map_and_copy(req->dst_align, areq->dst, 0, areq 573 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->dst_len, 1); areq 588 drivers/crypto/qat/qat_common/qat_asym_algs.c akcipher_request_complete(areq, err); areq 594 drivers/crypto/qat/qat_common/qat_asym_algs.c struct qat_asym_request *areq = (void *)(__force long)resp->opaque; areq 596 drivers/crypto/qat/qat_common/qat_asym_algs.c areq->cb(resp); areq 713 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->areq.rsa = req; areq 849 drivers/crypto/qat/qat_common/qat_asym_algs.c qat_req->areq.rsa = req; areq 141 drivers/crypto/stm32/stm32-cryp.c struct aead_request *areq; areq 464 drivers/crypto/stm32/stm32-cryp.c return is_encrypt(cryp) ? cryp->areq->cryptlen : areq 465 drivers/crypto/stm32/stm32-cryp.c cryp->areq->cryptlen - cryp->authsize; areq 474 drivers/crypto/stm32/stm32-cryp.c memcpy(iv, cryp->areq->iv, 12); areq 497 drivers/crypto/stm32/stm32-cryp.c memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); areq 507 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen) areq 604 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen) { areq 668 drivers/crypto/stm32/stm32-cryp.c crypto_finalize_aead_request(cryp->engine, cryp->areq, err); areq 684 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq); areq 686 drivers/crypto/stm32/stm32-cryp.c void *areq); areq 700 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq); areq 702 drivers/crypto/stm32/stm32-cryp.c void *areq); areq 912 drivers/crypto/stm32/stm32-cryp.c struct aead_request *areq) areq 919 drivers/crypto/stm32/stm32-cryp.c if (!req && !areq) areq 923 drivers/crypto/stm32/stm32-cryp.c crypto_aead_ctx(crypto_aead_reqtfm(areq)); areq 930 drivers/crypto/stm32/stm32-cryp.c rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq); areq 941 drivers/crypto/stm32/stm32-cryp.c cryp->areq = NULL; areq 966 drivers/crypto/stm32/stm32-cryp.c cryp->areq = areq; areq 968 drivers/crypto/stm32/stm32-cryp.c cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq)); areq 969 drivers/crypto/stm32/stm32-cryp.c cryp->total_in = areq->assoclen + areq->cryptlen; areq 981 drivers/crypto/stm32/stm32-cryp.c cryp->in_sg = req ? req->src : areq->src; areq 982 drivers/crypto/stm32/stm32-cryp.c cryp->out_sg = req ? req->dst : areq->dst; areq 1008 drivers/crypto/stm32/stm32-cryp.c scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen); areq 1009 drivers/crypto/stm32/stm32-cryp.c cryp->total_out -= cryp->areq->assoclen; areq 1017 drivers/crypto/stm32/stm32-cryp.c void *areq) areq 1019 drivers/crypto/stm32/stm32-cryp.c struct ablkcipher_request *req = container_of(areq, areq 1026 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq) areq 1028 drivers/crypto/stm32/stm32-cryp.c struct ablkcipher_request *req = container_of(areq, areq 1041 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq) areq 1043 drivers/crypto/stm32/stm32-cryp.c struct aead_request *req = container_of(areq, struct aead_request, areq 1049 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq) areq 1051 drivers/crypto/stm32/stm32-cryp.c struct aead_request *req = container_of(areq, struct aead_request, areq 1059 drivers/crypto/stm32/stm32-cryp.c if (unlikely(!cryp->areq->assoclen && areq 1120 drivers/crypto/stm32/stm32-cryp.c size_bit = cryp->areq->assoclen * 8; areq 1127 drivers/crypto/stm32/stm32-cryp.c size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen : areq 1128 drivers/crypto/stm32/stm32-cryp.c cryp->areq->cryptlen - AES_BLOCK_SIZE; areq 1140 drivers/crypto/stm32/stm32-cryp.c memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE); areq 1545 drivers/crypto/stm32/stm32-cryp.c cryp->areq->assoclen) { areq 1589 drivers/crypto/stm32/stm32-cryp.c alen = cryp->areq->assoclen; areq 1592 drivers/crypto/stm32/stm32-cryp.c if (cryp->areq->assoclen <= 65280) { areq 826 drivers/crypto/stm32/stm32-hash.c static int stm32_hash_one_request(struct crypto_engine *engine, void *areq); areq 827 drivers/crypto/stm32/stm32-hash.c static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq); areq 835 drivers/crypto/stm32/stm32-hash.c static int stm32_hash_prepare_req(struct crypto_engine *engine, void *areq) areq 837 drivers/crypto/stm32/stm32-hash.c struct ahash_request *req = container_of(areq, struct ahash_request, areq 856 drivers/crypto/stm32/stm32-hash.c static int stm32_hash_one_request(struct crypto_engine *engine, void *areq) areq 858 drivers/crypto/stm32/stm32-hash.c struct ahash_request *req = container_of(areq, struct ahash_request, areq 15 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c static int noinline_for_stack sun4i_ss_opti_poll(struct skcipher_request *areq) areq 17 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 21 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq); areq 30 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int ileft = areq->cryptlen; areq 31 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int oleft = areq->cryptlen; areq 37 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->cryptlen) areq 40 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->src || !areq->dst) { areq 50 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->iv) { areq 52 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c v = *(u32 *)(areq->iv + i * 4); areq 58 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mi, areq->src, sg_nents(areq->src), areq 60 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), areq 70 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c ileft = areq->cryptlen / 4; areq 71 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oleft = areq->cryptlen / 4; areq 104 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->iv) { areq 107 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c *(u32 *)(areq->iv + i * 4) = v; areq 120 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c static int noinline_for_stack sun4i_ss_cipher_poll_fallback(struct skcipher_request *areq) areq 122 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 124 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq); areq 129 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c skcipher_request_set_callback(subreq, areq->base.flags, NULL, areq 131 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c skcipher_request_set_crypt(subreq, areq->src, areq->dst, areq 132 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c areq->cryptlen, areq->iv); areq 143 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c static int sun4i_ss_cipher_poll(struct skcipher_request *areq) areq 145 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 149 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct scatterlist *in_sg = areq->src; areq 150 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct scatterlist *out_sg = areq->dst; areq 152 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq); areq 163 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int ileft = areq->cryptlen; areq 164 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c unsigned int oleft = areq->cryptlen; areq 174 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->cryptlen) areq 177 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (!areq->src || !areq->dst) { areq 183 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->cryptlen % algt->alg.crypto.base.cra_blocksize) areq 202 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_opti_poll(areq); areq 205 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll_fallback(areq); areq 212 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->iv) { areq 214 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c v = *(u32 *)(areq->iv + i * 4); areq 220 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mi, areq->src, sg_nents(areq->src), areq 222 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c sg_miter_start(&mo, areq->dst, sg_nents(areq->dst), areq 231 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c ileft = areq->cryptlen; areq 232 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oleft = areq->cryptlen; areq 283 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oi, mi.length, ileft, areq->cryptlen, rx_cnt, areq 284 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c oo, mo.length, oleft, areq->cryptlen, tx_cnt, ob); areq 330 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c if (areq->iv) { areq 333 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c *(u32 *)(areq->iv + i * 4) = v; areq 347 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_aes_encrypt(struct skcipher_request *areq) areq 349 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 351 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 355 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 358 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_aes_decrypt(struct skcipher_request *areq) areq 360 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 362 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 366 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 370 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_aes_encrypt(struct skcipher_request *areq) areq 372 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 374 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 378 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 381 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_aes_decrypt(struct skcipher_request *areq) areq 383 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 385 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 389 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 393 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_des_encrypt(struct skcipher_request *areq) areq 395 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 397 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 401 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 404 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_des_decrypt(struct skcipher_request *areq) areq 406 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 408 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 412 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 416 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_des_encrypt(struct skcipher_request *areq) areq 418 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 420 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 424 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 427 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_des_decrypt(struct skcipher_request *areq) areq 429 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 431 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 435 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 439 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_des3_encrypt(struct skcipher_request *areq) areq 441 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 443 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 447 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 450 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_cbc_des3_decrypt(struct skcipher_request *areq) areq 452 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 454 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 458 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 462 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_des3_encrypt(struct skcipher_request *areq) areq 464 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 466 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 470 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 473 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c int sun4i_ss_ecb_des3_decrypt(struct skcipher_request *areq) areq 475 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq); areq 477 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); areq 481 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c return sun4i_ss_cipher_poll(areq); areq 34 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_init(struct ahash_request *areq) areq 36 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 37 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 49 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_export_md5(struct ahash_request *areq, void *out) areq 51 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 72 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_import_md5(struct ahash_request *areq, const void *in) areq 74 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 78 drivers/crypto/sunxi-ss/sun4i-ss-hash.c sun4i_hash_init(areq); areq 91 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_export_sha1(struct ahash_request *areq, void *out) areq 93 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 115 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_import_sha1(struct ahash_request *areq, const void *in) areq 117 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 121 drivers/crypto/sunxi-ss/sun4i-ss-hash.c sun4i_hash_init(areq); areq 164 drivers/crypto/sunxi-ss/sun4i-ss-hash.c static int sun4i_hash(struct ahash_request *areq) areq 179 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 180 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 183 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct scatterlist *in_sg = areq->src; areq 190 drivers/crypto/sunxi-ss/sun4i-ss-hash.c __func__, crypto_tfm_alg_name(areq->base.tfm), areq 191 drivers/crypto/sunxi-ss/sun4i-ss-hash.c op->byte_count, areq->nbytes, op->mode, areq 194 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (unlikely(!areq->nbytes) && !(op->flags & SS_HASH_FINAL)) areq 198 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (unlikely(areq->nbytes > UINT_MAX - op->len)) { areq 203 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (op->len + areq->nbytes < 64 && !(op->flags & SS_HASH_FINAL)) { areq 205 drivers/crypto/sunxi-ss/sun4i-ss-hash.c copied = sg_pcopy_to_buffer(areq->src, sg_nents(areq->src), areq 206 drivers/crypto/sunxi-ss/sun4i-ss-hash.c op->buf + op->len, areq->nbytes, 0); areq 230 drivers/crypto/sunxi-ss/sun4i-ss-hash.c end = ((areq->nbytes + op->len) / 64) * 64 - op->len; areq 232 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (end > areq->nbytes || areq->nbytes - end > 63) { areq 234 drivers/crypto/sunxi-ss/sun4i-ss-hash.c end, areq->nbytes); areq 240 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (areq->nbytes < 4) areq 243 drivers/crypto/sunxi-ss/sun4i-ss-hash.c end = ((areq->nbytes + op->len) / 4) * 4 - op->len; areq 253 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if (i == 1 && !op->len && areq->nbytes) areq 257 drivers/crypto/sunxi-ss/sun4i-ss-hash.c sg_miter_start(&mi, areq->src, sg_nents(areq->src), areq 297 drivers/crypto/sunxi-ss/sun4i-ss-hash.c in_r = min_t(size_t, mi.length - in_i, areq->nbytes - i); areq 321 drivers/crypto/sunxi-ss/sun4i-ss-hash.c if ((areq->nbytes - i) < 64) { areq 322 drivers/crypto/sunxi-ss/sun4i-ss-hash.c while (i < areq->nbytes && in_i < mi.length && op->len < 64) { areq 324 drivers/crypto/sunxi-ss/sun4i-ss-hash.c in_r = min(areq->nbytes - i, 64 - op->len); areq 356 drivers/crypto/sunxi-ss/sun4i-ss-hash.c i, SS_TIMEOUT, v, areq->nbytes); areq 453 drivers/crypto/sunxi-ss/sun4i-ss-hash.c i, SS_TIMEOUT, v, areq->nbytes); areq 471 drivers/crypto/sunxi-ss/sun4i-ss-hash.c memcpy(areq->result + i * 4, &v, 4); areq 476 drivers/crypto/sunxi-ss/sun4i-ss-hash.c memcpy(areq->result + i * 4, &v, 4); areq 486 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_final(struct ahash_request *areq) areq 488 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 491 drivers/crypto/sunxi-ss/sun4i-ss-hash.c return sun4i_hash(areq); areq 494 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_update(struct ahash_request *areq) areq 496 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 499 drivers/crypto/sunxi-ss/sun4i-ss-hash.c return sun4i_hash(areq); areq 503 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_finup(struct ahash_request *areq) areq 505 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 508 drivers/crypto/sunxi-ss/sun4i-ss-hash.c return sun4i_hash(areq); areq 512 drivers/crypto/sunxi-ss/sun4i-ss-hash.c int sun4i_hash_digest(struct ahash_request *areq) areq 515 drivers/crypto/sunxi-ss/sun4i-ss-hash.c struct sun4i_req_ctx *op = ahash_request_ctx(areq); areq 517 drivers/crypto/sunxi-ss/sun4i-ss-hash.c err = sun4i_hash_init(areq); areq 522 drivers/crypto/sunxi-ss/sun4i-ss-hash.c return sun4i_hash(areq); areq 180 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_init(struct ahash_request *areq); areq 181 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_update(struct ahash_request *areq); areq 182 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_final(struct ahash_request *areq); areq 183 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_finup(struct ahash_request *areq); areq 184 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_digest(struct ahash_request *areq); areq 185 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_export_md5(struct ahash_request *areq, void *out); areq 186 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_import_md5(struct ahash_request *areq, const void *in); areq 187 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_export_sha1(struct ahash_request *areq, void *out); areq 188 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_hash_import_sha1(struct ahash_request *areq, const void *in); areq 190 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_aes_encrypt(struct skcipher_request *areq); areq 191 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_aes_decrypt(struct skcipher_request *areq); areq 192 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_aes_encrypt(struct skcipher_request *areq); areq 193 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_aes_decrypt(struct skcipher_request *areq); areq 195 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_des_encrypt(struct skcipher_request *areq); areq 196 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_des_decrypt(struct skcipher_request *areq); areq 197 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_des_encrypt(struct skcipher_request *areq); areq 198 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_des_decrypt(struct skcipher_request *areq); areq 200 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_des3_encrypt(struct skcipher_request *areq); areq 201 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_cbc_des3_decrypt(struct skcipher_request *areq); areq 202 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_des3_encrypt(struct skcipher_request *areq); areq 203 drivers/crypto/sunxi-ss/sun4i-ss.h int sun4i_ss_ecb_des3_decrypt(struct skcipher_request *areq); areq 993 drivers/crypto/talitos.c struct aead_request *areq, bool encrypt) areq 995 drivers/crypto/talitos.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 999 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); areq 1008 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq 1009 drivers/crypto/talitos.c cryptlen + authsize, areq->assoclen); areq 1018 drivers/crypto/talitos.c sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize, areq 1019 drivers/crypto/talitos.c areq->assoclen + cryptlen - ivsize); areq 1030 drivers/crypto/talitos.c struct aead_request *areq = context; areq 1031 drivers/crypto/talitos.c struct crypto_aead *authenc = crypto_aead_reqtfm(areq); areq 1037 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, areq, true); areq 1043 drivers/crypto/talitos.c aead_request_complete(areq, err); areq 1195 drivers/crypto/talitos.c static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq, areq 1201 drivers/crypto/talitos.c struct crypto_aead *aead = crypto_aead_reqtfm(areq); areq 1206 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); areq 1224 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, sg_count, edesc->buf, areq 1225 drivers/crypto/talitos.c areq->assoclen + cryptlen); areq 1227 drivers/crypto/talitos.c sg_count = dma_map_sg(dev, areq->src, sg_count, areq 1228 drivers/crypto/talitos.c (areq->src == areq->dst) ? areq 1232 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc, areq 1256 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4], areq 1257 drivers/crypto/talitos.c sg_count, areq->assoclen, tbl_off, elen, areq 1266 drivers/crypto/talitos.c if (areq->src != areq->dst) { areq 1269 drivers/crypto/talitos.c dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE); areq 1276 drivers/crypto/talitos.c ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5], areq 1277 drivers/crypto/talitos.c sg_count, areq->assoclen, tbl_off, elen, areq 1296 drivers/crypto/talitos.c talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6], areq 1297 drivers/crypto/talitos.c sg_count, areq->assoclen + cryptlen, tbl_off); areq 1310 drivers/crypto/talitos.c ret = talitos_submit(dev, ctx->ch, desc, callback, areq); areq 1312 drivers/crypto/talitos.c ipsec_esp_unmap(dev, edesc, areq, encrypt); areq 1419 drivers/crypto/talitos.c static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv, areq 1422 drivers/crypto/talitos.c struct crypto_aead *authenc = crypto_aead_reqtfm(areq); areq 1426 drivers/crypto/talitos.c unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize); areq 1428 drivers/crypto/talitos.c return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, areq 1429 drivers/crypto/talitos.c iv, areq->assoclen, cryptlen, areq 1431 drivers/crypto/talitos.c areq->base.flags, encrypt); areq 1538 drivers/crypto/talitos.c struct ablkcipher_request *areq) areq 1542 drivers/crypto/talitos.c talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0); areq 1554 drivers/crypto/talitos.c struct ablkcipher_request *areq = context; areq 1555 drivers/crypto/talitos.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1562 drivers/crypto/talitos.c common_nonsnoop_unmap(dev, edesc, areq); areq 1563 drivers/crypto/talitos.c memcpy(areq->info, ctx->iv, ivsize); areq 1567 drivers/crypto/talitos.c areq->base.complete(&areq->base, err); areq 1571 drivers/crypto/talitos.c struct ablkcipher_request *areq, areq 1576 drivers/crypto/talitos.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1580 drivers/crypto/talitos.c unsigned int cryptlen = areq->nbytes; areq 1597 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, sg_count, edesc->buf, areq 1600 drivers/crypto/talitos.c sg_count = dma_map_sg(dev, areq->src, sg_count, areq 1601 drivers/crypto/talitos.c (areq->src == areq->dst) ? areq 1606 drivers/crypto/talitos.c sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc, areq 1612 drivers/crypto/talitos.c if (areq->src != areq->dst) { areq 1615 drivers/crypto/talitos.c dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE); areq 1618 drivers/crypto/talitos.c ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4], areq 1633 drivers/crypto/talitos.c ret = talitos_submit(dev, ctx->ch, desc, callback, areq); areq 1635 drivers/crypto/talitos.c common_nonsnoop_unmap(dev, edesc, areq); areq 1642 drivers/crypto/talitos.c areq, bool encrypt) areq 1644 drivers/crypto/talitos.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1648 drivers/crypto/talitos.c return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst, areq 1649 drivers/crypto/talitos.c areq->info, 0, areq->nbytes, 0, ivsize, 0, areq 1650 drivers/crypto/talitos.c areq->base.flags, encrypt); areq 1653 drivers/crypto/talitos.c static int ablkcipher_encrypt(struct ablkcipher_request *areq) areq 1655 drivers/crypto/talitos.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1661 drivers/crypto/talitos.c if (!areq->nbytes) areq 1664 drivers/crypto/talitos.c if (areq->nbytes % blocksize) areq 1668 drivers/crypto/talitos.c edesc = ablkcipher_edesc_alloc(areq, true); areq 1675 drivers/crypto/talitos.c return common_nonsnoop(edesc, areq, ablkcipher_done); areq 1678 drivers/crypto/talitos.c static int ablkcipher_decrypt(struct ablkcipher_request *areq) areq 1680 drivers/crypto/talitos.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1686 drivers/crypto/talitos.c if (!areq->nbytes) areq 1689 drivers/crypto/talitos.c if (areq->nbytes % blocksize) areq 1693 drivers/crypto/talitos.c edesc = ablkcipher_edesc_alloc(areq, false); areq 1699 drivers/crypto/talitos.c return common_nonsnoop(edesc, areq, ablkcipher_done); areq 1704 drivers/crypto/talitos.c struct ahash_request *areq) areq 1706 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1746 drivers/crypto/talitos.c struct ahash_request *areq = context; areq 1749 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1756 drivers/crypto/talitos.c common_nonsnoop_hash_unmap(dev, edesc, areq); areq 1760 drivers/crypto/talitos.c areq->base.complete(&areq->base, err); areq 1785 drivers/crypto/talitos.c struct ahash_request *areq, unsigned int length, areq 1790 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 1792 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1848 drivers/crypto/talitos.c areq->result, DMA_FROM_DEVICE); areq 1902 drivers/crypto/talitos.c ret = talitos_submit(dev, ctx->ch, desc, callback, areq); areq 1904 drivers/crypto/talitos.c common_nonsnoop_hash_unmap(dev, edesc, areq); areq 1910 drivers/crypto/talitos.c static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq, areq 1913 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 1915 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1923 drivers/crypto/talitos.c nbytes, 0, 0, 0, areq->base.flags, false); areq 1926 drivers/crypto/talitos.c static int ahash_init(struct ahash_request *areq) areq 1928 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 1931 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1956 drivers/crypto/talitos.c static int ahash_init_sha224_swinit(struct ahash_request *areq) areq 1958 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1973 drivers/crypto/talitos.c ahash_init(areq); areq 1979 drivers/crypto/talitos.c static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes) areq 1981 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 1983 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 1998 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, nbytes); areq 2003 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, nents, areq 2030 drivers/crypto/talitos.c sg_chain(req_ctx->bufsl, 2, areq->src); areq 2039 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, offset); areq 2044 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, nents, areq 2047 drivers/crypto/talitos.c req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src, areq 2050 drivers/crypto/talitos.c req_ctx->psrc = areq->src; areq 2053 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, nbytes); areq 2058 drivers/crypto/talitos.c sg_pcopy_to_buffer(areq->src, nents, areq 2066 drivers/crypto/talitos.c edesc = ahash_edesc_alloc(areq, nbytes_to_hash); areq 2088 drivers/crypto/talitos.c return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done); areq 2091 drivers/crypto/talitos.c static int ahash_update(struct ahash_request *areq) areq 2093 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2097 drivers/crypto/talitos.c return ahash_process_req(areq, areq->nbytes); areq 2100 drivers/crypto/talitos.c static int ahash_final(struct ahash_request *areq) areq 2102 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2106 drivers/crypto/talitos.c return ahash_process_req(areq, 0); areq 2109 drivers/crypto/talitos.c static int ahash_finup(struct ahash_request *areq) areq 2111 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2115 drivers/crypto/talitos.c return ahash_process_req(areq, areq->nbytes); areq 2118 drivers/crypto/talitos.c static int ahash_digest(struct ahash_request *areq) areq 2120 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2121 drivers/crypto/talitos.c struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq); areq 2123 drivers/crypto/talitos.c ahash->init(areq); areq 2126 drivers/crypto/talitos.c return ahash_process_req(areq, areq->nbytes); areq 2129 drivers/crypto/talitos.c static int ahash_export(struct ahash_request *areq, void *out) areq 2131 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2133 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 2154 drivers/crypto/talitos.c static int ahash_import(struct ahash_request *areq, const void *in) areq 2156 drivers/crypto/talitos.c struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq); areq 2157 drivers/crypto/talitos.c struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq); areq 831 drivers/crypto/ux500/cryp/cryp_core.c static int ablk_dma_crypt(struct ablkcipher_request *areq) areq 833 drivers/crypto/ux500/cryp/cryp_core.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 843 drivers/crypto/ux500/cryp/cryp_core.c ctx->datalen = areq->nbytes; areq 844 drivers/crypto/ux500/cryp/cryp_core.c ctx->outlen = areq->nbytes; areq 855 drivers/crypto/ux500/cryp/cryp_core.c ctx->device->dma.nents_src = get_nents(areq->src, ctx->datalen); areq 856 drivers/crypto/ux500/cryp/cryp_core.c ctx->device->dma.nents_dst = get_nents(areq->dst, ctx->outlen); areq 861 drivers/crypto/ux500/cryp/cryp_core.c bytes_written = cryp_dma_write(ctx, areq->src, ctx->datalen); areq 862 drivers/crypto/ux500/cryp/cryp_core.c bytes_read = cryp_dma_read(ctx, areq->dst, bytes_written); areq 888 drivers/crypto/ux500/cryp/cryp_core.c static int ablk_crypt(struct ablkcipher_request *areq) areq 891 drivers/crypto/ux500/cryp/cryp_core.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 905 drivers/crypto/ux500/cryp/cryp_core.c ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes); areq 906 drivers/crypto/ux500/cryp/cryp_core.c ret = ablkcipher_walk_phys(areq, &walk); areq 929 drivers/crypto/ux500/cryp/cryp_core.c ret = ablkcipher_walk_done(areq, &walk, nbytes); areq 1024 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_blk_encrypt(struct ablkcipher_request *areq) areq 1026 drivers/crypto/ux500/cryp/cryp_core.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1036 drivers/crypto/ux500/cryp/cryp_core.c return ablk_dma_crypt(areq); areq 1039 drivers/crypto/ux500/cryp/cryp_core.c return ablk_crypt(areq); areq 1042 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_blk_decrypt(struct ablkcipher_request *areq) areq 1044 drivers/crypto/ux500/cryp/cryp_core.c struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq); areq 1053 drivers/crypto/ux500/cryp/cryp_core.c return ablk_dma_crypt(areq); areq 1056 drivers/crypto/ux500/cryp/cryp_core.c return ablk_crypt(areq); areq 75 include/crypto/engine.h void *areq); areq 77 include/crypto/engine.h void *areq); areq 79 include/crypto/engine.h void *areq); areq 234 include/crypto/if_alg.h void af_alg_free_resources(struct af_alg_async_req *areq); areq 241 include/crypto/if_alg.h struct af_alg_async_req *areq, size_t maxsize,