async_req          68 crypto/crypto_engine.c 	struct crypto_async_request *async_req, *backlog;
async_req         113 crypto/crypto_engine.c 	async_req = crypto_dequeue_request(&engine->queue);
async_req         114 crypto/crypto_engine.c 	if (!async_req)
async_req         117 crypto/crypto_engine.c 	engine->cur_req = async_req;
async_req         137 crypto/crypto_engine.c 	enginectx = crypto_tfm_ctx(async_req->tfm);
async_req         140 crypto/crypto_engine.c 		ret = enginectx->op.prepare_request(engine, async_req);
async_req         153 crypto/crypto_engine.c 	ret = enginectx->op.do_one_request(engine, async_req);
async_req         161 crypto/crypto_engine.c 	crypto_finalize_request(engine, async_req, ret);
async_req         216 crypto/rsa-pkcs1pad.c 	struct crypto_async_request async_req;
async_req         221 crypto/rsa-pkcs1pad.c 	async_req.data = req->base.data;
async_req         222 crypto/rsa-pkcs1pad.c 	async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req));
async_req         223 crypto/rsa-pkcs1pad.c 	async_req.flags = child_async_req->flags;
async_req         224 crypto/rsa-pkcs1pad.c 	req->base.complete(&async_req,
async_req         334 crypto/rsa-pkcs1pad.c 	struct crypto_async_request async_req;
async_req         339 crypto/rsa-pkcs1pad.c 	async_req.data = req->base.data;
async_req         340 crypto/rsa-pkcs1pad.c 	async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req));
async_req         341 crypto/rsa-pkcs1pad.c 	async_req.flags = child_async_req->flags;
async_req         342 crypto/rsa-pkcs1pad.c 	req->base.complete(&async_req, pkcs1pad_decrypt_complete(req, err));
async_req         512 crypto/rsa-pkcs1pad.c 	struct crypto_async_request async_req;
async_req         517 crypto/rsa-pkcs1pad.c 	async_req.data = req->base.data;
async_req         518 crypto/rsa-pkcs1pad.c 	async_req.tfm = crypto_akcipher_tfm(crypto_akcipher_reqtfm(req));
async_req         519 crypto/rsa-pkcs1pad.c 	async_req.flags = child_async_req->flags;
async_req         520 crypto/rsa-pkcs1pad.c 	req->base.complete(&async_req, pkcs1pad_verify_complete(req, err));
async_req         526 drivers/crypto/amcc/crypto4xx_core.c 	req = skcipher_request_cast(pd_uinfo->async_req);
async_req         558 drivers/crypto/amcc/crypto4xx_core.c 	ahash_req = ahash_request_cast(pd_uinfo->async_req);
async_req         574 drivers/crypto/amcc/crypto4xx_core.c 	struct aead_request *aead_req = container_of(pd_uinfo->async_req,
async_req         639 drivers/crypto/amcc/crypto4xx_core.c 	switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
async_req         815 drivers/crypto/amcc/crypto4xx_core.c 	pd_uinfo->async_req = req;
async_req          70 drivers/crypto/amcc/crypto4xx_core.h 	struct crypto_async_request *async_req; 	/* base crypto request
async_req        1054 drivers/crypto/atmel-sha.c 	struct crypto_async_request *async_req, *backlog;
async_req        1070 drivers/crypto/atmel-sha.c 	async_req = crypto_dequeue_request(&dd->queue);
async_req        1071 drivers/crypto/atmel-sha.c 	if (async_req)
async_req        1076 drivers/crypto/atmel-sha.c 	if (!async_req)
async_req        1082 drivers/crypto/atmel-sha.c 	ctx = crypto_tfm_ctx(async_req->tfm);
async_req        1084 drivers/crypto/atmel-sha.c 	dd->req = ahash_request_cast(async_req);
async_req         588 drivers/crypto/atmel-tdes.c 	struct crypto_async_request *async_req, *backlog;
async_req         602 drivers/crypto/atmel-tdes.c 	async_req = crypto_dequeue_request(&dd->queue);
async_req         603 drivers/crypto/atmel-tdes.c 	if (async_req)
async_req         607 drivers/crypto/atmel-tdes.c 	if (!async_req)
async_req         613 drivers/crypto/atmel-tdes.c 	req = ablkcipher_request_cast(async_req);
async_req          23 drivers/crypto/ccp/ccp-crypto-aes-cmac.c static int ccp_aes_cmac_complete(struct crypto_async_request *async_req,
async_req          26 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	struct ahash_request *req = ahash_request_cast(async_req);
async_req          24 drivers/crypto/ccp/ccp-crypto-aes-galois.c static int ccp_aes_gcm_complete(struct crypto_async_request *async_req, int ret)
async_req          62 drivers/crypto/ccp/ccp-crypto-aes-xts.c static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret)
async_req          64 drivers/crypto/ccp/ccp-crypto-aes-xts.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req          22 drivers/crypto/ccp/ccp-crypto-aes.c static int ccp_aes_complete(struct crypto_async_request *async_req, int ret)
async_req          24 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req         139 drivers/crypto/ccp/ccp-crypto-aes.c static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req,
async_req         142 drivers/crypto/ccp/ccp-crypto-aes.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req         148 drivers/crypto/ccp/ccp-crypto-aes.c 	return ccp_aes_complete(async_req, ret);
async_req          21 drivers/crypto/ccp/ccp-crypto-des3.c static int ccp_des3_complete(struct crypto_async_request *async_req, int ret)
async_req          23 drivers/crypto/ccp/ccp-crypto-des3.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req          44 drivers/crypto/ccp/ccp-crypto-rsa.c static int ccp_rsa_complete(struct crypto_async_request *async_req, int ret)
async_req          46 drivers/crypto/ccp/ccp-crypto-rsa.c 	struct akcipher_request *req = akcipher_request_cast(async_req);
async_req          25 drivers/crypto/ccp/ccp-crypto-sha.c static int ccp_sha_complete(struct crypto_async_request *async_req, int ret)
async_req          27 drivers/crypto/ccp/ccp-crypto-sha.c 	struct ahash_request *req = ahash_request_cast(async_req);
async_req        1912 drivers/crypto/hifn_795x.c 	struct crypto_async_request *async_req;
async_req        1928 drivers/crypto/hifn_795x.c 	while ((async_req = crypto_dequeue_request(&dev->queue))) {
async_req        1929 drivers/crypto/hifn_795x.c 		req = ablkcipher_request_cast(async_req);
async_req        2039 drivers/crypto/hifn_795x.c 	struct crypto_async_request *async_req, *backlog;
async_req        2047 drivers/crypto/hifn_795x.c 		async_req = crypto_dequeue_request(&dev->queue);
async_req        2050 drivers/crypto/hifn_795x.c 		if (!async_req)
async_req        2056 drivers/crypto/hifn_795x.c 		req = ablkcipher_request_cast(async_req);
async_req         497 drivers/crypto/img-hash.c 	struct crypto_async_request *async_req, *backlog;
async_req         513 drivers/crypto/img-hash.c 	async_req = crypto_dequeue_request(&hdev->queue);
async_req         514 drivers/crypto/img-hash.c 	if (async_req)
async_req         519 drivers/crypto/img-hash.c 	if (!async_req)
async_req         525 drivers/crypto/img-hash.c 	req = ahash_request_cast(async_req);
async_req         654 drivers/crypto/mediatek/mtk-sha.c 	struct crypto_async_request *async_req, *backlog;
async_req         669 drivers/crypto/mediatek/mtk-sha.c 	async_req = crypto_dequeue_request(&sha->queue);
async_req         670 drivers/crypto/mediatek/mtk-sha.c 	if (async_req)
async_req         674 drivers/crypto/mediatek/mtk-sha.c 	if (!async_req)
async_req         680 drivers/crypto/mediatek/mtk-sha.c 	req = ahash_request_cast(async_req);
async_req        1117 drivers/crypto/omap-sham.c 	struct crypto_async_request *async_req, *backlog;
async_req        1131 drivers/crypto/omap-sham.c 	async_req = crypto_dequeue_request(&dd->queue);
async_req        1132 drivers/crypto/omap-sham.c 	if (async_req)
async_req        1136 drivers/crypto/omap-sham.c 	if (!async_req)
async_req        1142 drivers/crypto/omap-sham.c 	req = ahash_request_cast(async_req);
async_req          19 drivers/crypto/qce/ablkcipher.c 	struct crypto_async_request *async_req = data;
async_req          20 drivers/crypto/qce/ablkcipher.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req          22 drivers/crypto/qce/ablkcipher.c 	struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm);
async_req          52 drivers/crypto/qce/ablkcipher.c qce_ablkcipher_async_req_handle(struct crypto_async_request *async_req)
async_req          54 drivers/crypto/qce/ablkcipher.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req          57 drivers/crypto/qce/ablkcipher.c 	struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm);
async_req         128 drivers/crypto/qce/ablkcipher.c 			       qce_ablkcipher_done, async_req);
async_req         134 drivers/crypto/qce/ablkcipher.c 	ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0);
async_req         222 drivers/crypto/qce/common.c static int qce_setup_regs_ahash(struct crypto_async_request *async_req,
async_req         225 drivers/crypto/qce/common.c 	struct ahash_request *req = ahash_request_cast(async_req);
async_req         226 drivers/crypto/qce/common.c 	struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm);
async_req         228 drivers/crypto/qce/common.c 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
async_req         231 drivers/crypto/qce/common.c 	unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm);
async_req         307 drivers/crypto/qce/common.c static int qce_setup_regs_ablkcipher(struct crypto_async_request *async_req,
async_req         310 drivers/crypto/qce/common.c 	struct ablkcipher_request *req = ablkcipher_request_cast(async_req);
async_req         312 drivers/crypto/qce/common.c 	struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
async_req         313 drivers/crypto/qce/common.c 	struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm);
async_req         388 drivers/crypto/qce/common.c int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
async_req         393 drivers/crypto/qce/common.c 		return qce_setup_regs_ablkcipher(async_req, totallen, offset);
async_req         395 drivers/crypto/qce/common.c 		return qce_setup_regs_ahash(async_req, totallen, offset);
async_req          91 drivers/crypto/qce/common.h int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
async_req          55 drivers/crypto/qce/core.c static int qce_handle_request(struct crypto_async_request *async_req)
async_req          59 drivers/crypto/qce/core.c 	u32 type = crypto_tfm_alg_type(async_req->tfm);
async_req          65 drivers/crypto/qce/core.c 		ret = ops->async_req_handle(async_req);
async_req          75 drivers/crypto/qce/core.c 	struct crypto_async_request *async_req, *backlog;
async_req          91 drivers/crypto/qce/core.c 	async_req = crypto_dequeue_request(&qce->queue);
async_req          92 drivers/crypto/qce/core.c 	if (async_req)
async_req          93 drivers/crypto/qce/core.c 		qce->req = async_req;
async_req          97 drivers/crypto/qce/core.c 	if (!async_req)
async_req         106 drivers/crypto/qce/core.c 	err = qce_handle_request(async_req);
async_req          57 drivers/crypto/qce/core.h 	int (*async_req_handle)(struct crypto_async_request *async_req);
async_req          31 drivers/crypto/qce/sha.c 	struct crypto_async_request *async_req = data;
async_req          32 drivers/crypto/qce/sha.c 	struct ahash_request *req = ahash_request_cast(async_req);
async_req          35 drivers/crypto/qce/sha.c 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
async_req          68 drivers/crypto/qce/sha.c static int qce_ahash_async_req_handle(struct crypto_async_request *async_req)
async_req          70 drivers/crypto/qce/sha.c 	struct ahash_request *req = ahash_request_cast(async_req);
async_req          72 drivers/crypto/qce/sha.c 	struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm);
async_req          73 drivers/crypto/qce/sha.c 	struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm);
async_req         103 drivers/crypto/qce/sha.c 			       &rctx->result_sg, 1, qce_ahash_done, async_req);
async_req         109 drivers/crypto/qce/sha.c 	ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0);
async_req         185 drivers/crypto/rockchip/rk3288_crypto.c 			      struct crypto_async_request *async_req)
async_req         191 drivers/crypto/rockchip/rk3288_crypto.c 	ret = crypto_enqueue_request(&dev->queue, async_req);
async_req         206 drivers/crypto/rockchip/rk3288_crypto.c 	struct crypto_async_request *async_req, *backlog;
async_req         213 drivers/crypto/rockchip/rk3288_crypto.c 	async_req = crypto_dequeue_request(&dev->queue);
async_req         215 drivers/crypto/rockchip/rk3288_crypto.c 	if (!async_req) {
async_req         227 drivers/crypto/rockchip/rk3288_crypto.c 	dev->async_req = async_req;
async_req         230 drivers/crypto/rockchip/rk3288_crypto.c 		dev->complete(dev->async_req, err);
async_req         238 drivers/crypto/rockchip/rk3288_crypto.c 		dev->complete(dev->async_req, dev->err);
async_req         244 drivers/crypto/rockchip/rk3288_crypto.c 		dev->complete(dev->async_req, dev->err);
async_req         196 drivers/crypto/rockchip/rk3288_crypto.h 	struct crypto_async_request	*async_req;
async_req         227 drivers/crypto/rockchip/rk3288_crypto.h 		       struct crypto_async_request *async_req);
async_req         200 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         248 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         273 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         296 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         316 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         343 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		ablkcipher_request_cast(dev->async_req);
async_req         371 drivers/crypto/rockchip/rk3288_crypto_ablkcipher.c 		dev->complete(dev->async_req, 0);
async_req          48 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct ahash_request *req = ahash_request_cast(dev->async_req);
async_req         195 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct ahash_request *req = ahash_request_cast(dev->async_req);
async_req         232 drivers/crypto/rockchip/rk3288_crypto_ahash.c 	struct ahash_request *req = ahash_request_cast(dev->async_req);
async_req         264 drivers/crypto/rockchip/rk3288_crypto_ahash.c 		dev->complete(dev->async_req, 0);
async_req        1375 drivers/crypto/s5p-sss.c 	struct crypto_async_request *async_req, *backlog;
async_req        1391 drivers/crypto/s5p-sss.c 	async_req = crypto_dequeue_request(&dd->hash_queue);
async_req        1392 drivers/crypto/s5p-sss.c 	if (async_req)
async_req        1397 drivers/crypto/s5p-sss.c 	if (!async_req)
async_req        1403 drivers/crypto/s5p-sss.c 	req = ahash_request_cast(async_req);
async_req        2006 drivers/crypto/s5p-sss.c 	struct crypto_async_request *async_req, *backlog;
async_req        2012 drivers/crypto/s5p-sss.c 	async_req = crypto_dequeue_request(&dev->queue);
async_req        2014 drivers/crypto/s5p-sss.c 	if (!async_req) {
async_req        2024 drivers/crypto/s5p-sss.c 	dev->req = ablkcipher_request_cast(async_req);
async_req        1051 drivers/crypto/sahara.c 	struct crypto_async_request *async_req;
async_req        1060 drivers/crypto/sahara.c 		async_req = crypto_dequeue_request(&dev->queue);
async_req        1066 drivers/crypto/sahara.c 		if (async_req) {
async_req        1067 drivers/crypto/sahara.c 			if (crypto_tfm_alg_type(async_req->tfm) ==
async_req        1070 drivers/crypto/sahara.c 					ahash_request_cast(async_req);
async_req        1075 drivers/crypto/sahara.c 					ablkcipher_request_cast(async_req);
async_req        1080 drivers/crypto/sahara.c 			async_req->complete(async_req, ret);
async_req        1136 drivers/md/dm-crypt.c static void kcryptd_async_done(struct crypto_async_request *async_req,
async_req        1672 drivers/md/dm-crypt.c static void kcryptd_async_done(struct crypto_async_request *async_req,
async_req        1675 drivers/md/dm-crypt.c 	struct dm_crypt_request *dmreq = async_req->data;
async_req         189 drivers/net/usb/rtl8150.c 	struct async_req *req = (struct async_req *)urb->context;
async_req         202 drivers/net/usb/rtl8150.c 	struct async_req *req;
async_req         204 drivers/net/usb/rtl8150.c 	req = kmalloc(sizeof(struct async_req), GFP_ATOMIC);
async_req         115 drivers/nvme/host/tcp.c 	struct nvme_tcp_request async_req;
async_req         161 drivers/nvme/host/tcp.c 	return req == &req->queue->ctrl->async_req;
async_req        1115 drivers/nvme/host/tcp.c 	struct nvme_tcp_request *async = &ctrl->async_req;
async_req        1123 drivers/nvme/host/tcp.c 	struct nvme_tcp_request *async = &ctrl->async_req;
async_req        1505 drivers/nvme/host/tcp.c 	if (to_tcp_ctrl(ctrl)->async_req.pdu) {
async_req        1507 drivers/nvme/host/tcp.c 		to_tcp_ctrl(ctrl)->async_req.pdu = NULL;
async_req        2021 drivers/nvme/host/tcp.c 	struct nvme_tcp_cmd_pdu *pdu = ctrl->async_req.pdu;
async_req        2037 drivers/nvme/host/tcp.c 	ctrl->async_req.state = NVME_TCP_SEND_CMD_PDU;
async_req        2038 drivers/nvme/host/tcp.c 	ctrl->async_req.offset = 0;
async_req        2039 drivers/nvme/host/tcp.c 	ctrl->async_req.curr_bio = NULL;
async_req        2040 drivers/nvme/host/tcp.c 	ctrl->async_req.data_len = 0;
async_req        2042 drivers/nvme/host/tcp.c 	nvme_tcp_queue_request(&ctrl->async_req);