Home
last modified time | relevance | path

Searched refs:async_req (Results 1 – 26 of 26) sorted by relevance

/linux-4.1.27/drivers/crypto/qce/
Dcore.c62 static int qce_handle_request(struct crypto_async_request *async_req) in qce_handle_request() argument
66 u32 type = crypto_tfm_alg_type(async_req->tfm); in qce_handle_request()
72 ret = ops->async_req_handle(async_req); in qce_handle_request()
82 struct crypto_async_request *async_req, *backlog; in qce_handle_queue() local
98 async_req = crypto_dequeue_request(&qce->queue); in qce_handle_queue()
99 if (async_req) in qce_handle_queue()
100 qce->req = async_req; in qce_handle_queue()
104 if (!async_req) in qce_handle_queue()
113 err = qce_handle_request(async_req); in qce_handle_queue()
Dcommon.c230 static int qce_setup_regs_ahash(struct crypto_async_request *async_req, in qce_setup_regs_ahash() argument
233 struct ahash_request *req = ahash_request_cast(async_req); in qce_setup_regs_ahash()
234 struct crypto_ahash *ahash = __crypto_ahash_cast(async_req->tfm); in qce_setup_regs_ahash()
236 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_setup_regs_ahash()
239 unsigned int blocksize = crypto_tfm_alg_blocksize(async_req->tfm); in qce_setup_regs_ahash()
315 static int qce_setup_regs_ablkcipher(struct crypto_async_request *async_req, in qce_setup_regs_ablkcipher() argument
318 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_setup_regs_ablkcipher()
320 struct qce_cipher_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_setup_regs_ablkcipher()
321 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_setup_regs_ablkcipher()
396 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen, in qce_start() argument
[all …]
Dablkcipher.c27 struct crypto_async_request *async_req = data; in qce_ablkcipher_done() local
28 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_done()
30 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_done()
62 qce_ablkcipher_async_req_handle(struct crypto_async_request *async_req) in qce_ablkcipher_async_req_handle() argument
64 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in qce_ablkcipher_async_req_handle()
67 struct qce_alg_template *tmpl = to_cipher_tmpl(async_req->tfm); in qce_ablkcipher_async_req_handle()
136 qce_ablkcipher_done, async_req); in qce_ablkcipher_async_req_handle()
142 ret = qce_start(async_req, tmpl->crypto_alg_type, req->nbytes, 0); in qce_ablkcipher_async_req_handle()
Dsha.c39 struct crypto_async_request *async_req = data; in qce_ahash_done() local
40 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_done()
43 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_done()
77 static int qce_ahash_async_req_handle(struct crypto_async_request *async_req) in qce_ahash_async_req_handle() argument
79 struct ahash_request *req = ahash_request_cast(async_req); in qce_ahash_async_req_handle()
81 struct qce_sha_ctx *ctx = crypto_tfm_ctx(async_req->tfm); in qce_ahash_async_req_handle()
82 struct qce_alg_template *tmpl = to_ahash_tmpl(async_req->tfm); in qce_ahash_async_req_handle()
109 &rctx->result_sg, 1, qce_ahash_done, async_req); in qce_ahash_async_req_handle()
115 ret = qce_start(async_req, tmpl->crypto_alg_type, 0, 0); in qce_ahash_async_req_handle()
Dcore.h65 int (*async_req_handle)(struct crypto_async_request *async_req);
Dcommon.h99 int qce_start(struct crypto_async_request *async_req, u32 type, u32 totallen,
/linux-4.1.27/drivers/crypto/ccp/
Dccp-crypto-aes.c25 static int ccp_aes_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_complete() argument
27 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_complete()
143 static int ccp_aes_rfc3686_complete(struct crypto_async_request *async_req, in ccp_aes_rfc3686_complete() argument
146 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_rfc3686_complete()
152 return ccp_aes_complete(async_req, ret); in ccp_aes_rfc3686_complete()
Dccp-crypto-aes-xts.c84 static int ccp_aes_xts_complete(struct crypto_async_request *async_req, int ret) in ccp_aes_xts_complete() argument
86 struct ablkcipher_request *req = ablkcipher_request_cast(async_req); in ccp_aes_xts_complete()
Dccp-crypto-aes-cmac.c26 static int ccp_aes_cmac_complete(struct crypto_async_request *async_req, in ccp_aes_cmac_complete() argument
29 struct ahash_request *req = ahash_request_cast(async_req); in ccp_aes_cmac_complete()
Dccp-crypto-sha.c26 static int ccp_sha_complete(struct crypto_async_request *async_req, int ret) in ccp_sha_complete() argument
28 struct ahash_request *req = ahash_request_cast(async_req); in ccp_sha_complete()
/linux-4.1.27/drivers/crypto/
Dmv_cesa.c597 struct crypto_async_request *async_req = NULL; in queue_manag() local
608 async_req = crypto_dequeue_request(&cpg->queue); in queue_manag()
609 if (async_req) { in queue_manag()
621 if (async_req) { in queue_manag()
622 if (crypto_tfm_alg_type(async_req->tfm) != in queue_manag()
625 ablkcipher_request_cast(async_req); in queue_manag()
629 ahash_request_cast(async_req); in queue_manag()
632 async_req = NULL; in queue_manag()
Dbfin_crc.c302 struct crypto_async_request *async_req, *backlog; in bfin_crypto_crc_handle_queue() local
319 async_req = crypto_dequeue_request(&crc->queue); in bfin_crypto_crc_handle_queue()
320 if (async_req) in bfin_crypto_crc_handle_queue()
324 if (!async_req) in bfin_crypto_crc_handle_queue()
330 req = ahash_request_cast(async_req); in bfin_crypto_crc_handle_queue()
Ds5p-sss.c484 struct crypto_async_request *async_req, *backlog; in s5p_tasklet_cb() local
490 async_req = crypto_dequeue_request(&dev->queue); in s5p_tasklet_cb()
492 if (!async_req) { in s5p_tasklet_cb()
502 dev->req = ablkcipher_request_cast(async_req); in s5p_tasklet_cb()
Dsahara.c1102 struct crypto_async_request *async_req; in sahara_queue_manage() local
1111 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1117 if (async_req) { in sahara_queue_manage()
1118 if (crypto_tfm_alg_type(async_req->tfm) == in sahara_queue_manage()
1121 ahash_request_cast(async_req); in sahara_queue_manage()
1126 ablkcipher_request_cast(async_req); in sahara_queue_manage()
1131 async_req->complete(async_req, ret); in sahara_queue_manage()
Dimg-hash.c497 struct crypto_async_request *async_req, *backlog; in img_hash_handle_queue() local
513 async_req = crypto_dequeue_request(&hdev->queue); in img_hash_handle_queue()
514 if (async_req) in img_hash_handle_queue()
519 if (!async_req) in img_hash_handle_queue()
525 req = ahash_request_cast(async_req); in img_hash_handle_queue()
Domap-des.c592 struct crypto_async_request *async_req, *backlog; in omap_des_handle_queue() local
606 async_req = crypto_dequeue_request(&dd->queue); in omap_des_handle_queue()
607 if (async_req) in omap_des_handle_queue()
611 if (!async_req) in omap_des_handle_queue()
617 req = ablkcipher_request_cast(async_req); in omap_des_handle_queue()
Domap-aes.c610 struct crypto_async_request *async_req, *backlog; in omap_aes_handle_queue() local
624 async_req = crypto_dequeue_request(&dd->queue); in omap_aes_handle_queue()
625 if (async_req) in omap_aes_handle_queue()
629 if (!async_req) in omap_aes_handle_queue()
635 req = ablkcipher_request_cast(async_req); in omap_aes_handle_queue()
Datmel-tdes.c591 struct crypto_async_request *async_req, *backlog; in atmel_tdes_handle_queue() local
605 async_req = crypto_dequeue_request(&dd->queue); in atmel_tdes_handle_queue()
606 if (async_req) in atmel_tdes_handle_queue()
610 if (!async_req) in atmel_tdes_handle_queue()
616 req = ablkcipher_request_cast(async_req); in atmel_tdes_handle_queue()
Datmel-sha.c832 struct crypto_async_request *async_req, *backlog; in atmel_sha_handle_queue() local
847 async_req = crypto_dequeue_request(&dd->queue); in atmel_sha_handle_queue()
848 if (async_req) in atmel_sha_handle_queue()
853 if (!async_req) in atmel_sha_handle_queue()
859 req = ahash_request_cast(async_req); in atmel_sha_handle_queue()
Datmel-aes.c565 struct crypto_async_request *async_req, *backlog; in atmel_aes_handle_queue() local
579 async_req = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
580 if (async_req) in atmel_aes_handle_queue()
584 if (!async_req) in atmel_aes_handle_queue()
590 req = ablkcipher_request_cast(async_req); in atmel_aes_handle_queue()
Dhifn_795x.c2020 struct crypto_async_request *async_req; in hifn_flush() local
2036 while ((async_req = crypto_dequeue_request(&dev->queue))) { in hifn_flush()
2037 req = container_of(async_req, struct ablkcipher_request, base); in hifn_flush()
2139 struct crypto_async_request *async_req, *backlog; in hifn_process_queue() local
2147 async_req = crypto_dequeue_request(&dev->queue); in hifn_process_queue()
2150 if (!async_req) in hifn_process_queue()
2156 req = container_of(async_req, struct ablkcipher_request, base); in hifn_process_queue()
Domap-sham.c1010 struct crypto_async_request *async_req, *backlog; in omap_sham_handle_queue() local
1023 async_req = crypto_dequeue_request(&dd->queue); in omap_sham_handle_queue()
1024 if (async_req) in omap_sham_handle_queue()
1028 if (!async_req) in omap_sham_handle_queue()
1034 req = ahash_request_cast(async_req); in omap_sham_handle_queue()
/linux-4.1.27/drivers/net/usb/
Drtl8150.c144 struct async_req { struct
172 struct async_req *req = (struct async_req *)urb->context; in async_set_reg_cb()
185 struct async_req *req; in async_set_registers()
187 req = kmalloc(sizeof(struct async_req), GFP_ATOMIC); in async_set_registers()
/linux-4.1.27/drivers/crypto/amcc/
Dcrypto4xx_core.h66 struct crypto_async_request *async_req; /* base crypto request member
Dcrypto4xx_core.c635 ablk_req = ablkcipher_request_cast(pd_uinfo->async_req); in crypto4xx_ablkcipher_done()
659 ahash_req = ahash_request_cast(pd_uinfo->async_req); in crypto4xx_ahash_done()
679 if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) == in crypto4xx_pd_done()
857 pd_uinfo->async_req = req; in crypto4xx_build_pd()
/linux-4.1.27/drivers/md/
Ddm-crypt.c882 static void kcryptd_async_done(struct crypto_async_request *async_req,
1342 static void kcryptd_async_done(struct crypto_async_request *async_req, in kcryptd_async_done() argument
1345 struct dm_crypt_request *dmreq = async_req->data; in kcryptd_async_done()