sec_req           383 drivers/crypto/hisilicon/sec/sec_algs.c static int sec_send_request(struct sec_request *sec_req, struct sec_queue *queue)
sec_req           388 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_lock(&sec_req->lock);
sec_req           389 drivers/crypto/hisilicon/sec/sec_algs.c 	list_for_each_entry_safe(el, temp, &sec_req->elements, head) {
sec_req           403 drivers/crypto/hisilicon/sec/sec_algs.c 			ret = sec_queue_send(queue, &el->req, sec_req);
sec_req           415 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_unlock(&sec_req->lock);
sec_req           426 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_request *sec_req = skcipher_request_ctx(skreq);
sec_req           429 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_alg_tfm_ctx *ctx = sec_req->tfm_ctx;
sec_req           435 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req_el = list_first_entry(&sec_req->elements, struct sec_request_el,
sec_req           443 drivers/crypto/hisilicon/sec/sec_algs.c 		sec_req->err = -EINVAL;
sec_req           494 drivers/crypto/hisilicon/sec/sec_algs.c 				       nextrequest->sec_req);
sec_req           513 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_lock(&sec_req->lock);
sec_req           515 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_unlock(&sec_req->lock);
sec_req           522 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_lock(&sec_req->lock);
sec_req           523 drivers/crypto/hisilicon/sec/sec_algs.c 	done = list_empty(&sec_req->elements);
sec_req           524 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_unlock(&sec_req->lock);
sec_req           527 drivers/crypto/hisilicon/sec/sec_algs.c 			dma_unmap_single(dev, sec_req->dma_iv,
sec_req           531 drivers/crypto/hisilicon/sec/sec_algs.c 		dma_unmap_sg(dev, skreq->src, sec_req->len_in,
sec_req           534 drivers/crypto/hisilicon/sec/sec_algs.c 			dma_unmap_sg(dev, skreq->dst, sec_req->len_out,
sec_req           536 drivers/crypto/hisilicon/sec/sec_algs.c 		skreq->base.complete(&skreq->base, sec_req->err);
sec_req           542 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_request *sec_req = shadow;
sec_req           544 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->cb(resp, sec_req->req_base);
sec_req           713 drivers/crypto/hisilicon/sec/sec_algs.c 	struct sec_request *sec_req = skcipher_request_ctx(skreq);
sec_req           724 drivers/crypto/hisilicon/sec/sec_algs.c 	mutex_init(&sec_req->lock);
sec_req           725 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->req_base = &skreq->base;
sec_req           726 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->err = 0;
sec_req           728 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->len_in = sg_nents(skreq->src);
sec_req           734 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->num_elements = steps;
sec_req           736 drivers/crypto/hisilicon/sec/sec_algs.c 				   &splits_in_nents, sec_req->len_in,
sec_req           742 drivers/crypto/hisilicon/sec/sec_algs.c 		sec_req->len_out = sg_nents(skreq->dst);
sec_req           745 drivers/crypto/hisilicon/sec/sec_algs.c 					   sec_req->len_out, info->dev);
sec_req           750 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->tfm_ctx = ctx;
sec_req           751 drivers/crypto/hisilicon/sec/sec_algs.c 	sec_req->cb = sec_skcipher_alg_callback;
sec_req           752 drivers/crypto/hisilicon/sec/sec_algs.c 	INIT_LIST_HEAD(&sec_req->elements);
sec_req           760 drivers/crypto/hisilicon/sec/sec_algs.c 		sec_req->dma_iv = dma_map_single(info->dev, skreq->iv,
sec_req           763 drivers/crypto/hisilicon/sec/sec_algs.c 		if (dma_mapping_error(info->dev, sec_req->dma_iv)) {
sec_req           783 drivers/crypto/hisilicon/sec/sec_algs.c 		el->req.cipher_iv_addr_lo = lower_32_bits(sec_req->dma_iv);
sec_req           784 drivers/crypto/hisilicon/sec/sec_algs.c 		el->req.cipher_iv_addr_hi = upper_32_bits(sec_req->dma_iv);
sec_req           785 drivers/crypto/hisilicon/sec/sec_algs.c 		el->sec_req = sec_req;
sec_req           786 drivers/crypto/hisilicon/sec/sec_algs.c 		list_add_tail(&el->head, &sec_req->elements);
sec_req           814 drivers/crypto/hisilicon/sec/sec_algs.c 			list_add_tail(&sec_req->backlog_head, &ctx->backlog);
sec_req           822 drivers/crypto/hisilicon/sec/sec_algs.c 	ret = sec_send_request(sec_req, queue);
sec_req           838 drivers/crypto/hisilicon/sec/sec_algs.c 	list_for_each_entry_safe(el, temp, &sec_req->elements, head) {
sec_req           843 drivers/crypto/hisilicon/sec/sec_algs.c 		dma_unmap_single(info->dev, sec_req->dma_iv,
sec_req           849 drivers/crypto/hisilicon/sec/sec_algs.c 				    splits_out_nents, sec_req->len_out,
sec_req           853 drivers/crypto/hisilicon/sec/sec_algs.c 			    sec_req->len_in, info->dev);
sec_req           311 drivers/crypto/hisilicon/sec/sec_drv.h 	struct sec_request *sec_req;