req_fq            126 drivers/crypto/caam/qi.c 		ret = qman_enqueue(req->drv_ctx->req_fq, &fd);
req_fq            178 drivers/crypto/caam/qi.c 	struct qman_fq *req_fq;
req_fq            181 drivers/crypto/caam/qi.c 	req_fq = kzalloc(sizeof(*req_fq), GFP_ATOMIC);
req_fq            182 drivers/crypto/caam/qi.c 	if (!req_fq)
req_fq            185 drivers/crypto/caam/qi.c 	req_fq->cb.ern = caam_fq_ern_cb;
req_fq            186 drivers/crypto/caam/qi.c 	req_fq->cb.fqs = NULL;
req_fq            189 drivers/crypto/caam/qi.c 				QMAN_FQ_FLAG_TO_DCPORTAL, req_fq);
req_fq            205 drivers/crypto/caam/qi.c 	ret = qman_init_fq(req_fq, fq_sched_flag, &opts);
req_fq            211 drivers/crypto/caam/qi.c 	dev_dbg(qidev, "Allocated request FQ %u for CPU %u\n", req_fq->fqid,
req_fq            213 drivers/crypto/caam/qi.c 	return req_fq;
req_fq            216 drivers/crypto/caam/qi.c 	qman_destroy_fq(req_fq);
req_fq            218 drivers/crypto/caam/qi.c 	kfree(req_fq);
req_fq            329 drivers/crypto/caam/qi.c 	old_fq = drv_ctx->req_fq;
req_fq            340 drivers/crypto/caam/qi.c 	drv_ctx->req_fq = new_fq;
req_fq            348 drivers/crypto/caam/qi.c 		drv_ctx->req_fq = old_fq;
req_fq            379 drivers/crypto/caam/qi.c 		drv_ctx->req_fq = old_fq;
req_fq            447 drivers/crypto/caam/qi.c 	drv_ctx->req_fq = create_caam_req_fq(qidev, drv_ctx->rsp_fq, hwdesc,
req_fq            449 drivers/crypto/caam/qi.c 	if (IS_ERR(drv_ctx->req_fq)) {
req_fq            493 drivers/crypto/caam/qi.c 	if (kill_fq(drv_ctx->qidev, drv_ctx->req_fq))
req_fq             63 drivers/crypto/caam/qi.h 	struct qman_fq *req_fq;