drv_ctx            69 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_ctx *drv_ctx[NUM_OP];
drv_ctx           250 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           251 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           259 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           260 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           377 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           378 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           386 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           387 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           487 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           488 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           496 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           497 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           595 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           596 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           604 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           605 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           642 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           643 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           651 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           652 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           763 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[ENCRYPT]) {
drv_ctx           764 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[ENCRYPT],
drv_ctx           772 drivers/crypto/caam/caamalg_qi.c 	if (ctx->drv_ctx[DECRYPT]) {
drv_ctx           773 drivers/crypto/caam/caamalg_qi.c 		ret = caam_drv_ctx_update(ctx->drv_ctx[DECRYPT],
drv_ctx           839 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_ctx *drv_ctx = ctx->drv_ctx[type];
drv_ctx           842 drivers/crypto/caam/caamalg_qi.c 	if (unlikely(!drv_ctx)) {
drv_ctx           846 drivers/crypto/caam/caamalg_qi.c 		drv_ctx = ctx->drv_ctx[type];
drv_ctx           847 drivers/crypto/caam/caamalg_qi.c 		if (!drv_ctx) {
drv_ctx           856 drivers/crypto/caam/caamalg_qi.c 			drv_ctx = caam_drv_ctx_init(ctx->qidev, &cpu, desc);
drv_ctx           857 drivers/crypto/caam/caamalg_qi.c 			if (!IS_ERR_OR_NULL(drv_ctx))
drv_ctx           858 drivers/crypto/caam/caamalg_qi.c 				drv_ctx->op_type = type;
drv_ctx           860 drivers/crypto/caam/caamalg_qi.c 			ctx->drv_ctx[type] = drv_ctx;
drv_ctx           866 drivers/crypto/caam/caamalg_qi.c 	return drv_ctx;
drv_ctx           957 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_ctx *drv_ctx;
drv_ctx           959 drivers/crypto/caam/caamalg_qi.c 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
drv_ctx           960 drivers/crypto/caam/caamalg_qi.c 	if (IS_ERR_OR_NULL(drv_ctx))
drv_ctx           961 drivers/crypto/caam/caamalg_qi.c 		return (struct aead_edesc *)drv_ctx;
drv_ctx          1094 drivers/crypto/caam/caamalg_qi.c 	edesc->drv_req.drv_ctx = drv_ctx;
drv_ctx          1261 drivers/crypto/caam/caamalg_qi.c 	struct caam_drv_ctx *drv_ctx;
drv_ctx          1263 drivers/crypto/caam/caamalg_qi.c 	drv_ctx = get_drv_ctx(ctx, encrypt ? ENCRYPT : DECRYPT);
drv_ctx          1264 drivers/crypto/caam/caamalg_qi.c 	if (IS_ERR_OR_NULL(drv_ctx))
drv_ctx          1265 drivers/crypto/caam/caamalg_qi.c 		return (struct skcipher_edesc *)drv_ctx;
drv_ctx          1360 drivers/crypto/caam/caamalg_qi.c 	edesc->drv_req.drv_ctx = drv_ctx;
drv_ctx          2454 drivers/crypto/caam/caamalg_qi.c 	ctx->drv_ctx[ENCRYPT] = NULL;
drv_ctx          2455 drivers/crypto/caam/caamalg_qi.c 	ctx->drv_ctx[DECRYPT] = NULL;
drv_ctx          2482 drivers/crypto/caam/caamalg_qi.c 	caam_drv_ctx_rel(ctx->drv_ctx[ENCRYPT]);
drv_ctx          2483 drivers/crypto/caam/caamalg_qi.c 	caam_drv_ctx_rel(ctx->drv_ctx[DECRYPT]);
drv_ctx           126 drivers/crypto/caam/qi.c 		ret = qman_enqueue(req->drv_ctx->req_fq, &fd);
drv_ctx           163 drivers/crypto/caam/qi.c 	dma_unmap_single(drv_req->drv_ctx->qidev, qm_fd_addr(fd),
drv_ctx           315 drivers/crypto/caam/qi.c int caam_drv_ctx_update(struct caam_drv_ctx *drv_ctx, u32 *sh_desc)
drv_ctx           320 drivers/crypto/caam/qi.c 	struct device *qidev = drv_ctx->qidev;
drv_ctx           329 drivers/crypto/caam/qi.c 	old_fq = drv_ctx->req_fq;
drv_ctx           332 drivers/crypto/caam/qi.c 	new_fq = create_caam_req_fq(drv_ctx->qidev, drv_ctx->rsp_fq,
drv_ctx           333 drivers/crypto/caam/qi.c 				    drv_ctx->context_a, 0);
drv_ctx           340 drivers/crypto/caam/qi.c 	drv_ctx->req_fq = new_fq;
drv_ctx           348 drivers/crypto/caam/qi.c 		drv_ctx->req_fq = old_fq;
drv_ctx           360 drivers/crypto/caam/qi.c 	drv_ctx->prehdr[0] = cpu_to_caam32((1 << PREHDR_RSLS_SHIFT) |
drv_ctx           362 drivers/crypto/caam/qi.c 	drv_ctx->prehdr[1] = cpu_to_caam32(PREHDR_ABS);
drv_ctx           363 drivers/crypto/caam/qi.c 	memcpy(drv_ctx->sh_desc, sh_desc, desc_bytes(sh_desc));
drv_ctx           364 drivers/crypto/caam/qi.c 	dma_sync_single_for_device(qidev, drv_ctx->context_a,
drv_ctx           365 drivers/crypto/caam/qi.c 				   sizeof(drv_ctx->sh_desc) +
drv_ctx           366 drivers/crypto/caam/qi.c 				   sizeof(drv_ctx->prehdr),
drv_ctx           379 drivers/crypto/caam/qi.c 		drv_ctx->req_fq = old_fq;
drv_ctx           398 drivers/crypto/caam/qi.c 	struct caam_drv_ctx *drv_ctx;
drv_ctx           408 drivers/crypto/caam/qi.c 	drv_ctx = kzalloc(sizeof(*drv_ctx), GFP_ATOMIC);
drv_ctx           409 drivers/crypto/caam/qi.c 	if (!drv_ctx)
drv_ctx           416 drivers/crypto/caam/qi.c 	drv_ctx->prehdr[0] = cpu_to_caam32((1 << PREHDR_RSLS_SHIFT) |
drv_ctx           418 drivers/crypto/caam/qi.c 	drv_ctx->prehdr[1] = cpu_to_caam32(PREHDR_ABS);
drv_ctx           419 drivers/crypto/caam/qi.c 	memcpy(drv_ctx->sh_desc, sh_desc, desc_bytes(sh_desc));
drv_ctx           420 drivers/crypto/caam/qi.c 	size = sizeof(drv_ctx->prehdr) + sizeof(drv_ctx->sh_desc);
drv_ctx           421 drivers/crypto/caam/qi.c 	hwdesc = dma_map_single(qidev, drv_ctx->prehdr, size,
drv_ctx           425 drivers/crypto/caam/qi.c 		kfree(drv_ctx);
drv_ctx           428 drivers/crypto/caam/qi.c 	drv_ctx->context_a = hwdesc;
drv_ctx           441 drivers/crypto/caam/qi.c 	drv_ctx->cpu = *cpu;
drv_ctx           444 drivers/crypto/caam/qi.c 	drv_ctx->rsp_fq = per_cpu(pcpu_qipriv.rsp_fq, drv_ctx->cpu);
drv_ctx           447 drivers/crypto/caam/qi.c 	drv_ctx->req_fq = create_caam_req_fq(qidev, drv_ctx->rsp_fq, hwdesc,
drv_ctx           449 drivers/crypto/caam/qi.c 	if (IS_ERR(drv_ctx->req_fq)) {
drv_ctx           452 drivers/crypto/caam/qi.c 		kfree(drv_ctx);
drv_ctx           456 drivers/crypto/caam/qi.c 	drv_ctx->qidev = qidev;
drv_ctx           457 drivers/crypto/caam/qi.c 	return drv_ctx;
drv_ctx           487 drivers/crypto/caam/qi.c void caam_drv_ctx_rel(struct caam_drv_ctx *drv_ctx)
drv_ctx           489 drivers/crypto/caam/qi.c 	if (IS_ERR_OR_NULL(drv_ctx))
drv_ctx           493 drivers/crypto/caam/qi.c 	if (kill_fq(drv_ctx->qidev, drv_ctx->req_fq))
drv_ctx           494 drivers/crypto/caam/qi.c 		dev_err(drv_ctx->qidev, "Crypto session req FQ kill failed\n");
drv_ctx           496 drivers/crypto/caam/qi.c 	dma_unmap_single(drv_ctx->qidev, drv_ctx->context_a,
drv_ctx           497 drivers/crypto/caam/qi.c 			 sizeof(drv_ctx->sh_desc) + sizeof(drv_ctx->prehdr),
drv_ctx           499 drivers/crypto/caam/qi.c 	kfree(drv_ctx);
drv_ctx           597 drivers/crypto/caam/qi.c 	dma_unmap_single(drv_req->drv_ctx->qidev, qm_fd_addr(fd),
drv_ctx            86 drivers/crypto/caam/qi.h 	struct caam_drv_ctx *drv_ctx;
drv_ctx           129 drivers/crypto/caam/qi.h bool caam_drv_ctx_busy(struct caam_drv_ctx *drv_ctx);
drv_ctx           141 drivers/crypto/caam/qi.h int caam_drv_ctx_update(struct caam_drv_ctx *drv_ctx, u32 *sh_desc);
drv_ctx           147 drivers/crypto/caam/qi.h void caam_drv_ctx_rel(struct caam_drv_ctx *drv_ctx);