drv_req 807 drivers/crypto/caam/caamalg_qi.c struct caam_drv_req drv_req; drv_req 827 drivers/crypto/caam/caamalg_qi.c struct caam_drv_req drv_req; drv_req 914 drivers/crypto/caam/caamalg_qi.c static void aead_done(struct caam_drv_req *drv_req, u32 status) drv_req 918 drivers/crypto/caam/caamalg_qi.c struct aead_request *aead_req = drv_req->app_ctx; drv_req 928 drivers/crypto/caam/caamalg_qi.c edesc = container_of(drv_req, typeof(*edesc), drv_req); drv_req 1092 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.app_ctx = req; drv_req 1093 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.cbk = aead_done; drv_req 1094 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.drv_ctx = drv_ctx; drv_req 1136 drivers/crypto/caam/caamalg_qi.c fd_sgt = &edesc->drv_req.fd_sgt[0]; drv_req 1174 drivers/crypto/caam/caamalg_qi.c ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); drv_req 1207 drivers/crypto/caam/caamalg_qi.c static void skcipher_done(struct caam_drv_req *drv_req, u32 status) drv_req 1210 drivers/crypto/caam/caamalg_qi.c struct skcipher_request *req = drv_req->app_ctx; drv_req 1219 drivers/crypto/caam/caamalg_qi.c edesc = container_of(drv_req, typeof(*edesc), drv_req); drv_req 1358 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.app_ctx = req; drv_req 1359 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.cbk = skcipher_done; drv_req 1360 drivers/crypto/caam/caamalg_qi.c edesc->drv_req.drv_ctx = drv_ctx; drv_req 1381 drivers/crypto/caam/caamalg_qi.c fd_sgt = &edesc->drv_req.fd_sgt[0]; drv_req 1416 drivers/crypto/caam/caamalg_qi.c ret = caam_qi_enqueue(ctx->qidev, &edesc->drv_req); drv_req 145 drivers/crypto/caam/qi.c struct caam_drv_req *drv_req; drv_req 156 drivers/crypto/caam/qi.c drv_req = caam_iova_to_virt(priv->domain, qm_fd_addr_get64(fd)); drv_req 157 drivers/crypto/caam/qi.c if (!drv_req) { drv_req 163 drivers/crypto/caam/qi.c dma_unmap_single(drv_req->drv_ctx->qidev, qm_fd_addr(fd), drv_req 164 drivers/crypto/caam/qi.c sizeof(drv_req->fd_sgt), DMA_BIDIRECTIONAL); drv_req 167 drivers/crypto/caam/qi.c drv_req->cbk(drv_req, be32_to_cpu(fd->status)); drv_req 169 drivers/crypto/caam/qi.c drv_req->cbk(drv_req, JRSTA_SSRC_QI); drv_req 563 drivers/crypto/caam/qi.c struct caam_drv_req *drv_req; drv_req 590 drivers/crypto/caam/qi.c drv_req = caam_iova_to_virt(priv->domain, qm_fd_addr_get64(fd)); drv_req 591 drivers/crypto/caam/qi.c if (unlikely(!drv_req)) { drv_req 597 drivers/crypto/caam/qi.c dma_unmap_single(drv_req->drv_ctx->qidev, qm_fd_addr(fd), drv_req 598 drivers/crypto/caam/qi.c sizeof(drv_req->fd_sgt), DMA_BIDIRECTIONAL); drv_req 600 drivers/crypto/caam/qi.c drv_req->cbk(drv_req, status); drv_req 34 drivers/crypto/caam/qi.h typedef void (*caam_qi_cbk)(struct caam_drv_req *drv_req, u32 status); drv_req 904 drivers/gpu/drm/i915/display/intel_display_power.c u32 drv_req = I915_READ(regs->driver); drv_req 906 drivers/gpu/drm/i915/display/intel_display_power.c if (!(drv_req & mask)) drv_req 907 drivers/gpu/drm/i915/display/intel_display_power.c I915_WRITE(regs->driver, drv_req | mask); drv_req 148 drivers/ide/ide-io.c u8 drv_req = ata_misc_request(rq) && rq->rq_disk; drv_req 153 drivers/ide/ide-io.c if ((media == ide_floppy || media == ide_tape) && drv_req) {