fcp_req 81 drivers/nvme/host/fc.c struct nvmefc_fcp_req fcp_req; fcp_req 191 drivers/nvme/host/fc.c return container_of(fcpreq, struct nvme_fc_fcp_op, fcp_req); fcp_req 1481 drivers/nvme/host/fc.c fc_dma_unmap_single(ctrl->lport->dev, op->fcp_req.rspdma, fcp_req 1483 drivers/nvme/host/fc.c fc_dma_unmap_single(ctrl->lport->dev, op->fcp_req.cmddma, fcp_req 1518 drivers/nvme/host/fc.c &op->fcp_req); fcp_req 1558 drivers/nvme/host/fc.c struct nvmefc_fcp_req *freq = &op->fcp_req; fcp_req 1607 drivers/nvme/host/fc.c fc_dma_sync_single_for_cpu(ctrl->lport->dev, op->fcp_req.rspdma, fcp_req 1724 drivers/nvme/host/fc.c op->fcp_req.cmdaddr = &op->cmd_iu; fcp_req 1725 drivers/nvme/host/fc.c op->fcp_req.cmdlen = sizeof(op->cmd_iu); fcp_req 1726 drivers/nvme/host/fc.c op->fcp_req.rspaddr = &op->rsp_iu; fcp_req 1727 drivers/nvme/host/fc.c op->fcp_req.rsplen = sizeof(op->rsp_iu); fcp_req 1728 drivers/nvme/host/fc.c op->fcp_req.done = nvme_fc_fcpio_done; fcp_req 1738 drivers/nvme/host/fc.c op->fcp_req.cmddma = fc_dma_map_single(ctrl->lport->dev, fcp_req 1740 drivers/nvme/host/fc.c if (fc_dma_mapping_error(ctrl->lport->dev, op->fcp_req.cmddma)) { fcp_req 1747 drivers/nvme/host/fc.c op->fcp_req.rspdma = fc_dma_map_single(ctrl->lport->dev, fcp_req 1750 drivers/nvme/host/fc.c if (fc_dma_mapping_error(ctrl->lport->dev, op->fcp_req.rspdma)) { fcp_req 1774 drivers/nvme/host/fc.c op->op.fcp_req.first_sgl = &op->sgl[0]; fcp_req 1775 drivers/nvme/host/fc.c op->op.fcp_req.private = &op->priv[0]; fcp_req 1807 drivers/nvme/host/fc.c aen_op->fcp_req.private = private; fcp_req 1825 drivers/nvme/host/fc.c if (!aen_op->fcp_req.private) fcp_req 1830 drivers/nvme/host/fc.c kfree(aen_op->fcp_req.private); fcp_req 1831 drivers/nvme/host/fc.c aen_op->fcp_req.private = NULL; fcp_req 2133 drivers/nvme/host/fc.c struct nvmefc_fcp_req *freq = &op->fcp_req; fcp_req 2168 drivers/nvme/host/fc.c struct nvmefc_fcp_req *freq = &op->fcp_req; fcp_req 2239 drivers/nvme/host/fc.c op->fcp_req.payload_length = data_len; fcp_req 2240 drivers/nvme/host/fc.c op->fcp_req.io_dir = io_dir; fcp_req 2241 drivers/nvme/host/fc.c op->fcp_req.transferred_length = 0; fcp_req 2242 drivers/nvme/host/fc.c op->fcp_req.rcv_rsplen = 0; fcp_req 2243 drivers/nvme/host/fc.c op->fcp_req.status = NVME_SC_SUCCESS; fcp_req 2244 drivers/nvme/host/fc.c op->fcp_req.sqid = cpu_to_le16(queue->qnum); fcp_req 2276 drivers/nvme/host/fc.c fc_dma_sync_single_for_device(ctrl->lport->dev, op->fcp_req.cmddma, fcp_req 2287 drivers/nvme/host/fc.c queue->lldd_handle, &op->fcp_req); fcp_req 114 drivers/nvme/target/fc.c struct nvmefc_tgt_fcp_req *fcp_req; fcp_req 552 drivers/nvme/target/fc.c fcpreq = deferfcp->fcp_req; fcp_req 726 drivers/nvme/target/fc.c deferfcp->fcp_req); fcp_req 729 drivers/nvme/target/fc.c deferfcp->fcp_req); fcp_req 732 drivers/nvme/target/fc.c deferfcp->fcp_req); fcp_req 2363 drivers/nvme/target/fc.c deferfcp->fcp_req = fcpreq; fcp_req 724 drivers/scsi/lpfc/lpfc_nvmet.c rsp = &ctxp->ctx.fcp_req; fcp_req 935 drivers/scsi/lpfc/lpfc_nvmet.c container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req); fcp_req 1069 drivers/scsi/lpfc/lpfc_nvmet.c container_of(req, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req); fcp_req 1131 drivers/scsi/lpfc/lpfc_nvmet.c container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req); fcp_req 1174 drivers/scsi/lpfc/lpfc_nvmet.c container_of(rsp, struct lpfc_nvmet_rcv_ctx, ctx.fcp_req); fcp_req 1666 drivers/scsi/lpfc/lpfc_nvmet.c req = &ctxp->ctx.fcp_req; fcp_req 1712 drivers/scsi/lpfc/lpfc_nvmet.c rsp = &ctxp->ctx.fcp_req; fcp_req 1786 drivers/scsi/lpfc/lpfc_nvmet.c &ctxp->ctx.fcp_req); fcp_req 1891 drivers/scsi/lpfc/lpfc_nvmet.c if (ctxp->ctx.fcp_req.op == NVMET_FCOP_RSP) fcp_req 2088 drivers/scsi/lpfc/lpfc_nvmet.c rc = nvmet_fc_rcv_fcp_req(phba->targetport, &ctxp->ctx.fcp_req, fcp_req 2590 drivers/scsi/lpfc/lpfc_nvmet.c struct nvmefc_tgt_fcp_req *rsp = &ctxp->ctx.fcp_req; fcp_req 109 drivers/scsi/lpfc/lpfc_nvmet.h struct nvmefc_tgt_fcp_req fcp_req;