cur_sg 132 drivers/fpga/zynq-fpga.c struct scatterlist *cur_sg; cur_sg 167 drivers/fpga/zynq-fpga.c while (priv->cur_sg) { cur_sg 172 drivers/fpga/zynq-fpga.c addr = sg_dma_address(priv->cur_sg); cur_sg 173 drivers/fpga/zynq-fpga.c len = sg_dma_len(priv->cur_sg); cur_sg 181 drivers/fpga/zynq-fpga.c priv->cur_sg = NULL; cur_sg 183 drivers/fpga/zynq-fpga.c priv->cur_sg = sg_next(priv->cur_sg); cur_sg 199 drivers/fpga/zynq-fpga.c if (first && priv->cur_sg) { cur_sg 202 drivers/fpga/zynq-fpga.c } else if (!priv->cur_sg) { cur_sg 224 drivers/fpga/zynq-fpga.c (intr_status & IXR_DMA_DONE_MASK) && priv->cur_sg) { cur_sg 426 drivers/fpga/zynq-fpga.c priv->cur_sg = sgt->sgl; cur_sg 435 drivers/fpga/zynq-fpga.c priv->cur_sg = NULL; cur_sg 453 drivers/fpga/zynq-fpga.c if (priv->cur_sg || cur_sg 498 drivers/gpu/drm/via/via_dmablit.c drm_via_sg_info_t *cur_sg; cur_sg 516 drivers/gpu/drm/via/via_dmablit.c cur_sg = blitq->blits[cur_released]; cur_sg 523 drivers/gpu/drm/via/via_dmablit.c via_free_sg_info(dev->pdev, cur_sg); cur_sg 524 drivers/gpu/drm/via/via_dmablit.c kfree(cur_sg); cur_sg 134 drivers/mmc/host/moxart-mmc.c struct scatterlist *cur_sg; cur_sg 154 drivers/mmc/host/moxart-mmc.c host->cur_sg = data->sg; cur_sg 156 drivers/mmc/host/moxart-mmc.c host->data_remain = host->cur_sg->length; cur_sg 167 drivers/mmc/host/moxart-mmc.c host->cur_sg++; cur_sg 171 drivers/mmc/host/moxart-mmc.c host->data_remain = host->cur_sg->length; cur_sg 314 drivers/mmc/host/moxart-mmc.c sgp = sg_virt(host->cur_sg); cur_sg 240 drivers/mmc/host/wbsd.c host->cur_sg = data->sg; cur_sg 244 drivers/mmc/host/wbsd.c host->remain = host->cur_sg->length; cur_sg 252 drivers/mmc/host/wbsd.c host->cur_sg++; cur_sg 260 drivers/mmc/host/wbsd.c host->remain = host->cur_sg->length; cur_sg 268 drivers/mmc/host/wbsd.c return kmap_atomic(sg_page(host->cur_sg)) + host->cur_sg->offset; cur_sg 151 drivers/mmc/host/wbsd.h struct scatterlist* cur_sg; /* Current SG entry */ cur_sg 70 drivers/nvme/target/tcp.c struct scatterlist *cur_sg; cur_sg 340 drivers/nvme/target/tcp.c cmd->cur_sg = cmd->req.sg; cur_sg 523 drivers/nvme/target/tcp.c while (cmd->cur_sg) { cur_sg 524 drivers/nvme/target/tcp.c struct page *page = sg_page(cmd->cur_sg); cur_sg 525 drivers/nvme/target/tcp.c u32 left = cmd->cur_sg->length - cmd->offset; cur_sg 542 drivers/nvme/target/tcp.c if (cmd->offset == cmd->cur_sg->length) { cur_sg 543 drivers/nvme/target/tcp.c cmd->cur_sg = sg_next(cmd->cur_sg); cur_sg 397 drivers/scsi/esp_scsi.c spriv->cur_sg = sg; cur_sg 411 drivers/scsi/esp_scsi.c return sg_dma_address(p->cur_sg) + cur_sg 412 drivers/scsi/esp_scsi.c (sg_dma_len(p->cur_sg) - cur_sg 450 drivers/scsi/esp_scsi.c p->prv_sg = p->cur_sg; cur_sg 451 drivers/scsi/esp_scsi.c p->cur_sg = sg_next(p->cur_sg); cur_sg 452 drivers/scsi/esp_scsi.c p->cur_residue = sg_dma_len(p->cur_sg); cur_sg 473 drivers/scsi/esp_scsi.c ent->saved_cur_sg = spriv->cur_sg; cur_sg 488 drivers/scsi/esp_scsi.c spriv->cur_sg = ent->saved_cur_sg; cur_sg 1363 drivers/scsi/esp_scsi.c ptr = scsi_kmap_atomic_sg(p->cur_sg, p->num_sg, cur_sg 1655 drivers/scsi/esp_scsi.c if (spriv->cur_residue == sg_dma_len(spriv->cur_sg)) { cur_sg 1656 drivers/scsi/esp_scsi.c spriv->cur_sg = spriv->prv_sg; cur_sg 255 drivers/scsi/esp_scsi.h struct scatterlist *cur_sg; cur_sg 4560 drivers/scsi/qla2xxx/qla_def.h struct scatterlist *cur_sg; /* IN */ cur_sg 855 drivers/scsi/qla2xxx/qla_iocb.c sg = sgx->cur_sg; cur_sg 879 drivers/scsi/qla2xxx/qla_iocb.c sgx->cur_sg = sg; cur_sg 910 drivers/scsi/qla2xxx/qla_iocb.c sgx.cur_sg = scsi_sglist(cmd); cur_sg 917 drivers/scsi/qla2xxx/qla_iocb.c sgx.cur_sg = tc->sg; cur_sg 1788 drivers/scsi/qla2xxx/qla_iocb.c sgx.cur_sg = scsi_sglist(cmd); cur_sg 2123 drivers/scsi/qla2xxx/qla_iocb.c sgx.cur_sg = scsi_sglist(cmd);