nseg              990 drivers/atm/fore200e.c     for (i = 0; i < rpd->nseg; i++)
nseg             1010 drivers/atm/fore200e.c     for (i = 0; i < rpd->nseg; i++) {
nseg             1060 drivers/atm/fore200e.c     for (i = 0; i < rpd->nseg; i++) {
nseg             1651 drivers/atm/fore200e.c     tpd->spec.nseg   = 1;
nseg              114 drivers/atm/fore200e.h         u32               nseg   :  8,    /* number of transmit segments */
nseg              163 drivers/atm/fore200e.h     u32               nseg;              /* number of receive segments  */
nseg              962 drivers/block/xen-blkback/blkback.c 	int indirect_grefs, rc, n, nseg, i;
nseg              965 drivers/block/xen-blkback/blkback.c 	nseg = pending_req->nr_segs;
nseg              966 drivers/block/xen-blkback/blkback.c 	indirect_grefs = INDIRECT_PAGES(nseg);
nseg              976 drivers/block/xen-blkback/blkback.c 	for (n = 0, i = 0; n < nseg; n++) {
nseg             1230 drivers/block/xen-blkback/blkback.c 	unsigned int nseg;
nseg             1276 drivers/block/xen-blkback/blkback.c 	nseg = req->operation == BLKIF_OP_INDIRECT ?
nseg             1279 drivers/block/xen-blkback/blkback.c 	if (unlikely(nseg == 0 && operation_flags != REQ_PREFLUSH) ||
nseg             1281 drivers/block/xen-blkback/blkback.c 		     (nseg > BLKIF_MAX_SEGMENTS_PER_REQUEST)) ||
nseg             1283 drivers/block/xen-blkback/blkback.c 		     (nseg > MAX_INDIRECT_SEGMENTS))) {
nseg             1284 drivers/block/xen-blkback/blkback.c 		pr_debug("Bad number of segments in request (%d)\n", nseg);
nseg             1295 drivers/block/xen-blkback/blkback.c 	pending_req->nr_segs   = nseg;
nseg             1300 drivers/block/xen-blkback/blkback.c 		for (i = 0; i < nseg; i++) {
nseg             1331 drivers/block/xen-blkback/blkback.c 	for (i = 0; i < nseg; i++) {
nseg             1362 drivers/block/xen-blkback/blkback.c 	for (i = 0; i < nseg; i++) {
nseg             1369 drivers/block/xen-blkback/blkback.c 			int nr_iovecs = min_t(int, (nseg-i), BIO_MAX_PAGES);
nseg             2223 drivers/net/ethernet/atheros/atlx/atl1.c 			int i, nseg;
nseg             2226 drivers/net/ethernet/atheros/atlx/atl1.c 			nseg = (data_len + ATL1_MAX_TX_BUF_LEN - 1) /
nseg             2228 drivers/net/ethernet/atheros/atlx/atl1.c 			for (i = 0; i < nseg; i++) {
nseg             2260 drivers/net/ethernet/atheros/atlx/atl1.c 		u16 i, nseg;
nseg             2264 drivers/net/ethernet/atheros/atlx/atl1.c 		nseg = (buf_len + ATL1_MAX_TX_BUF_LEN - 1) /
nseg             2266 drivers/net/ethernet/atheros/atlx/atl1.c 		for (i = 0; i < nseg; i++) {
nseg              509 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	int i, ret, nseg;
nseg              514 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	nseg = DIV_ROUND_UP(max_size, chunk_size);
nseg              516 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	chunks = kzalloc(array_size(sizeof(*chunks), nseg), GFP_KERNEL);
nseg              522 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	for (i = 0; i < nseg; i++) {
nseg              544 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	for (i = 0; i < nseg; i++) {
nseg              565 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	desc_sz = struct_size(desc, descs, nseg);
nseg              572 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	desc->chunk_cnt = cpu_to_le32(nseg);
nseg              573 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	for (i = 0; i < nseg; i++) {
nseg              605 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	i = nseg;
nseg              610 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	i = nseg;
nseg              359 drivers/nvme/host/pci.c 		unsigned int size, unsigned int nseg, bool use_sgl)
nseg              364 drivers/nvme/host/pci.c 		alloc_size = sizeof(__le64 *) * nvme_pci_npages_sgl(nseg);
nseg              368 drivers/nvme/host/pci.c 	return alloc_size + sizeof(struct scatterlist) * nseg;
nseg              507 drivers/nvme/host/pci.c 	int nseg = blk_rq_nr_phys_segments(req);
nseg              510 drivers/nvme/host/pci.c 	if (nseg == 0)
nseg              513 drivers/nvme/host/pci.c 	avg_seg_size = DIV_ROUND_UP(blk_rq_payload_bytes(req), nseg);
nseg              226 drivers/scsi/aacraid/aachba.c 				int pages, int nseg, int nseg_new);
nseg             3911 drivers/scsi/aacraid/aachba.c 	int nseg;
nseg             3920 drivers/scsi/aacraid/aachba.c 	nseg = scsi_dma_map(scsicmd);
nseg             3921 drivers/scsi/aacraid/aachba.c 	if (nseg <= 0)
nseg             3922 drivers/scsi/aacraid/aachba.c 		return nseg;
nseg             3924 drivers/scsi/aacraid/aachba.c 	psg->count = cpu_to_le32(nseg);
nseg             3926 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
nseg             3952 drivers/scsi/aacraid/aachba.c 	int nseg;
nseg             3962 drivers/scsi/aacraid/aachba.c 	nseg = scsi_dma_map(scsicmd);
nseg             3963 drivers/scsi/aacraid/aachba.c 	if (nseg <= 0)
nseg             3964 drivers/scsi/aacraid/aachba.c 		return nseg;
nseg             3966 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
nseg             3974 drivers/scsi/aacraid/aachba.c 	psg->count = cpu_to_le32(nseg);
nseg             3994 drivers/scsi/aacraid/aachba.c 	int nseg;
nseg             4007 drivers/scsi/aacraid/aachba.c 	nseg = scsi_dma_map(scsicmd);
nseg             4008 drivers/scsi/aacraid/aachba.c 	if (nseg <= 0)
nseg             4009 drivers/scsi/aacraid/aachba.c 		return nseg;
nseg             4011 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
nseg             4022 drivers/scsi/aacraid/aachba.c 	psg->count = cpu_to_le32(nseg);
nseg             4043 drivers/scsi/aacraid/aachba.c 	int nseg;
nseg             4048 drivers/scsi/aacraid/aachba.c 	nseg = scsi_dma_map(scsicmd);
nseg             4049 drivers/scsi/aacraid/aachba.c 	if (nseg <= 0)
nseg             4050 drivers/scsi/aacraid/aachba.c 		return nseg;
nseg             4052 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
nseg             4068 drivers/scsi/aacraid/aachba.c 		} else if ((i+1) < nseg && cur_size != rio2->sgeNominalSize) {
nseg             4084 drivers/scsi/aacraid/aachba.c 	rio2->sgeCnt = cpu_to_le32(nseg);
nseg             4088 drivers/scsi/aacraid/aachba.c 		int j, nseg_new = nseg, err_found;
nseg             4092 drivers/scsi/aacraid/aachba.c 			for (j = 1; j < nseg - 1; ++j) {
nseg             4103 drivers/scsi/aacraid/aachba.c 			int ret = aac_convert_sgraw2(rio2, i, nseg, nseg_new);
nseg             4120 drivers/scsi/aacraid/aachba.c static int aac_convert_sgraw2(struct aac_raw_io2 *rio2, int pages, int nseg, int nseg_new)
nseg             4133 drivers/scsi/aacraid/aachba.c 	for (i = 1, pos = 1; i < nseg-1; ++i) {
nseg             4145 drivers/scsi/aacraid/aachba.c 	sge[pos] = rio2->sge[nseg-1];
nseg             4161 drivers/scsi/aacraid/aachba.c 	int nseg;
nseg             4167 drivers/scsi/aacraid/aachba.c 	nseg = scsi_dma_map(scsicmd);
nseg             4168 drivers/scsi/aacraid/aachba.c 	if (nseg <= 0) {
nseg             4169 drivers/scsi/aacraid/aachba.c 		byte_count = nseg;
nseg             4173 drivers/scsi/aacraid/aachba.c 	if (nseg > HBA_MAX_SG_EMBEDDED)
nseg             4178 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
nseg             4203 drivers/scsi/aacraid/aachba.c 	if (nseg <= HBA_MAX_SG_EMBEDDED) {
nseg             4204 drivers/scsi/aacraid/aachba.c 		hbacmd->emb_data_desc_count = cpu_to_le32(nseg);
nseg              324 drivers/scsi/aha1740.c 	int ecbno, nseg;
nseg              395 drivers/scsi/aha1740.c 	nseg = scsi_dma_map(SCpnt);
nseg              396 drivers/scsi/aha1740.c 	BUG_ON(nseg < 0);
nseg              397 drivers/scsi/aha1740.c 	if (nseg) {
nseg              406 drivers/scsi/aha1740.c 		scsi_for_each_sg(SCpnt, sg, nseg, i) {
nseg              410 drivers/scsi/aha1740.c 		host->ecb[ecbno].datalen = nseg * sizeof(struct aha1740_chain);
nseg             6368 drivers/scsi/aic7xxx/aic79xx_core.c ahd_dmamap_cb(void *arg, bus_dma_segment_t *segs, int nseg, int error) 
nseg             1564 drivers/scsi/aic7xxx/aic79xx_osm.c 	int nseg;
nseg             1566 drivers/scsi/aic7xxx/aic79xx_osm.c 	nseg = scsi_dma_map(cmd);
nseg             1567 drivers/scsi/aic7xxx/aic79xx_osm.c 	if (nseg < 0)
nseg             1634 drivers/scsi/aic7xxx/aic79xx_osm.c 	if (nseg > 0) {
nseg             1641 drivers/scsi/aic7xxx/aic79xx_osm.c 		scsi_for_each_sg(cmd, cur_seg, nseg, i) {
nseg             1649 drivers/scsi/aic7xxx/aic79xx_osm.c 					  i == (nseg - 1));
nseg             4700 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_dmamap_cb(void *arg, bus_dma_segment_t *segs, int nseg, int error) 
nseg             1428 drivers/scsi/aic7xxx/aic7xxx_osm.c 	int nseg;
nseg             1455 drivers/scsi/aic7xxx/aic7xxx_osm.c 	nseg = scsi_dma_map(cmd);
nseg             1456 drivers/scsi/aic7xxx/aic7xxx_osm.c 	if (nseg < 0)
nseg             1519 drivers/scsi/aic7xxx/aic7xxx_osm.c 	if (nseg > 0) {
nseg             1530 drivers/scsi/aic7xxx/aic7xxx_osm.c 		scsi_for_each_sg(cmd, cur_seg, nseg, i) {
nseg             1734 drivers/scsi/arcmsr/arcmsr_hba.c 	int nseg;
nseg             1743 drivers/scsi/arcmsr/arcmsr_hba.c 	nseg = scsi_dma_map(pcmd);
nseg             1744 drivers/scsi/arcmsr/arcmsr_hba.c 	if (unlikely(nseg > acb->host->sg_tablesize || nseg < 0))
nseg             1746 drivers/scsi/arcmsr/arcmsr_hba.c 	scsi_for_each_sg(pcmd, sg, nseg, i) {
nseg             1768 drivers/scsi/arcmsr/arcmsr_hba.c 	arcmsr_cdb->sgcount = (uint8_t)nseg;
nseg              881 drivers/scsi/dc395x.c 	int nseg;
nseg              903 drivers/scsi/dc395x.c 	nseg = scsi_dma_map(cmd);
nseg              904 drivers/scsi/dc395x.c 	BUG_ON(nseg < 0);
nseg              906 drivers/scsi/dc395x.c 	if (dir == PCI_DMA_NONE || !nseg) {
nseg              917 drivers/scsi/dc395x.c 		srb->sg_count = nseg;
nseg             2198 drivers/scsi/dpt_i2o.c 	int nseg;
nseg             2270 drivers/scsi/dpt_i2o.c 	nseg = scsi_dma_map(cmd);
nseg             2271 drivers/scsi/dpt_i2o.c 	BUG_ON(nseg < 0);
nseg             2272 drivers/scsi/dpt_i2o.c 	if (nseg) {
nseg             2276 drivers/scsi/dpt_i2o.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
nseg             2285 drivers/scsi/dpt_i2o.c 			if (i == nseg - 1)
nseg              852 drivers/scsi/hptiop.c 	int idx, nseg;
nseg              854 drivers/scsi/hptiop.c 	nseg = scsi_dma_map(scp);
nseg              855 drivers/scsi/hptiop.c 	BUG_ON(nseg < 0);
nseg              856 drivers/scsi/hptiop.c 	if (!nseg)
nseg              859 drivers/scsi/hptiop.c 	HPT_SCP(scp)->sgcnt = nseg;
nseg             1297 drivers/scsi/ibmvscsi/ibmvfc.c static void ibmvfc_map_sg_list(struct scsi_cmnd *scmd, int nseg,
nseg             1303 drivers/scsi/ibmvscsi/ibmvfc.c 	scsi_for_each_sg(scmd, sg, nseg, i) {
nseg              652 drivers/scsi/ibmvscsi/ibmvscsi.c static int map_sg_list(struct scsi_cmnd *cmd, int nseg,
nseg              659 drivers/scsi/ibmvscsi/ibmvscsi.c 	scsi_for_each_sg(cmd, sg, nseg, i) {
nseg             2536 drivers/scsi/initio.c 	int i, nseg;
nseg             2573 drivers/scsi/initio.c 	nseg = scsi_dma_map(cmnd);
nseg             2574 drivers/scsi/initio.c 	BUG_ON(nseg < 0);
nseg             2575 drivers/scsi/initio.c 	if (nseg) {
nseg             2582 drivers/scsi/initio.c 		cblk->sglen = nseg;
nseg             5920 drivers/scsi/ipr.c 	int i, nseg;
nseg             5932 drivers/scsi/ipr.c 	nseg = scsi_dma_map(scsi_cmd);
nseg             5933 drivers/scsi/ipr.c 	if (nseg < 0) {
nseg             5939 drivers/scsi/ipr.c 	ipr_cmd->dma_use_sg = nseg;
nseg             5972 drivers/scsi/ipr.c 	int i, nseg;
nseg             5984 drivers/scsi/ipr.c 	nseg = scsi_dma_map(scsi_cmd);
nseg             5985 drivers/scsi/ipr.c 	if (nseg < 0) {
nseg             5990 drivers/scsi/ipr.c 	ipr_cmd->dma_use_sg = nseg;
nseg             1322 drivers/scsi/lpfc/lpfc_nvme.c 	int nseg, i, j;
nseg             1358 drivers/scsi/lpfc/lpfc_nvme.c 		nseg = nCmd->sg_cnt;
nseg             1363 drivers/scsi/lpfc/lpfc_nvme.c 		for (i = 0; i < nseg; i++) {
nseg             1367 drivers/scsi/lpfc/lpfc_nvme.c 						i, nseg);
nseg             1373 drivers/scsi/lpfc/lpfc_nvme.c 			if ((num_bde + 1) == nseg) {
nseg             1383 drivers/scsi/lpfc/lpfc_nvme.c 				    ((nseg - 1) != i)) {
nseg             1408 drivers/scsi/lpfc/lpfc_nvme.c 				if ((nseg - 1) == i)
nseg              846 drivers/scsi/lpfc/lpfc_scsi.c 	int nseg, datadir = scsi_cmnd->sc_data_direction;
nseg              863 drivers/scsi/lpfc/lpfc_scsi.c 		nseg = dma_map_sg(&phba->pcidev->dev, scsi_sglist(scsi_cmnd),
nseg              865 drivers/scsi/lpfc/lpfc_scsi.c 		if (unlikely(!nseg))
nseg              868 drivers/scsi/lpfc/lpfc_scsi.c 		lpfc_cmd->seg_cnt = nseg;
nseg              890 drivers/scsi/lpfc/lpfc_scsi.c 		scsi_for_each_sg(scsi_cmnd, sgel, nseg, num_bde) {
nseg              895 drivers/scsi/lpfc/lpfc_scsi.c 			    nseg <= LPFC_EXT_DATA_BDE_COUNT) {
nseg             3056 drivers/scsi/lpfc/lpfc_scsi.c 	int nseg, i, j;
nseg             3075 drivers/scsi/lpfc/lpfc_scsi.c 		nseg = scsi_dma_map(scsi_cmnd);
nseg             3076 drivers/scsi/lpfc/lpfc_scsi.c 		if (unlikely(nseg <= 0))
nseg             3085 drivers/scsi/lpfc/lpfc_scsi.c 		lpfc_cmd->seg_cnt = nseg;
nseg             3112 drivers/scsi/lpfc/lpfc_scsi.c 		for (i = 0; i < nseg; i++) {
nseg             3114 drivers/scsi/lpfc/lpfc_scsi.c 			if ((num_bde + 1) == nseg) {
nseg             3124 drivers/scsi/lpfc/lpfc_scsi.c 				    ((nseg - 1) != i)) {
nseg             3150 drivers/scsi/lpfc/lpfc_scsi.c 				if ((nseg - 1) == i)
nseg              364 drivers/scsi/mac53c94.c 	int i, dma_cmd, total, nseg;
nseg              370 drivers/scsi/mac53c94.c 	nseg = scsi_dma_map(cmd);
nseg              371 drivers/scsi/mac53c94.c 	BUG_ON(nseg < 0);
nseg              372 drivers/scsi/mac53c94.c 	if (!nseg)
nseg              380 drivers/scsi/mac53c94.c 	scsi_for_each_sg(cmd, scl, nseg, i) {
nseg             2903 drivers/scsi/megaraid/megaraid_sas_fusion.c 			rctx->nseg = 0x1;
nseg             2965 drivers/scsi/megaraid/megaraid_sas_fusion.c 			rctx->nseg = 0x1;
nseg             3134 drivers/scsi/megaraid/megaraid_sas_fusion.c 				pRAID_Context->nseg = 0x1;
nseg             3234 drivers/scsi/megaraid/megaraid_sas_fusion.c 	io_request->RaidContext.raid_context.nseg = 0;
nseg              105 drivers/scsi/megaraid/megaraid_sas_fusion.h 	u8 nseg:4;
nseg              109 drivers/scsi/megaraid/megaraid_sas_fusion.h 	u8 nseg:4;
nseg             1269 drivers/scsi/mesh.c 		int nseg;
nseg             1273 drivers/scsi/mesh.c 		nseg = scsi_dma_map(cmd);
nseg             1274 drivers/scsi/mesh.c 		BUG_ON(nseg < 0);
nseg             1276 drivers/scsi/mesh.c 		if (nseg) {
nseg             1280 drivers/scsi/mesh.c 			scsi_for_each_sg(cmd, scl, nseg, i) {
nseg             3149 drivers/scsi/pmcraid.c 	int i, nseg;
nseg             3161 drivers/scsi/pmcraid.c 	nseg = scsi_dma_map(scsi_cmd);
nseg             3163 drivers/scsi/pmcraid.c 	if (nseg < 0) {
nseg             3166 drivers/scsi/pmcraid.c 	} else if (nseg > PMCRAID_MAX_IOADLS) {
nseg             3169 drivers/scsi/pmcraid.c 			"sg count is (%d) more than allowed!\n", nseg);
nseg             3179 drivers/scsi/pmcraid.c 	ioadl = pmcraid_init_ioadls(cmd, nseg);
nseg             3182 drivers/scsi/pmcraid.c 	scsi_for_each_sg(scsi_cmd, sglist, nseg, i) {
nseg              324 drivers/scsi/qla2xxx/qla_iocb.c 	int		nseg;
nseg              367 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg              369 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg              372 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg              374 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             1593 drivers/scsi/qla2xxx/qla_iocb.c 	int		nseg;
nseg             1629 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             1631 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             1634 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             1636 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             1724 drivers/scsi/qla2xxx/qla_iocb.c 	int			nseg;
nseg             1774 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             1776 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             1791 drivers/scsi/qla2xxx/qla_iocb.c 			nseg = 0;
nseg             1794 drivers/scsi/qla2xxx/qla_iocb.c 				nseg++;
nseg             1797 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             1800 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             1804 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_prot_sglist(cmd),
nseg             1806 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             1813 drivers/scsi/qla2xxx/qla_iocb.c 			nseg = scsi_bufflen(cmd) / cmd->device->sector_size;
nseg             1816 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             1821 drivers/scsi/qla2xxx/qla_iocb.c 	tot_prot_dsds = nseg;
nseg             1822 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds += nseg;
nseg             1910 drivers/scsi/qla2xxx/qla_iocb.c 	int		nseg;
nseg             1949 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             1951 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             1954 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             1956 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             2045 drivers/scsi/qla2xxx/qla_iocb.c 	int			nseg;
nseg             2109 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             2111 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             2126 drivers/scsi/qla2xxx/qla_iocb.c 			nseg = 0;
nseg             2129 drivers/scsi/qla2xxx/qla_iocb.c 				nseg++;
nseg             2132 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             2135 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             2139 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_prot_sglist(cmd),
nseg             2141 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             2148 drivers/scsi/qla2xxx/qla_iocb.c 			nseg = scsi_bufflen(cmd) / cmd->device->sector_size;
nseg             2151 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             2156 drivers/scsi/qla2xxx/qla_iocb.c 	tot_prot_dsds = nseg;
nseg             2157 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds += nseg;
nseg             3079 drivers/scsi/qla2xxx/qla_iocb.c 	int		nseg;
nseg             3128 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             3130 drivers/scsi/qla2xxx/qla_iocb.c 		if (unlikely(!nseg))
nseg             3133 drivers/scsi/qla2xxx/qla_iocb.c 		nseg = 0;
nseg             3135 drivers/scsi/qla2xxx/qla_iocb.c 	tot_dsds = nseg;
nseg             3076 drivers/scsi/qla2xxx/qla_mr.c 	int		nseg;
nseg             3107 drivers/scsi/qla2xxx/qla_mr.c 		nseg = dma_map_sg(&ha->pdev->dev, scsi_sglist(cmd),
nseg             3109 drivers/scsi/qla2xxx/qla_mr.c 		if (unlikely(!nseg))
nseg             3112 drivers/scsi/qla2xxx/qla_mr.c 		nseg = 0;
nseg             3114 drivers/scsi/qla2xxx/qla_mr.c 	tot_dsds = nseg;
nseg              278 drivers/scsi/qla4xxx/ql4_iocb.c 	int nseg;
nseg              308 drivers/scsi/qla4xxx/ql4_iocb.c 	nseg = scsi_dma_map(cmd);
nseg              309 drivers/scsi/qla4xxx/ql4_iocb.c 	if (nseg < 0)
nseg              311 drivers/scsi/qla4xxx/ql4_iocb.c 	tot_dsds = nseg;
nseg               25 drivers/scsi/scsi_lib_dma.c 	int nseg = 0;
nseg               30 drivers/scsi/scsi_lib_dma.c 		nseg = dma_map_sg(dev, scsi_sglist(cmd), scsi_sg_count(cmd),
nseg               32 drivers/scsi/scsi_lib_dma.c 		if (unlikely(!nseg))
nseg               35 drivers/scsi/scsi_lib_dma.c 	return nseg;
nseg              432 drivers/scsi/stex.c 	int i, nseg;
nseg              435 drivers/scsi/stex.c 	nseg = scsi_dma_map(cmd);
nseg              436 drivers/scsi/stex.c 	BUG_ON(nseg < 0);
nseg              437 drivers/scsi/stex.c 	if (nseg) {
nseg              440 drivers/scsi/stex.c 		ccb->sg_count = nseg;
nseg              441 drivers/scsi/stex.c 		dst->sg_count = cpu_to_le16((u16)nseg);
nseg              446 drivers/scsi/stex.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
nseg              454 drivers/scsi/stex.c 	return nseg;
nseg              464 drivers/scsi/stex.c 	int i, nseg;
nseg              467 drivers/scsi/stex.c 	nseg = scsi_dma_map(cmd);
nseg              468 drivers/scsi/stex.c 	BUG_ON(nseg < 0);
nseg              469 drivers/scsi/stex.c 	if (nseg) {
nseg              472 drivers/scsi/stex.c 		ccb->sg_count = nseg;
nseg              473 drivers/scsi/stex.c 		dst->sg_count = cpu_to_le16((u16)nseg);
nseg              478 drivers/scsi/stex.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
nseg              487 drivers/scsi/stex.c 	return nseg;
nseg              203 include/scsi/scsi_cmnd.h #define scsi_for_each_sg(cmd, sg, nseg, __i)			\
nseg              204 include/scsi/scsi_cmnd.h 	for_each_sg(scsi_sglist(cmd), sg, nseg, __i)
nseg              307 include/scsi/scsi_cmnd.h #define scsi_for_each_prot_sg(cmd, sg, nseg, __i)		\
nseg              308 include/scsi/scsi_cmnd.h 	for_each_sg(scsi_prot_sglist(cmd), sg, nseg, __i)