Home
last modified time | relevance | path

Searched refs:sg_count (Results 1 – 62 of 62) sorted by relevance

/linux-4.1.27/drivers/staging/i2o/
Dmemory.c33 u16 sg_count = in i2o_sg_tablesize() local
42 sg_count -= 2; in i2o_sg_tablesize()
43 sg_count /= 3; in i2o_sg_tablesize()
45 sg_count /= 2; in i2o_sg_tablesize()
47 if (c->short_req && (sg_count > 8)) in i2o_sg_tablesize()
48 sg_count = 8; in i2o_sg_tablesize()
50 return sg_count; in i2o_sg_tablesize()
128 int sg_count, enum dma_data_direction direction, u32 ** sg_ptr) in i2o_dma_map_sg() argument
144 sg_count = dma_map_sg(&c->pdev->dev, sg, sg_count, direction); in i2o_dma_map_sg()
145 if (!sg_count) in i2o_dma_map_sg()
[all …]
Di2o_config.c537 u32 sg_count = 0; in i2o_cfg_passthru32() local
613 sg_count = in i2o_cfg_passthru32()
615 if (sg_count > SG_TABLESIZE) { in i2o_cfg_passthru32()
617 c->name, sg_count); in i2o_cfg_passthru32()
622 for (i = 0; i < sg_count; i++) { in i2o_cfg_passthru32()
640 c->name, sg_size, i, sg_count); in i2o_cfg_passthru32()
699 sg_count = in i2o_cfg_passthru32()
704 for (j = 0; j < sg_count; j++) { in i2o_cfg_passthru32()
788 u32 sg_count = 0; in i2o_cfg_passthru() local
856 sg_count = in i2o_cfg_passthru()
[all …]
Di2o.h700 struct scatterlist *sg, int sg_count,
/linux-4.1.27/drivers/crypto/caam/
Dsg_sw_sec4.h33 sg_to_sec4_sg(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg() argument
36 while (sg_count) { in sg_to_sec4_sg()
41 sg_count--; in sg_to_sec4_sg()
50 static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, in sg_to_sec4_sg_last() argument
54 sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); in sg_to_sec4_sg_last()
77 static inline int sg_count(struct scatterlist *sg_list, int nbytes, in sg_count() function
Dcaamalg.c2631 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); in aead_edesc_alloc()
2634 src_nents = sg_count(req->src, req->cryptlen, &src_chained); in aead_edesc_alloc()
2635 dst_nents = sg_count(req->dst, in aead_edesc_alloc()
2640 src_nents = sg_count(req->src, in aead_edesc_alloc()
2858 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); in aead_giv_edesc_alloc()
2859 src_nents = sg_count(req->src, req->cryptlen, &src_chained); in aead_giv_edesc_alloc()
2862 dst_nents = sg_count(req->dst, req->cryptlen + ctx->authsize, in aead_giv_edesc_alloc()
3074 src_nents = sg_count(req->src, req->nbytes, &src_chained); in ablkcipher_edesc_alloc()
3077 dst_nents = sg_count(req->dst, req->nbytes, &dst_chained); in ablkcipher_edesc_alloc()
3253 src_nents = sg_count(req->src, req->nbytes, &src_chained); in ablkcipher_giv_edesc_alloc()
[all …]
Dcaamhash.c1089 src_nents = sg_count(req->src, req->nbytes, &chained); in ahash_digest()
1441 src_nents = sg_count(req->src, req->nbytes - (*next_buflen), in ahash_update_first()
/linux-4.1.27/drivers/spi/
Dspi-mxs.c181 int sg_count; in mxs_spi_txrx_dma() local
209 for (sg_count = 0; sg_count < sgs; sg_count++) { in mxs_spi_txrx_dma()
217 if ((sg_count + 1 == sgs) && (flags & TXRX_DEASSERT_CS)) in mxs_spi_txrx_dma()
225 dma_xfer[sg_count].pio[0] = ctrl0; in mxs_spi_txrx_dma()
226 dma_xfer[sg_count].pio[3] = min; in mxs_spi_txrx_dma()
235 sg_init_table(&dma_xfer[sg_count].sg, 1); in mxs_spi_txrx_dma()
236 sg_set_page(&dma_xfer[sg_count].sg, vm_page, in mxs_spi_txrx_dma()
239 sg_init_one(&dma_xfer[sg_count].sg, buf, min); in mxs_spi_txrx_dma()
242 ret = dma_map_sg(ssp->dev, &dma_xfer[sg_count].sg, 1, in mxs_spi_txrx_dma()
250 (struct scatterlist *)dma_xfer[sg_count].pio, in mxs_spi_txrx_dma()
[all …]
/linux-4.1.27/drivers/crypto/
Dtalitos.c905 static int sg_to_link_tbl(struct scatterlist *sg, int sg_count, in sg_to_link_tbl() argument
908 int n_sg = sg_count; in sg_to_link_tbl()
925 sg_count--; in sg_to_link_tbl()
934 return sg_count; in sg_to_link_tbl()
952 int sg_count, ret; in ipsec_esp() local
970 sg_count = sg_to_link_tbl(areq->assoc, edesc->assoc_nents - 1, in ipsec_esp()
974 tbl_ptr += sg_count - 1; in ipsec_esp()
1013 sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, in ipsec_esp()
1018 if (sg_count == 1) { in ipsec_esp()
1026 sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len, in ipsec_esp()
[all …]
Dpicoxcell_crypto.c264 static int sg_count(struct scatterlist *sg_list, int nbytes) in sg_count() function
300 nents = sg_count(payload, nbytes); in spacc_sg_to_ddt()
328 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_make_ddts()
348 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); in spacc_aead_make_ddts()
412 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_free_ddts()
417 sg_count(areq->dst, areq->cryptlen), in spacc_aead_free_ddts()
423 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); in spacc_aead_free_ddts()
435 unsigned nents = sg_count(payload, nbytes); in spacc_free_ddt()
Dbfin_crc.c103 static int sg_count(struct scatterlist *sg_list) in sg_count() function
163 if (sg_count(req->src) > CRC_MAX_DMA_DESC) { in bfin_crypto_crc_init()
380 nsg = ctx->sg_nents = sg_count(ctx->sg); in bfin_crypto_crc_handle_queue()
/linux-4.1.27/drivers/scsi/
Dstorvsc_drv.c559 unsigned int sg_count) in destroy_bounce_buffer() argument
564 for (i = 0; i < sg_count; i++) { in destroy_bounce_buffer()
573 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) in do_bounce_buffer() argument
578 if (sg_count < 2) in do_bounce_buffer()
582 for (i = 0; i < sg_count; i++) { in do_bounce_buffer()
587 } else if (i == sg_count - 1) { in do_bounce_buffer()
601 unsigned int sg_count, in create_bounce_buffer() argument
1559 unsigned int sg_count = 0; in storvsc_queuecommand() local
1618 sg_count = scsi_sg_count(scmnd); in storvsc_queuecommand()
1624 if (sg_count) { in storvsc_queuecommand()
[all …]
Dqlogicpti.c895 int sg_count; in load_cmd() local
898 sg_count = dma_map_sg(&qpti->op->dev, sg, in load_cmd()
903 cmd->segment_cnt = sg_count; in load_cmd()
906 n = sg_count; in load_cmd()
913 sg_count -= 4; in load_cmd()
915 while (sg_count > 0) { in load_cmd()
930 n = sg_count; in load_cmd()
937 sg_count -= n; in load_cmd()
Daha1542.c378 int mbo, sg_count; in aha1542_queuecommand() local
402 sg_count = scsi_sg_count(cmd); in aha1542_queuecommand()
403 cptr = kmalloc(sizeof(*cptr) * sg_count, GFP_KERNEL | GFP_DMA); in aha1542_queuecommand()
456 scsi_for_each_sg(cmd, sg, sg_count, i) { in aha1542_queuecommand()
461 any2scsi(ccb[mbo].datalen, sg_count * sizeof(struct chain)); in aha1542_queuecommand()
Dstex.c183 __le16 sg_count; member
286 int sg_count; member
425 ccb->sg_count = nseg; in stex_map_sg()
426 dst->sg_count = cpu_to_le16((u16)nseg); in stex_map_sg()
457 ccb->sg_count = nseg; in stex_ss_map_sg()
458 dst->sg_count = cpu_to_le16((u16)nseg); in stex_ss_map_sg()
534 addr += (hba->ccb[tag].sg_count+4)/11; in stex_ss_send_cmd()
675 hba->ccb[tag].sg_count = 0; in stex_queuecommand_lck()
1731 hba->ccb[tag].sg_count = 0; in stex_hba_stop()
Dips.h430 uint8_t sg_count; member
639 uint8_t sg_count; member
654 uint16_t sg_count; member
1105 int sg_count; member
Ddc395x.c236 u8 sg_count; /* No of HW sg entries for this request */ member
990 srb->sg_count = 0; in build_srb()
1019 srb->sg_count = nseg; in build_srb()
1024 srb->sg_count); in build_srb()
1026 scsi_for_each_sg(cmd, sg, srb->sg_count, i) { in build_srb()
1033 sgp += srb->sg_count - 1; in build_srb()
1212 srb->segment_x, srb->sg_count, srb->sg_index, in dump_register_info()
1939 for (; idx < srb->sg_count; psge++, idx++) in sg_verify_length()
1969 for (idx = srb->sg_index; idx < srb->sg_count; idx++) { in sg_update_list()
2284 srb->sg_count, &offset, &len); in data_in_phase0()
[all …]
Ddpt_i2o.c1715 u32 sg_count = 0; in adpt_i2o_passthru() local
1761 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); in adpt_i2o_passthru()
1762 if (sg_count > pHba->sg_tablesize){ in adpt_i2o_passthru()
1763 printk(KERN_DEBUG"%s:IOCTL SG List too large (%u)\n", pHba->name,sg_count); in adpt_i2o_passthru()
1768 for(i = 0; i < sg_count; i++) { in adpt_i2o_passthru()
1781 pHba->name,sg_size,i,sg_count); in adpt_i2o_passthru()
1848 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); in adpt_i2o_passthru()
1852 for (j = 0; j < sg_count; j++) { in adpt_i2o_passthru()
Dmvumi.c204 void *sgl_p, unsigned char *sg_count) in mvumi_make_sgl() argument
214 *sg_count = pci_map_sg(mhba->pdev, sg, sgnum, in mvumi_make_sgl()
216 if (*sg_count > mhba->max_sge) { in mvumi_make_sgl()
219 *sg_count, mhba->max_sge); in mvumi_make_sgl()
222 for (i = 0; i < *sg_count; i++) { in mvumi_make_sgl()
228 if ((i + 1) == *sg_count) in mvumi_make_sgl()
244 *sg_count = 1; in mvumi_make_sgl()
Dsr.c454 int i, size = 0, sg_count = scsi_sg_count(SCpnt); in sr_init_command() local
456 scsi_for_each_sg(SCpnt, sg, sg_count, i) in sr_init_command()
D3w-sas.c298 int i, sg_count; in twl_scsiop_execute_scsi() local
340 sg_count = scsi_dma_map(srb); in twl_scsiop_execute_scsi()
341 if (sg_count <= 0) in twl_scsiop_execute_scsi()
344 scsi_for_each_sg(srb, sg, sg_count, i) { in twl_scsiop_execute_scsi()
Dlibiscsi_tcp.c369 struct scatterlist *sg_list, unsigned int sg_count, in iscsi_segment_seek_sg() argument
377 for_each_sg(sg_list, sg, sg_count, i) { in iscsi_segment_seek_sg()
Dhptiop.c1013 int sg_count = 0; in hptiop_queuecommand_lck() local
1050 sg_count = hptiop_buildsgl(scp, req->sg_list); in hptiop_queuecommand_lck()
1051 if (!sg_count) in hptiop_queuecommand_lck()
1064 + sg_count * sizeof(struct hpt_iopsg)); in hptiop_queuecommand_lck()
D53c700.c1872 int sg_count; in NCR_700_queuecommand_lck() local
1877 sg_count = scsi_dma_map(SCp); in NCR_700_queuecommand_lck()
1878 BUG_ON(sg_count < 0); in NCR_700_queuecommand_lck()
1880 scsi_for_each_sg(SCp, sg, sg_count, i) { in NCR_700_queuecommand_lck()
Dips.c2680 scb->sg_count = scsi_dma_map(SC); in ips_next()
2681 BUG_ON(scb->sg_count < 0); in ips_next()
2682 if (scb->sg_count) { in ips_next()
2688 scsi_for_each_sg(SC, sg, scb->sg_count, i) { in ips_next()
3592 scb->cmd.basic_io.sg_count = scb->sg_len; in ips_send_cmd()
3638 scb->cmd.basic_io.sg_count = scb->sg_len; in ips_send_cmd()
3786 tapeDCDB->sg_count = scb->sg_len; in ips_send_cmd()
3825 scb->dcdb.sg_count = scb->sg_len; in ips_send_cmd()
5691 scb->cmd.basic_io.sg_count = 0; in ips_read_adapter_status()
5734 scb->cmd.basic_io.sg_count = 0; in ips_read_subsystem_parameters()
D3w-9xxx.c1806 int i, sg_count; in DEF_SCSI_QCMD() local
1858 sg_count = scsi_dma_map(srb); in DEF_SCSI_QCMD()
1859 if (sg_count < 0) in DEF_SCSI_QCMD()
1862 scsi_for_each_sg(srb, sg, sg_count, i) { in DEF_SCSI_QCMD()
Dscsi_lib.c3087 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, in scsi_kmap_atomic_sg() argument
3097 for_each_sg(sgl, sg, sg_count, i) { in scsi_kmap_atomic_sg()
3104 if (unlikely(i == sg_count)) { in scsi_kmap_atomic_sg()
3107 __func__, sg_len, *offset, sg_count); in scsi_kmap_atomic_sg()
Dhpsa_cmd.h552 u8 sg_count; /* Number of sg elements */ member
DFlashPoint.c4953 unsigned char sg_count, i; in FPT_busMstrSGDataXferStart() local
4962 sg_count = 0; in FPT_busMstrSGDataXferStart()
4972 while ((sg_count < (unsigned char)SG_BUF_CNT) && in FPT_busMstrSGDataXferStart()
4982 if ((!sg_count) && (pcurrSCCB->Sccb_SGoffset)) { in FPT_busMstrSGDataXferStart()
4998 sg_count++; in FPT_busMstrSGDataXferStart()
5004 WR_HARPOON(p_port + hp_sg_addr, (sg_count << 4)); in FPT_busMstrSGDataXferStart()
Datp870u.c717 unsigned long sg_count; in DEF_SCSI_QCMD() local
848 sg_count = scsi_dma_map(workreq); in DEF_SCSI_QCMD()
904 scsi_for_each_sg(workreq, sgpnt, sg_count, j) { in DEF_SCSI_QCMD()
Dhpsa.c792 c->busaddr |= (h->ioaccel2_blockFetchTable[cp->sg_count]); in set_ioaccel2_performant_mode()
3606 cp->sg_count = (u8) use_sg; in hpsa_scsi_ioaccel2_queue_command()
7585 VERIFY_OFFSET(sg_count, 45); in verify_offsets()
Dadvansys.c1827 ADV_DCNT sg_count; /* SG element count. */ member
2664 (ulong)b->sg_list[i].sg_count); in asc_prt_adv_sgblock()
8060 sg_block->sg_list[i].sg_count = in adv_get_sglist()
/linux-4.1.27/drivers/scsi/arm/
Dscsi.h101 unsigned i, sg_count = scsi_sg_count(SCpnt); in init_SCp() local
103 scsi_for_each_sg(SCpnt, sg, sg_count, i) in init_SCp()
/linux-4.1.27/drivers/scsi/fnic/
Dfnic_scsi.c321 int sg_count) in fnic_queue_wq_copy_desc() argument
334 if (sg_count) { in fnic_queue_wq_copy_desc()
337 for_each_sg(scsi_sglist(sc), sg, sg_count, i) { in fnic_queue_wq_copy_desc()
347 sizeof(io_req->sgl_list[0]) * sg_count, in fnic_queue_wq_copy_desc()
423 int sg_count = 0; in fnic_queuecommand_lck() local
476 sg_count = scsi_dma_map(sc); in fnic_queuecommand_lck()
477 if (sg_count < 0) { in fnic_queuecommand_lck()
480 sg_count, CMD_STATE(sc)); in fnic_queuecommand_lck()
486 io_req->sgl_cnt = sg_count; in fnic_queuecommand_lck()
488 if (sg_count > FNIC_DFLT_SG_DESC_CNT) in fnic_queuecommand_lck()
[all …]
/linux-4.1.27/include/scsi/
Dlibiscsi_tcp.h117 struct scatterlist *sg_list, unsigned int sg_count,
Dscsi_cmnd.h162 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
/linux-4.1.27/drivers/crypto/ccp/
Dccp-crypto-aes-cmac.c67 unsigned int need_pad, sg_count; in ccp_do_cmac_update() local
110 sg_count = (nbytes) ? sg_nents(req->src) + 2 : 2; in ccp_do_cmac_update()
113 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); in ccp_do_cmac_update()
Dccp-crypto-sha.c66 unsigned int sg_count; in ccp_do_sha_update() local
103 sg_count = sg_nents(req->src) + 1; in ccp_do_sha_update()
104 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); in ccp_do_sha_update()
/linux-4.1.27/drivers/mmc/host/
Drtsx_pci_sdmmc.c57 int sg_count; member
188 host->sg_count = count; in sd_pre_dma_transfer()
489 err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 1, 10000); in sd_read_long_data()
547 err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 0, 10000); in sd_write_long_data()
560 if (host->sg_count < 0) { in sd_rw_multi()
561 data->error = host->sg_count; in sd_rw_multi()
563 __func__, host->sg_count); in sd_rw_multi()
Dsdhci.c512 host->sg_count = sdhci_pre_dma_transfer(host, data); in sdhci_adma_table_pre()
513 if (host->sg_count < 0) in sdhci_adma_table_pre()
521 for_each_sg(data->sg, sg, host->sg_count, i) { in sdhci_adma_table_pre()
628 for_each_sg(data->sg, sg, host->sg_count, i) in sdhci_adma_table_post()
640 for_each_sg(data->sg, sg, host->sg_count, i) { in sdhci_adma_table_post()
2163 int sg_count; in sdhci_pre_dma_transfer() local
2167 return data->sg_count; in sdhci_pre_dma_transfer()
2172 sg_count = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdhci_pre_dma_transfer()
2176 if (sg_count == 0) in sdhci_pre_dma_transfer()
2179 data->sg_count = sg_count; in sdhci_pre_dma_transfer()
[all …]
Dsdhci.h468 int sg_count; /* Mapped sg entries */ member
/linux-4.1.27/sound/soc/intel/atom/
Dsst-mfld-dsp.h384 __u16 sg_count; member
410 u8 sg_count; member
Dsst-mfld-platform-compress.c159 str_params.aparams.sg_count = 1; in sst_platform_compr_set_params()
Dsst-mfld-platform-pcm.c145 alloc_param->sg_count = 1; in sst_fill_alloc_params()
/linux-4.1.27/sound/soc/intel/atom/sst/
Dsst_stream.c54 alloc_param.sg_count = str_params->aparams.sg_count; in sst_alloc_stream_mrfld()
/linux-4.1.27/drivers/memstick/host/
Dr592.c280 int len, sg_count; in r592_transfer_fifo_dma() local
298 sg_count = dma_map_sg(&dev->pci_dev->dev, &dev->req->sg, 1, is_write ? in r592_transfer_fifo_dma()
301 if (sg_count != 1 || in r592_transfer_fifo_dma()
/linux-4.1.27/include/linux/mmc/
Dcore.h124 int sg_count; /* mapped sg entries */ member
/linux-4.1.27/drivers/infiniband/hw/ipath/
Dipath_sdma.c693 if (tx->txreq.sg_count > ipath_sdma_descq_freecnt(dd)) { in ipath_sdma_verbs_send()
798 dd->ipath_sdma_descq_added += tx->txreq.sg_count; in ipath_sdma_verbs_send()
Dipath_kernel.h206 int sg_count; member
Dipath_verbs.c1167 tx->txreq.sg_count = ndesc; in ipath_verbs_send_dma()
1191 tx->txreq.sg_count = 1; in ipath_verbs_send_dma()
/linux-4.1.27/drivers/infiniband/hw/qib/
Dqib_sdma.c555 if (tx->txreq.sg_count > qib_sdma_descq_freecnt(ppd)) { in qib_sdma_verbs_send()
651 ppd->sdma_descq_added += tx->txreq.sg_count; in qib_sdma_verbs_send()
Dqib_verbs.c1052 if (qp->s_tx->txreq.sg_count > avail) in qib_verbs_sdma_desc_avail()
1054 avail -= qp->s_tx->txreq.sg_count; in qib_verbs_sdma_desc_avail()
1193 tx->txreq.sg_count = ndesc; in qib_verbs_send_dma()
1217 tx->txreq.sg_count = 1; in qib_verbs_send_dma()
Dqib.h247 int sg_count; member
/linux-4.1.27/drivers/scsi/aic7xxx/
Daic7xxx_osm.c475 if ((scb->sg_count + 1) > AHC_NSEG) in ahc_linux_map_seg()
1521 scb->sg_count = 0; in ahc_linux_run_command()
1544 scb->sg_count += consumed; in ahc_linux_run_command()
1565 scb->sg_count = 0; in ahc_linux_run_command()
Daic7xxx.h578 u_int sg_count;/* How full ahc_dma_seg is */ member
Daic7xxx_core.c413 if (scb->sg_count == 0) in ahc_sync_sglist()
419 /*len*/sizeof(struct ahc_dma_seg) * scb->sg_count, op); in ahc_sync_sglist()
1143 scb->sg_count = 1; in ahc_handle_seqint()
1405 ahc_get_transfer_length(scb), scb->sg_count); in ahc_handle_seqint()
1406 if (scb->sg_count > 0) { in ahc_handle_seqint()
1407 for (i = 0; i < scb->sg_count; i++) { in ahc_handle_seqint()
2121 if (scb->sg_count > 0) {
2122 for (i = 0; i < scb->sg_count; i++) {
Daic79xx.h621 u_int sg_count;/* How full ahd_dma_seg is */ member
Daic79xx_core.c417 scb->sg_count++; in ahd_sg_setup()
538 if (scb->sg_count == 0) in ahd_sync_sglist()
544 /*len*/ahd_sg_size(ahd) * scb->sg_count, op); in ahd_sync_sglist()
863 if (scb->sg_count != 0) in ahd_queue_scb()
1739 if (scb->sg_count > 0) { in ahd_dump_sglist()
1744 for (i = 0; i < scb->sg_count; i++) { in ahd_dump_sglist()
1762 for (i = 0; i < scb->sg_count; i++) { in ahd_dump_sglist()
2234 ahd_get_transfer_length(scb), scb->sg_count); in ahd_handle_seqint()
9067 scb->sg_count = 0; in ahd_handle_scsi_status()
Daic79xx_osm.c835 reset_scb->sg_count = 0; in ahd_linux_dev_reset()
1634 scb->sg_count = 0; in ahd_linux_run_command()
/linux-4.1.27/drivers/scsi/bnx2i/
Dbnx2i_iscsi.c150 int sg_count; in bnx2i_map_scsi_sg() local
157 sg_count = scsi_dma_map(sc); in bnx2i_map_scsi_sg()
159 scsi_for_each_sg(sc, sg, sg_count, i) { in bnx2i_map_scsi_sg()
/linux-4.1.27/drivers/scsi/bnx2fc/
Dbnx2fc_io.c1650 int sg_count = 0; in bnx2fc_map_sg() local
1661 sg_count = dma_map_sg(&hba->pcidev->dev, scsi_sglist(sc), in bnx2fc_map_sg()
1663 scsi_for_each_sg(sc, sg, sg_count, i) { in bnx2fc_map_sg()
/linux-4.1.27/drivers/block/
Dsx8.c322 u8 sg_count; member
905 msg->sg_count = n_elem; in carm_rq_fn()
/linux-4.1.27/drivers/vhost/
Dscsi.c862 struct scatterlist *sg, int sg_count) in vhost_scsi_iov_to_sgl() argument
873 for (i = 0; i < sg_count; i++) { in vhost_scsi_iov_to_sgl()