Searched refs:sg_count (Results 1 - 63 of 63) sorted by relevance

/linux-4.1.27/drivers/crypto/caam/
H A Dsg_sw_sec4.h33 sg_to_sec4_sg(struct scatterlist *sg, int sg_count, sg_to_sec4_sg() argument
36 while (sg_count) { sg_to_sec4_sg()
41 sg_count--; sg_to_sec4_sg()
50 static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, sg_to_sec4_sg_last() argument
54 sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); sg_to_sec4_sg_last()
77 static inline int sg_count(struct scatterlist *sg_list, int nbytes, sg_count() function
H A Dcaamalg.c2631 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); aead_edesc_alloc()
2634 src_nents = sg_count(req->src, req->cryptlen, &src_chained); aead_edesc_alloc()
2635 dst_nents = sg_count(req->dst, aead_edesc_alloc()
2640 src_nents = sg_count(req->src, aead_edesc_alloc()
2858 assoc_nents = sg_count(req->assoc, req->assoclen, &assoc_chained); aead_giv_edesc_alloc()
2859 src_nents = sg_count(req->src, req->cryptlen, &src_chained); aead_giv_edesc_alloc()
2862 dst_nents = sg_count(req->dst, req->cryptlen + ctx->authsize, aead_giv_edesc_alloc()
3074 src_nents = sg_count(req->src, req->nbytes, &src_chained); ablkcipher_edesc_alloc()
3077 dst_nents = sg_count(req->dst, req->nbytes, &dst_chained); ablkcipher_edesc_alloc()
3253 src_nents = sg_count(req->src, req->nbytes, &src_chained); ablkcipher_giv_edesc_alloc()
3256 dst_nents = sg_count(req->dst, req->nbytes, &dst_chained); ablkcipher_giv_edesc_alloc()
H A Dcaamhash.c1089 src_nents = sg_count(req->src, req->nbytes, &chained); ahash_digest()
1441 src_nents = sg_count(req->src, req->nbytes - (*next_buflen), ahash_update_first()
/linux-4.1.27/drivers/staging/i2o/
H A Dmemory.c33 u16 sg_count = i2o_sg_tablesize() local
42 sg_count -= 2; i2o_sg_tablesize()
43 sg_count /= 3; i2o_sg_tablesize()
45 sg_count /= 2; i2o_sg_tablesize()
47 if (c->short_req && (sg_count > 8)) i2o_sg_tablesize()
48 sg_count = 8; i2o_sg_tablesize()
50 return sg_count; i2o_sg_tablesize()
116 * @sg_count: number of elements in the SG list
128 int sg_count, enum dma_data_direction direction, u32 ** sg_ptr) i2o_dma_map_sg()
144 sg_count = dma_map_sg(&c->pdev->dev, sg, sg_count, direction); i2o_dma_map_sg()
145 if (!sg_count) i2o_dma_map_sg()
155 while (sg_count-- > 0) { i2o_dma_map_sg()
156 if (!sg_count) i2o_dma_map_sg()
127 i2o_dma_map_sg(struct i2o_controller *c, struct scatterlist *sg, int sg_count, enum dma_data_direction direction, u32 ** sg_ptr) i2o_dma_map_sg() argument
H A Di2o_config.c537 u32 sg_count = 0; i2o_cfg_passthru32() local
613 sg_count = i2o_cfg_passthru32()
615 if (sg_count > SG_TABLESIZE) { i2o_cfg_passthru32()
617 c->name, sg_count); i2o_cfg_passthru32()
622 for (i = 0; i < sg_count; i++) { i2o_cfg_passthru32()
640 c->name, sg_size, i, sg_count); i2o_cfg_passthru32()
699 sg_count = i2o_cfg_passthru32()
704 for (j = 0; j < sg_count; j++) { i2o_cfg_passthru32()
788 u32 sg_count = 0; i2o_cfg_passthru() local
856 sg_count = i2o_cfg_passthru()
858 if (sg_count > SG_TABLESIZE) { i2o_cfg_passthru()
860 c->name, sg_count); i2o_cfg_passthru()
865 for (i = 0; i < sg_count; i++) { i2o_cfg_passthru()
882 c->name, sg_size, i, sg_count); i2o_cfg_passthru()
939 sg_count = i2o_cfg_passthru()
944 for (j = 0; j < sg_count; j++) { i2o_cfg_passthru()
H A Di2o.h700 struct scatterlist *sg, int sg_count,
/linux-4.1.27/drivers/spi/
H A Dspi-mxs.c181 int sg_count; mxs_spi_txrx_dma() local
209 for (sg_count = 0; sg_count < sgs; sg_count++) { mxs_spi_txrx_dma()
217 if ((sg_count + 1 == sgs) && (flags & TXRX_DEASSERT_CS)) mxs_spi_txrx_dma()
225 dma_xfer[sg_count].pio[0] = ctrl0; mxs_spi_txrx_dma()
226 dma_xfer[sg_count].pio[3] = min; mxs_spi_txrx_dma()
235 sg_init_table(&dma_xfer[sg_count].sg, 1); mxs_spi_txrx_dma()
236 sg_set_page(&dma_xfer[sg_count].sg, vm_page, mxs_spi_txrx_dma()
239 sg_init_one(&dma_xfer[sg_count].sg, buf, min); mxs_spi_txrx_dma()
242 ret = dma_map_sg(ssp->dev, &dma_xfer[sg_count].sg, 1, mxs_spi_txrx_dma()
250 (struct scatterlist *)dma_xfer[sg_count].pio, mxs_spi_txrx_dma()
253 sg_count ? DMA_PREP_INTERRUPT : 0); mxs_spi_txrx_dma()
262 &dma_xfer[sg_count].sg, 1, mxs_spi_txrx_dma()
296 while (--sg_count >= 0) { mxs_spi_txrx_dma()
298 dma_unmap_sg(ssp->dev, &dma_xfer[sg_count].sg, 1, mxs_spi_txrx_dma()
/linux-4.1.27/drivers/scsi/arm/
H A Dscsi.h101 unsigned i, sg_count = scsi_sg_count(SCpnt); init_SCp() local
103 scsi_for_each_sg(SCpnt, sg, sg_count, i) init_SCp()
/linux-4.1.27/drivers/crypto/
H A Dtalitos.c905 static int sg_to_link_tbl(struct scatterlist *sg, int sg_count, sg_to_link_tbl() argument
908 int n_sg = sg_count; sg_to_link_tbl()
925 sg_count--; sg_to_link_tbl()
934 return sg_count; sg_to_link_tbl()
952 int sg_count, ret; ipsec_esp() local
970 sg_count = sg_to_link_tbl(areq->assoc, edesc->assoc_nents - 1, ipsec_esp()
974 tbl_ptr += sg_count - 1; ipsec_esp()
1013 sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, ipsec_esp()
1018 if (sg_count == 1) { ipsec_esp()
1026 sg_count = sg_to_link_tbl(areq->src, sg_count, sg_link_tbl_len, ipsec_esp()
1028 if (sg_count > 1) { ipsec_esp()
1046 sg_count = talitos_map_sg(dev, areq->dst, ipsec_esp()
1050 if (sg_count == 1) { ipsec_esp()
1058 sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen, ipsec_esp()
1062 tbl_ptr += sg_count - 1; ipsec_esp()
1093 static int sg_count(struct scatterlist *sg_list, int nbytes, bool *chained) sg_count() function
1148 assoc_nents = sg_count(assoc, assoclen, &assoc_chained); talitos_edesc_alloc()
1158 src_nents = sg_count(src, cryptlen + authsize, &src_chained); talitos_edesc_alloc()
1162 src_nents = sg_count(src, cryptlen + (encrypt ? 0 : authsize), talitos_edesc_alloc()
1165 dst_nents = sg_count(dst, cryptlen + (encrypt ? authsize : 0), talitos_edesc_alloc()
1374 int sg_count, ret; common_nonsnoop() local
1396 sg_count = talitos_map_sg(dev, areq->src, edesc->src_nents ? : 1, common_nonsnoop()
1401 if (sg_count == 1) { common_nonsnoop()
1404 sg_count = sg_to_link_tbl(areq->src, sg_count, cryptlen, common_nonsnoop()
1406 if (sg_count > 1) { common_nonsnoop()
1424 sg_count = talitos_map_sg(dev, areq->dst, common_nonsnoop()
1428 if (sg_count == 1) { common_nonsnoop()
1438 sg_count = sg_to_link_tbl(areq->dst, sg_count, cryptlen, common_nonsnoop()
1563 int sg_count, ret; common_nonsnoop_hash() local
1594 sg_count = talitos_map_sg(dev, req_ctx->psrc, common_nonsnoop_hash()
1598 if (sg_count == 1) { common_nonsnoop_hash()
1601 sg_count = sg_to_link_tbl(req_ctx->psrc, sg_count, length, common_nonsnoop_hash()
1603 if (sg_count > 1) { common_nonsnoop_hash()
1712 sg_count(areq->src, nbytes, &chained), ahash_process_req()
1745 int nents = sg_count(areq->src, nbytes, &chained); ahash_process_req()
H A Dpicoxcell_crypto.c264 static int sg_count(struct scatterlist *sg_list, int nbytes) sg_count() function
300 nents = sg_count(payload, nbytes); spacc_sg_to_ddt()
328 unsigned nents = sg_count(areq->src, areq->cryptlen); spacc_aead_make_ddts()
348 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); spacc_aead_make_ddts()
412 unsigned nents = sg_count(areq->src, areq->cryptlen); spacc_aead_free_ddts()
417 sg_count(areq->dst, areq->cryptlen), spacc_aead_free_ddts()
423 sg_count(areq->assoc, areq->assoclen), DMA_TO_DEVICE); spacc_aead_free_ddts()
435 unsigned nents = sg_count(payload, nbytes); spacc_free_ddt()
H A Dbfin_crc.c103 static int sg_count(struct scatterlist *sg_list) sg_count() function
163 if (sg_count(req->src) > CRC_MAX_DMA_DESC) { bfin_crypto_crc_init()
380 nsg = ctx->sg_nents = sg_count(ctx->sg); bfin_crypto_crc_handle_queue()
/linux-4.1.27/drivers/scsi/
H A Dstorvsc_drv.c559 unsigned int sg_count) destroy_bounce_buffer()
564 for (i = 0; i < sg_count; i++) { destroy_bounce_buffer()
573 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) do_bounce_buffer() argument
578 if (sg_count < 2) do_bounce_buffer()
582 for (i = 0; i < sg_count; i++) { do_bounce_buffer()
587 } else if (i == sg_count - 1) { do_bounce_buffer()
601 unsigned int sg_count, create_bounce_buffer()
1559 unsigned int sg_count = 0; storvsc_queuecommand() local
1618 sg_count = scsi_sg_count(scmnd); storvsc_queuecommand()
1624 if (sg_count) { storvsc_queuecommand()
1628 create_bounce_buffer(sgl, sg_count, storvsc_queuecommand()
1639 cmd_request->bounce_sgl, sg_count); storvsc_queuecommand()
1642 sg_count = cmd_request->bounce_sgl_count; storvsc_queuecommand()
1646 if (sg_count > MAX_PAGE_BUFFER_COUNT) { storvsc_queuecommand()
1648 payload_sz = (sg_count * sizeof(void *) + storvsc_queuecommand()
1665 for (i = 0; i < sg_count; i++) { storvsc_queuecommand()
558 destroy_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) destroy_bounce_buffer() argument
600 create_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count, unsigned int len, int write) create_bounce_buffer() argument
H A Dqlogicpti.c895 int sg_count; load_cmd() local
898 sg_count = dma_map_sg(&qpti->op->dev, sg, load_cmd()
903 cmd->segment_cnt = sg_count; load_cmd()
906 n = sg_count; load_cmd()
913 sg_count -= 4;
915 while (sg_count > 0) {
930 n = sg_count;
937 sg_count -= n;
H A Dstex.c183 __le16 sg_count; member in struct:st_sgtable
286 int sg_count; member in struct:st_ccb
425 ccb->sg_count = nseg; stex_map_sg()
426 dst->sg_count = cpu_to_le16((u16)nseg); stex_map_sg()
457 ccb->sg_count = nseg; stex_ss_map_sg()
458 dst->sg_count = cpu_to_le16((u16)nseg); stex_ss_map_sg()
534 addr += (hba->ccb[tag].sg_count+4)/11; stex_ss_send_cmd()
675 hba->ccb[tag].sg_count = 0; stex_queuecommand_lck()
1731 hba->ccb[tag].sg_count = 0; stex_hba_stop()
H A Daha1542.c378 int mbo, sg_count; aha1542_queuecommand() local
402 sg_count = scsi_sg_count(cmd); aha1542_queuecommand()
403 cptr = kmalloc(sizeof(*cptr) * sg_count, GFP_KERNEL | GFP_DMA); aha1542_queuecommand()
456 scsi_for_each_sg(cmd, sg, sg_count, i) { scsi_for_each_sg()
461 any2scsi(ccb[mbo].datalen, sg_count * sizeof(struct chain));
H A Dmvumi.c199 * @sg_count return the number of SG elements
204 void *sgl_p, unsigned char *sg_count) mvumi_make_sgl()
214 *sg_count = pci_map_sg(mhba->pdev, sg, sgnum, mvumi_make_sgl()
216 if (*sg_count > mhba->max_sge) { mvumi_make_sgl()
219 *sg_count, mhba->max_sge); mvumi_make_sgl()
222 for (i = 0; i < *sg_count; i++) { mvumi_make_sgl()
228 if ((i + 1) == *sg_count) mvumi_make_sgl()
244 *sg_count = 1; mvumi_make_sgl()
203 mvumi_make_sgl(struct mvumi_hba *mhba, struct scsi_cmnd *scmd, void *sgl_p, unsigned char *sg_count) mvumi_make_sgl() argument
H A Ddc395x.c236 u8 sg_count; /* No of HW sg entries for this request */ member in struct:ScsiReqBlk
990 srb->sg_count = 0; build_srb()
1019 srb->sg_count = nseg; build_srb()
1024 srb->sg_count); build_srb()
1026 scsi_for_each_sg(cmd, sg, srb->sg_count, i) { build_srb()
1033 sgp += srb->sg_count - 1; build_srb()
1212 srb->segment_x, srb->sg_count, srb->sg_index, dump_register_info()
1939 for (; idx < srb->sg_count; psge++, idx++) sg_verify_length()
1969 for (idx = srb->sg_index; idx < srb->sg_count; idx++) { sg_update_list()
2284 srb->sg_count, &offset, &len); data_in_phase0()
2411 srb->total_xfer_length, srb->sg_index, srb->sg_count); data_io_transfer()
2414 if (srb->sg_index >= srb->sg_count) { data_io_transfer()
2446 ((u32)(srb->sg_count - data_io_transfer()
2501 srb->sg_count, &offset, &len); data_io_transfer()
2536 if (srb->sg_count) { data_io_transfer()
3323 srb, scsi_sg_count(cmd), srb->sg_index, srb->sg_count, srb_done()
3705 srb->sg_count = 1; request_sense()
H A Dips.h430 uint8_t sg_count; member in struct:__anon8996
639 uint8_t sg_count; member in struct:__anon9013
654 uint16_t sg_count; member in struct:__anon9014
1105 int sg_count; member in struct:ips_scb
H A Ddpt_i2o.c1715 u32 sg_count = 0; adpt_i2o_passthru() local
1761 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); adpt_i2o_passthru()
1762 if (sg_count > pHba->sg_tablesize){ adpt_i2o_passthru()
1763 printk(KERN_DEBUG"%s:IOCTL SG List too large (%u)\n", pHba->name,sg_count); adpt_i2o_passthru()
1768 for(i = 0; i < sg_count; i++) { adpt_i2o_passthru()
1781 pHba->name,sg_size,i,sg_count); adpt_i2o_passthru()
1848 sg_count = (size - sg_offset*4) / sizeof(struct sg_simple_element); adpt_i2o_passthru()
1852 for (j = 0; j < sg_count; j++) { adpt_i2o_passthru()
H A Dhptiop.c1013 int sg_count = 0; hptiop_queuecommand_lck() local
1050 sg_count = hptiop_buildsgl(scp, req->sg_list); hptiop_queuecommand_lck()
1051 if (!sg_count) hptiop_queuecommand_lck()
1064 + sg_count * sizeof(struct hpt_iopsg)); hptiop_queuecommand_lck()
H A D3w-9xxx.c1806 int i, sg_count; twa_scsiop_execute_scsi() local
1858 sg_count = scsi_dma_map(srb); twa_scsiop_execute_scsi()
1859 if (sg_count < 0) twa_scsiop_execute_scsi()
1862 scsi_for_each_sg(srb, sg, sg_count, i) { scsi_for_each_sg()
H A D3w-sas.c298 int i, sg_count; twl_scsiop_execute_scsi() local
340 sg_count = scsi_dma_map(srb); twl_scsiop_execute_scsi()
341 if (sg_count <= 0) twl_scsiop_execute_scsi()
344 scsi_for_each_sg(srb, sg, sg_count, i) { scsi_for_each_sg()
H A D53c700.c1872 int sg_count; NCR_700_queuecommand_lck() local
1877 sg_count = scsi_dma_map(SCp); NCR_700_queuecommand_lck()
1878 BUG_ON(sg_count < 0); NCR_700_queuecommand_lck()
1880 scsi_for_each_sg(SCp, sg, sg_count, i) { scsi_for_each_sg()
H A Dsr.c454 int i, size = 0, sg_count = scsi_sg_count(SCpnt); sr_init_command() local
456 scsi_for_each_sg(SCpnt, sg, sg_count, i) sr_init_command()
H A Dlibiscsi_tcp.c369 struct scatterlist *sg_list, unsigned int sg_count, iscsi_segment_seek_sg()
377 for_each_sg(sg_list, sg, sg_count, i) { for_each_sg()
368 iscsi_segment_seek_sg(struct iscsi_segment *segment, struct scatterlist *sg_list, unsigned int sg_count, unsigned int offset, size_t size, iscsi_segment_done_fn_t *done, struct hash_desc *hash) iscsi_segment_seek_sg() argument
H A Dscsi_lib.c3081 * @sg_count: number of segments in sg
3087 void *scsi_kmap_atomic_sg(struct scatterlist *sgl, int sg_count, scsi_kmap_atomic_sg() argument
3097 for_each_sg(sgl, sg, sg_count, i) { for_each_sg()
3104 if (unlikely(i == sg_count)) {
3107 __func__, sg_len, *offset, sg_count);
H A Dips.c2680 scb->sg_count = scsi_dma_map(SC); ips_next()
2681 BUG_ON(scb->sg_count < 0); ips_next()
2682 if (scb->sg_count) { ips_next()
2688 scsi_for_each_sg(SC, sg, scb->sg_count, i) { ips_next()
3592 scb->cmd.basic_io.sg_count = scb->sg_len; ips_send_cmd()
3638 scb->cmd.basic_io.sg_count = scb->sg_len; ips_send_cmd()
3786 tapeDCDB->sg_count = scb->sg_len; ips_send_cmd()
3825 scb->dcdb.sg_count = scb->sg_len; ips_send_cmd()
5691 scb->cmd.basic_io.sg_count = 0; ips_read_adapter_status()
5734 scb->cmd.basic_io.sg_count = 0; ips_read_subsystem_parameters()
H A Dhpsa_cmd.h552 u8 sg_count; /* Number of sg elements */ member in struct:io_accel2_cmd
H A DFlashPoint.c4953 unsigned char sg_count, i; FPT_busMstrSGDataXferStart() local
4962 sg_count = 0; FPT_busMstrSGDataXferStart()
4972 while ((sg_count < (unsigned char)SG_BUF_CNT) && FPT_busMstrSGDataXferStart()
4982 if ((!sg_count) && (pcurrSCCB->Sccb_SGoffset)) { FPT_busMstrSGDataXferStart()
4998 sg_count++; FPT_busMstrSGDataXferStart()
5004 WR_HARPOON(p_port + hp_sg_addr, (sg_count << 4)); FPT_busMstrSGDataXferStart()
H A Datp870u.c717 unsigned long sg_count; send_s870() local
848 sg_count = scsi_dma_map(workreq); send_s870()
904 scsi_for_each_sg(workreq, sgpnt, sg_count, j) { scsi_for_each_sg()
H A Dhpsa.c792 c->busaddr |= (h->ioaccel2_blockFetchTable[cp->sg_count]); set_ioaccel2_performant_mode()
3606 cp->sg_count = (u8) use_sg;
7585 VERIFY_OFFSET(sg_count, 45); verify_offsets()
H A Dadvansys.c1827 ADV_DCNT sg_count; /* SG element count. */ member in struct:asc_sg_block::__anon8560
2662 printk(" [%u]: sg_addr 0x%lx, sg_count 0x%lx\n", asc_prt_adv_sgblock()
2664 (ulong)b->sg_list[i].sg_count); asc_prt_adv_sgblock()
8060 sg_block->sg_list[i].sg_count = adv_get_sglist()
/linux-4.1.27/drivers/crypto/ccp/
H A Dccp-crypto-aes-cmac.c67 unsigned int need_pad, sg_count; ccp_do_cmac_update() local
110 sg_count = (nbytes) ? sg_nents(req->src) + 2 : 2; ccp_do_cmac_update()
113 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_cmac_update()
H A Dccp-crypto-sha.c66 unsigned int sg_count; ccp_do_sha_update() local
103 sg_count = sg_nents(req->src) + 1; ccp_do_sha_update()
104 ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); ccp_do_sha_update()
/linux-4.1.27/drivers/mmc/host/
H A Drtsx_pci_sdmmc.c57 int sg_count; member in struct:realtek_pci_sdmmc
188 host->sg_count = count; sd_pre_dma_transfer()
489 err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 1, 10000); sd_read_long_data()
547 err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 0, 10000); sd_write_long_data()
560 if (host->sg_count < 0) { sd_rw_multi()
561 data->error = host->sg_count; sd_rw_multi()
562 dev_dbg(sdmmc_dev(host), "%s: sg_count = %d is invalid\n", sd_rw_multi()
563 __func__, host->sg_count); sd_rw_multi()
H A Dsdhci.c512 host->sg_count = sdhci_pre_dma_transfer(host, data); sdhci_adma_table_pre()
513 if (host->sg_count < 0) sdhci_adma_table_pre()
521 for_each_sg(data->sg, sg, host->sg_count, i) { sdhci_adma_table_pre()
628 for_each_sg(data->sg, sg, host->sg_count, i) sdhci_adma_table_post()
640 for_each_sg(data->sg, sg, host->sg_count, i) { sdhci_adma_table_post()
2163 int sg_count; sdhci_pre_dma_transfer() local
2167 return data->sg_count; sdhci_pre_dma_transfer()
2172 sg_count = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, sdhci_pre_dma_transfer()
2176 if (sg_count == 0) sdhci_pre_dma_transfer()
2179 data->sg_count = sg_count; sdhci_pre_dma_transfer()
2182 return sg_count; sdhci_pre_dma_transfer()
H A Dsdhci.h468 int sg_count; /* Mapped sg entries */ member in struct:sdhci_host
/linux-4.1.27/include/scsi/
H A Dlibiscsi_tcp.h117 struct scatterlist *sg_list, unsigned int sg_count,
H A Dscsi_cmnd.h162 extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
/linux-4.1.27/sound/soc/intel/atom/sst/
H A Dsst_stream.c54 alloc_param.sg_count = str_params->aparams.sg_count; sst_alloc_stream_mrfld()
/linux-4.1.27/drivers/scsi/fnic/
H A Dfnic_scsi.c321 int sg_count) fnic_queue_wq_copy_desc()
334 if (sg_count) { fnic_queue_wq_copy_desc()
337 for_each_sg(scsi_sglist(sc), sg, sg_count, i) { fnic_queue_wq_copy_desc() local
347 sizeof(io_req->sgl_list[0]) * sg_count, fnic_queue_wq_copy_desc()
423 int sg_count = 0; fnic_queuecommand_lck() local
476 sg_count = scsi_dma_map(sc); fnic_queuecommand_lck()
477 if (sg_count < 0) { fnic_queuecommand_lck()
480 sg_count, CMD_STATE(sc)); fnic_queuecommand_lck()
486 io_req->sgl_cnt = sg_count; fnic_queuecommand_lck()
488 if (sg_count > FNIC_DFLT_SG_DESC_CNT) fnic_queuecommand_lck()
491 if (sg_count) { fnic_queuecommand_lck()
532 ret = fnic_queue_wq_copy_desc(fnic, wq, io_req, sc, sg_count); fnic_queuecommand_lck()
572 sg_count, cmd_trace, fnic_queuecommand_lck()
317 fnic_queue_wq_copy_desc(struct fnic *fnic, struct vnic_wq_copy *wq, struct fnic_io_req *io_req, struct scsi_cmnd *sc, int sg_count) fnic_queue_wq_copy_desc() argument
/linux-4.1.27/drivers/memstick/host/
H A Dr592.c280 int len, sg_count; r592_transfer_fifo_dma() local
298 sg_count = dma_map_sg(&dev->pci_dev->dev, &dev->req->sg, 1, is_write ? r592_transfer_fifo_dma()
301 if (sg_count != 1 || r592_transfer_fifo_dma()
/linux-4.1.27/sound/soc/intel/atom/
H A Dsst-mfld-dsp.h384 __u16 sg_count; member in struct:snd_sst_alloc_params_ext
410 u8 sg_count; member in struct:snd_sst_alloc_mrfld
H A Dsst-mfld-platform-compress.c159 str_params.aparams.sg_count = 1; sst_platform_compr_set_params()
H A Dsst-mfld-platform-pcm.c145 alloc_param->sg_count = 1; sst_fill_alloc_params()
/linux-4.1.27/include/linux/mmc/
H A Dcore.h124 int sg_count; /* mapped sg entries */ member in struct:mmc_data
/linux-4.1.27/drivers/infiniband/hw/ipath/
H A Dipath_sdma.c693 if (tx->txreq.sg_count > ipath_sdma_descq_freecnt(dd)) { ipath_sdma_verbs_send()
798 dd->ipath_sdma_descq_added += tx->txreq.sg_count; ipath_sdma_verbs_send()
H A Dipath_verbs.c1167 tx->txreq.sg_count = ndesc; ipath_verbs_send_dma()
1191 tx->txreq.sg_count = 1; ipath_verbs_send_dma()
H A Dipath_kernel.h206 int sg_count; member in struct:ipath_sdma_txreq
/linux-4.1.27/drivers/infiniband/hw/qib/
H A Dqib_verbs.c1052 if (qp->s_tx->txreq.sg_count > avail) qib_verbs_sdma_desc_avail()
1054 avail -= qp->s_tx->txreq.sg_count; qib_verbs_sdma_desc_avail()
1193 tx->txreq.sg_count = ndesc; qib_verbs_send_dma()
1217 tx->txreq.sg_count = 1; qib_verbs_send_dma()
H A Dqib_sdma.c555 if (tx->txreq.sg_count > qib_sdma_descq_freecnt(ppd)) { qib_sdma_verbs_send()
651 ppd->sdma_descq_added += tx->txreq.sg_count; qib_sdma_verbs_send()
H A Dqib.h247 int sg_count; member in struct:qib_sdma_txreq
/linux-4.1.27/drivers/scsi/aic7xxx/
H A Daic7xxx_osm.c475 if ((scb->sg_count + 1) > AHC_NSEG) ahc_linux_map_seg()
1521 scb->sg_count = 0; ahc_linux_run_command()
1531 * The sg_count may be larger than nseg if ahc_linux_run_command()
1544 scb->sg_count += consumed; scsi_for_each_sg()
1565 scb->sg_count = 0;
H A Daic7xxx_core.c413 if (scb->sg_count == 0) ahc_sync_sglist()
419 /*len*/sizeof(struct ahc_dma_seg) * scb->sg_count, op); ahc_sync_sglist()
1143 scb->sg_count = 1; ahc_handle_seqint()
1405 ahc_get_transfer_length(scb), scb->sg_count); ahc_handle_seqint()
1406 if (scb->sg_count > 0) { ahc_handle_seqint()
1407 for (i = 0; i < scb->sg_count; i++) { ahc_handle_seqint()
2121 if (scb->sg_count > 0) {
2122 for (i = 0; i < scb->sg_count; i++) {
H A Daic79xx_core.c417 scb->sg_count++; ahd_sg_setup()
538 if (scb->sg_count == 0) ahd_sync_sglist()
544 /*len*/ahd_sg_size(ahd) * scb->sg_count, op); ahd_sync_sglist()
863 if (scb->sg_count != 0) ahd_queue_scb()
1739 if (scb->sg_count > 0) { ahd_dump_sglist()
1744 for (i = 0; i < scb->sg_count; i++) { ahd_dump_sglist()
1762 for (i = 0; i < scb->sg_count; i++) { ahd_dump_sglist()
2234 ahd_get_transfer_length(scb), scb->sg_count); ahd_handle_seqint()
9067 scb->sg_count = 0; ahd_handle_scsi_status()
H A Daic79xx_osm.c835 reset_scb->sg_count = 0; ahd_linux_dev_reset()
1634 scb->sg_count = 0; ahd_linux_run_command()
H A Daic79xx.h621 u_int sg_count;/* How full ahd_dma_seg is */ member in struct:scb
H A Daic7xxx.h578 u_int sg_count;/* How full ahc_dma_seg is */ member in struct:scb
/linux-4.1.27/drivers/scsi/bnx2fc/
H A Dbnx2fc_io.c1650 int sg_count = 0; bnx2fc_map_sg() local
1661 sg_count = dma_map_sg(&hba->pcidev->dev, scsi_sglist(sc), bnx2fc_map_sg()
1663 scsi_for_each_sg(sc, sg, sg_count, i) { scsi_for_each_sg()
/linux-4.1.27/drivers/scsi/bnx2i/
H A Dbnx2i_iscsi.c150 int sg_count; bnx2i_map_scsi_sg() local
157 sg_count = scsi_dma_map(sc); bnx2i_map_scsi_sg()
159 scsi_for_each_sg(sc, sg, sg_count, i) { scsi_for_each_sg()
/linux-4.1.27/drivers/block/
H A Dsx8.c322 u8 sg_count; member in struct:carm_msg_rw
905 msg->sg_count = n_elem; carm_rq_fn()
/linux-4.1.27/drivers/vhost/
H A Dscsi.c862 struct scatterlist *sg, int sg_count) vhost_scsi_iov_to_sgl()
873 for (i = 0; i < sg_count; i++) { vhost_scsi_iov_to_sgl()
860 vhost_scsi_iov_to_sgl(struct vhost_scsi_cmd *cmd, bool write, struct iov_iter *iter, struct scatterlist *sg, int sg_count) vhost_scsi_iov_to_sgl() argument
/linux-4.1.27/drivers/scsi/pcmcia/
H A Dnsp_cs.c198 "SCpnt=0x%p target=%d lun=%llu sglist=0x%p bufflen=%d sg_count=%d", nsp_queuecommand_lck()

Completed in 2467 milliseconds