Home
last modified time | relevance | path

Searched refs:sg_len (Results 1 – 127 of 127) sorted by relevance

/linux-4.4.14/drivers/mmc/host/
Dtmio_mmc_dma.c57 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_rx()
66 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE || in tmio_mmc_start_dma_rx()
86 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE); in tmio_mmc_start_dma_rx()
99 __func__, host->sg_len, ret, cookie, host->mrq); in tmio_mmc_start_dma_rx()
120 desc, cookie, host->sg_len); in tmio_mmc_start_dma_rx()
133 for_each_sg(sg, sg_tmp, host->sg_len, i) { in tmio_mmc_start_dma_tx()
142 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE || in tmio_mmc_start_dma_tx()
166 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE); in tmio_mmc_start_dma_tx()
179 __func__, host->sg_len, ret, cookie, host->mrq); in tmio_mmc_start_dma_tx()
248 host->sg_ptr, host->sg_len, in tmio_mmc_tasklet_fn()
[all …]
Dandroid-goldfish.c127 unsigned int sg_len; member
229 dma_unmap_sg(mmc_dev(host->mmc), data->sg, host->sg_len, in goldfish_mmc_xfer_done()
234 host->sg_len = 0; in goldfish_mmc_xfer_done()
371 unsigned sg_len; in goldfish_mmc_prepare_data() local
391 sg_len = (data->blocks == 1) ? 1 : data->sg_len; in goldfish_mmc_prepare_data()
398 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, in goldfish_mmc_prepare_data()
399 sg_len, dma_data_dir); in goldfish_mmc_prepare_data()
Dmxs-mmc.c158 data->sg_len, ssp->dma_dir); in mxs_mmc_request_done()
231 unsigned int sg_len; in mxs_mmc_prep_dma() local
236 data->sg_len, ssp->dma_dir); in mxs_mmc_prep_dma()
238 sg_len = data->sg_len; in mxs_mmc_prep_dma()
242 sg_len = SSP_PIO_NUM; in mxs_mmc_prep_dma()
246 sgl, sg_len, ssp->slave_dirn, flags); in mxs_mmc_prep_dma()
253 data->sg_len, ssp->dma_dir); in mxs_mmc_prep_dma()
357 unsigned int sg_len = data->sg_len; in mxs_mmc_adtc() local
402 for_each_sg(sgl, sg, sg_len, i) in mxs_mmc_adtc()
Djz4740_mmc.c118 int sg_len; member
153 int sg_len; member
220 dma_unmap_sg(chan->device->dev, data->sg, data->sg_len, dir); in jz4740_mmc_dma_unmap()
231 int sg_len; in jz4740_mmc_prepare_dma_data() local
245 sg_len = dma_map_sg(chan->device->dev, in jz4740_mmc_prepare_dma_data()
247 data->sg_len, in jz4740_mmc_prepare_dma_data()
251 sg_len = next_data->sg_len; in jz4740_mmc_prepare_dma_data()
252 next_data->sg_len = 0; in jz4740_mmc_prepare_dma_data()
255 if (sg_len <= 0) { in jz4740_mmc_prepare_dma_data()
262 next->sg_len = sg_len; in jz4740_mmc_prepare_dma_data()
[all …]
Dau1xmmc.c353 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir); in au1xmmc_data_complete()
396 int sg_len, max, count; in au1xmmc_send_pio() local
411 sg_len = data->sg[host->pio.index].length - host->pio.offset; in au1xmmc_send_pio()
414 max = (sg_len > host->pio.len) ? host->pio.len : sg_len; in au1xmmc_send_pio()
433 if (count == sg_len) { in au1xmmc_send_pio()
451 int max, count, sg_len = 0; in au1xmmc_receive_pio() local
468 sg_len = sg_dma_len(&data->sg[host->pio.index]) - host->pio.offset; in au1xmmc_receive_pio()
471 if (sg_len < max) in au1xmmc_receive_pio()
472 max = sg_len; in au1xmmc_receive_pio()
510 if (sg_len && count == sg_len) { in au1xmmc_receive_pio()
[all …]
Dtifm_sd.c106 int sg_len; member
175 if (host->sg_pos == host->sg_len) in tifm_sd_transfer_data()
182 if (host->sg_pos == host->sg_len) { in tifm_sd_transfer_data()
238 if (host->sg_pos == host->sg_len) in tifm_sd_bounce_block()
271 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
279 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
287 if (host->sg_pos == host->sg_len) in tifm_sd_set_dma_data()
666 host->sg_len = r_data->sg_len; in tifm_sd_request()
680 host->sg_len = tifm_map_sg(sock, r_data->sg, in tifm_sd_request()
681 r_data->sg_len, in tifm_sd_request()
[all …]
Domap.c149 unsigned int sg_len; member
427 dma_unmap_sg(dev, data->sg, host->sg_len, dma_data_dir); in mmc_omap_release_dma()
451 host->sg_len = 0; in mmc_omap_xfer_done()
508 host->sg_len = 0; in mmc_omap_abort_xfer()
673 BUG_ON(host->sg_idx == host->sg_len); in mmc_omap_xfer_data()
952 unsigned sg_len; in mmc_omap_prepare_data() local
973 sg_len = (data->blocks == 1) ? 1 : data->sg_len; in mmc_omap_prepare_data()
976 for_each_sg(data->sg, sg, sg_len, i) { in mmc_omap_prepare_data()
1035 host->sg_len = dma_map_sg(c->device->dev, data->sg, sg_len, in mmc_omap_prepare_data()
1037 if (host->sg_len == 0) in mmc_omap_prepare_data()
[all …]
Datmel-mci.c185 unsigned int sg_len; member
802 data->sg, data->sg_len, in atmci_pdc_cleanup()
824 sg_copy_from_buffer(host->data->sg, host->data->sg_len, in atmci_pdc_complete()
841 data->sg, data->sg_len, in atmci_dma_cleanup()
907 host->sg_len = data->sg_len; in atmci_prepare_data()
943 unsigned int sg_len; in atmci_prepare_data_pdc() local
972 sg_len = dma_map_sg(&host->pdev->dev, data->sg, data->sg_len, dir); in atmci_prepare_data_pdc()
976 sg_copy_to_buffer(host->data->sg, host->data->sg_len, in atmci_prepare_data_pdc()
1021 for_each_sg(data->sg, sg, data->sg_len, i) { in atmci_prepare_data_dma()
1049 data->sg_len, direction); in atmci_prepare_data_dma()
[all …]
Ddw_mmc.c416 data->sg_len, in dw_mci_dma_cleanup()
457 data->sg_len, in dw_mci_dmac_complete_dma()
473 unsigned int sg_len) in dw_mci_translate_sglist() argument
483 for (i = 0; i < sg_len; i++) { in dw_mci_translate_sglist()
528 for (i = 0; i < sg_len; i++) { in dw_mci_translate_sglist()
573 static int dw_mci_idmac_start_dma(struct dw_mci *host, unsigned int sg_len) in dw_mci_idmac_start_dma() argument
577 dw_mci_translate_sglist(host, host->data, sg_len); in dw_mci_idmac_start_dma()
690 unsigned int sg_len) in dw_mci_edmac_start_dma() argument
696 u32 sg_elems = host->data->sg_len; in dw_mci_edmac_start_dma()
724 sg_len, cfg.direction, in dw_mci_edmac_start_dma()
[all …]
Ddavinci_mmc.c214 unsigned int sg_len; member
438 host->sg_len, in mmc_davinci_send_dma_request()
460 host->sg_len, in mmc_davinci_send_dma_request()
485 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
491 for (i = 0; i < host->sg_len; i++) { in mmc_davinci_start_dma_transfer()
494 data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
622 host->sg_len = data->sg_len; in mmc_davinci_prepare_data()
829 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_xfer_done()
Dmxcmmc.c294 for_each_sg(data->sg, sg, data->sg_len, i) in mxcmci_swap_buffers()
323 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_setup_data()
341 data->sg_len, host->dma_dir); in mxcmci_setup_data()
342 if (nents != data->sg_len) in mxcmci_setup_data()
346 data->sg, data->sg_len, slave_dirn, in mxcmci_setup_data()
350 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_setup_data()
460 dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len, in mxcmci_finish_data()
621 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_transfer_data()
628 for_each_sg(data->sg, sg, data->sg_len, i) { in mxcmci_transfer_data()
Dsh_mmcif.c309 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, in sh_mmcif_start_dma_rx()
325 __func__, data->sg_len, ret, cookie); in sh_mmcif_start_dma_rx()
346 desc, cookie, data->sg_len); in sh_mmcif_start_dma_rx()
359 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, in sh_mmcif_start_dma_tx()
375 __func__, data->sg_len, ret, cookie); in sh_mmcif_start_dma_tx()
621 if (++host->sg_idx < data->sg_len) in sh_mmcif_next_block()
627 return host->sg_idx != data->sg_len; in sh_mmcif_next_block()
670 if (!data->sg_len || !data->sg->length) in sh_mmcif_multi_read()
750 if (!data->sg_len || !data->sg->length) in sh_mmcif_multi_write()
1217 data->sg, data->sg_len, in sh_mmcif_end_cmd()
[all …]
Dusdhi6rol0.c317 __func__, host->mrq->cmd->opcode, data->sg_len, in usdhi6_blk_bounce()
354 struct scatterlist *sg = data->sg_len > 1 ? host->sg : data->sg; in usdhi6_sg_map()
398 struct scatterlist *sg = data->sg_len > 1 ? in usdhi6_sg_unmap()
540 data->sg_len, DMA_FROM_DEVICE); in usdhi6_dma_stop_unmap()
543 data->sg_len, DMA_TO_DEVICE); in usdhi6_dma_stop_unmap()
583 ret = dma_map_sg(chan->device->dev, sg, data->sg_len, data_dir); in usdhi6_dma_setup()
597 __func__, data->sg_len, ret, cookie, desc); in usdhi6_dma_setup()
627 __func__, data->sg_len, data->blocks, data->blksz); in usdhi6_dma_kill()
903 data ? data->blksz : 0, data ? data->sg_len : 0)) in usdhi6_request_done()
1044 "Bad SG of %u: %ux%u @ %u\n", data->sg_len, in usdhi6_rq_start()
[all …]
Dsunxi-mmc.c310 for (i = 0; i < data->sg_len; i++) { in sunxi_mmc_init_idma_des()
350 dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sunxi_mmc_map_dma()
357 for_each_sg(data->sg, sg, data->sg_len, i) { in sunxi_mmc_map_dma()
501 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sunxi_mmc_finalize_request()
852 dma_unmap_sg(mmc_dev(mmc), data->sg, data->sg_len, in sunxi_mmc_request()
Dwmt-sdmmc.c307 req->data->sg_len, DMA_TO_DEVICE); in wmt_complete_data_request()
310 req->data->sg_len, DMA_FROM_DEVICE); in wmt_complete_data_request()
624 req->data->sg_len, DMA_TO_DEVICE); in wmt_mci_request()
630 req->data->sg_len, DMA_FROM_DEVICE); in wmt_mci_request()
Dmoxart-mmc.c155 host->num_sg = data->sg_len; in moxart_init_sg()
278 data->sg_len, dir_data); in moxart_transfer_dma()
303 data->sg, data->sg_len, in moxart_transfer_dma()
Ds3cmci.c328 if (host->pio_sgptr >= host->mrq->data->sg_len) { in get_data_buffer()
330 host->pio_sgptr, host->mrq->data->sg_len); in get_data_buffer()
341 host->pio_sgptr, host->mrq->data->sg_len); in get_data_buffer()
1107 dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in s3cmci_prepare_dma()
1111 desc = dmaengine_prep_slave_sg(host->dma, data->sg, data->sg_len, in s3cmci_prepare_dma()
1124 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in s3cmci_prepare_dma()
Dtmio_mmc.h65 unsigned int sg_len; member
Dmmci.c421 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in mmci_init_sg()
521 dma_unmap_sg(chan->device->dev, data->sg, data->sg_len, dir); in mmci_dma_unmap()
606 nr_sg = dma_map_sg(device->dev, data->sg, data->sg_len, buffer_dirn); in __mmci_dma_prep_data()
625 dma_unmap_sg(device->dev, data->sg, data->sg_len, buffer_dirn); in __mmci_dma_prep_data()
660 data->sg_len, data->blksz, data->blocks, data->flags); in mmci_dma_start_data()
Dbfin_sdh.c158 host->dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma_dir); in sdh_setup_data()
302 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdh_data_done()
Drtsx_pci_sdmmc.c176 count = rtsx_pci_dma_map_sg(pcr, data->sg, data->sg_len, read); in sd_pre_dma_transfer()
219 rtsx_pci_dma_unmap_sg(pcr, data->sg, data->sg_len, read); in sdmmc_post_req()
608 sg_copy_from_buffer(data->sg, data->sg_len, buf, data->blksz); in sd_normal_rw()
610 sg_copy_to_buffer(data->sg, data->sg_len, buf, data->blksz); in sd_normal_rw()
Dsdhci.c621 data->sg_len, direction); in sdhci_adma_table_post()
641 data->sg_len, direction); in sdhci_adma_table_post()
783 for_each_sg(data->sg, sg, data->sg_len, i) { in sdhci_prepare_data()
818 for_each_sg(data->sg, sg, data->sg_len, i) { in sdhci_prepare_data()
894 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in sdhci_prepare_data()
968 data->sg, data->sg_len, in sdhci_finish_data()
2143 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdhci_post_req()
2162 sg_count = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in sdhci_pre_dma_transfer()
Domap_hsmmc.c1058 host->data->sg, host->data->sg_len, in omap_hsmmc_dma_cleanup()
1355 data->sg, data->sg_len, in omap_hsmmc_dma_callback()
1390 dma_len = dma_map_sg(chan->device->dev, data->sg, data->sg_len, in omap_hsmmc_pre_dma_transfer()
1424 for (i = 0; i < data->sg_len; i++) { in omap_hsmmc_setup_dma_transfer()
1456 tx = dmaengine_prep_slave_sg(chan, data->sg, data->sg_len, in omap_hsmmc_setup_dma_transfer()
1567 dma_unmap_sg(c->device->dev, data->sg, data->sg_len, in omap_hsmmc_post_req()
Dcb710-mmc.c277 sg_miter_start(&miter, data->sg, data->sg_len, SG_MITER_TO_SG); in cb710_mmc_receive()
321 sg_miter_start(&miter, data->sg, data->sg_len, SG_MITER_FROM_SG); in cb710_mmc_send()
Dtmio_mmc_pio.c73 host->sg_len = data->sg_len; in tmio_mmc_init_sg()
83 return --host->sg_len; in tmio_mmc_next_sg()
Dvub300.c1450 data->sg_len, 0, GFP_KERNEL); in __command_read_data()
1494 sg_copy_from_buffer(data->sg, data->sg_len, buf, in __command_read_data()
1519 sg_copy_to_buffer(data->sg, data->sg_len, in __command_write_data()
1543 sg_copy_to_buffer(data->sg, data->sg_len, buf, in __command_write_data()
1565 sg_copy_to_buffer(data->sg, data->sg_len, buf, sizeof(buf)); in __command_write_data()
1568 data->sg_len, 0, GFP_KERNEL); in __command_write_data()
Dwbsd.c246 host->num_sg = data->sg_len; in wbsd_init_sg()
284 len = data->sg_len; in wbsd_sg_to_dma()
301 len = data->sg_len; in wbsd_dma_to_sg()
Dpxamci.c231 host->dma_len = dma_map_sg(chan->device->dev, data->sg, data->sg_len, in pxamci_setup_data()
368 data->sg, data->sg_len, host->dma_dir); in pxamci_data_done()
Drtsx_usb_sdmmc.c533 data->sg_len, NULL, 10000); in sd_rw_multi()
579 sg_copy_from_buffer(data->sg, data->sg_len, buf, data->blksz); in sd_normal_rw()
581 sg_copy_to_buffer(data->sg, data->sg_len, buf, data->blksz); in sd_normal_rw()
Dsdricoh_cs.c315 data->sg_len, data->sg->length); in sdricoh_request()
Dvia-sdmmc.c493 count = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in via_sdc_preparedata()
637 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in via_sdc_finish_data()
Dtoshsd.c491 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in toshsd_start_data()
Dmtk-sd.c458 data->sg_count = dma_map_sg(host->dev, data->sg, data->sg_len, in msdc_prepare_data()
473 dma_unmap_sg(host->dev, data->sg, data->sg_len, in msdc_unprepare_data()
Dmvsdio.c134 data->sg_len, dma_dir); in mvsd_setup_data()
Dmmc_spi.c910 for (sg = data->sg, n_sg = data->sg_len; n_sg; n_sg--, sg++) { in mmc_spi_data_do()
/linux-4.4.14/drivers/mmc/card/
Dqueue.c143 static struct scatterlist *mmc_alloc_sg(int sg_len, int *err) in mmc_alloc_sg() argument
147 sg = kmalloc(sizeof(struct scatterlist)*sg_len, GFP_KERNEL); in mmc_alloc_sg()
152 sg_init_table(sg, sg_len); in mmc_alloc_sg()
455 unsigned int sg_len = 0; in mmc_queue_packed_map_sg() local
471 sg_len++; in mmc_queue_packed_map_sg()
476 sg_len += blk_rq_map_sg(mq->queue, req, __sg); in mmc_queue_packed_map_sg()
477 __sg = sg + (sg_len - 1); in mmc_queue_packed_map_sg()
480 sg_mark_end(sg + (sg_len - 1)); in mmc_queue_packed_map_sg()
481 return sg_len; in mmc_queue_packed_map_sg()
489 unsigned int sg_len; in mmc_queue_map_sg() local
[all …]
Dmmc_test.c82 unsigned int sg_len; member
161 unsigned int *sg_len; member
191 struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len, in mmc_test_prepare_mrq() argument
222 mrq->data->sg_len = sg_len; in mmc_test_prepare_mrq()
391 unsigned int *sg_len, int min_sg_len) in mmc_test_map_sg() argument
401 *sg_len = 0; in mmc_test_map_sg()
420 *sg_len += 1; in mmc_test_map_sg()
444 unsigned int *sg_len) in mmc_test_map_sg_max_scatter() argument
453 *sg_len = 0; in mmc_test_map_sg_max_scatter()
475 *sg_len += 1; in mmc_test_map_sg_max_scatter()
[all …]
Dblock.c490 data.sg_len = 1; in __mmc_blk_ioctl_cmd()
803 data.sg_len = 1; in mmc_sd_num_wr_blocks()
1597 brq->data.sg_len = mmc_queue_map_sg(mq, mqrq); in mmc_blk_rw_rq_prep()
1607 for_each_sg(brq->data.sg, sg, brq->data.sg_len, i) { in mmc_blk_rw_rq_prep()
1615 brq->data.sg_len = i; in mmc_blk_rw_rq_prep()
1824 brq->data.sg_len = mmc_queue_map_sg(mq, mqrq); in mmc_blk_packed_hdr_wrq_prep()
/linux-4.4.14/drivers/dma/sh/
Dusb-dmac.c58 unsigned int sg_len; member
266 static int usb_dmac_desc_alloc(struct usb_dmac_chan *chan, unsigned int sg_len, in usb_dmac_desc_alloc() argument
272 desc = kzalloc(sizeof(*desc) + sg_len * sizeof(desc->sg[0]), gfp); in usb_dmac_desc_alloc()
276 desc->sg_allocated_len = sg_len; in usb_dmac_desc_alloc()
302 unsigned int sg_len, gfp_t gfp) in usb_dmac_desc_get() argument
310 if (sg_len <= desc->sg_allocated_len) { in usb_dmac_desc_get()
319 if (!usb_dmac_desc_alloc(chan, sg_len, gfp)) { in usb_dmac_desc_get()
420 unsigned int sg_len, enum dma_transfer_direction dir, in usb_dmac_prep_slave_sg() argument
428 if (!sg_len) { in usb_dmac_prep_slave_sg()
430 "%s: bad parameter: len=%d\n", __func__, sg_len); in usb_dmac_prep_slave_sg()
[all …]
Dshdma-base.c564 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, in shdma_prep_sg() argument
574 for_each_sg(sgl, sg, sg_len, i) in shdma_prep_sg()
591 for_each_sg(sgl, sg, sg_len, i) { in shdma_prep_sg()
665 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in shdma_prep_slave_sg() argument
680 if (slave_id < 0 || !sg_len) { in shdma_prep_slave_sg()
682 __func__, sg_len, slave_id); in shdma_prep_slave_sg()
688 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr, in shdma_prep_slave_sg()
703 unsigned int sg_len = buf_len / period_len; in shdma_prep_dma_cyclic() local
714 if (sg_len > SHDMA_MAX_SG_LEN) { in shdma_prep_dma_cyclic()
716 sg_len, SHDMA_MAX_SG_LEN); in shdma_prep_dma_cyclic()
[all …]
Drcar-dmac.c829 unsigned int sg_len, dma_addr_t dev_addr, in rcar_dmac_chan_prep_sg() argument
860 for_each_sg(sgl, sg, sg_len, i) { in rcar_dmac_chan_prep_sg()
1026 unsigned int sg_len, enum dma_transfer_direction dir, in rcar_dmac_prep_slave_sg() argument
1033 if (rchan->mid_rid < 0 || !sg_len) { in rcar_dmac_prep_slave_sg()
1036 __func__, sg_len, rchan->mid_rid); in rcar_dmac_prep_slave_sg()
1042 return rcar_dmac_chan_prep_sg(rchan, sgl, sg_len, dev_addr, in rcar_dmac_prep_slave_sg()
1057 unsigned int sg_len; in rcar_dmac_prep_dma_cyclic() local
1068 sg_len = buf_len / period_len; in rcar_dmac_prep_dma_cyclic()
1069 if (sg_len > RCAR_DMAC_MAX_SG_LEN) { in rcar_dmac_prep_dma_cyclic()
1072 rchan->index, sg_len, RCAR_DMAC_MAX_SG_LEN); in rcar_dmac_prep_dma_cyclic()
[all …]
/linux-4.4.14/net/rds/
Dib_rdma.c59 unsigned int sg_len; member
488 ibmr->sg_len = nents; in rds_ib_map_fmr()
527 ibmr->sg, ibmr->sg_len, in __rds_ib_teardown_mr()
533 if (ibmr->sg_len) { in __rds_ib_teardown_mr()
536 for (i = 0; i < ibmr->sg_len; ++i) { in __rds_ib_teardown_mr()
548 ibmr->sg_len = 0; in __rds_ib_teardown_mr()
554 unsigned int pinned = ibmr->sg_len; in rds_ib_teardown_mr()
700 unpinned += ibmr->sg_len; in rds_ib_flush_mr_pool()
762 rdsdebug("RDS/IB: free_mr nents %u\n", ibmr->sg_len); in rds_ib_free_mr()
770 atomic_add(ibmr->sg_len, &pool->free_pinned); in rds_ib_free_mr()
Diw_rdma.c251 struct scatterlist *list, unsigned int sg_len) argument
254 sg->len = sg_len;
729 unsigned int sg_len) argument
736 rds_iw_set_scatterlist(&mapping->m_sg, sg, sg_len);
/linux-4.4.14/drivers/dma/
Dfsl-edma.c503 int sg_len) in fsl_edma_alloc_desc() argument
508 fsl_desc = kzalloc(sizeof(*fsl_desc) + sizeof(struct fsl_edma_sw_tcd) * sg_len, in fsl_edma_alloc_desc()
514 fsl_desc->n_tcds = sg_len; in fsl_edma_alloc_desc()
515 for (i = 0; i < sg_len; i++) { in fsl_edma_alloc_desc()
539 int sg_len, i; in fsl_edma_prep_dma_cyclic() local
546 sg_len = buf_len / period_len; in fsl_edma_prep_dma_cyclic()
547 fsl_desc = fsl_edma_alloc_desc(fsl_chan, sg_len); in fsl_edma_prep_dma_cyclic()
556 for (i = 0; i < sg_len; i++) { in fsl_edma_prep_dma_cyclic()
561 last_sg = fsl_desc->tcd[(i + 1) % sg_len].ptcd; in fsl_edma_prep_dma_cyclic()
586 unsigned int sg_len, enum dma_transfer_direction direction, in fsl_edma_prep_slave_sg() argument
[all …]
Dste_dma40_ll.c266 int sg_len, in d40_phy_sg_to_lli() argument
284 for_each_sg(sg, current_sg, sg_len, i) { in d40_phy_sg_to_lli()
291 if (i == sg_len - 1) in d40_phy_sg_to_lli()
418 int sg_len, in d40_log_sg_to_lli() argument
433 for_each_sg(sg, current_sg, sg_len, i) { in d40_log_sg_to_lli()
Dmxs-dma.c501 unsigned int sg_len, enum dma_transfer_direction direction, in mxs_dma_prep_slave_sg() argument
516 if (sg_len + (append ? idx : 0) > NUM_CCW) { in mxs_dma_prep_slave_sg()
519 sg_len, NUM_CCW); in mxs_dma_prep_slave_sg()
545 for (j = 0; j < sg_len;) in mxs_dma_prep_slave_sg()
555 ccw->bits |= BF_CCW(sg_len, PIO_NUM); in mxs_dma_prep_slave_sg()
558 for_each_sg(sgl, sg, sg_len, i) { in mxs_dma_prep_slave_sg()
579 if (i + 1 == sg_len) { in mxs_dma_prep_slave_sg()
Dste_dma40_ll.h442 int sg_len,
454 int sg_len,
Dmoxart-dma.c270 unsigned int sg_len, enum dma_transfer_direction dir, in moxart_prep_slave_sg() argument
311 d = kzalloc(sizeof(*d) + sg_len * sizeof(d->sg[0]), GFP_ATOMIC); in moxart_prep_slave_sg()
319 for_each_sg(sgl, sgent, sg_len, i) { in moxart_prep_slave_sg()
324 d->sglen = sg_len; in moxart_prep_slave_sg()
Ddma-jz4740.c391 unsigned int sg_len, enum dma_transfer_direction direction, in jz4740_dma_prep_slave_sg() argument
399 desc = jz4740_dma_alloc_desc(sg_len); in jz4740_dma_prep_slave_sg()
403 for_each_sg(sgl, sg, sg_len, i) { in jz4740_dma_prep_slave_sg()
408 desc->num_sgs = sg_len; in jz4740_dma_prep_slave_sg()
Dep93xx_dma.c1026 unsigned int sg_len, enum dma_transfer_direction dir, in ep93xx_dma_prep_slave_sg() argument
1047 for_each_sg(sgl, sg, sg_len, i) { in ep93xx_dma_prep_slave_sg()
1048 size_t sg_len = sg_dma_len(sg); in ep93xx_dma_prep_slave_sg() local
1050 if (sg_len > DMA_MAX_CHAN_BYTES) { in ep93xx_dma_prep_slave_sg()
1052 sg_len); in ep93xx_dma_prep_slave_sg()
1069 desc->size = sg_len; in ep93xx_dma_prep_slave_sg()
Dtimb_dma.c505 struct scatterlist *sgl, unsigned int sg_len, in td_prep_slave_sg() argument
516 if (!sgl || !sg_len) { in td_prep_slave_sg()
536 for_each_sg(sgl, sg, sg_len, i) { in td_prep_slave_sg()
544 i == (sg_len - 1)); in td_prep_slave_sg()
Didma64.c304 unsigned int sg_len, enum dma_transfer_direction direction, in idma64_prep_slave_sg() argument
312 desc = idma64_alloc_desc(sg_len); in idma64_prep_slave_sg()
316 for_each_sg(sgl, sg, sg_len, i) { in idma64_prep_slave_sg()
331 desc->ndesc = sg_len; in idma64_prep_slave_sg()
Dste_dma40.c1039 static int d40_sg_2_dmalen(struct scatterlist *sgl, int sg_len, in d40_sg_2_dmalen() argument
1047 for_each_sg(sgl, sg, sg_len, i) { in d40_sg_2_dmalen()
2139 unsigned int sg_len, dma_addr_t src_dev_addr, in d40_prep_sg_log() argument
2147 ret = d40_log_sg_to_lli(sg_src, sg_len, in d40_prep_sg_log()
2154 ret = d40_log_sg_to_lli(sg_dst, sg_len, in d40_prep_sg_log()
2167 unsigned int sg_len, dma_addr_t src_dev_addr, in d40_prep_sg_phy() argument
2179 ret = d40_phy_sg_to_lli(sg_src, sg_len, src_dev_addr, in d40_prep_sg_phy()
2185 ret = d40_phy_sg_to_lli(sg_dst, sg_len, dst_dev_addr, in d40_prep_sg_phy()
2199 unsigned int sg_len, unsigned long dma_flags) in d40_prep_desc() argument
2209 desc->lli_len = d40_sg_2_dmalen(sg, sg_len, cfg->src_info.data_width, in d40_prep_desc()
[all …]
Ds3c24xx-dma.c892 unsigned sg_len; in s3c24xx_dma_prep_dma_cyclic() local
953 sg_len = size / period; in s3c24xx_dma_prep_dma_cyclic()
955 for (i = 0; i < sg_len; i++) { in s3c24xx_dma_prep_dma_cyclic()
965 if (i == sg_len - 1) in s3c24xx_dma_prep_dma_cyclic()
981 unsigned int sg_len, enum dma_transfer_direction direction, in s3c24xx_dma_prep_slave_sg() argument
1047 for_each_sg(sgl, sg, sg_len, tmp) { in s3c24xx_dma_prep_slave_sg()
Dedma.c195 u32 sg_len; member
765 edesc->sg_len = 0; in edma_execute()
771 edesc->sg_len += edesc->pset[j].len; in edma_execute()
1027 unsigned int sg_len, enum dma_transfer_direction direction, in edma_prep_slave_sg() argument
1039 if (unlikely(!echan || !sgl || !sg_len)) in edma_prep_slave_sg()
1060 edesc = kzalloc(sizeof(*edesc) + sg_len * sizeof(edesc->pset[0]), in edma_prep_slave_sg()
1067 edesc->pset_nr = sg_len; in edma_prep_slave_sg()
1073 nslots = min_t(unsigned, MAX_NR_SG, sg_len); in edma_prep_slave_sg()
1089 for_each_sg(sgl, sg, sg_len, i) { in edma_prep_slave_sg()
1113 if (i == sg_len - 1) in edma_prep_slave_sg()
[all …]
Dat_hdmac.c992 unsigned int sg_len, int value, in atc_prep_dma_memset_sg() argument
1005 value, sg_len, flags); in atc_prep_dma_memset_sg()
1007 if (unlikely(!sgl || !sg_len)) { in atc_prep_dma_memset_sg()
1021 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_dma_memset_sg()
1077 unsigned int sg_len, enum dma_transfer_direction direction, in atc_prep_slave_sg() argument
1095 sg_len, in atc_prep_slave_sg()
1099 if (unlikely(!atslave || !sg_len)) { in atc_prep_slave_sg()
1117 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_slave_sg()
1158 for_each_sg(sgl, sg, sg_len, i) { in atc_prep_slave_sg()
Ddma-jz4780.c309 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in jz4780_dma_prep_slave_sg() argument
318 desc = jz4780_dma_desc_alloc(jzchan, sg_len, DMA_SLAVE); in jz4780_dma_prep_slave_sg()
322 for (i = 0; i < sg_len; i++) { in jz4780_dma_prep_slave_sg()
332 if (i != (sg_len - 1)) { in jz4780_dma_prep_slave_sg()
Ddma-axi-dmac.c340 unsigned int sg_len, enum dma_transfer_direction direction, in axi_dmac_prep_slave_sg() argument
351 desc = axi_dmac_alloc_desc(sg_len); in axi_dmac_prep_slave_sg()
355 for_each_sg(sgl, sg, sg_len, i) { in axi_dmac_prep_slave_sg()
Dimx-sdma.c1150 unsigned int sg_len, enum dma_transfer_direction direction, in sdma_prep_slave_sg() argument
1168 sg_len, channel); in sdma_prep_slave_sg()
1175 if (sg_len > NUM_BD) { in sdma_prep_slave_sg()
1177 channel, sg_len, NUM_BD); in sdma_prep_slave_sg()
1183 for_each_sg(sgl, sg, sg_len, i) { in sdma_prep_slave_sg()
1226 if (i + 1 == sg_len) { in sdma_prep_slave_sg()
1240 sdmac->num_bd = sg_len; in sdma_prep_slave_sg()
Dat_xdmac.c635 unsigned int sg_len, enum dma_transfer_direction direction, in at_xdmac_prep_slave_sg() argument
655 __func__, sg_len, in at_xdmac_prep_slave_sg()
666 for_each_sg(sgl, sg, sg_len, i) { in at_xdmac_prep_slave_sg()
1218 unsigned int sg_len, int value, in at_xdmac_prep_dma_memset_sg() argument
1232 __func__, sg_len, value, flags); in at_xdmac_prep_dma_memset_sg()
1235 for_each_sg(sgl, sg, sg_len, i) { in at_xdmac_prep_dma_memset_sg()
1339 if ((i == (sg_len - 1)) && in at_xdmac_prep_dma_memset_sg()
Dcoh901318.c1272 unsigned int sg_len; member
1531 desc->sg_len = 0; in coh901318_desc_get()
2291 unsigned int sg_len, enum dma_transfer_direction direction, in coh901318_prep_slave_sg() argument
2317 __func__, sg_len, direction); in coh901318_prep_slave_sg()
2357 for_each_sg(sgl, sg, sg_len, i) { in coh901318_prep_slave_sg()
2380 ret = coh901318_lli_fill_sg(&cohc->base->pool, lli, sgl, sg_len, in coh901318_prep_slave_sg()
Dmpc512x_dma.c689 unsigned int sg_len, enum dma_transfer_direction direction, in mpc_dma_prep_slave_sg() argument
704 if (sg_len != 1) in mpc_dma_prep_slave_sg()
710 for_each_sg(sgl, sg, sg_len, i) { in mpc_dma_prep_slave_sg()
Dqcom_bam_dma.c587 struct scatterlist *sgl, unsigned int sg_len, in bam_prep_slave_sg() argument
606 for_each_sg(sgl, sg, sg_len, i) in bam_prep_slave_sg()
630 for_each_sg(sgl, sg, sg_len, i) { in bam_prep_slave_sg()
Dpch_dma.c579 struct scatterlist *sgl, unsigned int sg_len, in pd_prep_slave_sg() argument
592 if (unlikely(!sg_len)) { in pd_prep_slave_sg()
607 for_each_sg(sgl, sg, sg_len, i) { in pd_prep_slave_sg()
Dmmp_pdma.c526 unsigned int sg_len, enum dma_transfer_direction dir, in mmp_pdma_prep_slave_sg() argument
536 if ((sgl == NULL) || (sg_len == 0)) in mmp_pdma_prep_slave_sg()
541 for_each_sg(sgl, sg, sg_len, i) { in mmp_pdma_prep_slave_sg()
Dtegra20-apb-dma.c939 struct dma_chan *dc, struct scatterlist *sgl, unsigned int sg_len, in tegra_dma_prep_slave_sg() argument
957 if (sg_len < 1) { in tegra_dma_prep_slave_sg()
958 dev_err(tdc2dev(tdc), "Invalid segment length %d\n", sg_len); in tegra_dma_prep_slave_sg()
993 for_each_sg(sgl, sg, sg_len, i) { in tegra_dma_prep_slave_sg()
Dnbpfaxi.c998 struct dma_chan *dchan, struct scatterlist *sgl, unsigned int sg_len, in nbpf_prep_slave_sg() argument
1011 return nbpf_prep_sg(chan, sgl, &slave_sg, sg_len, in nbpf_prep_slave_sg()
1016 return nbpf_prep_sg(chan, &slave_sg, sgl, sg_len, in nbpf_prep_slave_sg()
Dimx-dma.c810 unsigned int sg_len, enum dma_transfer_direction direction, in imxdma_prep_slave_sg() argument
824 for_each_sg(sgl, sg, sg_len, i) { in imxdma_prep_slave_sg()
845 desc->sgcount = sg_len; in imxdma_prep_slave_sg()
Dpxa_dma.c1020 unsigned int sg_len, enum dma_transfer_direction dir, in pxad_prep_slave_sg() argument
1031 if ((sgl == NULL) || (sg_len == 0)) in pxad_prep_slave_sg()
1038 for_each_sg(sgl, sg, sg_len, i) in pxad_prep_slave_sg()
1044 for_each_sg(sgl, sg, sg_len, i) { in pxad_prep_slave_sg()
Dtxx9dmac.c811 unsigned int sg_len, enum dma_transfer_direction direction, in txx9dmac_prep_slave_sg() argument
829 if (unlikely(!sg_len)) in txx9dmac_prep_slave_sg()
834 for_each_sg(sgl, sg, sg_len, i) { in txx9dmac_prep_slave_sg()
Dimg-mdc-dma.c453 unsigned int sg_len, enum dma_transfer_direction dir, in mdc_prep_slave_sg() argument
478 for_each_sg(sgl, sg, sg_len, i) { in mdc_prep_slave_sg()
Dcppi41.c493 struct dma_chan *chan, struct scatterlist *sgl, unsigned sg_len, in cppi41_dma_prep_slave_sg() argument
504 for_each_sg(sgl, sg, sg_len, i) { in cppi41_dma_prep_slave_sg()
Dsun6i-dma.c562 unsigned int sg_len, enum dma_transfer_direction dir, in sun6i_dma_prep_slave_sg() argument
586 for_each_sg(sgl, sg, sg_len, i) { in sun6i_dma_prep_slave_sg()
Dsun4i-dma.c772 unsigned int sg_len, enum dma_transfer_direction dir, in sun4i_dma_prep_slave_sg() argument
818 for_each_sg(sgl, sg, sg_len, i) { in sun4i_dma_prep_slave_sg()
Dpl330.c2660 unsigned int sg_len, enum dma_transfer_direction direction, in pl330_prep_slave_sg() argument
2669 if (unlikely(!pch || !sgl || !sg_len)) in pl330_prep_slave_sg()
2676 for_each_sg(sgl, sg, sg_len, i) { in pl330_prep_slave_sg()
Damba-pl08x.c1586 unsigned int sg_len, enum dma_transfer_direction direction, in pl08x_prep_slave_sg() argument
1603 for_each_sg(sgl, sg, sg_len, tmp) { in pl08x_prep_slave_sg()
/linux-4.4.14/drivers/mmc/core/
Dsdio_ops.c166 data.sg_len = nents; in mmc_io_rw_extended()
168 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { in mmc_io_rw_extended()
176 data.sg_len = 1; in mmc_io_rw_extended()
Dsd_ops.c285 data.sg_len = 1; in mmc_app_send_scr()
336 data.sg_len = 1; in mmc_sd_switch()
381 data.sg_len = 1; in mmc_app_sd_status()
Dmmc_ops.c316 data.sg_len = 1; in mmc_send_cxd_data()
633 data.sg_len = 1; in mmc_send_tuning()
715 data.sg_len = 1; in mmc_send_bus_test()
Dcore.c279 for_each_sg(mrq->data->sg, sg, mrq->data->sg_len, i) in mmc_start_request()
/linux-4.4.14/drivers/rapidio/devices/
Dtsi721_dma.c457 for_each_sg(desc->sg, sg, desc->sg_len, i) { in tsi721_submit_sg()
460 i, desc->sg_len, in tsi721_submit_sg()
494 desc->sg_len -= i; in tsi721_submit_sg()
524 desc->sg_len = 0; in tsi721_submit_sg()
599 if (desc->sg_len == 0) { in tsi721_dma_tasklet()
757 struct scatterlist *sgl, unsigned int sg_len, in tsi721_prep_rio_sg() argument
767 if (!sgl || !sg_len) { in tsi721_prep_rio_sg()
805 desc->sg_len = sg_len; in tsi721_prep_rio_sg()
Dtsi721.h657 unsigned int sg_len; member
/linux-4.4.14/drivers/crypto/nx/
Dnx.c96 unsigned int sg_len = 0; in nx_build_sg_list() local
121 for (sg = sg_head; sg_len < *len; sg++) { in nx_build_sg_list()
130 sg_len += sg->len; in nx_build_sg_list()
133 is_vmalloc_addr(start_addr + sg_len)) { in nx_build_sg_list()
135 start_addr + sg_len)); in nx_build_sg_list()
136 end_addr = sg_addr + *len - sg_len; in nx_build_sg_list()
146 *len = sg_len; in nx_build_sg_list()
/linux-4.4.14/drivers/dma/hsu/
Dhsu.c216 unsigned int sg_len, enum dma_transfer_direction direction, in hsu_dma_prep_slave_sg() argument
224 desc = hsu_dma_alloc_desc(sg_len); in hsu_dma_prep_slave_sg()
228 for_each_sg(sgl, sg, sg_len, i) { in hsu_dma_prep_slave_sg()
233 desc->nents = sg_len; in hsu_dma_prep_slave_sg()
/linux-4.4.14/drivers/scsi/bnx2fc/
Dbnx2fc_io.c18 static int bnx2fc_split_bd(struct bnx2fc_cmd *io_req, u64 addr, int sg_len,
1556 static int bnx2fc_split_bd(struct bnx2fc_cmd *io_req, u64 addr, int sg_len, in bnx2fc_split_bd() argument
1563 while (sg_len) { in bnx2fc_split_bd()
1564 if (sg_len >= BNX2FC_BD_SPLIT_SZ) in bnx2fc_split_bd()
1567 frag_size = sg_len; in bnx2fc_split_bd()
1575 sg_len -= frag_size; in bnx2fc_split_bd()
1592 unsigned int sg_len; in bnx2fc_map_sg() local
1603 sg_len = sg_dma_len(sg); in bnx2fc_map_sg()
1605 if (sg_len > BNX2FC_MAX_BD_LEN) { in bnx2fc_map_sg()
1606 sg_frags = bnx2fc_split_bd(io_req, addr, sg_len, in bnx2fc_map_sg()
[all …]
/linux-4.4.14/include/linux/
Ddmaengine.h722 unsigned int sg_len, enum dma_transfer_direction direction,
775 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_slave_sg() argument
778 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_slave_sg()
785 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, in dmaengine_prep_rio_sg() argument
789 return chan->device->device_prep_slave_sg(chan, sgl, sg_len, in dmaengine_prep_rio_sg()
Drio.h440 unsigned int sg_len; /* size of scatter list */ member
/linux-4.4.14/drivers/ata/
Dlibata-sff.c2642 u32 sg_len, len; in ata_bmdma_fill_sg() local
2649 sg_len = sg_dma_len(sg); in ata_bmdma_fill_sg()
2651 while (sg_len) { in ata_bmdma_fill_sg()
2653 len = sg_len; in ata_bmdma_fill_sg()
2654 if ((offset + sg_len) > 0x10000) in ata_bmdma_fill_sg()
2662 sg_len -= len; in ata_bmdma_fill_sg()
2693 u32 sg_len, len, blen; in ata_bmdma_fill_sg_dumb() local
2700 sg_len = sg_dma_len(sg); in ata_bmdma_fill_sg_dumb()
2702 while (sg_len) { in ata_bmdma_fill_sg_dumb()
2704 len = sg_len; in ata_bmdma_fill_sg_dumb()
[all …]
Dpata_macio.c529 u32 addr, sg_len, len; in pata_macio_qc_prep() local
536 sg_len = sg_dma_len(sg); in pata_macio_qc_prep()
538 while (sg_len) { in pata_macio_qc_prep()
542 len = (sg_len < MAX_DBDMA_SEG) ? sg_len : MAX_DBDMA_SEG; in pata_macio_qc_prep()
550 sg_len -= len; in pata_macio_qc_prep()
Dsata_promise.c604 u32 sg_len; in pdc_fill_sg() local
611 sg_len = sg_dma_len(sg); in pdc_fill_sg()
613 while (sg_len) { in pdc_fill_sg()
615 len = sg_len; in pdc_fill_sg()
616 if ((offset + sg_len) > 0x10000) in pdc_fill_sg()
624 sg_len -= len; in pdc_fill_sg()
Dsata_fsl.c468 u32 sg_len = sg_dma_len(sg); in sata_fsl_fill_sg() local
471 (unsigned long long)sg_addr, sg_len); in sata_fsl_fill_sg()
477 if (unlikely(sg_len & 0x03)) in sata_fsl_fill_sg()
479 sg_len); in sata_fsl_fill_sg()
491 ttl_dwords += sg_len; in sata_fsl_fill_sg()
493 prd->ddc_and_ext = cpu_to_le32(data_snoop | (sg_len & ~0x03)); in sata_fsl_fill_sg()
501 indirect_ext_segment_sz += sg_len; in sata_fsl_fill_sg()
Dsata_sil.c322 u32 sg_len = sg_dma_len(sg); in sil_fill_sg() local
325 prd->flags_len = cpu_to_le32(sg_len); in sil_fill_sg()
326 VPRINTK("PRD[%u] = (0x%X, 0x%X)\n", si, addr, sg_len); in sil_fill_sg()
Dacard-ahci.c242 u32 sg_len = sg_dma_len(sg); in acard_ahci_fill_sg() local
251 acard_sg[si].size = cpu_to_le32(sg_len); in acard_ahci_fill_sg()
Dsata_rcar.c536 u32 addr, sg_len; in sata_rcar_bmdma_fill_sg() local
543 sg_len = sg_dma_len(sg); in sata_rcar_bmdma_fill_sg()
546 prd[si].flags_len = cpu_to_le32(sg_len); in sata_rcar_bmdma_fill_sg()
547 VPRINTK("PRD[%u] = (0x%X, 0x%X)\n", si, addr, sg_len); in sata_rcar_bmdma_fill_sg()
Dsata_nv.c2018 u32 sg_len, len; in nv_swncq_fill_sg() local
2021 sg_len = sg_dma_len(sg); in nv_swncq_fill_sg()
2023 while (sg_len) { in nv_swncq_fill_sg()
2025 len = sg_len; in nv_swncq_fill_sg()
2026 if ((offset + sg_len) > 0x10000) in nv_swncq_fill_sg()
2033 sg_len -= len; in nv_swncq_fill_sg()
Dsata_mv.c1810 u32 sg_len = sg_dma_len(sg); in mv_fill_sg() local
1812 while (sg_len) { in mv_fill_sg()
1814 u32 len = sg_len; in mv_fill_sg()
1824 sg_len -= len; in mv_fill_sg()
Dlibahci.c1518 u32 sg_len = sg_dma_len(sg); in ahci_fill_sg() local
1522 ahci_sg[si].flags_size = cpu_to_le32(sg_len - 1); in ahci_fill_sg()
/linux-4.4.14/drivers/scsi/
Dgdth_ioctl.h38 u32 sg_len; /* length */ member
44 u32 sg_len; /* length */ member
Da100u2w.c646 scb->sg_len = cpu_to_le32(0); in orc_device_reset()
861 scb->sg_len = cpu_to_le32(0); in inia100_build_scb()
873 scb->sg_len = cpu_to_le32((u32) (count_sg * 8)); in inia100_build_scb()
880 scb->sg_len = cpu_to_le32(0); in inia100_build_scb()
Dscsi_lib.c3100 size_t sg_len = 0, len_complete = 0; in scsi_kmap_atomic_sg() local
3107 len_complete = sg_len; /* Complete sg-entries */ in scsi_kmap_atomic_sg()
3108 sg_len += sg->length; in scsi_kmap_atomic_sg()
3109 if (sg_len > *offset) in scsi_kmap_atomic_sg()
3116 __func__, sg_len, *offset, sg_count); in scsi_kmap_atomic_sg()
3129 sg_len = PAGE_SIZE - *offset; in scsi_kmap_atomic_sg()
3130 if (*len > sg_len) in scsi_kmap_atomic_sg()
3131 *len = sg_len; in scsi_kmap_atomic_sg()
Da100u2w.h154 u32 sg_len; /*10 SG list # * 8 */ member
Dgdth.c2535 cmdp->u.cache64.sg_lst[i].sg_len = sg_dma_len(sl); in gdth_fill_cache_cmd()
2547 cmdp->u.cache.sg_lst[i].sg_len = sg_dma_len(sl); in gdth_fill_cache_cmd()
2565 cmdp->u.cache64.sg_lst[0].sg_len)); in gdth_fill_cache_cmd()
2574 cmdp->u.cache.sg_lst[0].sg_len)); in gdth_fill_cache_cmd()
2710 cmdp->u.raw64.sg_lst[i].sg_len = sg_dma_len(sl); in gdth_fill_raw_cmd()
2722 cmdp->u.raw.sg_lst[i].sg_len = sg_dma_len(sl); in gdth_fill_raw_cmd()
2738 cmdp->u.raw64.sg_lst[0].sg_len)); in gdth_fill_raw_cmd()
2746 cmdp->u.raw.sg_lst[0].sg_len)); in gdth_fill_raw_cmd()
4204 gen.command.u.cache64.sg_lst[0].sg_len = gen.data_len; in ioc_general()
4205 gen.command.u.cache64.sg_lst[1].sg_len = 0; in ioc_general()
[all …]
Dips.c1821 ++scb->sg_len; in ips_fill_scb_sg_single()
1859 scb->sg_len = 0; in ips_flash_firmware()
1928 scb->sg_len = 0; in ips_usrcmd()
2693 scb->sg_len = 0; in ips_next()
3567 if (!scb->sg_len) { in ips_send_cmd()
3588 scb->cmd.basic_io.sg_count = scb->sg_len; in ips_send_cmd()
3613 if (!scb->sg_len) { in ips_send_cmd()
3634 scb->cmd.basic_io.sg_count = scb->sg_len; in ips_send_cmd()
3749 if (!scb->sg_len) { in ips_send_cmd()
3782 tapeDCDB->sg_count = scb->sg_len; in ips_send_cmd()
[all …]
Dips.h1097 uint32_t sg_len; member
1123 uint32_t sg_len; member
/linux-4.4.14/include/linux/dma/
Dipu-dma.h150 unsigned int sg_len; /* tx-descriptor. */ member
/linux-4.4.14/drivers/i2c/busses/
Di2c-at91.c276 unsigned int sg_len = 1; in at91_twi_write_data_dma() local
298 sg_len = 0; in at91_twi_write_data_dma()
302 sg = &dma->sg[sg_len++]; in at91_twi_write_data_dma()
309 sg = &dma->sg[sg_len++]; in at91_twi_write_data_dma()
327 txdesc = dmaengine_prep_slave_sg(chan_tx, dma->sg, sg_len, in at91_twi_write_data_dma()
/linux-4.4.14/include/linux/mmc/
Ddw_mmc.h230 int (*start)(struct dw_mci *host, unsigned int sg_len);
Dcore.h123 unsigned int sg_len; /* size of scatter list */ member
/linux-4.4.14/drivers/crypto/amcc/
Dcrypto4xx_core.c550 u32 sg_len; in crypto4xx_copy_pkt_to_dst() local
559 sg_len = sg->length; in crypto4xx_copy_pkt_to_dst()
580 sg_len -= len; in crypto4xx_copy_pkt_to_dst()
581 if (sg_len) { in crypto4xx_copy_pkt_to_dst()
584 &sg_len, &this_sd, &offset, &nbytes)) in crypto4xx_copy_pkt_to_dst()
/linux-4.4.14/drivers/crypto/ux500/hash/
Dhash_alg.h314 int sg_len; member
Dhash_core.c170 ctx->device->dma.sg_len = dma_map_sg(channel->device->dev, in hash_set_dma_transfer()
174 if (!ctx->device->dma.sg_len) { in hash_set_dma_transfer()
183 ctx->device->dma.sg, ctx->device->dma.sg_len, in hash_set_dma_transfer()
207 ctx->device->dma.sg_len, DMA_TO_DEVICE); in hash_dma_done()
/linux-4.4.14/drivers/scsi/be2iscsi/
Dbe_main.c2324 unsigned int sg_len, index; in hwi_write_sgl_v2() local
2339 sg_len = sg_dma_len(sg); in hwi_write_sgl_v2()
2349 sg_len); in hwi_write_sgl_v2()
2350 sge_len = sg_len; in hwi_write_sgl_v2()
2354 sg_len = sg_dma_len(sg); in hwi_write_sgl_v2()
2364 sg_len); in hwi_write_sgl_v2()
2399 sg_len = sg_dma_len(sg); in hwi_write_sgl_v2()
2405 AMAP_SET_BITS(struct amap_iscsi_sge, len, psgl, sg_len); in hwi_write_sgl_v2()
2408 offset += sg_len; in hwi_write_sgl_v2()
2419 unsigned int sg_len, index; in hwi_write_sgl() local
[all …]
/linux-4.4.14/Documentation/dmaengine/
Dclient.txt89 unsigned int sg_len, enum dma_data_direction direction,
108 nr_sg = dma_map_sg(chan->device->dev, sgl, sg_len);
/linux-4.4.14/drivers/scsi/bfa/
Dbfi.h126 sg_len:28; member
128 u32 sg_len:28,
Dbfa_fcpim.c2500 sge->sg_len = pgdlen; in bfa_ioim_send_ioreq()
2511 sgpge->sg_len = sg_dma_len(sg); in bfa_ioim_send_ioreq()
2512 pgcumsz += sgpge->sg_len; in bfa_ioim_send_ioreq()
2530 sgpge->sg_len = pgcumsz; in bfa_ioim_send_ioreq()
2536 sgpge->sg_len = pgcumsz; in bfa_ioim_send_ioreq()
2550 sge->sg_len = pgdlen; in bfa_ioim_send_ioreq()
Dbfa_ioc.h64 u32 sg_len; member
Dbfad_bsg.c3212 return sge->sg_len; in bfad_fcxp_get_req_sglen_cb()
3234 return sge->sg_len; in bfad_fcxp_get_rsp_sglen_cb()
3283 sg_table->sg_len = buf_info->size; in bfad_fcxp_map_sg()
/linux-4.4.14/drivers/dma/ipu/
Dipu_idmac.c1323 for_each_sg(desc->sg, sg, desc->sg_len, k) { in ipu_gc_tasklet()
1338 struct scatterlist *sgl, unsigned int sg_len, in idmac_prep_slave_sg() argument
1366 desc->sg_len = sg_len; in idmac_prep_slave_sg()
/linux-4.4.14/drivers/scsi/csiostor/
Dcsio_scsi.c566 uint32_t sg_len = 0; in csio_setup_ddp() local
571 sg_len = sg_dma_len(sgel); in csio_setup_ddp()
578 sg_addr, sg_len); in csio_setup_ddp()
584 ((buf_off + sg_len) & (ddp_pagesz - 1))) { in csio_setup_ddp()
587 "(%llx:%d)\n", sg_addr, sg_len); in csio_setup_ddp()
/linux-4.4.14/drivers/dma/dw/
Dcore.c762 unsigned int sg_len, enum dma_transfer_direction direction, in dwc_prep_slave_sg() argument
781 if (unlikely(!is_slave_direction(direction) || !sg_len)) in dwc_prep_slave_sg()
802 for_each_sg(sgl, sg, sg_len, i) { in dwc_prep_slave_sg()
859 for_each_sg(sgl, sg, sg_len, i) { in dwc_prep_slave_sg()
/linux-4.4.14/drivers/tty/serial/
Datmel_serial.c831 unsigned int tx_len, part1_len, part2_len, sg_len; in atmel_tx_dma() local
863 sg_len = 0; in atmel_tx_dma()
866 sg = &sgl[sg_len++]; in atmel_tx_dma()
874 sg = &sgl[sg_len++]; in atmel_tx_dma()
887 sg_len, in atmel_tx_dma()
/linux-4.4.14/drivers/scsi/bnx2i/
Dbnx2i_iscsi.c151 int sg_len; in bnx2i_map_scsi_sg() local
160 sg_len = sg_dma_len(sg); in bnx2i_map_scsi_sg()
164 bd[bd_count].buffer_length = sg_len; in bnx2i_map_scsi_sg()
169 byte_count += sg_len; in bnx2i_map_scsi_sg()
/linux-4.4.14/drivers/scsi/qla2xxx/
Dqla_iocb.c870 uint32_t cumulative_partial, sg_len; in qla24xx_get_one_block_sg() local
880 sg_len = sg_dma_len(sg); in qla24xx_get_one_block_sg()
884 if ((cumulative_partial + (sg_len - sgx->bytes_consumed)) >= blk_sz) { in qla24xx_get_one_block_sg()
890 sgx->dma_len = sg_len - sgx->bytes_consumed; in qla24xx_get_one_block_sg()
897 if (sg_len == sgx->bytes_consumed) { in qla24xx_get_one_block_sg()
/linux-4.4.14/drivers/crypto/ccp/
Dccp-ops.c680 struct scatterlist *sg, u64 sg_len, in ccp_init_data() argument
688 ret = ccp_init_sg_workarea(&data->sg_wa, cmd_q->ccp->dev, sg, sg_len, in ccp_init_data()
/linux-4.4.14/drivers/net/wireless/ath/ath6kl/
Dsdio.c255 data->sg_len = scat_req->scat_entries; in ath6kl_sdio_setup_scat_data()
/linux-4.4.14/drivers/rapidio/
Drio.c1588 return dmaengine_prep_rio_sg(dchan, data->sg, data->sg_len, in rio_dma_prep_xfer()
/linux-4.4.14/drivers/net/wireless/brcm80211/brcmfmac/
Dbcmsdh.c613 mmc_dat.sg_len = sg_cnt; in brcmf_sdiod_sglist_rw()