/linux-4.1.27/lib/ |
D | scatterlist.c | 52 int nents; in sg_nents() local 53 for (nents = 0; sg; sg = sg_next(sg)) in sg_nents() 54 nents++; in sg_nents() 55 return nents; in sg_nents() 74 struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) in sg_last() argument 77 struct scatterlist *ret = &sgl[nents - 1]; in sg_last() 82 for_each_sg(sgl, sg, nents, i) in sg_last() 104 void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument 106 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 110 for (i = 0; i < nents; i++) in sg_init_table() [all …]
|
D | kfifo.c | 309 int nents, unsigned int len) in setup_sgl_buf() argument 316 if (!nents) in setup_sgl_buf() 336 if (++n == nents || sgl == NULL) in setup_sgl_buf() 348 int nents, unsigned int len, unsigned int off) in setup_sgl() argument 363 n = setup_sgl_buf(sgl, fifo->data + off, nents, l); in setup_sgl() 364 n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); in setup_sgl() 370 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_in_prepare() argument 378 return setup_sgl(fifo, sgl, nents, len, fifo->in); in __kfifo_dma_in_prepare() 383 struct scatterlist *sgl, int nents, unsigned int len) in __kfifo_dma_out_prepare() argument 391 return setup_sgl(fifo, sgl, nents, len, fifo->out); in __kfifo_dma_out_prepare() [all …]
|
D | dma-debug.c | 573 unsigned int nents, i; in debug_dma_assert_idle() local 585 nents = radix_tree_gang_lookup(&dma_active_cacheline, results, cln, in debug_dma_assert_idle() 587 for (i = 0; i < nents; i++) { in debug_dma_assert_idle() 1355 int nents, int mapped_ents, int direction) in debug_dma_map_sg() argument 1376 entry->sg_call_ents = nents; in debug_dma_map_sg()
|
/linux-4.1.27/arch/c6x/kernel/ |
D | dma.c | 65 int nents, enum dma_data_direction dir) in dma_map_sg() argument 70 for_each_sg(sglist, sg, nents, i) in dma_map_sg() 74 debug_dma_map_sg(dev, sglist, nents, nents, dir); in dma_map_sg() 76 return nents; in dma_map_sg() 82 int nents, enum dma_data_direction dir) in dma_unmap_sg() argument 87 for_each_sg(sglist, sg, nents, i) in dma_unmap_sg() 90 debug_dma_unmap_sg(dev, sglist, nents, dir); in dma_unmap_sg() 115 int nents, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument 120 for_each_sg(sglist, sg, nents, i) in dma_sync_sg_for_cpu() 124 debug_dma_sync_sg_for_cpu(dev, sglist, nents, dir); in dma_sync_sg_for_cpu() [all …]
|
/linux-4.1.27/samples/kfifo/ |
D | dma-example.c | 27 unsigned int nents; in example_init() local 65 nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); in example_init() 66 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init() 67 if (!nents) { in example_init() 75 for (i = 0; i < nents; i++) { in example_init() 95 nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); in example_init() 96 printk(KERN_INFO "DMA sgl entries: %d\n", nents); in example_init() 97 if (!nents) { in example_init() 104 for (i = 0; i < nents; i++) { in example_init()
|
/linux-4.1.27/drivers/crypto/qce/ |
D | dma.c | 57 int qce_mapsg(struct device *dev, struct scatterlist *sg, int nents, in qce_mapsg() argument 70 err = dma_map_sg(dev, sg, nents, dir); in qce_mapsg() 75 return nents; in qce_mapsg() 78 void qce_unmapsg(struct device *dev, struct scatterlist *sg, int nents, in qce_unmapsg() argument 87 dma_unmap_sg(dev, sg, nents, dir); in qce_unmapsg() 93 int nents = 0; in qce_countsg() local 99 nents++; in qce_countsg() 106 return nents; in qce_countsg() 135 int nents, unsigned long flags, in qce_dma_prep_sg() argument 142 if (!sg || !nents) in qce_dma_prep_sg() [all …]
|
D | dma.h | 53 void qce_unmapsg(struct device *dev, struct scatterlist *sg, int nents, 55 int qce_mapsg(struct device *dev, struct scatterlist *sg, int nents,
|
/linux-4.1.27/drivers/crypto/caam/ |
D | sg_sw_sec4.h | 89 unsigned int nents, enum dma_data_direction dir, in dma_map_sg_chained() argument 94 for (i = 0; i < nents; i++) { in dma_map_sg_chained() 99 dma_map_sg(dev, sg, nents, dir); in dma_map_sg_chained() 101 return nents; in dma_map_sg_chained() 105 unsigned int nents, enum dma_data_direction dir, in dma_unmap_sg_chained() argument 110 for (i = 0; i < nents; i++) { in dma_unmap_sg_chained() 115 dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_chained() 117 return nents; in dma_unmap_sg_chained()
|
/linux-4.1.27/drivers/spi/ |
D | spi-pxa2xx-dma.c | 26 int i, nents, len = drv_data->len; in pxa2xx_spi_map_dma_buffer() local 44 nents = DIV_ROUND_UP(len, SZ_2K); in pxa2xx_spi_map_dma_buffer() 45 if (nents != sgt->nents) { in pxa2xx_spi_map_dma_buffer() 49 ret = sg_alloc_table(sgt, nents, GFP_ATOMIC); in pxa2xx_spi_map_dma_buffer() 55 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in pxa2xx_spi_map_dma_buffer() 67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_map_dma_buffer() 68 if (!nents) in pxa2xx_spi_map_dma_buffer() 71 return nents; in pxa2xx_spi_map_dma_buffer() 88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir); in pxa2xx_spi_unmap_dma_buffer() 168 int nents, ret; in pxa2xx_spi_dma_prepare_one() local [all …]
|
D | spi-ep93xx.c | 443 int i, ret, nents; in ep93xx_spi_dma_prepare() local 483 nents = DIV_ROUND_UP(len, PAGE_SIZE); in ep93xx_spi_dma_prepare() 484 if (nents != sgt->nents) { in ep93xx_spi_dma_prepare() 487 ret = sg_alloc_table(sgt, nents, GFP_KERNEL); in ep93xx_spi_dma_prepare() 493 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in ep93xx_spi_dma_prepare() 513 nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 514 if (!nents) in ep93xx_spi_dma_prepare() 517 txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir, DMA_CTRL_ACK); in ep93xx_spi_dma_prepare() 519 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_prepare() 547 dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); in ep93xx_spi_dma_finish()
|
D | spi-qup.c | 294 unsigned int nents; in spi_qup_prep_sg() local 298 nents = xfer->tx_sg.nents; in spi_qup_prep_sg() 302 nents = xfer->rx_sg.nents; in spi_qup_prep_sg() 306 desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); in spi_qup_prep_sg()
|
D | spi-dw-mid.c | 169 xfer->tx_sg.nents, in dw_spi_dma_prepare_tx() 215 xfer->rx_sg.nents, in dw_spi_dma_prepare_rx()
|
D | spi-pl022.c | 814 pl022->sgt_tx.nents, DMA_TO_DEVICE); in unmap_free_dma_scatter() 816 pl022->sgt_rx.nents, DMA_FROM_DEVICE); in unmap_free_dma_scatter() 841 pl022->sgt_rx.nents, in dma_callback() 844 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { in dma_callback() 854 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { in dma_callback() 891 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter() 912 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { in setup_dma_scatter() 1067 pl022->sgt_rx.nents, DMA_FROM_DEVICE); in configure_dma() 1072 pl022->sgt_tx.nents, DMA_TO_DEVICE); in configure_dma() 1111 pl022->sgt_tx.nents, DMA_TO_DEVICE); in configure_dma() [all …]
|
D | spi-img-spfi.c | 339 xfer->rx_sg.nents, in img_spfi_start_dma() 363 xfer->tx_sg.nents, in img_spfi_start_dma()
|
D | spi-rockchip.c | 457 rs->rx_sg.sgl, rs->rx_sg.nents, in rockchip_spi_prepare_dma() 474 rs->tx_sg.sgl, rs->tx_sg.nents, in rockchip_spi_prepare_dma()
|
D | spi-imx.c | 907 tx->sgl, tx->nents, DMA_MEM_TO_DEV, in spi_imx_dma_transfer() 919 rx->sgl, rx->nents, DMA_DEV_TO_MEM, in spi_imx_dma_transfer()
|
D | spi-rspi.c | 526 rx->sgl, rx->nents, DMA_FROM_DEVICE, in rspi_dma_transfer() 546 tx->sgl, tx->nents, DMA_TO_DEVICE, in rspi_dma_transfer()
|
D | spi.c | 511 ret = dma_map_sg(dev, sgt->sgl, sgt->nents, dir); in spi_map_buf() 519 sgt->nents = ret; in spi_map_buf()
|
D | spi-s3c64xx.c | 303 desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, in prepare_dma()
|
/linux-4.1.27/drivers/parisc/ |
D | ccio-rm-dma.c | 115 static int ccio_map_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_map_sg() argument 117 int tmp = nents; in ccio_map_sg() 120 while (nents) { in ccio_map_sg() 123 nents--; in ccio_map_sg() 131 static void ccio_unmap_sg(struct pci_dev *dev, struct scatterlist *sglist, int nents, int direction) in ccio_unmap_sg() argument 134 while (nents) { in ccio_unmap_sg() 136 nents--; in ccio_unmap_sg()
|
D | iommu-helpers.h | 14 iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, in iommu_fill_pdir() argument 29 while (nents-- > 0) { in iommu_fill_pdir() 33 DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, in iommu_fill_pdir() 101 struct scatterlist *startsg, int nents, in iommu_coalesce_chunks() argument 113 while (nents > 0) { in iommu_coalesce_chunks() 130 while(--nents > 0) { in iommu_coalesce_chunks()
|
D | sba_iommu.c | 274 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_dump_sg() argument 276 while (nents-- > 0) { in sba_dump_sg() 278 nents, in sba_dump_sg() 935 sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in sba_map_sg() argument 942 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in sba_map_sg() 947 if (nents == 1) { in sba_map_sg() 959 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() 976 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_range); in sba_map_sg() 986 filled = iommu_fill_pdir(ioc, sglist, nents, 0, sba_io_pdir_entry); in sba_map_sg() 995 sba_dump_sg(ioc, sglist, nents); in sba_map_sg() [all …]
|
D | ccio-dma.c | 901 ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_map_sg() argument 914 DBG_RUN_SG("%s() START %d entries\n", __func__, nents); in ccio_map_sg() 917 if (nents == 1) { in ccio_map_sg() 925 for(i = 0; i < nents; i++) in ccio_map_sg() 942 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range); in ccio_map_sg() 952 filled = iommu_fill_pdir(ioc, sglist, nents, hint, ccio_io_pdir_entry); in ccio_map_sg() 978 ccio_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, in ccio_unmap_sg() argument 987 __func__, nents, sg_virt(sglist), sglist->length); in ccio_unmap_sg() 993 while(sg_dma_len(sglist) && nents--) { in ccio_unmap_sg() 1003 DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); in ccio_unmap_sg()
|
/linux-4.1.27/include/linux/ |
D | scatterlist.h | 14 unsigned int nents; /* number of mapped entries */ member 242 size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents, 244 size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents, 247 size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents, 249 size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents, 280 struct scatterlist *sglist, unsigned int nents, 308 #define for_each_sg_page(sglist, piter, nents, pgoffset) \ argument 309 for (__sg_page_iter_start((piter), (sglist), (nents), (pgoffset)); \ 347 unsigned int nents, unsigned int flags);
|
D | dma-mapping.h | 42 int nents, enum dma_data_direction dir, 45 struct scatterlist *sg, int nents, 55 struct scatterlist *sg, int nents, 58 struct scatterlist *sg, int nents, 267 #define dma_map_sg_attrs(dev, sgl, nents, dir, attrs) \ argument 268 dma_map_sg(dev, sgl, nents, dir) 270 #define dma_unmap_sg_attrs(dev, sgl, nents, dir, attrs) \ argument 271 dma_unmap_sg(dev, sgl, nents, dir)
|
D | iommu.h | 152 struct scatterlist *sg, unsigned int nents, int prot); 201 struct scatterlist *sg,unsigned int nents, 290 unsigned int nents, int prot) in iommu_map_sg() argument 292 return domain->ops->map_sg(domain, iova, sg, nents, prot); in iommu_map_sg() 349 unsigned int nents, int prot) in iommu_map_sg() argument
|
D | kfifo.h | 658 #define kfifo_dma_in_prepare(fifo, sgl, nents, len) \ argument 662 int __nents = (nents); \ 709 #define kfifo_dma_out_prepare(fifo, sgl, nents, len) \ argument 713 int __nents = (nents); \ 792 struct scatterlist *sgl, int nents, unsigned int len); 795 struct scatterlist *sgl, int nents, unsigned int len); 814 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize); 820 struct scatterlist *sgl, int nents, unsigned int len, size_t recsize);
|
D | tifm.h | 148 int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, 150 void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
|
D | swiotlb.h | 74 swiotlb_map_sg(struct device *hwdev, struct scatterlist *sg, int nents, 78 swiotlb_unmap_sg(struct device *hwdev, struct scatterlist *sg, int nents,
|
D | dma-debug.h | 48 int nents, int mapped_ents, int direction); 124 int nents, int mapped_ents, int direction) in debug_dma_map_sg() argument
|
D | nvme.h | 137 int nents; /* Used in scatterlist */ member
|
D | usb.h | 1664 struct scatterlist *sg, int nents); 1758 int nents,
|
/linux-4.1.27/arch/microblaze/kernel/ |
D | dma.c | 55 int nents, enum dma_data_direction direction, in dma_direct_map_sg() argument 62 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg() 68 return nents; in dma_direct_map_sg() 131 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_cpu() argument 139 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_cpu() 145 struct scatterlist *sgl, int nents, in dma_direct_sync_sg_for_device() argument 153 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg_for_device()
|
/linux-4.1.27/arch/x86/kernel/ |
D | pci-nommu.c | 57 int nents, enum dma_data_direction dir, in nommu_map_sg() argument 63 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg() 65 for_each_sg(sg, s, nents, i) { in nommu_map_sg() 73 return nents; in nommu_map_sg()
|
D | amd_gart_64.c | 288 static void gart_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_unmap_sg() argument 294 for_each_sg(sg, s, nents, i) { in gart_unmap_sg() 303 int nents, int dir) in dma_map_sg_nonforce() argument 312 for_each_sg(sg, s, nents, i) { in dma_map_sg_nonforce() 320 nents = 0; in dma_map_sg_nonforce() 330 return nents; in dma_map_sg_nonforce() 389 static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents, in gart_map_sg() argument 398 if (nents == 0) in gart_map_sg() 412 for_each_sg(sg, s, nents, i) { in gart_map_sg() 452 if (out < nents) { in gart_map_sg() [all …]
|
/linux-4.1.27/arch/sh/kernel/ |
D | dma-nommu.c | 27 int nents, enum dma_data_direction dir, in nommu_map_sg() argument 33 WARN_ON(nents == 0 || sg[0].length == 0); in nommu_map_sg() 35 for_each_sg(sg, s, nents, i) { in nommu_map_sg() 44 return nents; in nommu_map_sg()
|
/linux-4.1.27/arch/avr32/include/asm/ |
D | dma-mapping.h | 212 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 217 for (i = 0; i < nents; i++) { in dma_map_sg() 225 return nents; in dma_map_sg() 314 int nents, enum dma_data_direction direction) in dma_sync_sg_for_cpu() argument 325 int nents, enum dma_data_direction direction) in dma_sync_sg_for_device() argument 329 for (i = 0; i < nents; i++) { in dma_sync_sg_for_device()
|
/linux-4.1.27/drivers/mmc/core/ |
D | sdio_ops.c | 129 unsigned int nents, left_size, i; in mmc_io_rw_extended() local 160 nents = (left_size - 1) / seg_size + 1; in mmc_io_rw_extended() 161 if (nents > 1) { in mmc_io_rw_extended() 162 if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) in mmc_io_rw_extended() 166 data.sg_len = nents; in mmc_io_rw_extended() 185 if (nents > 1) in mmc_io_rw_extended()
|
/linux-4.1.27/arch/m68k/kernel/ |
D | dma.c | 123 void dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg, int nents, in dma_sync_sg_for_device() argument 128 for (i = 0; i < nents; sg++, i++) in dma_sync_sg_for_device() 154 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 159 for (i = 0; i < nents; sg++, i++) { in dma_map_sg() 163 return nents; in dma_map_sg()
|
/linux-4.1.27/arch/tile/kernel/ |
D | pci-dma.c | 204 int nents, enum dma_data_direction direction, in tile_dma_map_sg() argument 212 WARN_ON(nents == 0 || sglist->length == 0); in tile_dma_map_sg() 214 for_each_sg(sglist, sg, nents, i) { in tile_dma_map_sg() 222 return nents; in tile_dma_map_sg() 226 int nents, enum dma_data_direction direction, in tile_dma_unmap_sg() argument 233 for_each_sg(sglist, sg, nents, i) { in tile_dma_unmap_sg() 377 int nents, enum dma_data_direction direction, in tile_pci_dma_map_sg() argument 385 WARN_ON(nents == 0 || sglist->length == 0); in tile_pci_dma_map_sg() 387 for_each_sg(sglist, sg, nents, i) { in tile_pci_dma_map_sg() 397 return nents; in tile_pci_dma_map_sg() [all …]
|
/linux-4.1.27/tools/virtio/linux/ |
D | scatterlist.h | 159 static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) in sg_init_table() argument 161 memset(sgl, 0, sizeof(*sgl) * nents); in sg_init_table() 165 for (i = 0; i < nents; i++) in sg_init_table() 169 sg_mark_end(&sgl[nents - 1]); in sg_init_table()
|
/linux-4.1.27/arch/openrisc/kernel/ |
D | dma.c | 180 int nents, enum dma_data_direction dir, in or1k_map_sg() argument 186 for_each_sg(sg, s, nents, i) { in or1k_map_sg() 191 return nents; in or1k_map_sg() 196 int nents, enum dma_data_direction dir, in or1k_unmap_sg() argument 202 for_each_sg(sg, s, nents, i) { in or1k_unmap_sg()
|
/linux-4.1.27/arch/ia64/hp/common/ |
D | sba_iommu.c | 395 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_dump_sg() argument 397 while (nents-- > 0) { in sba_dump_sg() 398 printk(KERN_DEBUG " %d : DMA %08lx/%05x CPU %p\n", nents, in sba_dump_sg() 406 sba_check_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) in sba_check_sg() argument 409 int the_nents = nents; in sba_check_sg() 413 sba_dump_sg(NULL, startsg, nents); in sba_check_sg() 1231 int nents) in sba_fill_pdir() argument 1238 while (nents-- > 0) { in sba_fill_pdir() 1245 nents, startsg->dma_address, cnt, in sba_fill_pdir() 1249 nents, startsg->dma_address, cnt, in sba_fill_pdir() [all …]
|
/linux-4.1.27/arch/c6x/include/asm/ |
D | dma-mapping.h | 54 int nents, enum dma_data_direction direction); 57 int nents, enum dma_data_direction direction); 88 int nents, enum dma_data_direction dir); 91 int nents, enum dma_data_direction dir);
|
/linux-4.1.27/drivers/infiniband/hw/qib/ |
D | qib_dma.c | 95 int nents, enum dma_data_direction direction) in qib_map_sg() argument 100 int ret = nents; in qib_map_sg() 104 for_each_sg(sgl, sg, nents, i) { in qib_map_sg() 120 struct scatterlist *sg, int nents, in qib_unmap_sg() argument
|
/linux-4.1.27/drivers/infiniband/hw/ipath/ |
D | ipath_dma.c | 102 int nents, enum dma_data_direction direction) in ipath_map_sg() argument 107 int ret = nents; in ipath_map_sg() 111 for_each_sg(sgl, sg, nents, i) { in ipath_map_sg() 127 struct scatterlist *sg, int nents, in ipath_unmap_sg() argument
|
/linux-4.1.27/arch/powerpc/kernel/ |
D | dma.c | 129 int nents, enum dma_data_direction direction, in dma_direct_map_sg() argument 135 for_each_sg(sgl, sg, nents, i) { in dma_direct_map_sg() 141 return nents; in dma_direct_map_sg() 145 int nents, enum dma_data_direction direction, in dma_direct_unmap_sg() argument 196 struct scatterlist *sgl, int nents, in dma_direct_sync_sg() argument 202 for_each_sg(sgl, sg, nents, i) in dma_direct_sync_sg()
|
D | ibmebus.c | 107 int nents, enum dma_data_direction direction, in ibmebus_map_sg() argument 113 for_each_sg(sgl, sg, nents, i) { in ibmebus_map_sg() 118 return nents; in ibmebus_map_sg() 123 int nents, enum dma_data_direction direction, in ibmebus_unmap_sg() argument
|
/linux-4.1.27/arch/frv/mb93090-mb00/ |
D | pci-dma.c | 53 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 64 for (i = 0; i < nents; i++) { in dma_map_sg() 78 return nents; in dma_map_sg()
|
D | pci-dma-nommu.c | 122 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 127 for (i=0; i<nents; i++) in dma_map_sg() 133 return nents; in dma_map_sg()
|
/linux-4.1.27/arch/mips/loongson/common/ |
D | dma-swiotlb.c | 69 int nents, enum dma_data_direction dir, in loongson_dma_map_sg() argument 72 int r = swiotlb_map_sg_attrs(dev, sg, nents, dir, NULL); in loongson_dma_map_sg() 87 struct scatterlist *sg, int nents, in loongson_dma_sync_sg_for_device() argument 90 swiotlb_sync_sg_for_device(dev, sg, nents, dir); in loongson_dma_sync_sg_for_device()
|
/linux-4.1.27/include/asm-generic/ |
D | dma-mapping-common.h | 47 int nents, enum dma_data_direction dir, in dma_map_sg_attrs() argument 54 for_each_sg(sg, s, nents, i) in dma_map_sg_attrs() 57 ents = ops->map_sg(dev, sg, nents, dir, attrs); in dma_map_sg_attrs() 59 debug_dma_map_sg(dev, sg, nents, ents, dir); in dma_map_sg_attrs() 65 int nents, enum dma_data_direction dir, in dma_unmap_sg_attrs() argument 71 debug_dma_unmap_sg(dev, sg, nents, dir); in dma_unmap_sg_attrs() 73 ops->unmap_sg(dev, sg, nents, dir, attrs); in dma_unmap_sg_attrs()
|
D | pci-dma-compat.h | 66 int nents, int direction) in pci_map_sg() argument 68 …return dma_map_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)directio… in pci_map_sg() 73 int nents, int direction) in pci_unmap_sg() argument 75 dma_unmap_sg(hwdev == NULL ? NULL : &hwdev->dev, sg, nents, (enum dma_data_direction)direction); in pci_unmap_sg()
|
D | dma-mapping-broken.h | 47 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
|
/linux-4.1.27/arch/arc/include/asm/ |
D | dma-mapping.h | 123 int nents, enum dma_data_direction dir) in dma_map_sg() argument 128 for_each_sg(sg, s, nents, i) in dma_map_sg() 132 return nents; in dma_map_sg() 137 int nents, enum dma_data_direction dir) in dma_unmap_sg() argument 142 for_each_sg(sg, s, nents, i) in dma_unmap_sg()
|
/linux-4.1.27/arch/mn10300/include/asm/ |
D | dma-mapping.h | 51 int dma_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in dma_map_sg() argument 58 WARN_ON(nents == 0 || sglist[0].length == 0); in dma_map_sg() 60 for_each_sg(sglist, sg, nents, i) { in dma_map_sg() 67 return nents; in dma_map_sg()
|
/linux-4.1.27/drivers/gpu/drm/exynos/ |
D | exynos_drm_dmabuf.c | 59 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in exynos_gem_detach_dma_buf() 78 int nents, ret; in exynos_gem_map_dma_buf() local 109 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in exynos_gem_map_dma_buf() 110 if (!nents) { in exynos_gem_map_dma_buf() 254 if (sgt->nents == 1) { in exynos_dmabuf_prime_import()
|
D | exynos_drm_gem.c | 99 for_each_sg(buf->sgt->sgl, sgl, buf->sgt->nents, i) { in exynos_drm_gem_map_buf() 482 int nents; in exynos_gem_map_sgt_with_dma() local 486 nents = dma_map_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_map_sgt_with_dma() 487 if (!nents) { in exynos_gem_map_sgt_with_dma() 490 return nents; in exynos_gem_map_sgt_with_dma() 501 dma_unmap_sg(drm_dev->dev, sgt->sgl, sgt->nents, dir); in exynos_gem_unmap_sgt_from_dma()
|
/linux-4.1.27/arch/hexagon/kernel/ |
D | dma.c | 118 int nents, enum dma_data_direction dir, in hexagon_map_sg() argument 124 WARN_ON(nents == 0 || sg[0].length == 0); in hexagon_map_sg() 126 for_each_sg(sg, s, nents, i) { in hexagon_map_sg() 137 return nents; in hexagon_map_sg()
|
/linux-4.1.27/arch/sparc/kernel/ |
D | ioport.c | 550 int nents, enum dma_data_direction dir, in pci32_map_sg() argument 557 for_each_sg(sgl, sg, nents, n) { in pci32_map_sg() 561 return nents; in pci32_map_sg() 569 int nents, enum dma_data_direction dir, in pci32_unmap_sg() argument 576 for_each_sg(sgl, sg, nents, n) { in pci32_unmap_sg() 615 int nents, enum dma_data_direction dir) in pci32_sync_sg_for_cpu() argument 621 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_cpu() 628 int nents, enum dma_data_direction dir) in pci32_sync_sg_for_device() argument 634 for_each_sg(sgl, sg, nents, n) { in pci32_sync_sg_for_device()
|
/linux-4.1.27/arch/parisc/kernel/ |
D | pci-dma.c | 478 static int pa11_dma_map_sg(struct device *dev, struct scatterlist *sglist, int nents, enum dma_data… in pa11_dma_map_sg() argument 484 for (i = 0; i < nents; i++, sglist++ ) { in pa11_dma_map_sg() 490 return nents; in pa11_dma_map_sg() 493 static void pa11_dma_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, enum dma_d… in pa11_dma_unmap_sg() argument 504 for (i = 0; i < nents; i++, sglist++ ) in pa11_dma_unmap_sg() 523 static void pa11_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sglist, int nents, enu… in pa11_dma_sync_sg_for_cpu() argument 529 for (i = 0; i < nents; i++, sglist++ ) in pa11_dma_sync_sg_for_cpu() 533 static void pa11_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sglist, int nents, … in pa11_dma_sync_sg_for_device() argument 539 for (i = 0; i < nents; i++, sglist++ ) in pa11_dma_sync_sg_for_device()
|
/linux-4.1.27/drivers/usb/storage/ |
D | protocol.c | 141 unsigned int nents = scsi_sg_count(srb); in usb_stor_access_xfer_buf() local 144 nents = sg_nents(sg); in usb_stor_access_xfer_buf() 148 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? in usb_stor_access_xfer_buf()
|
D | isd200.c | 469 srb->sdb.table.nents = buff ? 1 : 0; in isd200_set_srb()
|
D | uas.c | 417 urb->num_sgs = udev->bus->sg_tablesize ? sdb->table.nents : 0; in uas_alloc_data_urb()
|
/linux-4.1.27/net/rds/ |
D | message.c | 220 struct scatterlist *rds_message_alloc_sgs(struct rds_message *rm, int nents) in rds_message_alloc_sgs() argument 225 WARN_ON(rm->m_used_sgs + nents > rm->m_total_sgs); in rds_message_alloc_sgs() 226 WARN_ON(!nents); in rds_message_alloc_sgs() 228 if (rm->m_used_sgs + nents > rm->m_total_sgs) in rds_message_alloc_sgs() 232 sg_init_table(sg_ret, nents); in rds_message_alloc_sgs() 233 rm->m_used_sgs += nents; in rds_message_alloc_sgs()
|
D | rdma.c | 183 unsigned int nents; in __rds_rdma_map() local 245 nents = ret; in __rds_rdma_map() 246 sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL); in __rds_rdma_map() 251 WARN_ON(!nents); in __rds_rdma_map() 252 sg_init_table(sg, nents); in __rds_rdma_map() 255 for (i = 0 ; i < nents; i++) in __rds_rdma_map() 258 rdsdebug("RDS: trans_private nents is %u\n", nents); in __rds_rdma_map() 264 trans_private = rs->rs_transport->get_mr(sg, nents, rs, in __rds_rdma_map() 268 for (i = 0 ; i < nents; i++) in __rds_rdma_map()
|
D | ib_rdma.c | 368 struct scatterlist *sg, unsigned int nents) in rds_ib_map_fmr() argument 379 sg_dma_len = ib_dma_map_sg(dev, sg, nents, in rds_ib_map_fmr() 438 ibmr->sg_len = nents; in rds_ib_map_fmr() 743 void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents, in rds_ib_get_mr() argument 767 ret = rds_ib_map_fmr(rds_ibdev, ibmr, sg, nents); in rds_ib_get_mr()
|
D | iw_rdma.c | 83 struct scatterlist *sg, unsigned int nents); 591 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents, argument 625 ret = rds_iw_map_fastreg(rds_iwdev->mr_pool, ibmr, sg, nents);
|
D | rds.h | 658 struct rds_message *rds_message_alloc(unsigned int nents, gfp_t gfp); 659 struct scatterlist *rds_message_alloc_sgs(struct rds_message *rm, int nents);
|
D | ib.h | 305 void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents,
|
D | iw.h | 315 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents,
|
/linux-4.1.27/drivers/infiniband/hw/usnic/ |
D | usnic_uiom.c | 88 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_put_pages() 168 chunk->nents = min_t(int, ret, USNIC_UIOM_PAGE_CHUNK); in usnic_uiom_get_pages() 169 sg_init_table(chunk->page_list, chunk->nents); in usnic_uiom_get_pages() 170 for_each_sg(chunk->page_list, sg, chunk->nents, i) { in usnic_uiom_get_pages() 177 cur_base += chunk->nents * PAGE_SIZE; in usnic_uiom_get_pages() 178 ret -= chunk->nents; in usnic_uiom_get_pages() 179 off += chunk->nents; in usnic_uiom_get_pages() 266 for (i = 0; i < chunk->nents; i++, va += PAGE_SIZE) { in usnic_uiom_map_sorted_intervals() 317 if (i == chunk->nents) { in usnic_uiom_map_sorted_intervals()
|
D | usnic_uiom.h | 63 int nents; member
|
/linux-4.1.27/arch/arm/mm/ |
D | dma-mapping.c | 901 int arm_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_map_sg() argument 908 for_each_sg(sg, s, nents, i) { in arm_dma_map_sg() 917 return nents; in arm_dma_map_sg() 935 void arm_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, in arm_dma_unmap_sg() argument 943 for_each_sg(sg, s, nents, i) in arm_dma_unmap_sg() 955 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_cpu() argument 961 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_cpu() 974 int nents, enum dma_data_direction dir) in arm_dma_sync_sg_for_device() argument 980 for_each_sg(sg, s, nents, i) in arm_dma_sync_sg_for_device() 1536 static int __iommu_map_sg(struct device *dev, struct scatterlist *sg, int nents, in __iommu_map_sg() argument [all …]
|
/linux-4.1.27/arch/blackfin/kernel/ |
D | dma-mapping.c | 116 dma_map_sg(struct device *dev, struct scatterlist *sg_list, int nents, in dma_map_sg() argument 122 for_each_sg(sg_list, sg, nents, i) { in dma_map_sg() 127 return nents; in dma_map_sg()
|
/linux-4.1.27/drivers/gpu/drm/i915/ |
D | i915_gem_dmabuf.c | 60 ret = sg_alloc_table(st, obj->pages->nents, GFP_KERNEL); in i915_gem_map_dma_buf() 66 for (i = 0; i < obj->pages->nents; i++) { in i915_gem_map_dma_buf() 72 if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { in i915_gem_map_dma_buf() 100 dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); in i915_gem_unmap_dma_buf() 139 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) in i915_gem_dmabuf_vmap()
|
D | i915_gem_tiling.c | 515 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_do_bit_17_swizzle() 545 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_save_bit_17_swizzle()
|
D | i915_gem_gtt.c | 557 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { in gen8_ppgtt_insert_entries() 1142 for_each_sg_page(pages->sgl, &sg_iter, pages->nents, 0) { in gen6_ppgtt_insert_entries() 1723 obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_prepare_object() 1753 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen8_ggtt_insert_entries() 1799 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) { in gen6_ggtt_insert_entries() 1991 obj->pages->sgl, obj->pages->nents, in i915_gem_gtt_finish_object() 2613 st->nents = 0; in rotate_pages() 2618 st->nents++; in rotate_pages() 2674 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in intel_rotate_fb_obj_pages()
|
D | i915_gem.c | 637 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, in i915_gem_shmem_pread() 962 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, in i915_gem_shmem_pwrite() 1991 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_object_put_pages_gtt() 2074 st->nents = 0; in i915_gem_object_get_pages_gtt() 2097 st->nents++; in i915_gem_object_get_pages_gtt() 2106 st->nents++; in i915_gem_object_get_pages_gtt() 2133 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) in i915_gem_object_get_pages_gtt()
|
D | i915_gem_userptr.c | 715 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) { in i915_gem_userptr_put_pages()
|
D | i915_cmd_parser.c | 839 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, first_page) { in vmap_batch()
|
D | i915_drv.h | 2648 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, n) in i915_gem_object_get_page()
|
/linux-4.1.27/arch/metag/include/asm/ |
D | dma-mapping.h | 48 dma_map_sg(struct device *dev, struct scatterlist *sglist, int nents, in dma_map_sg() argument 55 WARN_ON(nents == 0 || sglist[0].length == 0); in dma_map_sg() 57 for_each_sg(sglist, sg, nents, i) { in dma_map_sg() 64 return nents; in dma_map_sg()
|
/linux-4.1.27/arch/alpha/kernel/ |
D | pci-noop.c | 141 static int alpha_noop_map_sg(struct device *dev, struct scatterlist *sgl, int nents, in alpha_noop_map_sg() argument 147 for_each_sg(sgl, sg, nents, i) { in alpha_noop_map_sg() 156 return nents; in alpha_noop_map_sg()
|
D | pci_iommu.c | 653 int nents, enum dma_data_direction dir, in alpha_pci_map_sg() argument 668 if (nents == 1) { in alpha_pci_map_sg() 677 end = sg + nents; in alpha_pci_map_sg() 731 int nents, enum dma_data_direction dir, in alpha_pci_unmap_sg() argument 757 for (end = sg + nents; sg < end; ++sg) { in alpha_pci_unmap_sg() 771 sg - end + nents, addr, size); in alpha_pci_unmap_sg() 779 sg - end + nents, addr, size); in alpha_pci_unmap_sg() 784 sg - end + nents, addr, size); in alpha_pci_unmap_sg() 803 DBGA("pci_unmap_sg: %ld entries\n", nents - (end - sg)); in alpha_pci_unmap_sg()
|
/linux-4.1.27/drivers/gpu/drm/udl/ |
D | udl_dmabuf.c | 65 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in udl_detach_dma_buf() 83 int nents, ret; in udl_map_dma_buf() local 126 nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); in udl_map_dma_buf() 127 if (!nents) { in udl_map_dma_buf()
|
/linux-4.1.27/arch/nios2/mm/ |
D | dma-mapping.c | 59 int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 66 for_each_sg(sg, sg, nents, i) { in dma_map_sg() 76 return nents; in dma_map_sg()
|
/linux-4.1.27/drivers/staging/android/ion/ |
D | ion_heap.c | 48 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_kernel() 81 for_each_sg(table->sgl, sg, table->nents, i) { in ion_heap_map_user() 118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, in ion_heap_sglist_zero() argument 126 for_each_sg_page(sgl, &piter, nents, 0) { in ion_heap_sglist_zero() 151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); in ion_heap_buffer_zero()
|
D | ion_chunk_heap.c | 108 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_chunk_heap_free() 111 for_each_sg(table->sgl, sg, table->nents, i) { in ion_chunk_heap_free()
|
D | ion_carveout_heap.c | 119 dma_sync_sg_for_device(NULL, table->sgl, table->nents, in ion_carveout_heap_free()
|
D | ion_test.c | 66 for_each_sg_page(table->sgl, &sg_iter, table->nents, offset_page) { in ion_handle_test_dma()
|
D | ion_system_heap.c | 193 for_each_sg(table->sgl, sg, table->nents, i) in ion_system_heap_free()
|
D | ion.c | 232 for_each_sg(table->sgl, sg, table->nents, i) { in ion_buffer_create() 255 for_each_sg(buffer->sg_table->sgl, sg, buffer->sg_table->nents, i) in ion_buffer_create() 1221 buffer->sg_table->nents, DMA_BIDIRECTIONAL); in ion_sync_for_device()
|
/linux-4.1.27/arch/xtensa/include/asm/ |
D | dma-mapping.h | 55 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 62 for (i = 0; i < nents; i++, sg++ ) { in dma_map_sg() 69 return nents; in dma_map_sg()
|
/linux-4.1.27/arch/blackfin/include/asm/ |
D | dma-mapping.h | 99 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, 140 dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg, int nents, in dma_sync_sg_for_cpu() argument 148 int nents, enum dma_data_direction dir);
|
/linux-4.1.27/drivers/dma/hsu/ |
D | hsu.c | 80 count = (desc->nents - desc->active) % HSU_DMA_CHAN_NR_DESC; in hsu_dma_chan_start() 183 } else if (desc->active < desc->nents) { in hsu_dma_irq() 197 static struct hsu_dma_desc *hsu_dma_alloc_desc(unsigned int nents) in hsu_dma_alloc_desc() argument 205 desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); in hsu_dma_alloc_desc() 241 desc->nents = sg_len; in hsu_dma_prep_slave_sg() 265 for (i = desc->active; i < desc->nents; i++) in hsu_dma_desc_size()
|
D | hsu.h | 70 unsigned int nents; member
|
/linux-4.1.27/drivers/misc/ |
D | tifm_core.c | 298 int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_map_sg() argument 301 return pci_map_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_map_sg() 305 void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, in tifm_unmap_sg() argument 308 pci_unmap_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); in tifm_unmap_sg()
|
/linux-4.1.27/drivers/gpu/drm/armada/ |
D | armada_gem.c | 456 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { in armada_gem_prime_map_dma_buf() 457 num = sgt->nents; in armada_gem_prime_map_dma_buf() 467 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in armada_gem_prime_map_dma_buf() 498 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in armada_gem_prime_unmap_dma_buf() 502 for_each_sg(sgt->sgl, sg, sgt->nents, i) in armada_gem_prime_unmap_dma_buf() 606 if (dobj->sgt->nents > 1) { in armada_gem_map_import()
|
/linux-4.1.27/drivers/scsi/ |
D | scsi_lib.c | 554 static inline unsigned int scsi_sgtable_index(unsigned short nents) in scsi_sgtable_index() argument 558 BUG_ON(nents > SCSI_MAX_SG_SEGMENTS); in scsi_sgtable_index() 560 if (nents <= 8) in scsi_sgtable_index() 563 index = get_count_order(nents) - 3; in scsi_sgtable_index() 568 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) in scsi_sg_free() argument 572 sgp = scsi_sg_pools + scsi_sgtable_index(nents); in scsi_sg_free() 576 static struct scatterlist *scsi_sg_alloc(unsigned int nents, gfp_t gfp_mask) in scsi_sg_alloc() argument 580 sgp = scsi_sg_pools + scsi_sgtable_index(nents); in scsi_sg_alloc() 591 static int scsi_alloc_sgtable(struct scsi_data_buffer *sdb, int nents, bool mq) in scsi_alloc_sgtable() argument 596 BUG_ON(!nents); in scsi_alloc_sgtable() [all …]
|
D | iscsi_tcp.c | 518 sdb->table.nents, offset, in iscsi_sw_tcp_pdu_init()
|
D | libiscsi_tcp.c | 711 sdb->table.nents, in iscsi_tcp_hdr_dissect()
|
D | scsi_debug.c | 913 act_len = sg_copy_from_buffer(sdb->table.sgl, sdb->table.nents, in fill_from_dev_buffer() 2388 ret = func(sdb->table.sgl, sdb->table.nents, in do_device_access() 2395 ret += func(sdb->table.sgl, sdb->table.nents, in do_device_access() 3401 sg_miter_start(&miter, sdb->table.sgl, sdb->table.nents, in resp_xdwriteread()
|
D | scsi_error.c | 946 scmd->sdb.table.nents = scmd->sdb.table.orig_nents = 1; in scsi_eh_prep_cmnd()
|
/linux-4.1.27/drivers/scsi/libfc/ |
D | fc_libfc.c | 113 u32 *nents, size_t *offset, in fc_copy_buffer_to_sglist() argument 128 if (!(*nents)) in fc_copy_buffer_to_sglist() 130 --(*nents); in fc_copy_buffer_to_sglist()
|
D | fc_libfc.h | 136 u32 *nents, size_t *offset,
|
D | fc_lport.c | 156 u32 nents; member 1940 fc_copy_buffer_to_sglist(buf, len, info->sg, &info->nents, in fc_lport_bsg_resp() 2008 info->nents = job->reply_payload.sg_cnt; in fc_lport_els_request() 2069 info->nents = job->reply_payload.sg_cnt; in fc_lport_ct_request()
|
D | fc_fcp.c | 454 u32 nents; in fc_fcp_recv_data() local 492 nents = scsi_sg_count(sc); in fc_fcp_recv_data() 495 copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents, in fc_fcp_recv_data() 499 copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents, in fc_fcp_recv_data()
|
/linux-4.1.27/drivers/gpu/drm/tegra/ |
D | gem.c | 118 bo->sgt->nents, prot); in tegra_bo_iommu_map() 214 for_each_sg(sgt->sgl, s, sgt->nents, i) in tegra_bo_get_pages() 217 if (dma_map_sg(drm->dev, sgt->sgl, sgt->nents, DMA_TO_DEVICE) == 0) in tegra_bo_get_pages() 352 if (bo->sgt->nents > 1) { in tegra_bo_import() 537 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) in tegra_gem_prime_map_dma_buf() 563 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); in tegra_gem_prime_unmap_dma_buf()
|
/linux-4.1.27/drivers/dma/ |
D | coh901318_lli.c | 233 struct scatterlist *sgl, unsigned int nents, in coh901318_lli_fill_sg() argument 258 for_each_sg(sgl, sg, nents, i) { in coh901318_lli_fill_sg() 265 } else if (i == nents - 1) in coh901318_lli_fill_sg()
|
D | coh901318.h | 136 struct scatterlist *sg, unsigned int nents,
|
/linux-4.1.27/drivers/gpu/drm/msm/ |
D | msm_iommu.c | 59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_map() 94 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in msm_iommu_unmap()
|
D | msm_gem.c | 105 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); in get_pages() 121 msm_obj->sgt->nents, DMA_BIDIRECTIONAL); in put_pages()
|
/linux-4.1.27/arch/powerpc/platforms/ps3/ |
D | system-bus.c | 643 int nents, enum dma_data_direction direction, struct dma_attrs *attrs) in ps3_sb_map_sg() argument 653 for_each_sg(sgl, sg, nents, i) { in ps3_sb_map_sg() 666 return nents; in ps3_sb_map_sg() 671 int nents, in ps3_ioc0_map_sg() argument 680 int nents, enum dma_data_direction direction, struct dma_attrs *attrs) in ps3_sb_unmap_sg() argument 688 int nents, enum dma_data_direction direction, in ps3_ioc0_unmap_sg() argument
|
/linux-4.1.27/drivers/media/v4l2-core/ |
D | videobuf2-dma-sg.c | 150 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_alloc() 190 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_put() 212 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_prepare() 224 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dma_sg_finish() 317 if (dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->nents, in vb2_dma_sg_get_userptr() 354 dma_unmap_sg_attrs(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir, &attrs); in vb2_dma_sg_put_userptr()
|
D | videobuf2-dma-contig.c | 77 for_each_sg(sgt->sgl, s, sgt->nents, i) { in vb2_dc_get_contiguous_size() 123 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_prepare() 135 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir); in vb2_dc_finish() 677 sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, in vb2_dc_get_userptr() 679 if (sgt->nents <= 0) { in vb2_dc_get_userptr()
|
D | videobuf2-vmalloc.c | 246 for_each_sg(sgt->sgl, sg, sgt->nents, i) { in vb2_vmalloc_dmabuf_ops_attach()
|
/linux-4.1.27/arch/cris/include/asm/ |
D | dma-mapping.h | 56 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 60 return nents; in dma_map_sg()
|
/linux-4.1.27/include/rdma/ |
D | ib_verbs.h | 1453 struct scatterlist *sg, int nents, 1456 struct scatterlist *sg, int nents, 2245 struct scatterlist *sg, int nents, in ib_dma_map_sg() argument 2249 return dev->dma_ops->map_sg(dev, sg, nents, direction); in ib_dma_map_sg() 2250 return dma_map_sg(dev->dma_device, sg, nents, direction); in ib_dma_map_sg() 2261 struct scatterlist *sg, int nents, in ib_dma_unmap_sg() argument 2265 dev->dma_ops->unmap_sg(dev, sg, nents, direction); in ib_dma_unmap_sg() 2267 dma_unmap_sg(dev->dma_device, sg, nents, direction); in ib_dma_unmap_sg() 2271 struct scatterlist *sg, int nents, in ib_dma_map_sg_attrs() argument 2275 return dma_map_sg_attrs(dev->dma_device, sg, nents, direction, attrs); in ib_dma_map_sg_attrs() [all …]
|
/linux-4.1.27/arch/parisc/include/asm/ |
D | dma-mapping.h | 16 …int (*map_sg)(struct device *dev, struct scatterlist *sg, int nents, enum dma_data_direction dire… 95 dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, in dma_map_sg() argument 98 return hppa_dma_ops->map_sg(dev, sg, nents, direction); in dma_map_sg()
|
/linux-4.1.27/drivers/gpu/drm/vmwgfx/ |
D | vmwgfx_buffer.c | 336 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, in vmw_ttm_unmap_from_dma() 338 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; in vmw_ttm_unmap_from_dma() 364 vmw_tt->sgt.nents = ret; in vmw_ttm_map_for_dma() 420 if (vsgt->num_pages > vmw_tt->sgt.nents) { in vmw_ttm_map_dma() 423 vmw_tt->sgt.nents); in vmw_ttm_map_dma()
|
/linux-4.1.27/drivers/crypto/ |
D | picoxcell_crypto.c | 295 unsigned nents, mapped_ents; in spacc_sg_to_ddt() local 300 nents = sg_count(payload, nbytes); in spacc_sg_to_ddt() 301 mapped_ents = dma_map_sg(engine->dev, payload, nents, dir); in spacc_sg_to_ddt() 317 dma_unmap_sg(engine->dev, payload, nents, dir); in spacc_sg_to_ddt() 328 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_make_ddts() local 350 src_ents = dma_map_sg(engine->dev, areq->src, nents, in spacc_aead_make_ddts() 352 dst_ents = dma_map_sg(engine->dev, areq->dst, nents, in spacc_aead_make_ddts() 355 src_ents = dma_map_sg(engine->dev, areq->src, nents, in spacc_aead_make_ddts() 412 unsigned nents = sg_count(areq->src, areq->cryptlen); in spacc_aead_free_ddts() local 415 dma_unmap_sg(engine->dev, areq->src, nents, DMA_TO_DEVICE); in spacc_aead_free_ddts() [all …]
|
D | mxs-dcp.c | 259 const int nents = sg_nents(req->src); in mxs_dcp_aes_block_crypt() local 289 for_each_sg(req->src, src, nents, i) { in mxs_dcp_aes_block_crypt() 568 const int nents = sg_nents(req->src); in dcp_sha_req_to_buf() local 583 for_each_sg(req->src, src, nents, i) { in dcp_sha_req_to_buf()
|
D | img-hash.c | 97 size_t nents; member 399 ctx->bufcnt = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, in img_hash_dma_task() 407 tbc = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, in img_hash_dma_task() 639 ctx->nents = sg_nents(ctx->sg); in img_hash_digest()
|
D | bfin_crc.c | 122 static struct scatterlist *sg_get(struct scatterlist *sg_list, unsigned int nents, in sg_get() argument 128 for_each_sg(sg_list, sg, nents, i) in sg_get()
|
D | talitos.c | 738 unsigned int nents, enum dma_data_direction dir, in talitos_map_sg() argument 747 dma_map_sg(dev, sg, nents, dir); in talitos_map_sg() 748 return nents; in talitos_map_sg() 1745 int nents = sg_count(areq->src, nbytes, &chained); in ahash_process_req() local 1746 sg_pcopy_to_buffer(areq->src, nents, in ahash_process_req()
|
/linux-4.1.27/drivers/infiniband/hw/cxgb3/ |
D | cxio_dbg.c | 139 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents) in cxio_dump_rqt() argument 142 int size = nents * 64; in cxio_dump_rqt()
|
D | cxio_hal.h | 207 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents);
|
/linux-4.1.27/drivers/media/common/saa7146/ |
D | saa7146_core.c | 191 pt->nents = pages; in saa7146_vmalloc_build_pgtable() 192 slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable() 202 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); in saa7146_vmalloc_build_pgtable() 216 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); in saa7146_vfree_destroy_pgtable()
|
/linux-4.1.27/arch/mips/mm/ |
D | dma-default.c | 266 int nents, enum dma_data_direction direction, struct dma_attrs *attrs) in mips_dma_map_sg() argument 270 for (i = 0; i < nents; i++, sg++) { in mips_dma_map_sg() 281 return nents; in mips_dma_map_sg()
|
/linux-4.1.27/include/scsi/ |
D | scsi_cmnd.h | 173 return cmd->sdb.table.nents; in scsi_sg_count() 310 return cmd->prot_sdb ? cmd->prot_sdb->table.nents : 0; in scsi_prot_sg_count()
|
/linux-4.1.27/arch/powerpc/platforms/cell/ |
D | iommu.c | 619 int nents, enum dma_data_direction direction, in dma_fixed_map_sg() argument 623 return dma_direct_ops.map_sg(dev, sg, nents, direction, attrs); in dma_fixed_map_sg() 626 nents, device_to_mask(dev), in dma_fixed_map_sg() 631 int nents, enum dma_data_direction direction, in dma_fixed_unmap_sg() argument 635 dma_direct_ops.unmap_sg(dev, sg, nents, direction, attrs); in dma_fixed_unmap_sg() 637 ppc_iommu_unmap_sg(cell_get_iommu_table(dev), sg, nents, in dma_fixed_unmap_sg()
|
/linux-4.1.27/drivers/gpu/drm/radeon/ |
D | radeon_ttm.c | 534 unsigned pinned = 0, nents; in radeon_ttm_tt_pin_userptr() local 575 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_pin_userptr() 576 if (nents != ttm->sg->nents) in radeon_ttm_tt_pin_userptr() 607 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); in radeon_ttm_tt_unpin_userptr() 609 for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { in radeon_ttm_tt_unpin_userptr()
|
/linux-4.1.27/drivers/net/wireless/brcm80211/brcmfmac/ |
D | bcmsdh.c | 886 uint nents; in brcmf_sdiod_sgtable_alloc() local 892 nents = max_t(uint, BRCMF_DEFAULT_RXGLOM_SIZE, brcmf_sdiod_txglomsz); in brcmf_sdiod_sgtable_alloc() 893 nents += (nents >> 4) + 1; in brcmf_sdiod_sgtable_alloc() 895 WARN_ON(nents > sdiodev->max_segment_count); in brcmf_sdiod_sgtable_alloc() 897 brcmf_dbg(TRACE, "nents=%d\n", nents); in brcmf_sdiod_sgtable_alloc() 898 err = sg_alloc_table(&sdiodev->sgtable, nents, GFP_KERNEL); in brcmf_sdiod_sgtable_alloc()
|
/linux-4.1.27/drivers/gpu/drm/ |
D | drm_cache.c | 112 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) in drm_clflush_sg()
|
D | drm_prime.c | 156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, in drm_gem_map_detach() 204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) { in drm_gem_map_dma_buf() 720 for_each_sg(sgt->sgl, sg, sgt->nents, count) { in drm_prime_sg_to_page_addr_arrays()
|
D | drm_gem_cma_helper.c | 459 if (sgt->nents != 1) in drm_gem_cma_prime_import_sg_table()
|
/linux-4.1.27/arch/mips/cavium-octeon/ |
D | dma-octeon.c | 138 int nents, enum dma_data_direction direction, struct dma_attrs *attrs) in octeon_dma_map_sg() argument 140 int r = swiotlb_map_sg_attrs(dev, sg, nents, direction, attrs); in octeon_dma_map_sg()
|
/linux-4.1.27/drivers/staging/lustre/lnet/klnds/o2iblnd/ |
D | o2iblnd.h | 915 struct scatterlist *sg, int nents, in kiblnd_dma_map_sg() argument 918 return ib_dma_map_sg(dev, sg, nents, direction); in kiblnd_dma_map_sg() 922 struct scatterlist *sg, int nents, in kiblnd_dma_unmap_sg() argument 925 ib_dma_unmap_sg(dev, sg, nents, direction); in kiblnd_dma_unmap_sg()
|
/linux-4.1.27/drivers/staging/i2o/ |
D | i2o_block.c | 320 int nents; in i2o_block_sglist_alloc() local 324 nents = blk_rq_map_sg(ireq->req->q, ireq->req, ireq->sg_table); in i2o_block_sglist_alloc() 331 ireq->sg_nents = nents; in i2o_block_sglist_alloc() 333 return i2o_dma_map_sg(c, ireq->sg_table, nents, direction, mptr); in i2o_block_sglist_alloc()
|
/linux-4.1.27/drivers/crypto/ux500/hash/ |
D | hash_core.c | 171 ctx->device->dma.sg, ctx->device->dma.nents, in hash_set_dma_transfer() 515 int nents = 0; in hash_get_nents() local 519 nents++; in hash_get_nents() 536 return nents; in hash_get_nents() 925 ctx->device->dma.nents = hash_get_nents(req->src, req->nbytes, NULL); in hash_dma_final() 926 if (!ctx->device->dma.nents) { in hash_dma_final() 929 ret = ctx->device->dma.nents; in hash_dma_final()
|
D | hash_alg.h | 316 int nents; member
|
/linux-4.1.27/drivers/infiniband/ulp/iser/ |
D | iser_memory.c | 65 int i = 0, nents = DIV_ROUND_UP(length, PAGE_SIZE); in iser_alloc_bounce_sg() local 67 sg = kcalloc(nents, sizeof(*sg), GFP_ATOMIC); in iser_alloc_bounce_sg() 71 sg_init_table(sg, nents); in iser_alloc_bounce_sg() 87 data->size = nents; in iser_alloc_bounce_sg()
|
/linux-4.1.27/arch/frv/include/asm/ |
D | dma-mapping.h | 34 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
|
/linux-4.1.27/drivers/hsi/ |
D | hsi.c | 541 struct hsi_msg *hsi_alloc_msg(unsigned int nents, gfp_t flags) in hsi_alloc_msg() argument 550 if (!nents) in hsi_alloc_msg() 553 err = sg_alloc_table(&msg->sgt, nents, flags); in hsi_alloc_msg()
|
/linux-4.1.27/arch/m68k/include/asm/ |
D | dma-mapping.h | 96 int nents, enum dma_data_direction dir) in dma_sync_sg_for_cpu() argument
|
/linux-4.1.27/drivers/block/ |
D | nvme-core.c | 410 iod->nents = 0; in iod_init() 619 if (iod->nents) { in req_completion() 620 dma_unmap_sg(&nvmeq->dev->pci_dev->dev, iod->sg, iod->nents, in req_completion() 858 iod->nents = blk_rq_map_sg(req->q, req, iod->sg); in nvme_queue_rq() 859 if (!iod->nents) in nvme_queue_rq() 862 if (!dma_map_sg(nvmeq->q_dmadev, iod->sg, iod->nents, dma_dir)) in nvme_queue_rq() 868 iod->nents, dma_dir); in nvme_queue_rq() 1677 int i, err, count, nents, offset; in nvme_map_user_pages() local 1715 iod->nents = count; in nvme_map_user_pages() 1717 nents = dma_map_sg(&dev->pci_dev->dev, sg, count, in nvme_map_user_pages() [all …]
|
/linux-4.1.27/arch/nios2/include/asm/ |
D | dma-mapping.h | 78 extern int dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
|
/linux-4.1.27/drivers/infiniband/ulp/isert/ |
D | ib_isert.h | 102 int nents; member
|
D | ib_isert.c | 1652 struct scatterlist *sg, u32 nents, u32 length, u32 offset, in isert_map_data_buf() argument 1665 data->nents = min_t(unsigned int, nents - data->sg_off, in isert_map_data_buf() 1670 data->dma_nents = ib_dma_map_sg(ib_dev, data->sg, data->nents, in isert_map_data_buf() 1678 isert_cmd, data->dma_nents, data->sg, data->nents, data->len); in isert_map_data_buf() 1688 ib_dma_unmap_sg(ib_dev, data->sg, data->nents, data->dma_dir); in isert_unmap_data_buf() 2459 ib_sge = kzalloc(sizeof(struct ib_sge) * data->nents, GFP_KERNEL); in isert_map_rdma() 2467 wr->send_wr_num = DIV_ROUND_UP(data->nents, isert_conn->max_sge); in isert_map_rdma() 2610 fr_desc, mem->nents, mem->offset); in isert_fast_reg_mr() 2612 pagelist_len = isert_map_fr_pagelist(ib_dev, mem->sg, mem->nents, in isert_fast_reg_mr()
|
/linux-4.1.27/Documentation/ |
D | DMA-API.txt | 297 int nents, enum dma_data_direction direction) 300 than <nents> passed in if some elements of the scatter/gather list are 315 int i, count = dma_map_sg(dev, sglist, nents, direction); 323 where nents is the number of entries in the sglist. 330 Then you should loop count times (note: this can be less than nents times) 342 Note: <nents> must be the number you passed in, *not* the number of 387 int nents, enum dma_data_direction dir, 392 int nents, enum dma_data_direction dir, 422 n = dma_map_sg_attrs(dev, sg, nents, DMA_TO_DEVICE, &attr);
|
D | DMA-API-HOWTO.txt | 619 int i, count = dma_map_sg(dev, sglist, nents, direction); 627 where nents is the number of entries in the sglist. 637 Then you should loop count times (note: this can be less than nents times) 643 dma_unmap_sg(dev, sglist, nents, direction); 647 PLEASE NOTE: The 'nents' argument to the dma_unmap_sg call must be 668 dma_sync_sg_for_cpu(dev, sglist, nents, direction); 680 dma_sync_sg_for_device(dev, sglist, nents, direction);
|
/linux-4.1.27/drivers/char/agp/ |
D | intel-gtt.c | 113 st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL)) in intel_gtt_map_memory() 132 st.orig_nents = st.nents = num_sg; in intel_gtt_unmap_memory() 853 for_each_sg(st->sgl, sg, st->nents, i) { in intel_gtt_insert_sg_entries() 920 mem->num_sg = st.nents; in intel_fake_agp_insert_entries()
|
/linux-4.1.27/drivers/hsi/clients/ |
D | cmt_speech.c | 480 msg->sgt.nents = 1; in cs_hsi_peek_on_control_complete() 510 msg->sgt.nents = 0; in cs_hsi_read_on_control() 555 msg->sgt.nents = 1; in cs_hsi_write_on_control() 633 msg->sgt.nents = 1; in cs_hsi_peek_on_data_complete() 676 rxmsg->sgt.nents = 0; in cs_hsi_read_on_data()
|
D | ssi_protocol.c | 186 BUG_ON(msg->sgt.nents != (unsigned int)(skb_shinfo(skb)->nr_frags + 1)); in ssip_skb_to_msg()
|
/linux-4.1.27/drivers/usb/core/ |
D | message.c | 361 int nents, size_t length, gfp_t mem_flags) in usb_sg_init() argument 370 || nents <= 0) in usb_sg_init() 382 io->entries = nents; in usb_sg_init() 416 urb->num_sgs = nents; in usb_sg_init() 424 for_each_sg(sg, sg2, nents, j) in usb_sg_init()
|
D | usb.c | 902 struct scatterlist *sg, int nents) 914 return dma_map_sg(controller, sg, nents,
|
/linux-4.1.27/drivers/usb/misc/ |
D | usbtest.c | 477 static void free_sglist(struct scatterlist *sg, int nents) in free_sglist() argument 483 for (i = 0; i < nents; i++) { in free_sglist() 492 alloc_sglist(int nents, int max, int vary, struct usbtest_dev *dev, int pipe) in alloc_sglist() argument 504 sg = kmalloc_array(nents, sizeof(*sg), GFP_KERNEL); in alloc_sglist() 507 sg_init_table(sg, nents); in alloc_sglist() 509 for (i = 0; i < nents; i++) { in alloc_sglist() 558 int nents in perform_sglist() argument 572 sg, nents, 0, GFP_KERNEL); in perform_sglist()
|
/linux-4.1.27/drivers/media/pci/saa7134/ |
D | saa7134-vbi.c | 134 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in buffer_prepare()
|
D | saa7134-ts.c | 110 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in saa7134_ts_buffer_prepare()
|
D | saa7134-video.c | 898 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, in buffer_prepare()
|
/linux-4.1.27/drivers/block/mtip32xx/ |
D | mtip32xx.c | 1759 int nents) in fill_command_sg() argument 1768 for (n = 0; n < nents; n++) { in fill_command_sg() 2379 struct mtip_cmd *command, int nents, in mtip_hw_submit_io() argument 2389 nents = dma_map_sg(&dd->pdev->dev, command->sg, nents, dma_dir); in mtip_hw_submit_io() 2393 command->scatter_ents = nents; in mtip_hw_submit_io() 2423 fill_command_sg(dd, command, nents); in mtip_hw_submit_io() 2431 (nents << 16) | 5 | AHCI_CMD_PREFETCH); in mtip_hw_submit_io() 3698 unsigned int nents; in mtip_submit_request() local 3727 nents = blk_rq_map_sg(hctx->queue, rq, cmd->sg); in mtip_submit_request() 3730 mtip_hw_submit_io(dd, rq, cmd, nents, hctx); in mtip_submit_request()
|
/linux-4.1.27/drivers/hsi/controllers/ |
D | omap_ssi_port.c | 227 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma() 243 err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, in ssi_start_dma() 320 if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) in ssi_start_transfer() 375 if (msg->sgt.nents > 1) in ssi_async() 862 if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { in ssi_pio_complete()
|
D | omap_ssi.c | 226 dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); in ssi_gdd_complete()
|
/linux-4.1.27/drivers/usb/wusbcore/ |
D | wa-xfer.c | 1025 nents; in wa_xfer_create_subset_sg() local 1048 nents = DIV_ROUND_UP((bytes_to_transfer + in wa_xfer_create_subset_sg() 1053 out_sg = kmalloc((sizeof(struct scatterlist) * nents), GFP_ATOMIC); in wa_xfer_create_subset_sg() 1055 sg_init_table(out_sg, nents); in wa_xfer_create_subset_sg() 1064 nents = 0; in wa_xfer_create_subset_sg() 1084 nents++; in wa_xfer_create_subset_sg() 1090 *out_num_sgs = nents; in wa_xfer_create_subset_sg()
|
/linux-4.1.27/crypto/ |
D | algif_skcipher.c | 494 int nents = 0; in skcipher_all_sg_nents() local 502 nents += sg_nents(sg); in skcipher_all_sg_nents() 504 return nents; in skcipher_all_sg_nents()
|
/linux-4.1.27/include/media/ |
D | saa7146.h | 71 int nents; member
|
/linux-4.1.27/drivers/mmc/host/ |
D | mxcmmc.c | 308 int i, nents; in mxcmci_setup_data() local 340 nents = dma_map_sg(host->dma->device->dev, data->sg, in mxcmci_setup_data() 342 if (nents != data->sg_len) in mxcmci_setup_data()
|
/linux-4.1.27/drivers/media/platform/omap3isp/ |
D | ispstat.c | 165 buf->sgt.nents, DMA_FROM_DEVICE); in isp_stat_buf_sync_for_device() 175 buf->sgt.nents, DMA_FROM_DEVICE); in isp_stat_buf_sync_for_cpu()
|
D | ispccdc.c | 452 req->table.sgt.nents, DMA_TO_DEVICE); in ccdc_lsc_config() 461 req->table.sgt.nents, DMA_TO_DEVICE); in ccdc_lsc_config()
|
/linux-4.1.27/drivers/media/pci/solo6x10/ |
D | solo6x10-v4l2-enc.c | 331 for_each_sg(vbuf->sgl, sg, vbuf->nents, i) { in solo_send_desc() 751 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, in solo_enc_buf_finish() 755 sg_copy_from_buffer(vbuf->sgl, vbuf->nents, in solo_enc_buf_finish()
|
/linux-4.1.27/drivers/iommu/ |
D | iommu.c | 1137 struct scatterlist *sg, unsigned int nents, int prot) in default_iommu_map_sg() argument 1149 for_each_sg(sg, s, nents, i) { in default_iommu_map_sg()
|
/linux-4.1.27/drivers/crypto/ux500/cryp/ |
D | cryp_core.c | 813 int nents = 0; in get_nents() local 818 nents++; in get_nents() 821 return nents; in get_nents()
|
/linux-4.1.27/fs/proc/ |
D | base.c | 2085 unsigned int nents) in proc_pident_lookup() argument 2100 last = &ents[nents - 1]; in proc_pident_lookup() 2118 const struct pid_entry *ents, unsigned int nents) in proc_pident_readdir() argument 2129 if (ctx->pos >= nents + 2) in proc_pident_readdir() 2132 for (p = ents + (ctx->pos - 2); p <= ents + nents - 1; p++) { in proc_pident_readdir()
|
/linux-4.1.27/drivers/crypto/ccp/ |
D | ccp-ops.c | 55 unsigned int nents; member 484 dma_unmap_sg(wa->dma_dev, wa->dma_sg, wa->nents, wa->dma_dir); in ccp_sg_free() 499 wa->nents = sg_nents(sg); in ccp_init_sg_workarea() 513 wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); in ccp_init_sg_workarea()
|
/linux-4.1.27/drivers/scsi/cxgbi/ |
D | libcxgbi.c | 1719 scsi_in(sc)->table.nents, in task_reserve_itt() 1725 scsi_in(sc)->table.nents); in task_reserve_itt() 2178 sdb->table.sgl, sdb->table.nents, in cxgbi_conn_init_pdu() 2182 sdb->table.nents, tdata->offset, sdb->length); in cxgbi_conn_init_pdu() 2189 sdb->table.nents, tdata->offset, tdata->count); in cxgbi_conn_init_pdu()
|
/linux-4.1.27/drivers/infiniband/hw/ehca/ |
D | ehca_mrmw.c | 2514 int nents, enum dma_data_direction direction) in ehca_dma_map_sg() argument 2519 for_each_sg(sgl, sg, nents, i) { in ehca_dma_map_sg() 2528 return nents; in ehca_dma_map_sg() 2532 int nents, enum dma_data_direction direction) in ehca_dma_unmap_sg() argument
|
/linux-4.1.27/drivers/target/ |
D | target_core_transport.c | 2156 static inline void transport_free_sgl(struct scatterlist *sgl, int nents) in transport_free_sgl() argument 2161 for_each_sg(sgl, sg, nents, count) in transport_free_sgl() 2282 target_alloc_sgl(struct scatterlist **sgl, unsigned int *nents, u32 length, in target_alloc_sgl() argument 2309 *nents = nent; in target_alloc_sgl()
|
/linux-4.1.27/Documentation/usb/ |
D | dma.txt | 98 struct scatterlist *sg, int nents);
|
/linux-4.1.27/net/ceph/ |
D | crypto.c | 134 sgt->nents = sgt->orig_nents = 1; in setup_sgtable()
|
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/ |
D | base.c | 91 for_each_sg(mem->sg->sgl, sg, mem->sg->nents, i) { in nvkm_vm_map_sg_table()
|
/linux-4.1.27/drivers/media/pci/ttpci/ |
D | budget-core.c | 199 pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); in vpeirq()
|
D | av7110.c | 1286 pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); in vpeirq()
|
/linux-4.1.27/drivers/input/touchscreen/ |
D | sur40.c | 423 sgt->sgl, sgt->nents, sur40_video_format.sizeimage, 0); in sur40_process_video()
|
/linux-4.1.27/drivers/char/ |
D | virtio_console.c | 616 int nents, size_t in_count, in __send_to_port() argument 630 err = virtqueue_add_outbuf(out_vq, sg, nents, data, GFP_ATOMIC); in __send_to_port()
|
/linux-4.1.27/drivers/target/loopback/ |
D | tcm_loop.c | 151 sgl_bidi_count = sdb->table.nents; in tcm_loop_submission_work()
|
/linux-4.1.27/drivers/infiniband/ulp/srp/ |
D | ib_srp.c | 1512 int len, nents, count; in srp_map_data() local 1531 nents = scsi_sg_count(scmnd); in srp_map_data() 1537 count = ib_dma_map_sg(ibdev, scat, nents, scmnd->sc_data_direction); in srp_map_data()
|
/linux-4.1.27/drivers/media/platform/marvell-ccic/ |
D | mcam-core.c | 1208 for_each_sg(sg_table->sgl, sg, sg_table->nents, i) { in mcam_vb_sg_buf_prepare()
|