nents 666 arch/alpha/kernel/pci_iommu.c int nents, enum dma_data_direction dir, nents 681 arch/alpha/kernel/pci_iommu.c if (nents == 1) { nents 690 arch/alpha/kernel/pci_iommu.c end = sg + nents; nents 744 arch/alpha/kernel/pci_iommu.c int nents, enum dma_data_direction dir, nents 770 arch/alpha/kernel/pci_iommu.c for (end = sg + nents; sg < end; ++sg) { nents 784 arch/alpha/kernel/pci_iommu.c sg - end + nents, addr, size); nents 792 arch/alpha/kernel/pci_iommu.c sg - end + nents, addr, size); nents 797 arch/alpha/kernel/pci_iommu.c sg - end + nents, addr, size); nents 816 arch/alpha/kernel/pci_iommu.c DBGA("pci_unmap_sg: %ld entries\n", nents - (end - sg)); nents 118 arch/arm/mm/dma-mapping-nommu.c int nents, enum dma_data_direction dir, nents 124 arch/arm/mm/dma-mapping-nommu.c for_each_sg(sgl, sg, nents, i) { nents 130 arch/arm/mm/dma-mapping-nommu.c return nents; nents 134 arch/arm/mm/dma-mapping-nommu.c int nents, enum dma_data_direction dir, nents 140 arch/arm/mm/dma-mapping-nommu.c for_each_sg(sgl, sg, nents, i) nents 157 arch/arm/mm/dma-mapping-nommu.c int nents, enum dma_data_direction dir) nents 162 arch/arm/mm/dma-mapping-nommu.c for_each_sg(sgl, sg, nents, i) nents 167 arch/arm/mm/dma-mapping-nommu.c int nents, enum dma_data_direction dir) nents 172 arch/arm/mm/dma-mapping-nommu.c for_each_sg(sgl, sg, nents, i) nents 1006 arch/arm/mm/dma-mapping.c int arm_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 1013 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) { nents 1022 arch/arm/mm/dma-mapping.c return nents; nents 1040 arch/arm/mm/dma-mapping.c void arm_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 1048 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) nents 1060 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir) nents 1066 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) nents 1079 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir) nents 1085 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) nents 1658 arch/arm/mm/dma-mapping.c static int __iommu_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 1668 arch/arm/mm/dma-mapping.c for (i = 1; i < nents; i++) { nents 1717 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 1719 arch/arm/mm/dma-mapping.c return __iommu_map_sg(dev, sg, nents, dir, attrs, true); nents 1735 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 1737 arch/arm/mm/dma-mapping.c return __iommu_map_sg(dev, sg, nents, dir, attrs, false); nents 1741 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir, nents 1747 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) { nents 1768 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir, nents 1771 arch/arm/mm/dma-mapping.c __iommu_unmap_sg(dev, sg, nents, dir, attrs, true); nents 1784 arch/arm/mm/dma-mapping.c void arm_iommu_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 1788 arch/arm/mm/dma-mapping.c __iommu_unmap_sg(dev, sg, nents, dir, attrs, false); nents 1799 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir) nents 1804 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) nents 1817 arch/arm/mm/dma-mapping.c int nents, enum dma_data_direction dir) nents 1822 arch/arm/mm/dma-mapping.c for_each_sg(sg, s, nents, i) nents 387 arch/ia64/hp/common/sba_iommu.c sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) nents 389 arch/ia64/hp/common/sba_iommu.c while (nents-- > 0) { nents 390 arch/ia64/hp/common/sba_iommu.c printk(KERN_DEBUG " %d : DMA %08lx/%05x CPU %p\n", nents, nents 398 arch/ia64/hp/common/sba_iommu.c sba_check_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) nents 401 arch/ia64/hp/common/sba_iommu.c int the_nents = nents; nents 405 arch/ia64/hp/common/sba_iommu.c sba_dump_sg(NULL, startsg, nents); nents 1201 arch/ia64/hp/common/sba_iommu.c int nents) nents 1208 arch/ia64/hp/common/sba_iommu.c while (nents-- > 0) { nents 1215 arch/ia64/hp/common/sba_iommu.c nents, startsg->dma_address, cnt, nents 1219 arch/ia64/hp/common/sba_iommu.c nents, startsg->dma_address, cnt, nents 1296 arch/ia64/hp/common/sba_iommu.c int nents) nents 1307 arch/ia64/hp/common/sba_iommu.c while (nents > 0) { nents 1325 arch/ia64/hp/common/sba_iommu.c while (--nents > 0) { nents 1418 arch/ia64/hp/common/sba_iommu.c int nents, enum dma_data_direction dir, nents 1431 arch/ia64/hp/common/sba_iommu.c int nents, enum dma_data_direction dir, nents 1443 arch/ia64/hp/common/sba_iommu.c DBG_RUN_SG("%s() START %d entries\n", __func__, nents); nents 1450 arch/ia64/hp/common/sba_iommu.c for_each_sg(sglist, sg, nents, filled) { nents 1458 arch/ia64/hp/common/sba_iommu.c if (nents == 1) { nents 1471 arch/ia64/hp/common/sba_iommu.c sba_dump_sg(ioc, sglist, nents); nents 1487 arch/ia64/hp/common/sba_iommu.c coalesced = sba_coalesce_chunks(ioc, dev, sglist, nents); nents 1489 arch/ia64/hp/common/sba_iommu.c sba_unmap_sg_attrs(dev, sglist, nents, dir, attrs); nents 1501 arch/ia64/hp/common/sba_iommu.c filled = sba_fill_pdir(ioc, sglist, nents); nents 1507 arch/ia64/hp/common/sba_iommu.c sba_dump_sg(ioc, sglist, nents); nents 1530 arch/ia64/hp/common/sba_iommu.c int nents, enum dma_data_direction dir, nents 1539 arch/ia64/hp/common/sba_iommu.c __func__, nents, sba_sg_address(sglist), sglist->length); nents 1550 arch/ia64/hp/common/sba_iommu.c while (nents && sglist->dma_length) { nents 1555 arch/ia64/hp/common/sba_iommu.c nents--; nents 1558 arch/ia64/hp/common/sba_iommu.c DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); nents 608 arch/mips/jazz/jazzdma.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 613 arch/mips/jazz/jazzdma.c for_each_sg(sglist, sg, nents, i) { nents 623 arch/mips/jazz/jazzdma.c return nents; nents 627 arch/mips/jazz/jazzdma.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 632 arch/mips/jazz/jazzdma.c for_each_sg(sglist, sg, nents, i) { nents 653 arch/mips/jazz/jazzdma.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 658 arch/mips/jazz/jazzdma.c for_each_sg(sgl, sg, nents, i) nents 663 arch/mips/jazz/jazzdma.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 668 arch/mips/jazz/jazzdma.c for_each_sg(sgl, sg, nents, i) nents 184 arch/powerpc/kernel/dma-iommu.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 187 arch/powerpc/kernel/dma-iommu.c dma_direct_sync_sg_for_cpu(dev, sgl, nents, dir); nents 191 arch/powerpc/kernel/dma-iommu.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 194 arch/powerpc/kernel/dma-iommu.c dma_direct_sync_sg_for_device(dev, sgl, nents, dir); nents 633 arch/powerpc/platforms/ps3/system-bus.c int nents, enum dma_data_direction direction, unsigned long attrs) nents 643 arch/powerpc/platforms/ps3/system-bus.c for_each_sg(sgl, sg, nents, i) { nents 656 arch/powerpc/platforms/ps3/system-bus.c return nents; nents 661 arch/powerpc/platforms/ps3/system-bus.c int nents, nents 670 arch/powerpc/platforms/ps3/system-bus.c int nents, enum dma_data_direction direction, unsigned long attrs) nents 678 arch/powerpc/platforms/ps3/system-bus.c int nents, enum dma_data_direction direction, nents 107 arch/powerpc/platforms/pseries/ibmebus.c int nents, enum dma_data_direction direction, nents 113 arch/powerpc/platforms/pseries/ibmebus.c for_each_sg(sgl, sg, nents, i) { nents 118 arch/powerpc/platforms/pseries/ibmebus.c return nents; nents 123 arch/powerpc/platforms/pseries/ibmebus.c int nents, enum dma_data_direction direction, nents 161 arch/sparc/mm/io-unit.c static int iounit_map_sg(struct device *dev, struct scatterlist *sgl, int nents, nents 171 arch/sparc/mm/io-unit.c for_each_sg(sgl, sg, nents, i) { nents 176 arch/sparc/mm/io-unit.c return nents; nents 195 arch/sparc/mm/io-unit.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 203 arch/sparc/mm/io-unit.c for_each_sg(sgl, sg, nents, i) { nents 244 arch/sparc/mm/iommu.c int nents, enum dma_data_direction dir, unsigned long attrs, nents 250 arch/sparc/mm/iommu.c for_each_sg(sgl, sg, nents, j) { nents 258 arch/sparc/mm/iommu.c return nents; nents 262 arch/sparc/mm/iommu.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 265 arch/sparc/mm/iommu.c return __sbus_iommu_map_sg(dev, sgl, nents, dir, attrs, false); nents 269 arch/sparc/mm/iommu.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 271 arch/sparc/mm/iommu.c return __sbus_iommu_map_sg(dev, sgl, nents, dir, attrs, true); nents 294 arch/sparc/mm/iommu.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 299 arch/sparc/mm/iommu.c for_each_sg(sgl, sg, nents, i) { nents 279 arch/x86/kernel/amd_gart_64.c static void gart_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 285 arch/x86/kernel/amd_gart_64.c for_each_sg(sg, s, nents, i) { nents 294 arch/x86/kernel/amd_gart_64.c int nents, int dir) nents 303 arch/x86/kernel/amd_gart_64.c for_each_sg(sg, s, nents, i) { nents 311 arch/x86/kernel/amd_gart_64.c nents = 0; nents 321 arch/x86/kernel/amd_gart_64.c return nents; nents 380 arch/x86/kernel/amd_gart_64.c static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 389 arch/x86/kernel/amd_gart_64.c if (nents == 0) nents 400 arch/x86/kernel/amd_gart_64.c for_each_sg(sg, s, nents, i) { nents 440 arch/x86/kernel/amd_gart_64.c if (out < nents) { nents 452 arch/x86/kernel/amd_gart_64.c out = dma_map_sg_nonforce(dev, sg, nents, dir); nents 460 arch/x86/kernel/amd_gart_64.c for_each_sg(sg, s, nents, i) nents 222 crypto/essiv.c int nents; nents 227 crypto/essiv.c nents = sg_nents_for_len(req->src, ssize); nents 228 crypto/essiv.c if (nents < 0) nents 234 crypto/essiv.c if (unlikely(nents > 1)) { nents 496 crypto/testmgr.c unsigned int nents; nents 547 crypto/testmgr.c tsgl->nents = 0; nents 555 crypto/testmgr.c partitions[tsgl->nents].div = &divs[i]; nents 556 crypto/testmgr.c partitions[tsgl->nents].length = len_this_sg; nents 557 crypto/testmgr.c tsgl->nents++; nents 561 crypto/testmgr.c if (tsgl->nents == 0) { nents 562 crypto/testmgr.c partitions[tsgl->nents].div = &divs[0]; nents 563 crypto/testmgr.c partitions[tsgl->nents].length = 0; nents 564 crypto/testmgr.c tsgl->nents++; nents 566 crypto/testmgr.c partitions[tsgl->nents - 1].length += len_remaining; nents 569 crypto/testmgr.c sg_init_table(tsgl->sgl, tsgl->nents); nents 570 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { nents 605 crypto/testmgr.c sg_mark_end(&tsgl->sgl[tsgl->nents - 1]); nents 607 crypto/testmgr.c memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0])); nents 630 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { nents 664 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { nents 734 crypto/testmgr.c tsgls->dst.nents = tsgls->src.nents; nents 1138 crypto/testmgr.c if (tsgl->nents != 1) nents 1173 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { nents 1174 crypto/testmgr.c if (i + 1 == tsgl->nents && nents 1357 crypto/testmgr.c for (i = 0; i < tsgl->nents; i++) { nents 1459 drivers/block/mtip32xx/mtip32xx.c int nents) nents 1468 drivers/block/mtip32xx/mtip32xx.c for_each_sg(command->sg, sg, nents, n) { nents 2067 drivers/block/mtip32xx/mtip32xx.c unsigned int nents; nents 2070 drivers/block/mtip32xx/mtip32xx.c nents = blk_rq_map_sg(hctx->queue, rq, command->sg); nents 2071 drivers/block/mtip32xx/mtip32xx.c nents = dma_map_sg(&dd->pdev->dev, command->sg, nents, dma_dir); nents 2075 drivers/block/mtip32xx/mtip32xx.c command->scatter_ents = nents; nents 2105 drivers/block/mtip32xx/mtip32xx.c fill_command_sg(dd, command, nents); nents 2114 drivers/block/mtip32xx/mtip32xx.c hdr->opts = cpu_to_le32((nents << 16) | 5 | AHCI_CMD_PREFETCH); nents 114 drivers/char/agp/intel-gtt.c st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL)) nents 133 drivers/char/agp/intel-gtt.c st.orig_nents = st.nents = num_sg; nents 867 drivers/char/agp/intel-gtt.c for_each_sg(st->sgl, sg, st->nents, i) { nents 936 drivers/char/agp/intel-gtt.c mem->num_sg = st.nents; nents 605 drivers/char/virtio_console.c int nents, size_t in_count, nents 619 drivers/char/virtio_console.c err = virtqueue_add_outbuf(out_vq, sg, nents, data, GFP_ATOMIC); nents 176 drivers/crypto/atmel-aes.c int nents; nents 642 drivers/crypto/atmel-aes.c int nents; nents 647 drivers/crypto/atmel-aes.c for (nents = 0; sg; sg = sg_next(sg), ++nents) { nents 655 drivers/crypto/atmel-aes.c dma->nents = nents+1; nents 673 drivers/crypto/atmel-aes.c int nents = dma->nents; nents 678 drivers/crypto/atmel-aes.c while (--nents > 0 && sg) nents 714 drivers/crypto/atmel-aes.c dd->src.nents = 1; nents 720 drivers/crypto/atmel-aes.c dd->dst.nents = 1; nents 729 drivers/crypto/atmel-aes.c dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, nents 735 drivers/crypto/atmel-aes.c dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents, nents 740 drivers/crypto/atmel-aes.c dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents, nents 743 drivers/crypto/atmel-aes.c dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, nents 755 drivers/crypto/atmel-aes.c dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, nents 761 drivers/crypto/atmel-aes.c dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents, nents 767 drivers/crypto/atmel-aes.c dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents, nents 124 drivers/crypto/atmel-sha.c int nents; nents 1462 drivers/crypto/atmel-sha.c int nents; nents 1464 drivers/crypto/atmel-sha.c for (nents = 0; sg; sg = sg_next(sg), ++nents) { nents 1473 drivers/crypto/atmel-sha.c dma->nents = nents + 1; nents 1494 drivers/crypto/atmel-sha.c int nents; nents 1497 drivers/crypto/atmel-sha.c dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); nents 1500 drivers/crypto/atmel-sha.c for (nents = 0; nents < dma->nents - 1; ++nents) nents 1528 drivers/crypto/atmel-sha.c sg_len = dma_map_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); nents 1559 drivers/crypto/atmel-sha.c dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE); nents 2159 drivers/crypto/bcm/cipher.c int nents; nents 2169 drivers/crypto/bcm/cipher.c nents = sg_nents(req->src); nents 2180 drivers/crypto/bcm/cipher.c if (sg_copy_to_buffer(req->src, nents, tmpbuf, req->nbytes) != nents 2253 drivers/crypto/bcm/cipher.c int nents; nents 2263 drivers/crypto/bcm/cipher.c nents = sg_nents(req->src); nents 2278 drivers/crypto/bcm/cipher.c if (sg_copy_to_buffer(req->src, nents, tmpbuf, req->nbytes) != nents 54 drivers/crypto/bcm/util.c unsigned int nents = sg_nents(src); nents 56 drivers/crypto/bcm/util.c copied = sg_pcopy_to_buffer(src, nents, dest, len, skip); nents 60 drivers/crypto/bcm/util.c flow_log("sg with %u entries and skip %u\n", nents, skip); nents 73 drivers/crypto/bcm/util.c unsigned int nents = sg_nents(dest); nents 75 drivers/crypto/bcm/util.c copied = sg_pcopy_from_buffer(dest, nents, src, len, skip); nents 79 drivers/crypto/bcm/util.c flow_log("sg with %u entries and skip %u\n", nents, skip); nents 746 drivers/crypto/caam/caamhash.c struct ahash_request *req, int nents, nents 753 drivers/crypto/caam/caamhash.c if (nents > 1 || first_sg) { nents 756 drivers/crypto/caam/caamhash.c pad_sg_nents(first_sg + nents); nents 81 drivers/crypto/cavium/nitrox/nitrox_aead.c int nents = sg_nents_for_len(src, buflen); nents 84 drivers/crypto/cavium/nitrox/nitrox_aead.c if (nents < 0) nents 85 drivers/crypto/cavium/nitrox/nitrox_aead.c return nents; nents 88 drivers/crypto/cavium/nitrox/nitrox_aead.c nents += 1; nents 90 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = alloc_src_req_buf(nkreq, nents, ivsize); nents 95 drivers/crypto/cavium/nitrox/nitrox_aead.c nitrox_creq_set_src_sg(nkreq, nents, ivsize, src, buflen); nents 103 drivers/crypto/cavium/nitrox/nitrox_aead.c int nents = sg_nents_for_len(dst, buflen); nents 106 drivers/crypto/cavium/nitrox/nitrox_aead.c if (nents < 0) nents 107 drivers/crypto/cavium/nitrox/nitrox_aead.c return nents; nents 110 drivers/crypto/cavium/nitrox/nitrox_aead.c nents += 3; nents 114 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = alloc_dst_req_buf(nkreq, nents); nents 120 drivers/crypto/cavium/nitrox/nitrox_aead.c nitrox_creq_set_dst_sg(nkreq, nents, ivsize, dst, buflen); nents 547 drivers/crypto/cavium/nitrox/nitrox_req.h static inline void *alloc_req_buf(int nents, int extralen, gfp_t gfp) nents 551 drivers/crypto/cavium/nitrox/nitrox_req.h size = sizeof(struct scatterlist) * nents; nents 613 drivers/crypto/cavium/nitrox/nitrox_req.h int nents, int ivsize) nents 617 drivers/crypto/cavium/nitrox/nitrox_req.h nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); nents 635 drivers/crypto/cavium/nitrox/nitrox_req.h int nents, int ivsize, nents 644 drivers/crypto/cavium/nitrox/nitrox_req.h sg_init_table(sg, nents); nents 659 drivers/crypto/cavium/nitrox/nitrox_req.h int nents) nents 664 drivers/crypto/cavium/nitrox/nitrox_req.h nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); nents 693 drivers/crypto/cavium/nitrox/nitrox_req.h int nents, int ivsize, nents 702 drivers/crypto/cavium/nitrox/nitrox_req.h sg_init_table(sg, nents); nents 161 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c int i, nents, ret = 0; nents 163 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c nents = dma_map_sg(dev, req->src, sg_nents(req->src), nents 165 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c if (!nents) nents 168 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c for_each_sg(req->src, sg, nents, i) nents 172 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c sr->in.sgmap_cnt = nents; nents 180 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c dma_unmap_sg(dev, req->src, nents, DMA_BIDIRECTIONAL); nents 189 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c int nents, ret = 0; nents 191 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c nents = dma_map_sg(dev, req->dst, sg_nents(req->dst), nents 193 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c if (!nents) nents 197 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c sr->out.sgmap_cnt = nents; nents 205 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c dma_unmap_sg(dev, req->dst, nents, DMA_BIDIRECTIONAL); nents 140 drivers/crypto/cavium/nitrox/nitrox_skcipher.c int nents = sg_nents(skreq->src) + 1; nents 144 drivers/crypto/cavium/nitrox/nitrox_skcipher.c ret = alloc_src_req_buf(nkreq, nents, ivsize); nents 149 drivers/crypto/cavium/nitrox/nitrox_skcipher.c nitrox_creq_set_src_sg(nkreq, nents, ivsize, skreq->src, nents 158 drivers/crypto/cavium/nitrox/nitrox_skcipher.c int nents = sg_nents(skreq->dst) + 3; nents 164 drivers/crypto/cavium/nitrox/nitrox_skcipher.c ret = alloc_dst_req_buf(nkreq, nents); nents 170 drivers/crypto/cavium/nitrox/nitrox_skcipher.c nitrox_creq_set_dst_sg(nkreq, nents, ivsize, skreq->dst, nents 468 drivers/crypto/ccp/ccp-dev.h int nents; nents 66 drivers/crypto/ccp/ccp-ops.c dma_unmap_sg(wa->dma_dev, wa->dma_sg, wa->nents, wa->dma_dir); nents 81 drivers/crypto/ccp/ccp-ops.c wa->nents = sg_nents_for_len(sg, len); nents 82 drivers/crypto/ccp/ccp-ops.c if (wa->nents < 0) nents 83 drivers/crypto/ccp/ccp-ops.c return wa->nents; nents 97 drivers/crypto/ccp/ccp-ops.c wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir); nents 35 drivers/crypto/ccree/cc_buffer_mgr.c int nents[MAX_NUM_OF_BUFFERS_IN_MLLI]; nents 88 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int nents = 0; nents 93 drivers/crypto/ccree/cc_buffer_mgr.c nents++; nents 101 drivers/crypto/ccree/cc_buffer_mgr.c dev_dbg(dev, "nents %d last bytes %d\n", nents, *lbytes); nents 102 drivers/crypto/ccree/cc_buffer_mgr.c return nents; nents 118 drivers/crypto/ccree/cc_buffer_mgr.c u32 nents; nents 120 drivers/crypto/ccree/cc_buffer_mgr.c nents = sg_nents_for_len(sg, end); nents 121 drivers/crypto/ccree/cc_buffer_mgr.c sg_copy_buffer(sg, nents, (void *)dest, (end - to_skip + 1), to_skip, nents 259 drivers/crypto/ccree/cc_buffer_mgr.c sgl_data->nents[index] = 1; nents 272 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int nents, struct scatterlist *sgl, nents 279 drivers/crypto/ccree/cc_buffer_mgr.c index, nents, sgl, data_len, is_last_table); nents 280 drivers/crypto/ccree/cc_buffer_mgr.c sgl_data->nents[index] = nents; nents 293 drivers/crypto/ccree/cc_buffer_mgr.c unsigned int nbytes, int direction, u32 *nents, nents 298 drivers/crypto/ccree/cc_buffer_mgr.c *nents = cc_get_sgl_nents(dev, sg, nbytes, lbytes); nents 299 drivers/crypto/ccree/cc_buffer_mgr.c if (*nents > max_sg_nents) { nents 300 drivers/crypto/ccree/cc_buffer_mgr.c *nents = 0; nents 302 drivers/crypto/ccree/cc_buffer_mgr.c *nents, max_sg_nents); nents 306 drivers/crypto/ccree/cc_buffer_mgr.c ret = dma_map_sg(dev, sg, *nents, direction); nents 308 drivers/crypto/ccree/cc_buffer_mgr.c *nents = 0; nents 546 drivers/crypto/ccree/cc_buffer_mgr.c sg_virt(req->src), areq_ctx->src.nents, areq_ctx->assoc.nents, nents 648 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents = 0; nents 652 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents); nents 665 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents = mapped_nents; nents 673 drivers/crypto/ccree/cc_buffer_mgr.c (areq_ctx->assoc.nents + 1), nents 688 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->assoc.nents); nents 689 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->assoc.nents, req->src, nents 736 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, nents 742 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->src.nents, authsize, nents 766 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; nents 776 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, nents 780 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, nents 786 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->src.nents, authsize, nents 798 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1]; nents 808 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->dst.nents, nents 812 drivers/crypto/ccree/cc_buffer_mgr.c cc_add_sg_entry(dev, sg_data, areq_ctx->src.nents, nents 818 drivers/crypto/ccree/cc_buffer_mgr.c cc_is_icv_frag(areq_ctx->dst.nents, authsize, nents 822 drivers/crypto/ccree/cc_buffer_mgr.c sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1]; nents 891 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->src.nents = src_mapped_nents; nents 934 drivers/crypto/ccree/cc_buffer_mgr.c areq_ctx->dst.nents = dst_mapped_nents; nents 29 drivers/crypto/ccree/cc_buffer_mgr.h unsigned int nents; //sg nents nents 143 drivers/crypto/chelsio/chcr_algo.c int nents = 0; nents 160 drivers/crypto/chelsio/chcr_algo.c nents += DIV_ROUND_UP(less, entlen); nents 165 drivers/crypto/chelsio/chcr_algo.c return nents; nents 384 drivers/crypto/chelsio/chcr_algo.c walk->nents = 0; nents 403 drivers/crypto/chelsio/chcr_algo.c CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents)); nents 418 drivers/crypto/chelsio/chcr_algo.c j = walk->nents; nents 424 drivers/crypto/chelsio/chcr_algo.c walk->nents = j; nents 434 drivers/crypto/chelsio/chcr_algo.c unsigned int j = walk->nents; nents 471 drivers/crypto/chelsio/chcr_algo.c walk->nents = j; nents 478 drivers/crypto/chelsio/chcr_algo.c walk->nents = 0; nents 488 drivers/crypto/chelsio/chcr_algo.c ULPTX_NSGE_V(walk->nents)); nents 499 drivers/crypto/chelsio/chcr_algo.c if (walk->nents == 0) { nents 509 drivers/crypto/chelsio/chcr_algo.c walk->nents++; nents 534 drivers/crypto/chelsio/chcr_algo.c if (sg && (walk->nents == 0)) { nents 539 drivers/crypto/chelsio/chcr_algo.c walk->nents++; nents 557 drivers/crypto/chelsio/chcr_algo.c walk->nents++; nents 770 drivers/crypto/chelsio/chcr_algo.c int nents; nents 776 drivers/crypto/chelsio/chcr_algo.c nents = sg_nents_xlen(reqctx->dstsg, wrparam->bytes, CHCR_DST_SG_SIZE, nents 778 drivers/crypto/chelsio/chcr_algo.c dst_size = get_space_for_phys_dsgl(nents); nents 781 drivers/crypto/chelsio/chcr_algo.c nents = sg_nents_xlen(reqctx->srcsg, wrparam->bytes, nents 784 drivers/crypto/chelsio/chcr_algo.c (sgl_len(nents) * 8); nents 1501 drivers/crypto/chelsio/chcr_algo.c unsigned int nents = 0, transhdr_len; nents 1511 drivers/crypto/chelsio/chcr_algo.c nents = sg_nents_xlen(req_ctx->hctx_wr.srcsg, param->sg_len, nents 1513 drivers/crypto/chelsio/chcr_algo.c nents += param->bfr_len ? 1 : 0; nents 1515 drivers/crypto/chelsio/chcr_algo.c param->sg_len, 16) : (sgl_len(nents) * 8); nents 196 drivers/crypto/chelsio/chcr_crypto.h unsigned int nents; nents 205 drivers/crypto/chelsio/chcr_crypto.h unsigned int nents; nents 95 drivers/crypto/img-hash.c size_t nents; nents 399 drivers/crypto/img-hash.c ctx->bufcnt = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, nents 407 drivers/crypto/img-hash.c tbc = sg_pcopy_to_buffer(ctx->sgfirst, ctx->nents, nents 665 drivers/crypto/img-hash.c ctx->nents = sg_nents(ctx->sg); nents 35 drivers/crypto/inside-secure/safexcel_hash.c int nents; nents 201 drivers/crypto/inside-secure/safexcel_hash.c if (sreq->nents) { nents 202 drivers/crypto/inside-secure/safexcel_hash.c dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE); nents 203 drivers/crypto/inside-secure/safexcel_hash.c sreq->nents = 0; nents 327 drivers/crypto/inside-secure/safexcel_hash.c req->nents = dma_map_sg(priv->dev, areq->src, nents 331 drivers/crypto/inside-secure/safexcel_hash.c if (!req->nents) { nents 336 drivers/crypto/inside-secure/safexcel_hash.c for_each_sg(areq->src, sg, req->nents, i) { nents 395 drivers/crypto/inside-secure/safexcel_hash.c dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE); nents 194 drivers/crypto/mediatek/mtk-aes.c int nents; nents 199 drivers/crypto/mediatek/mtk-aes.c for (nents = 0; sg; sg = sg_next(sg), ++nents) { nents 207 drivers/crypto/mediatek/mtk-aes.c dma->nents = nents + 1; nents 232 drivers/crypto/mediatek/mtk-aes.c int nents = dma->nents; nents 237 drivers/crypto/mediatek/mtk-aes.c while (--nents > 0 && sg) nents 283 drivers/crypto/mediatek/mtk-aes.c int nents; nents 286 drivers/crypto/mediatek/mtk-aes.c for (nents = 0; nents < slen; ++nents, ssg = sg_next(ssg)) { nents 291 drivers/crypto/mediatek/mtk-aes.c if (nents == 0) { nents 306 drivers/crypto/mediatek/mtk-aes.c for (nents = 0; nents < dlen; ++nents, dsg = sg_next(dsg)) { nents 311 drivers/crypto/mediatek/mtk-aes.c if (nents == 0) nents 347 drivers/crypto/mediatek/mtk-aes.c dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents, nents 353 drivers/crypto/mediatek/mtk-aes.c dma_unmap_sg(cryp->dev, aes->dst.sg, aes->dst.nents, nents 359 drivers/crypto/mediatek/mtk-aes.c dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents, nents 385 drivers/crypto/mediatek/mtk-aes.c aes->src.nents, nents 392 drivers/crypto/mediatek/mtk-aes.c aes->src.nents, DMA_TO_DEVICE); nents 397 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents, DMA_FROM_DEVICE); nents 399 drivers/crypto/mediatek/mtk-aes.c dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents, nents 490 drivers/crypto/mediatek/mtk-aes.c aes->src.nents = 1; nents 496 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents = 1; nents 938 drivers/crypto/mediatek/mtk-aes.c aes->src.nents = 1; nents 944 drivers/crypto/mediatek/mtk-aes.c aes->dst.nents = 1; nents 113 drivers/crypto/mediatek/mtk-platform.h int nents; nents 284 drivers/crypto/mxs-dcp.c const int nents = sg_nents(req->src); nents 316 drivers/crypto/mxs-dcp.c for_each_sg(req->src, src, nents, i) { nents 283 drivers/crypto/picoxcell_crypto.c int nents; nents 285 drivers/crypto/picoxcell_crypto.c nents = sg_nents_for_len(payload, nbytes); nents 286 drivers/crypto/picoxcell_crypto.c if (nents < 0) { nents 290 drivers/crypto/picoxcell_crypto.c mapped_ents = dma_map_sg(engine->dev, payload, nents, dir); nents 306 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, payload, nents, dir); nents 419 drivers/crypto/picoxcell_crypto.c int nents = sg_nents_for_len(areq->src, total); nents 422 drivers/crypto/picoxcell_crypto.c if (unlikely(nents < 0)) { nents 428 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->src, nents, DMA_TO_DEVICE); nents 429 drivers/crypto/picoxcell_crypto.c nents = sg_nents_for_len(areq->dst, total); nents 430 drivers/crypto/picoxcell_crypto.c if (unlikely(nents < 0)) { nents 434 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->dst, nents, DMA_FROM_DEVICE); nents 436 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(engine->dev, areq->src, nents, DMA_BIDIRECTIONAL); nents 446 drivers/crypto/picoxcell_crypto.c int nents = sg_nents_for_len(payload, nbytes); nents 448 drivers/crypto/picoxcell_crypto.c if (nents < 0) { nents 453 drivers/crypto/picoxcell_crypto.c dma_unmap_sg(req->engine->dev, payload, nents, dir); nents 75 drivers/crypto/qce/dma.c int nents, unsigned long flags, nents 82 drivers/crypto/qce/dma.c if (!sg || !nents) nents 85 drivers/crypto/qce/dma.c desc = dmaengine_prep_slave_sg(chan, sg, nents, dir, flags); nents 140 drivers/crypto/stm32/stm32-hash.c int nents; nents 551 drivers/crypto/stm32/stm32-hash.c rctx->nents = sg_nents(rctx->sg); nents 553 drivers/crypto/stm32/stm32-hash.c if (rctx->nents < 0) nents 564 drivers/crypto/stm32/stm32-hash.c for_each_sg(rctx->sg, tsg, rctx->nents, i) { nents 573 drivers/crypto/stm32/stm32-hash.c rctx->sg, rctx->nents, nents 1990 drivers/crypto/talitos.c int nents; nents 1998 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, nbytes); nents 1999 drivers/crypto/talitos.c if (nents < 0) { nents 2001 drivers/crypto/talitos.c return nents; nents 2003 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, nents, nents 2039 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, offset); nents 2040 drivers/crypto/talitos.c if (nents < 0) { nents 2042 drivers/crypto/talitos.c return nents; nents 2044 drivers/crypto/talitos.c sg_copy_to_buffer(areq->src, nents, nents 2053 drivers/crypto/talitos.c nents = sg_nents_for_len(areq->src, nbytes); nents 2054 drivers/crypto/talitos.c if (nents < 0) { nents 2056 drivers/crypto/talitos.c return nents; nents 2058 drivers/crypto/talitos.c sg_pcopy_to_buffer(areq->src, nents, nents 820 drivers/crypto/ux500/cryp/cryp_core.c int nents = 0; nents 825 drivers/crypto/ux500/cryp/cryp_core.c nents++; nents 828 drivers/crypto/ux500/cryp/cryp_core.c return nents; nents 316 drivers/crypto/ux500/hash/hash_alg.h int nents; nents 156 drivers/crypto/ux500/hash/hash_core.c ctx->device->dma.sg, ctx->device->dma.nents, nents 500 drivers/crypto/ux500/hash/hash_core.c int nents = 0; nents 504 drivers/crypto/ux500/hash/hash_core.c nents++; nents 521 drivers/crypto/ux500/hash/hash_core.c return nents; nents 911 drivers/crypto/ux500/hash/hash_core.c ctx->device->dma.nents = hash_get_nents(req->src, req->nbytes, NULL); nents 912 drivers/crypto/ux500/hash/hash_core.c if (!ctx->device->dma.nents) { nents 915 drivers/crypto/ux500/hash/hash_core.c ret = ctx->device->dma.nents; nents 64 drivers/dma-buf/udmabuf.c if (!dma_map_sg(at->dev, sg->sgl, sg->nents, direction)) { nents 80 drivers/dma-buf/udmabuf.c dma_unmap_sg(at->dev, sg->sgl, sg->nents, direction); nents 136 drivers/dma/coh901318.h struct scatterlist *sg, unsigned int nents, nents 233 drivers/dma/coh901318_lli.c struct scatterlist *sgl, unsigned int nents, nents 258 drivers/dma/coh901318_lli.c for_each_sg(sgl, sg, nents, i) { nents 265 drivers/dma/coh901318_lli.c } else if (i == nents - 1) nents 77 drivers/dma/hsu/hsu.c count = desc->nents - desc->active; nents 219 drivers/dma/hsu/hsu.c } else if (desc->active < desc->nents) { nents 233 drivers/dma/hsu/hsu.c static struct hsu_dma_desc *hsu_dma_alloc_desc(unsigned int nents) nents 241 drivers/dma/hsu/hsu.c desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT); nents 279 drivers/dma/hsu/hsu.c desc->nents = sg_len; nents 304 drivers/dma/hsu/hsu.c for (i = desc->active; i < desc->nents; i++) nents 71 drivers/dma/hsu/hsu.h unsigned int nents; nents 112 drivers/fpga/fpga-mgr.c sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); nents 127 drivers/fpga/fpga-mgr.c len = sg_copy_to_buffer(sgt->sgl, sgt->nents, buf, nents 191 drivers/fpga/fpga-mgr.c sg_miter_start(&miter, sgt->sgl, sgt->nents, SG_MITER_FROM_SG); nents 400 drivers/fpga/zynq-fpga.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 409 drivers/fpga/zynq-fpga.c dma_map_sg(mgr->dev.parent, sgt->sgl, sgt->nents, DMA_TO_DEVICE); nents 481 drivers/fpga/zynq-fpga.c dma_unmap_sg(mgr->dev.parent, sgt->sgl, sgt->nents, DMA_TO_DEVICE); nents 942 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c unsigned nents; nents 958 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c nents = dma_map_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); nents 959 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c if (nents != ttm->sg->nents) nents 990 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c dma_unmap_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction); nents 410 drivers/gpu/drm/armada/armada_gem.c if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) { nents 411 drivers/gpu/drm/armada/armada_gem.c num = sgt->nents; nents 421 drivers/gpu/drm/armada/armada_gem.c if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) nents 452 drivers/gpu/drm/armada/armada_gem.c dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); nents 456 drivers/gpu/drm/armada/armada_gem.c for_each_sg(sgt->sgl, sg, sgt->nents, i) nents 553 drivers/gpu/drm/armada/armada_gem.c if (dobj->sgt->nents > 1) { nents 129 drivers/gpu/drm/drm_cache.c for_each_sg_page(st->sgl, &sg_iter, st->nents, 0) nents 474 drivers/gpu/drm/drm_gem_cma_helper.c if (sgt->nents != 1) { nents 480 drivers/gpu/drm/drm_gem_cma_helper.c for_each_sg(sgt->sgl, s, sgt->nents, i) { nents 121 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt->nents, DMA_BIDIRECTIONAL); nents 394 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt->nents, DMA_BIDIRECTIONAL); nents 628 drivers/gpu/drm/drm_gem_shmem_helper.c dma_map_sg(obj->dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); nents 627 drivers/gpu/drm/drm_prime.c if (!dma_map_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir, nents 653 drivers/gpu/drm/drm_prime.c dma_unmap_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir, nents 957 drivers/gpu/drm/drm_prime.c for_each_sg(sgt->sgl, sg, sgt->nents, count) { nents 30 drivers/gpu/drm/etnaviv/etnaviv_gem.c dma_map_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); nents 54 drivers/gpu/drm/etnaviv/etnaviv_gem.c dma_unmap_sg(dev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL); nents 408 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_obj->sgt->nents, nents 425 drivers/gpu/drm/etnaviv/etnaviv_gem.c etnaviv_obj->sgt->nents, nents 82 drivers/gpu/drm/etnaviv/etnaviv_mmu.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 116 drivers/gpu/drm/etnaviv/etnaviv_mmu.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 244 drivers/gpu/drm/etnaviv/etnaviv_mmu.c sgt->nents == 1 && !(etnaviv_obj->flags & ETNA_BO_FORCE_MMU)) { nents 399 drivers/gpu/drm/exynos/exynos_drm_g2d.c g2d_userptr->sgt->nents, DMA_BIDIRECTIONAL); nents 514 drivers/gpu/drm/exynos/exynos_drm_g2d.c if (!dma_map_sg(to_dma_dev(g2d->drm_dev), sgt->sgl, sgt->nents, nents 503 drivers/gpu/drm/exynos/exynos_drm_gem.c if (sgt->nents == 1) { nents 39 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c ret = sg_alloc_table(st, obj->mm.pages->nents, GFP_KERNEL); nents 45 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c for (i = 0; i < obj->mm.pages->nents; i++) { nents 51 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { nents 74 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir); nents 78 drivers/gpu/drm/i915/gem/i915_gem_internal.c st->nents = 0; nents 99 drivers/gpu/drm/i915/gem/i915_gem_internal.c st->nents++; nents 80 drivers/gpu/drm/i915/gem/i915_gem_shmem.c st->nents = 0; nents 140 drivers/gpu/drm/i915/gem/i915_gem_shmem.c st->nents++; nents 76 drivers/gpu/drm/i915/gem/selftests/huge_pages.c st->nents = 0; nents 99 drivers/gpu/drm/i915/gem/selftests/huge_pages.c st->nents++; nents 201 drivers/gpu/drm/i915/gem/selftests/huge_pages.c st->nents = 0; nents 217 drivers/gpu/drm/i915/gem/selftests/huge_pages.c st->nents++; nents 254 drivers/gpu/drm/i915/gem/selftests/huge_pages.c st->nents = 1; nents 31 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c if (!dma_map_sg(attachment->dev, st->sgl, st->nents, dir)) { nents 49 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c dma_unmap_sg(attachment->dev, st->sgl, st->nents, dir); nents 595 drivers/gpu/drm/i915/gt/uc/intel_uc_fw.c return sg_pcopy_to_buffer(pages->sgl, pages->nents, dst, size, offset); nents 2148 drivers/gpu/drm/i915/i915_gem_gtt.c pages->sgl, pages->nents, nents 2542 drivers/gpu/drm/i915/i915_gem_gtt.c dma_unmap_sg(kdev, pages->sgl, pages->nents, PCI_DMA_BIDIRECTIONAL); nents 3368 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents++; nents 3404 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents = 0; nents 3449 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents++; nents 3485 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents = 0; nents 3530 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents = 0; nents 3541 drivers/gpu/drm/i915/i915_gem_gtt.c st->nents++; nents 15 drivers/gpu/drm/i915/i915_scatterlist.c if (orig_st->nents == orig_st->orig_nents) nents 18 drivers/gpu/drm/i915/i915_scatterlist.c if (sg_alloc_table(&new_st, orig_st->nents, GFP_KERNEL | __GFP_NOWARN)) nents 22 drivers/gpu/drm/i915/i915_scatterlist.c for_each_sg(orig_st->sgl, sg, orig_st->nents, i) { nents 560 drivers/gpu/drm/i915/selftests/i915_vma.c if (vma->pages->nents > rotated_size(a, b)) { nents 562 drivers/gpu/drm/i915/selftests/i915_vma.c rotated_size(a, b), vma->pages->nents); nents 51 drivers/gpu/drm/i915/selftests/scatterlist.c for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { nents 53 drivers/gpu/drm/i915/selftests/scatterlist.c unsigned int npages = npages_fn(n, pt->st.nents, rnd); nents 89 drivers/gpu/drm/i915/selftests/scatterlist.c for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { nents 259 drivers/gpu/drm/i915/selftests/scatterlist.c pt->st.nents = n; nents 346 drivers/gpu/drm/i915/selftests/scatterlist.c pt.st.nents != prime) { nents 348 drivers/gpu/drm/i915/selftests/scatterlist.c pt.st.nents, pt.st.orig_nents, prime); nents 226 drivers/gpu/drm/mediatek/mtk_drm_gem.c for_each_sg(sg->sgl, s, sg->nents, i) { nents 57 drivers/gpu/drm/msm/msm_gem.c msm_obj->sgt->nents, DMA_BIDIRECTIONAL); nents 60 drivers/gpu/drm/msm/msm_gem.c msm_obj->sgt->nents, DMA_BIDIRECTIONAL); nents 70 drivers/gpu/drm/msm/msm_gem.c msm_obj->sgt->nents, DMA_BIDIRECTIONAL); nents 73 drivers/gpu/drm/msm/msm_gem.c msm_obj->sgt->nents, DMA_BIDIRECTIONAL); nents 49 drivers/gpu/drm/msm/msm_gpummu.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 48 drivers/gpu/drm/msm/msm_iommu.c ret = iommu_map_sg(iommu->domain, iova, sgt->sgl, sgt->nents, prot); nents 156 drivers/gpu/drm/omapdrm/omap_gem.c if ((omap_obj->flags & OMAP_BO_MEM_DMABUF) && omap_obj->sgt->nents == 1) nents 45 drivers/gpu/drm/panfrost/panfrost_gem.c bo->sgts[i].nents, DMA_BIDIRECTIONAL); nents 256 drivers/gpu/drm/panfrost/panfrost_mmu.c for_each_sg(sgt->sgl, sgl, sgt->nents, count) { nents 520 drivers/gpu/drm/panfrost/panfrost_mmu.c if (!dma_map_sg(pfdev->dev, sgt->sgl, sgt->nents, DMA_BIDIRECTIONAL)) { nents 489 drivers/gpu/drm/radeon/radeon_ttm.c unsigned pinned = 0, nents; nents 530 drivers/gpu/drm/radeon/radeon_ttm.c nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); nents 531 drivers/gpu/drm/radeon/radeon_ttm.c if (nents != ttm->sg->nents) nents 562 drivers/gpu/drm/radeon/radeon_ttm.c dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction); nents 564 drivers/gpu/drm/radeon/radeon_ttm.c for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) { nents 39 drivers/gpu/drm/rockchip/rockchip_drm_gem.c rk_obj->sgt->nents, prot); nents 100 drivers/gpu/drm/rockchip/rockchip_drm_gem.c for_each_sg(rk_obj->sgt->sgl, s, rk_obj->sgt->nents, i) nents 103 drivers/gpu/drm/rockchip/rockchip_drm_gem.c dma_sync_sg_for_device(drm->dev, rk_obj->sgt->sgl, rk_obj->sgt->nents, nents 353 drivers/gpu/drm/rockchip/rockchip_drm_gem.c rk_obj->sgt->nents, DMA_BIDIRECTIONAL); nents 495 drivers/gpu/drm/rockchip/rockchip_drm_gem.c int count = dma_map_sg(drm->dev, sg->sgl, sg->nents, nents 502 drivers/gpu/drm/rockchip/rockchip_drm_gem.c dma_unmap_sg(drm->dev, sg->sgl, sg->nents, nents 139 drivers/gpu/drm/tegra/gem.c bo->sgt->nents, prot); nents 206 drivers/gpu/drm/tegra/gem.c dma_unmap_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, nents 232 drivers/gpu/drm/tegra/gem.c err = dma_map_sg(drm->dev, bo->sgt->sgl, bo->sgt->nents, nents 363 drivers/gpu/drm/tegra/gem.c if (bo->sgt->nents > 1) { nents 520 drivers/gpu/drm/tegra/gem.c if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) nents 546 drivers/gpu/drm/tegra/gem.c dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir); nents 565 drivers/gpu/drm/tegra/gem.c dma_sync_sg_for_cpu(drm->dev, bo->sgt->sgl, bo->sgt->nents, nents 579 drivers/gpu/drm/tegra/gem.c dma_sync_sg_for_device(drm->dev, bo->sgt->sgl, bo->sgt->nents, nents 54 drivers/gpu/drm/udl/udl_dmabuf.c dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, nents 73 drivers/gpu/drm/udl/udl_dmabuf.c int nents, ret; nents 116 drivers/gpu/drm/udl/udl_dmabuf.c nents = dma_map_sg(attach->dev, sgt->sgl, sgt->orig_nents, dir); nents 117 drivers/gpu/drm/udl/udl_dmabuf.c if (!nents) { nents 96 drivers/gpu/drm/v3d/v3d_mmu.c for_each_sg(shmem_obj->sgt->sgl, sgl, shmem_obj->sgt->nents, count) { nents 496 drivers/gpu/drm/virtio/virtgpu_vq.c bo->pages->sgl, bo->pages->nents, nents 517 drivers/gpu/drm/virtio/virtgpu_vq.c uint32_t nents, nents 528 drivers/gpu/drm/virtio/virtgpu_vq.c cmd_p->nr_entries = cpu_to_le32(nents); nents 531 drivers/gpu/drm/virtio/virtgpu_vq.c vbuf->data_size = sizeof(*ents) * nents; nents 903 drivers/gpu/drm/virtio/virtgpu_vq.c bo->pages->sgl, bo->pages->nents, nents 968 drivers/gpu/drm/virtio/virtgpu_vq.c int si, nents; nents 983 drivers/gpu/drm/virtio/virtgpu_vq.c obj->pages->sgl, obj->pages->nents, nents 985 drivers/gpu/drm/virtio/virtgpu_vq.c nents = obj->mapped; nents 987 drivers/gpu/drm/virtio/virtgpu_vq.c nents = obj->pages->nents; nents 991 drivers/gpu/drm/virtio/virtgpu_vq.c ents = kmalloc_array(nents, sizeof(struct virtio_gpu_mem_entry), nents 998 drivers/gpu/drm/virtio/virtgpu_vq.c for_each_sg(obj->pages->sgl, sg, nents, si) { nents 1007 drivers/gpu/drm/virtio/virtgpu_vq.c ents, nents, nents 365 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents, nents 367 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents; nents 393 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c vmw_tt->sgt.nents = ret; nents 452 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c if (vsgt->num_pages > vmw_tt->sgt.nents) { nents 455 drivers/gpu/drm/vmwgfx/vmwgfx_ttm_buffer.c vmw_tt->sgt.nents); nents 218 drivers/gpu/drm/xen/xen_drm_front_gem.c size, sgt->nents); nents 145 drivers/gpu/host1x/job.c for_each_sg(sgt->sgl, sg, sgt->nents, j) nents 159 drivers/gpu/host1x/job.c sgt->sgl, sgt->nents, IOMMU_READ); nents 472 drivers/hsi/clients/cmt_speech.c msg->sgt.nents = 1; nents 502 drivers/hsi/clients/cmt_speech.c msg->sgt.nents = 0; nents 547 drivers/hsi/clients/cmt_speech.c msg->sgt.nents = 1; nents 625 drivers/hsi/clients/cmt_speech.c msg->sgt.nents = 1; nents 668 drivers/hsi/clients/cmt_speech.c rxmsg->sgt.nents = 0; nents 176 drivers/hsi/clients/ssi_protocol.c BUG_ON(msg->sgt.nents != (unsigned int)(skb_shinfo(skb)->nr_frags + 1)); nents 192 drivers/hsi/controllers/omap_ssi_core.c dma_unmap_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, dir); nents 231 drivers/hsi/controllers/omap_ssi_port.c err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, nents 248 drivers/hsi/controllers/omap_ssi_port.c err = dma_map_sg(&ssi->device, msg->sgt.sgl, msg->sgt.nents, nents 330 drivers/hsi/controllers/omap_ssi_port.c if ((msg->sgt.nents) && (msg->sgt.sgl->length > sizeof(u32))) nents 386 drivers/hsi/controllers/omap_ssi_port.c if (msg->sgt.nents > 1) nents 905 drivers/hsi/controllers/omap_ssi_port.c if ((!msg->sgt.nents) || (!msg->sgt.sgl->length)) { nents 536 drivers/hsi/hsi_core.c struct hsi_msg *hsi_alloc_msg(unsigned int nents, gfp_t flags) nents 545 drivers/hsi/hsi_core.c if (!nents) nents 548 drivers/hsi/hsi_core.c err = sg_alloc_table(&msg->sgt, nents, flags); nents 54 drivers/hwtracing/intel_th/msu-sink.c unsigned int nents; nents 62 drivers/hwtracing/intel_th/msu-sink.c nents = DIV_ROUND_UP(size, PAGE_SIZE); nents 64 drivers/hwtracing/intel_th/msu-sink.c ret = sg_alloc_table(*sgt, nents, GFP_KERNEL); nents 70 drivers/hwtracing/intel_th/msu-sink.c for_each_sg((*sgt)->sgl, sg_ptr, nents, i) { nents 77 drivers/hwtracing/intel_th/msu-sink.c return nents; nents 87 drivers/hwtracing/intel_th/msu-sink.c for_each_sg(sgt->sgl, sg_ptr, sgt->nents, i) { nents 91 drivers/infiniband/core/rw.c u32 nents = min(sg_cnt, pages_per_mr); nents 100 drivers/infiniband/core/rw.c ret = ib_map_mr_sg(reg->mr, sg, nents, &offset, PAGE_SIZE); nents 101 drivers/infiniband/core/rw.c if (ret < 0 || ret < nents) { nents 136 drivers/infiniband/core/rw.c u32 nents = min(sg_cnt, pages_per_mr); nents 168 drivers/infiniband/core/rw.c sg_cnt -= nents; nents 169 drivers/infiniband/core/rw.c for (j = 0; j < nents; j++) nents 77 drivers/infiniband/core/umem.c int *nents) nents 122 drivers/infiniband/core/umem.c (*nents)++; nents 2828 drivers/infiniband/core/verbs.c struct scatterlist *sglist, unsigned int nents, nents 2833 drivers/infiniband/core/verbs.c biter->__sg_nents = nents; nents 75 drivers/infiniband/hw/usnic/usnic_uiom.c for_each_sg(chunk->page_list, sg, chunk->nents, i) { nents 165 drivers/infiniband/hw/usnic/usnic_uiom.c chunk->nents = min_t(int, ret, USNIC_UIOM_PAGE_CHUNK); nents 166 drivers/infiniband/hw/usnic/usnic_uiom.c sg_init_table(chunk->page_list, chunk->nents); nents 167 drivers/infiniband/hw/usnic/usnic_uiom.c for_each_sg(chunk->page_list, sg, chunk->nents, i) { nents 174 drivers/infiniband/hw/usnic/usnic_uiom.c cur_base += chunk->nents * PAGE_SIZE; nents 175 drivers/infiniband/hw/usnic/usnic_uiom.c ret -= chunk->nents; nents 176 drivers/infiniband/hw/usnic/usnic_uiom.c off += chunk->nents; nents 264 drivers/infiniband/hw/usnic/usnic_uiom.c for (i = 0; i < chunk->nents; i++, va += PAGE_SIZE) { nents 315 drivers/infiniband/hw/usnic/usnic_uiom.c if (i == chunk->nents) { nents 79 drivers/infiniband/hw/usnic/usnic_uiom.h int nents; nents 417 drivers/infiniband/sw/siw/siw_mem.c int got, nents = min_t(int, num_pages, PAGES_PER_CHUNK); nents 420 drivers/infiniband/sw/siw/siw_mem.c kcalloc(nents, sizeof(struct page *), GFP_KERNEL); nents 426 drivers/infiniband/sw/siw/siw_mem.c while (nents) { nents 429 drivers/infiniband/sw/siw/siw_mem.c rv = get_user_pages(first_page_va, nents, nents 438 drivers/infiniband/sw/siw/siw_mem.c nents -= rv; nents 1806 drivers/infiniband/ulp/srp/ib_srp.c int i, len, nents, count, ret; nents 1829 drivers/infiniband/ulp/srp/ib_srp.c nents = scsi_sg_count(scmnd); nents 1836 drivers/infiniband/ulp/srp/ib_srp.c count = ib_dma_map_sg(ibdev, scat, nents, scmnd->sc_data_direction); nents 913 drivers/infiniband/ulp/srpt/ib_srpt.c ret = target_alloc_sgl(&ctx->sg, &ctx->nents, size, false, nents 919 drivers/infiniband/ulp/srpt/ib_srpt.c ctx->sg, ctx->nents, 0, remote_addr, rkey, dir); nents 921 drivers/infiniband/ulp/srpt/ib_srpt.c target_free_sgl(ctx->sg, ctx->nents); nents 936 drivers/infiniband/ulp/srpt/ib_srpt.c prev_nents = ctx->nents; nents 938 drivers/infiniband/ulp/srpt/ib_srpt.c *sg_cnt += ctx->nents; nents 948 drivers/infiniband/ulp/srpt/ib_srpt.c ctx->sg, ctx->nents, dir); nents 949 drivers/infiniband/ulp/srpt/ib_srpt.c target_free_sgl(ctx->sg, ctx->nents); nents 966 drivers/infiniband/ulp/srpt/ib_srpt.c ctx->sg, ctx->nents, dir); nents 967 drivers/infiniband/ulp/srpt/ib_srpt.c target_free_sgl(ctx->sg, ctx->nents); nents 197 drivers/infiniband/ulp/srpt/ib_srpt.h unsigned int nents; nents 585 drivers/input/touchscreen/sur40.c sgt->sgl, sgt->nents, sur40->pix_fmt.sizeimage, 0); nents 2798 drivers/iommu/arm-smmu-v3.c unsigned int nents = 1 << cmdq->q.llq.max_n_shift; nents 2804 drivers/iommu/arm-smmu-v3.c bitmap = (atomic_long_t *)bitmap_zalloc(nents, GFP_KERNEL); nents 735 drivers/iommu/dma-iommu.c static int __finalise_sg(struct device *dev, struct scatterlist *sg, int nents, nents 743 drivers/iommu/dma-iommu.c for_each_sg(sg, s, nents, i) { nents 788 drivers/iommu/dma-iommu.c static void __invalidate_sg(struct scatterlist *sg, int nents) nents 793 drivers/iommu/dma-iommu.c for_each_sg(sg, s, nents, i) { nents 811 drivers/iommu/dma-iommu.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 824 drivers/iommu/dma-iommu.c iommu_dma_sync_sg_for_device(dev, sg, nents, dir); nents 832 drivers/iommu/dma-iommu.c for_each_sg(sg, s, nents, i) { nents 873 drivers/iommu/dma-iommu.c if (iommu_map_sg(domain, iova, sg, nents, prot) < iova_len) nents 876 drivers/iommu/dma-iommu.c return __finalise_sg(dev, sg, nents, iova); nents 881 drivers/iommu/dma-iommu.c __invalidate_sg(sg, nents); nents 886 drivers/iommu/dma-iommu.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 893 drivers/iommu/dma-iommu.c iommu_dma_sync_sg_for_cpu(dev, sg, nents, dir); nents 900 drivers/iommu/dma-iommu.c for_each_sg(sg_next(sg), tmp, nents - 1, i) { nents 1996 drivers/iommu/iommu.c struct scatterlist *sg, unsigned int nents, int prot) nents 2003 drivers/iommu/iommu.c while (i <= nents) { nents 2022 drivers/iommu/iommu.c if (++i < nents) nents 179 drivers/media/common/saa7146/saa7146_core.c pt->nents = pages; nents 180 drivers/media/common/saa7146/saa7146_core.c slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE); nents 190 drivers/media/common/saa7146/saa7146_core.c pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); nents 204 drivers/media/common/saa7146/saa7146_core.c pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE); nents 56 drivers/media/common/videobuf2/videobuf2-dma-contig.c for_each_sg(sgt->sgl, s, sgt->nents, i) { nents 311 drivers/media/common/videobuf2/videobuf2-dma-contig.c sgt->nents = dma_map_sg_attrs(db_attach->dev, sgt->sgl, sgt->orig_nents, nents 313 drivers/media/common/videobuf2/videobuf2-dma-contig.c if (!sgt->nents) { nents 532 drivers/media/common/videobuf2/videobuf2-dma-contig.c sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, nents 534 drivers/media/common/videobuf2/videobuf2-dma-contig.c if (sgt->nents <= 0) { nents 145 drivers/media/common/videobuf2/videobuf2-dma-sg.c sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, nents 147 drivers/media/common/videobuf2/videobuf2-dma-sg.c if (!sgt->nents) nents 261 drivers/media/common/videobuf2/videobuf2-dma-sg.c sgt->nents = dma_map_sg_attrs(buf->dev, sgt->sgl, sgt->orig_nents, nents 263 drivers/media/common/videobuf2/videobuf2-dma-sg.c if (!sgt->nents) nents 446 drivers/media/common/videobuf2/videobuf2-dma-sg.c sgt->nents = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, nents 448 drivers/media/common/videobuf2/videobuf2-dma-sg.c if (!sgt->nents) { nents 233 drivers/media/common/videobuf2/videobuf2-vmalloc.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 295 drivers/media/common/videobuf2/videobuf2-vmalloc.c sgt->nents = dma_map_sg(db_attach->dev, sgt->sgl, sgt->orig_nents, nents 297 drivers/media/common/videobuf2/videobuf2-vmalloc.c if (!sgt->nents) { nents 81 drivers/media/pci/cobalt/cobalt-v4l2.c !s->is_output, sg_desc->nents, size, nents 872 drivers/media/pci/intel/ipu3/ipu3-cio2.c if (sg->nents && sg->sgl) nents 876 drivers/media/pci/intel/ipu3/ipu3-cio2.c for_each_sg_dma_page (sg->sgl, &sg_iter, sg->nents, 0) { nents 101 drivers/media/pci/saa7134/saa7134-ts.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, nents 124 drivers/media/pci/saa7134/saa7134-vbi.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, nents 933 drivers/media/pci/saa7134/saa7134-video.c return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents, nents 322 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c for_each_sg(vbuf->sgl, sg, vbuf->nents, i) { nents 743 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c sg_copy_from_buffer(sgt->sgl, sgt->nents, nents 747 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c sg_copy_from_buffer(sgt->sgl, sgt->nents, nents 1252 drivers/media/pci/ttpci/av7110.c pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); nents 183 drivers/media/pci/ttpci/budget-core.c pci_dma_sync_sg_for_cpu(budget->dev->pci, budget->pt.slist, budget->pt.nents, PCI_DMA_FROMDEVICE); nents 197 drivers/media/pci/tw686x/tw686x-video.c for_each_sg(vbuf->sgl, sg, vbuf->nents, i) { nents 617 drivers/media/platform/marvell-ccic/mcam-core.c sg_table->nents * sizeof(struct mcam_dma_desc)); nents 1260 drivers/media/platform/marvell-ccic/mcam-core.c for_each_sg(sg_table->sgl, sg, sg_table->nents, i) { nents 450 drivers/media/platform/omap3isp/ispccdc.c req->table.sgt.nents, DMA_TO_DEVICE); nents 459 drivers/media/platform/omap3isp/ispccdc.c req->table.sgt.nents, DMA_TO_DEVICE); nents 165 drivers/media/platform/omap3isp/ispstat.c buf->sgt.nents, DMA_FROM_DEVICE); nents 175 drivers/media/platform/omap3isp/ispstat.c buf->sgt.nents, DMA_FROM_DEVICE); nents 1394 drivers/media/platform/pxa_camera.c ret = sg_split(sgt->sgl, sgt->nents, 0, nb_channels, nents 131 drivers/media/platform/rockchip/rga/rga-buf.c for_each_sg(sgt->sgl, sgl, sgt->nents, i) { nents 369 drivers/media/platform/via-camera.c viafb_dma_copy_out_sg(cam->cb_offsets[bufn], sgt->sgl, sgt->nents); nents 915 drivers/media/platform/vsp1/vsp1_drm.c return dma_map_sg_attrs(vsp1->bus_master, sgt->sgl, sgt->nents, nents 924 drivers/media/platform/vsp1/vsp1_drm.c dma_unmap_sg_attrs(vsp1->bus_master, sgt->sgl, sgt->nents, nents 495 drivers/misc/fastrpc.c if (!dma_map_sg(attachment->dev, table->sgl, table->nents, dir)) nents 505 drivers/misc/fastrpc.c dma_unmap_sg(attach->dev, table->sgl, table->nents, dir); nents 3108 drivers/misc/habanalabs/goya/goya.c int nents, enum dma_data_direction dir) nents 3113 drivers/misc/habanalabs/goya/goya.c if (!dma_map_sg(&hdev->pdev->dev, sgl, nents, dir)) nents 3117 drivers/misc/habanalabs/goya/goya.c for_each_sg(sgl, sg, nents, i) nents 3124 drivers/misc/habanalabs/goya/goya.c int nents, enum dma_data_direction dir) nents 3130 drivers/misc/habanalabs/goya/goya.c for_each_sg(sgl, sg, nents, i) nents 3133 drivers/misc/habanalabs/goya/goya.c dma_unmap_sg(&hdev->pdev->dev, sgl, nents, dir); nents 3145 drivers/misc/habanalabs/goya/goya.c for_each_sg(sgt->sgl, sg, sgt->nents, count) { nents 3153 drivers/misc/habanalabs/goya/goya.c while ((count + 1) < sgt->nents) { nents 3201 drivers/misc/habanalabs/goya/goya.c userptr->sgt->nents, dir); nents 3657 drivers/misc/habanalabs/goya/goya.c for_each_sg(sgt->sgl, sg, sgt->nents, count) { nents 3664 drivers/misc/habanalabs/goya/goya.c while ((count + 1) < sgt->nents) { nents 539 drivers/misc/habanalabs/habanalabs.h struct scatterlist *sgl, int nents, nents 543 drivers/misc/habanalabs/habanalabs.h struct scatterlist *sgl, int nents, nents 194 drivers/misc/habanalabs/memory.c userptr->sgt->nents, DMA_BIDIRECTIONAL); nents 673 drivers/misc/habanalabs/memory.c for_each_sg(userptr->sgt->sgl, sg, userptr->sgt->nents, i) { nents 702 drivers/misc/habanalabs/memory.c for_each_sg(userptr->sgt->sgl, sg, userptr->sgt->nents, i) { nents 1338 drivers/misc/habanalabs/memory.c userptr->sgt->nents, nents 188 drivers/misc/mic/host/mic_boot.c int nents, enum dma_data_direction dir, nents 197 drivers/misc/mic/host/mic_boot.c ret = dma_map_sg(&mdev->pdev->dev, sg, nents, dir); nents 201 drivers/misc/mic/host/mic_boot.c for_each_sg(sg, s, nents, i) { nents 207 drivers/misc/mic/host/mic_boot.c return nents; nents 213 drivers/misc/mic/host/mic_boot.c dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); nents 218 drivers/misc/mic/host/mic_boot.c struct scatterlist *sg, int nents, nents 228 drivers/misc/mic/host/mic_boot.c for_each_sg(sg, s, nents, i) { nents 233 drivers/misc/mic/host/mic_boot.c dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir); nents 66 drivers/misc/mic/scif/scif_debugfs.c for_each_sg(window->st->sgl, sg, window->st->nents, j) nents 232 drivers/misc/mic/scif/scif_rma.c window->st->sgl, window->st->nents, nents 539 drivers/misc/mic/scif/scif_rma.c for_each_sg(window->st->sgl, sg, window->st->nents, i) nents 543 drivers/misc/mic/scif/scif_rma.c window->st->nents, DMA_BIDIRECTIONAL); nents 294 drivers/misc/tifm_core.c int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, nents 297 drivers/misc/tifm_core.c return pci_map_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); nents 301 drivers/misc/tifm_core.c void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, nents 304 drivers/misc/tifm_core.c pci_unmap_sg(to_pci_dev(sock->dev.parent), sg, nents, direction); nents 122 drivers/mmc/core/sdio_ops.c unsigned int nents, left_size, i; nents 151 drivers/mmc/core/sdio_ops.c nents = DIV_ROUND_UP(left_size, seg_size); nents 152 drivers/mmc/core/sdio_ops.c if (nents > 1) { nents 153 drivers/mmc/core/sdio_ops.c if (sg_alloc_table(&sgtable, nents, GFP_KERNEL)) nents 157 drivers/mmc/core/sdio_ops.c data.sg_len = nents; nents 175 drivers/mmc/core/sdio_ops.c if (nents > 1) nents 303 drivers/mmc/host/mxcmmc.c int i, nents; nents 332 drivers/mmc/host/mxcmmc.c nents = dma_map_sg(host->dma->device->dev, data->sg, nents 334 drivers/mmc/host/mxcmmc.c if (nents != data->sg_len) nents 3537 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c adapter->hma.sgt->nents, PCI_DMA_BIDIRECTIONAL); nents 3624 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c sgt->nents = dma_map_sg(adapter->pdev_dev, sgl, sgt->orig_nents, nents 3626 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c if (!sgt->nents) { nents 3634 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c adapter->hma.phy_addr = kcalloc(sgt->nents, sizeof(dma_addr_t), nents 3639 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c for_each_sg(sgl, iter, sgt->nents, i) { nents 3644 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c ncmds = DIV_ROUND_UP(sgt->nents, HMA_MAX_ADDR_IN_CMD); nents 3659 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c naddr = sgt->nents % HMA_MAX_ADDR_IN_CMD; nents 93 drivers/net/ethernet/chelsio/libcxgb/libcxgb_ppm.h int nents; nents 755 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c uint nents; nents 771 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c nents = max_t(uint, BRCMF_DEFAULT_RXGLOM_SIZE, nents 773 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c nents += (nents >> 4) + 1; nents 775 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c WARN_ON(nents > sdiodev->max_segment_count); nents 777 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c brcmf_dbg(TRACE, "nents=%d\n", nents); nents 778 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c err = sg_alloc_table(&sdiodev->sgtable, nents, GFP_KERNEL); nents 628 drivers/net/wireless/intel/iwlwifi/fw/dbg.c int alloc_size, nents, i; nents 633 drivers/net/wireless/intel/iwlwifi/fw/dbg.c nents = DIV_ROUND_UP(size, PAGE_SIZE); nents 634 drivers/net/wireless/intel/iwlwifi/fw/dbg.c table = kcalloc(nents, sizeof(*table), GFP_KERNEL); nents 637 drivers/net/wireless/intel/iwlwifi/fw/dbg.c sg_init_table(table, nents); nents 90 drivers/nvme/host/fc.c u32 nents; nents 967 drivers/nvme/host/fc.c fc_map_sg(struct scatterlist *sg, int nents) nents 972 drivers/nvme/host/fc.c WARN_ON(nents == 0 || sg[0].length == 0); nents 974 drivers/nvme/host/fc.c for_each_sg(sg, s, nents, i) { nents 980 drivers/nvme/host/fc.c return nents; nents 984 drivers/nvme/host/fc.c fc_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 987 drivers/nvme/host/fc.c return dev ? dma_map_sg(dev, sg, nents, dir) : fc_map_sg(sg, nents); nents 991 drivers/nvme/host/fc.c fc_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 995 drivers/nvme/host/fc.c dma_unmap_sg(dev, sg, nents, dir); nents 2148 drivers/nvme/host/fc.c op->nents = blk_rq_map_sg(rq->q, rq, freq->sg_table.sgl); nents 2149 drivers/nvme/host/fc.c WARN_ON(op->nents > blk_rq_nr_phys_segments(rq)); nents 2151 drivers/nvme/host/fc.c op->nents, rq_dma_dir(rq)); nents 2173 drivers/nvme/host/fc.c fc_dma_unmap_sg(ctrl->lport->dev, freq->sg_table.sgl, op->nents, nents 206 drivers/nvme/host/pci.c int nents; /* Used in scatterlist */ nents 537 drivers/nvme/host/pci.c WARN_ON_ONCE(!iod->nents); nents 540 drivers/nvme/host/pci.c pci_p2pdma_unmap_sg(dev->dev, iod->sg, iod->nents, nents 543 drivers/nvme/host/pci.c dma_unmap_sg(dev->dev, iod->sg, iod->nents, rq_dma_dir(req)); nents 571 drivers/nvme/host/pci.c static void nvme_print_sgl(struct scatterlist *sgl, int nents) nents 576 drivers/nvme/host/pci.c for_each_sg(sgl, sg, nents, i) { nents 672 drivers/nvme/host/pci.c WARN(DO_ONCE(nvme_print_sgl, iod->sg, iod->nents), nents 674 drivers/nvme/host/pci.c blk_rq_payload_bytes(req), iod->nents); nents 822 drivers/nvme/host/pci.c iod->nents = blk_rq_map_sg(req->q, req, iod->sg); nents 823 drivers/nvme/host/pci.c if (!iod->nents) nents 828 drivers/nvme/host/pci.c iod->nents, rq_dma_dir(req), DMA_ATTR_NO_WARN); nents 830 drivers/nvme/host/pci.c nr_mapped = dma_map_sg_attrs(dev->dev, iod->sg, iod->nents, nents 875 drivers/nvme/host/pci.c iod->nents = 0; nents 61 drivers/nvme/host/rdma.c int nents; nents 1164 drivers/nvme/host/rdma.c ib_dma_unmap_sg(ibdev, req->sg_table.sgl, req->nents, rq_dma_dir(rq)); nents 1287 drivers/nvme/host/rdma.c req->nents = blk_rq_map_sg(rq->q, rq, req->sg_table.sgl); nents 1289 drivers/nvme/host/rdma.c count = ib_dma_map_sg(ibdev, req->sg_table.sgl, req->nents, nents 1319 drivers/nvme/host/rdma.c ib_dma_unmap_sg(ibdev, req->sg_table.sgl, req->nents, rq_dma_dir(rq)); nents 289 drivers/nvme/target/fc.c fc_map_sg(struct scatterlist *sg, int nents) nents 294 drivers/nvme/target/fc.c WARN_ON(nents == 0 || sg[0].length == 0); nents 296 drivers/nvme/target/fc.c for_each_sg(sg, s, nents, i) { nents 302 drivers/nvme/target/fc.c return nents; nents 306 drivers/nvme/target/fc.c fc_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 309 drivers/nvme/target/fc.c return dev ? dma_map_sg(dev, sg, nents, dir) : fc_map_sg(sg, nents); nents 313 drivers/nvme/target/fc.c fc_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 317 drivers/nvme/target/fc.c dma_unmap_sg(dev, sg, nents, dir); nents 908 drivers/parisc/ccio-dma.c ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, nents 923 drivers/parisc/ccio-dma.c DBG_RUN_SG("%s() START %d entries\n", __func__, nents); nents 926 drivers/parisc/ccio-dma.c if (nents == 1) { nents 934 drivers/parisc/ccio-dma.c for(i = 0; i < nents; i++) nents 951 drivers/parisc/ccio-dma.c coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range); nents 961 drivers/parisc/ccio-dma.c filled = iommu_fill_pdir(ioc, sglist, nents, hint, ccio_io_pdir_entry); nents 987 drivers/parisc/ccio-dma.c ccio_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, nents 1000 drivers/parisc/ccio-dma.c __func__, nents, sg_virt(sglist), sglist->length); nents 1006 drivers/parisc/ccio-dma.c while(sg_dma_len(sglist) && nents--) { nents 1016 drivers/parisc/ccio-dma.c DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); nents 15 drivers/parisc/iommu-helpers.h iommu_fill_pdir(struct ioc *ioc, struct scatterlist *startsg, int nents, nents 30 drivers/parisc/iommu-helpers.h while (nents-- > 0) { nents 34 drivers/parisc/iommu-helpers.h DBG_RUN_SG(" %d : %08lx/%05x %p/%05x\n", nents, nents 102 drivers/parisc/iommu-helpers.h struct scatterlist *startsg, int nents, nents 114 drivers/parisc/iommu-helpers.h while (nents > 0) { nents 131 drivers/parisc/iommu-helpers.h while(--nents > 0) { nents 273 drivers/parisc/sba_iommu.c sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) nents 275 drivers/parisc/sba_iommu.c while (nents-- > 0) { nents 277 drivers/parisc/sba_iommu.c nents, nents 939 drivers/parisc/sba_iommu.c sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, nents 946 drivers/parisc/sba_iommu.c DBG_RUN_SG("%s() START %d entries\n", __func__, nents); nents 953 drivers/parisc/sba_iommu.c if (nents == 1) { nents 965 drivers/parisc/sba_iommu.c sba_dump_sg(ioc, sglist, nents); nents 982 drivers/parisc/sba_iommu.c coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_range); nents 992 drivers/parisc/sba_iommu.c filled = iommu_fill_pdir(ioc, sglist, nents, 0, sba_io_pdir_entry); nents 1000 drivers/parisc/sba_iommu.c sba_dump_sg(ioc, sglist, nents); nents 1023 drivers/parisc/sba_iommu.c sba_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, nents 1032 drivers/parisc/sba_iommu.c __func__, nents, sg_virt(sglist), sglist->length); nents 1050 drivers/parisc/sba_iommu.c while (sg_dma_len(sglist) && nents--) { nents 1061 drivers/parisc/sba_iommu.c DBG_RUN_SG("%s() DONE (nents %d)\n", __func__, nents); nents 354 drivers/pci/controller/vmd.c static int vmd_map_sg(struct device *dev, struct scatterlist *sg, int nents, nents 357 drivers/pci/controller/vmd.c return dma_map_sg_attrs(to_vmd_dev(dev), sg, nents, dir, attrs); nents 360 drivers/pci/controller/vmd.c static void vmd_unmap_sg(struct device *dev, struct scatterlist *sg, int nents, nents 363 drivers/pci/controller/vmd.c dma_unmap_sg_attrs(to_vmd_dev(dev), sg, nents, dir, attrs); nents 379 drivers/pci/controller/vmd.c int nents, enum dma_data_direction dir) nents 381 drivers/pci/controller/vmd.c dma_sync_sg_for_cpu(to_vmd_dev(dev), sg, nents, dir); nents 385 drivers/pci/controller/vmd.c int nents, enum dma_data_direction dir) nents 387 drivers/pci/controller/vmd.c dma_sync_sg_for_device(to_vmd_dev(dev), sg, nents, dir); nents 742 drivers/pci/p2pdma.c unsigned int *nents, u32 length) nents 758 drivers/pci/p2pdma.c *nents = 1; nents 816 drivers/pci/p2pdma.c struct device *dev, struct scatterlist *sg, int nents) nents 832 drivers/pci/p2pdma.c for_each_sg(sg, s, nents, i) { nents 839 drivers/pci/p2pdma.c return nents; nents 856 drivers/pci/p2pdma.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 869 drivers/pci/p2pdma.c return dma_map_sg_attrs(dev, sg, nents, dir, attrs); nents 871 drivers/pci/p2pdma.c return __pci_p2pdma_map_sg(p2p_pgmap, dev, sg, nents); nents 889 drivers/pci/p2pdma.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 904 drivers/pci/p2pdma.c dma_unmap_sg_attrs(dev, sg, nents, dir, attrs); nents 578 drivers/rapidio/devices/rio_mport_cdev.c req->sgt.sgl, req->sgt.nents, req->dir); nents 617 drivers/rapidio/devices/rio_mport_cdev.c struct sg_table *sgt, int nents, enum dma_transfer_direction dir, nents 623 drivers/rapidio/devices/rio_mport_cdev.c tx_data.sg_len = nents; nents 695 drivers/rapidio/devices/rio_mport_cdev.c enum rio_transfer_sync sync, int nents) nents 719 drivers/rapidio/devices/rio_mport_cdev.c tx = prep_dma_xfer(chan, xfer, sgt, nents, dir, nents 819 drivers/rapidio/devices/rio_mport_cdev.c int nents; nents 935 drivers/rapidio/devices/rio_mport_cdev.c nents = dma_map_sg(chan->device->dev, nents 936 drivers/rapidio/devices/rio_mport_cdev.c req->sgt.sgl, req->sgt.nents, dir); nents 937 drivers/rapidio/devices/rio_mport_cdev.c if (nents == 0) { nents 943 drivers/rapidio/devices/rio_mport_cdev.c ret = do_dma_request(req, xfer, sync, nents); nents 281 drivers/scsi/be2iscsi/be_main.c unsigned int i, nents; nents 304 drivers/scsi/be2iscsi/be_main.c nents = 0; nents 318 drivers/scsi/be2iscsi/be_main.c if (nents == BE_INVLDT_CMD_TBL_SZ) { nents 335 drivers/scsi/be2iscsi/be_main.c inv_tbl->tbl[nents].cid = beiscsi_conn->beiscsi_conn_cid; nents 336 drivers/scsi/be2iscsi/be_main.c inv_tbl->tbl[nents].icd = io_task->psgl_handle->sgl_index; nents 337 drivers/scsi/be2iscsi/be_main.c inv_tbl->task[nents] = task; nents 338 drivers/scsi/be2iscsi/be_main.c nents++; nents 344 drivers/scsi/be2iscsi/be_main.c if (!nents) nents 354 drivers/scsi/be2iscsi/be_main.c if (beiscsi_mgmt_invalidate_icds(phba, &inv_tbl->tbl[0], nents)) { nents 362 drivers/scsi/be2iscsi/be_main.c for (i = 0; i < nents; i++) nents 1499 drivers/scsi/be2iscsi/be_mgmt.c unsigned int nents) nents 1509 drivers/scsi/be2iscsi/be_mgmt.c if (!nents || nents > BE_INVLDT_CMD_TBL_SZ) nents 1538 drivers/scsi/be2iscsi/be_mgmt.c for (i = 0; i < nents; i++) { nents 168 drivers/scsi/be2iscsi/be_mgmt.h unsigned int nents); nents 1223 drivers/scsi/cxgbi/libcxgbi.c *sgcnt = sdb->table.nents; nents 1314 drivers/scsi/cxgbi/libcxgbi.c static int cxgbi_ddp_sgl_check(struct scatterlist *sgl, int nents) nents 1317 drivers/scsi/cxgbi/libcxgbi.c int last_sgidx = nents - 1; nents 1320 drivers/scsi/cxgbi/libcxgbi.c for (i = 0; i < nents; i++, sg = sg_next(sg)) { nents 1327 drivers/scsi/cxgbi/libcxgbi.c i, nents, sg->offset, sg->length); nents 1345 drivers/scsi/cxgbi/libcxgbi.c unsigned int sgcnt = ttinfo->nents; nents 1360 drivers/scsi/cxgbi/libcxgbi.c xferlen, ttinfo->nents); nents 1447 drivers/scsi/cxgbi/libcxgbi.c dma_unmap_sg(&ppm->pdev->dev, ttinfo->sgl, ttinfo->nents, nents 1475 drivers/scsi/cxgbi/libcxgbi.c scmd_get_params(sc, &ttinfo->sgl, &ttinfo->nents, nents 1484 drivers/scsi/cxgbi/libcxgbi.c ttinfo->nents); nents 1987 drivers/scsi/cxgbi/libcxgbi.c sdb->table.sgl, sdb->table.nents, nents 1991 drivers/scsi/cxgbi/libcxgbi.c sdb->table.nents, tdata->offset, sdb->length); nents 1998 drivers/scsi/cxgbi/libcxgbi.c sdb->table.nents, tdata->offset, tdata->count); nents 523 drivers/scsi/iscsi_tcp.c sdb->table.nents, offset, nents 486 drivers/scsi/libfc/fc_fcp.c u32 nents; nents 524 drivers/scsi/libfc/fc_fcp.c nents = scsi_sg_count(sc); nents 527 drivers/scsi/libfc/fc_fcp.c copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents, nents 531 drivers/scsi/libfc/fc_fcp.c copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents, nents 101 drivers/scsi/libfc/fc_libfc.c u32 *nents, size_t *offset, nents 116 drivers/scsi/libfc/fc_libfc.c if (!(*nents)) nents 118 drivers/scsi/libfc/fc_libfc.c --(*nents); nents 124 drivers/scsi/libfc/fc_libfc.h u32 *nents, size_t *offset, nents 144 drivers/scsi/libfc/fc_lport.c u32 nents; nents 1911 drivers/scsi/libfc/fc_lport.c fc_copy_buffer_to_sglist(buf, len, info->sg, &info->nents, nents 1978 drivers/scsi/libfc/fc_lport.c info->nents = job->reply_payload.sg_cnt; nents 2038 drivers/scsi/libfc/fc_lport.c info->nents = job->reply_payload.sg_cnt; nents 714 drivers/scsi/libiscsi_tcp.c sdb->table.nents, nents 1013 drivers/scsi/scsi_debug.c act_len = sg_copy_from_buffer(sdb->table.sgl, sdb->table.nents, nents 1037 drivers/scsi/scsi_debug.c act_len = sg_pcopy_from_buffer(sdb->table.sgl, sdb->table.nents, nents 2503 drivers/scsi/scsi_debug.c ret = sg_copy_buffer(sdb->table.sgl, sdb->table.nents, nents 2510 drivers/scsi/scsi_debug.c ret += sg_copy_buffer(sdb->table.sgl, sdb->table.nents, nents 990 drivers/scsi/scsi_error.c scmd->sdb.table.nents = scmd->sdb.table.orig_nents = 1; nents 556 drivers/scsi/scsi_lib.c if (cmd->sdb.table.nents) nents 1003 drivers/scsi/scsi_lib.c BUG_ON(count > sdb->table.nents); nents 1004 drivers/scsi/scsi_lib.c sdb->table.nents = count; nents 1061 drivers/scsi/scsi_lib.c cmd->prot_sdb->table.nents = count; nents 231 drivers/spi/spi-at91-usart.c xfer->rx_sg.nents, nents 240 drivers/spi/spi-at91-usart.c xfer->tx_sg.nents, nents 767 drivers/spi/spi-atmel.c xfer->rx_sg.nents, nents 786 drivers/spi/spi-atmel.c xfer->tx_sg.nents, nents 667 drivers/spi/spi-bcm2835.c unsigned int nents; nents 677 drivers/spi/spi-bcm2835.c nents = tfr->tx_sg.nents; nents 683 drivers/spi/spi-bcm2835.c nents = tfr->rx_sg.nents; nents 688 drivers/spi/spi-bcm2835.c desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); nents 631 drivers/spi/spi-davinci.c t->rx_sg.sgl, t->rx_sg.nents, DMA_DEV_TO_MEM, nents 643 drivers/spi/spi-davinci.c t->tx_sg.nents = t->rx_sg.nents; nents 647 drivers/spi/spi-davinci.c t->tx_sg.sgl, t->tx_sg.nents, DMA_MEM_TO_DEV, nents 161 drivers/spi/spi-dw-mid.c xfer->tx_sg.nents, nents 207 drivers/spi/spi-dw-mid.c xfer->rx_sg.nents, nents 281 drivers/spi/spi-ep93xx.c int i, ret, nents; nents 321 drivers/spi/spi-ep93xx.c nents = DIV_ROUND_UP(len, PAGE_SIZE); nents 322 drivers/spi/spi-ep93xx.c if (nents != sgt->nents) { nents 325 drivers/spi/spi-ep93xx.c ret = sg_alloc_table(sgt, nents, GFP_KERNEL); nents 331 drivers/spi/spi-ep93xx.c for_each_sg(sgt->sgl, sg, sgt->nents, i) { nents 351 drivers/spi/spi-ep93xx.c nents = dma_map_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); nents 352 drivers/spi/spi-ep93xx.c if (!nents) nents 355 drivers/spi/spi-ep93xx.c txd = dmaengine_prep_slave_sg(chan, sgt->sgl, nents, conf.direction, nents 358 drivers/spi/spi-ep93xx.c dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); nents 387 drivers/spi/spi-ep93xx.c dma_unmap_sg(chan->device->dev, sgt->sgl, sgt->nents, dir); nents 580 drivers/spi/spi-fsl-lpspi.c rx->sgl, rx->nents, DMA_DEV_TO_MEM, nents 592 drivers/spi/spi-fsl-lpspi.c tx->sgl, tx->nents, DMA_MEM_TO_DEV, nents 336 drivers/spi/spi-img-spfi.c xfer->rx_sg.nents, nents 360 drivers/spi/spi-img-spfi.c xfer->tx_sg.nents, nents 1341 drivers/spi/spi-imx.c struct scatterlist *last_sg = sg_last(rx->sgl, rx->nents); nents 1372 drivers/spi/spi-imx.c rx->sgl, rx->nents, DMA_DEV_TO_MEM, nents 1384 drivers/spi/spi-imx.c tx->sgl, tx->nents, DMA_MEM_TO_DEV, nents 410 drivers/spi/spi-omap2-mcspi.c xfer->tx_sg.nents, nents 491 drivers/spi/spi-omap2-mcspi.c ret = sg_split(xfer->rx_sg.sgl, xfer->rx_sg.nents, nents 265 drivers/spi/spi-pic32-sqi.c int nents, i; nents 277 drivers/spi/spi-pic32-sqi.c nents = xfer->rx_sg.nents; nents 281 drivers/spi/spi-pic32-sqi.c nents = xfer->tx_sg.nents; nents 296 drivers/spi/spi-pic32-sqi.c for_each_sg(sgl, sg, nents, i) { nents 314 drivers/spi/spi-pic32.c xfer->rx_sg.nents, nents 324 drivers/spi/spi-pic32.c xfer->tx_sg.nents, nents 525 drivers/spi/spi-pic32.c if (transfer->rx_sg.nents && transfer->tx_sg.nents) { nents 799 drivers/spi/spi-pl022.c pl022->sgt_tx.nents, DMA_TO_DEVICE); nents 801 drivers/spi/spi-pl022.c pl022->sgt_rx.nents, DMA_FROM_DEVICE); nents 826 drivers/spi/spi-pl022.c pl022->sgt_rx.nents, nents 829 drivers/spi/spi-pl022.c for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) { nents 839 drivers/spi/spi-pl022.c for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) { nents 875 drivers/spi/spi-pl022.c for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { nents 896 drivers/spi/spi-pl022.c for_each_sg(sgtab->sgl, sg, sgtab->nents, i) { nents 1051 drivers/spi/spi-pl022.c pl022->sgt_rx.nents, DMA_FROM_DEVICE); nents 1056 drivers/spi/spi-pl022.c pl022->sgt_tx.nents, DMA_TO_DEVICE); nents 1095 drivers/spi/spi-pl022.c pl022->sgt_tx.nents, DMA_TO_DEVICE); nents 1098 drivers/spi/spi-pl022.c pl022->sgt_rx.nents, DMA_FROM_DEVICE); nents 118 drivers/spi/spi-pxa2xx-dma.c return dmaengine_prep_slave_sg(chan, sgt->sgl, sgt->nents, dir, nents 390 drivers/spi/spi-qup.c unsigned int nents, enum dma_transfer_direction dir, nents 404 drivers/spi/spi-qup.c desc = dmaengine_prep_slave_sg(chan, sgl, nents, dir, flags); nents 426 drivers/spi/spi-qup.c u32 *nents) nents 439 drivers/spi/spi-qup.c (*nents)++; nents 405 drivers/spi/spi-rockchip.c xfer->rx_sg.sgl, xfer->rx_sg.nents, nents 427 drivers/spi/spi-rockchip.c xfer->tx_sg.sgl, xfer->tx_sg.nents, nents 533 drivers/spi/spi-rspi.c rx->nents, DMA_DEV_TO_MEM, nents 553 drivers/spi/spi-rspi.c tx->nents, DMA_MEM_TO_DEV, nents 294 drivers/spi/spi-s3c64xx.c desc = dmaengine_prep_slave_sg(dma->ch, sgt->sgl, sgt->nents, nents 504 drivers/spi/spi-sprd.c desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags); nents 226 drivers/spi/spi-stm32-qspi.c desc = dmaengine_prep_slave_sg(dma_ch, sgt.sgl, sgt.nents, nents 247 drivers/spi/spi-stm32-qspi.c t_out = sgt.nents * STM32_COMP_TIMEOUT_MS; nents 1294 drivers/spi/spi-stm32.c xfer->rx_sg.nents, nents 1306 drivers/spi/spi-stm32.c xfer->tx_sg.nents, nents 468 drivers/spi/spi-ti-qspi.c for_each_sg(rx_sg.sgl, sg, rx_sg.nents, i) { nents 874 drivers/spi/spi.c ret = dma_map_sg(dev, sgt->sgl, sgt->nents, dir); nents 882 drivers/spi/spi.c sgt->nents = ret; nents 150 drivers/staging/android/ion/ion.c ret = sg_alloc_table(new_table, table->nents, GFP_KERNEL); nents 157 drivers/staging/android/ion/ion.c for_each_sg(table->sgl, sg, table->nents, i) { nents 230 drivers/staging/android/ion/ion.c if (!dma_map_sg(attachment->dev, table->sgl, table->nents, nents 241 drivers/staging/android/ion/ion.c dma_unmap_sg(attachment->dev, table->sgl, table->nents, direction); nents 312 drivers/staging/android/ion/ion.c dma_sync_sg_for_cpu(a->dev, a->table->sgl, a->table->nents, nents 335 drivers/staging/android/ion/ion.c dma_sync_sg_for_device(a->dev, a->table->sgl, a->table->nents, nents 41 drivers/staging/android/ion/ion_heap.c for_each_sg(table->sgl, sg, table->nents, i) { nents 74 drivers/staging/android/ion/ion_heap.c for_each_sg(table->sgl, sg, table->nents, i) { nents 112 drivers/staging/android/ion/ion_heap.c static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, nents 120 drivers/staging/android/ion/ion_heap.c for_each_sg_page(sgl, &piter, nents, 0) { nents 145 drivers/staging/android/ion/ion_heap.c return ion_heap_sglist_zero(table->sgl, table->nents, pgprot); nents 165 drivers/staging/android/ion/ion_system_heap.c for_each_sg(table->sgl, sg, table->nents, i) nents 95 drivers/staging/kpc2000/kpc_dma/fileops.c acd->mapped_entry_count = dma_map_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); nents 189 drivers/staging/kpc2000/kpc_dma/fileops.c dma_unmap_sg(&ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, ldev->dir); nents 220 drivers/staging/kpc2000/kpc_dma/fileops.c dma_unmap_sg(&acd->ldev->pldev->dev, acd->sgt.sgl, acd->sgt.nents, acd->ldev->dir); nents 199 drivers/staging/media/ipu3/ipu3-dmamap.c int nents, struct imgu_css_map *map) nents 207 drivers/staging/media/ipu3/ipu3-dmamap.c for_each_sg(sglist, sg, nents, i) { nents 211 drivers/staging/media/ipu3/ipu3-dmamap.c if (i != nents - 1 && !PAGE_ALIGNED(sg->length)) nents 219 drivers/staging/media/ipu3/ipu3-dmamap.c nents, size >> shift); nents 230 drivers/staging/media/ipu3/ipu3-dmamap.c sglist, nents) < size) nents 16 drivers/staging/media/ipu3/ipu3-dmamap.h int nents, struct imgu_css_map *map); nents 298 drivers/staging/media/ipu3/ipu3-mmu.c struct scatterlist *sg, unsigned int nents) nents 306 drivers/staging/media/ipu3/ipu3-mmu.c for_each_sg(sg, s, nents, i) { nents 315 drivers/staging/media/ipu3/ipu3-mmu.c if (i == nents - 1 && !IS_ALIGNED(s->length, IPU3_PAGE_SIZE)) nents 35 drivers/staging/media/ipu3/ipu3-mmu.h struct scatterlist *sg, unsigned int nents); nents 318 drivers/staging/media/ipu3/ipu3-v4l2.c return imgu_dmamap_map_sg(imgu, sg->sgl, sg->nents, &buf->map); nents 109 drivers/staging/media/tegra-vde/dmabuf-cache.c if (!vde->domain && sgt->nents > 1) { nents 39 drivers/staging/media/tegra-vde/iommu.c size = iommu_map_sg(vde->domain, addr, sgt->sgl, sgt->nents, nents 1011 drivers/staging/wusbcore/wa-xfer.c nents; nents 1034 drivers/staging/wusbcore/wa-xfer.c nents = DIV_ROUND_UP((bytes_to_transfer + nents 1039 drivers/staging/wusbcore/wa-xfer.c out_sg = kmalloc((sizeof(struct scatterlist) * nents), GFP_ATOMIC); nents 1041 drivers/staging/wusbcore/wa-xfer.c sg_init_table(out_sg, nents); nents 1050 drivers/staging/wusbcore/wa-xfer.c nents = 0; nents 1070 drivers/staging/wusbcore/wa-xfer.c nents++; nents 1076 drivers/staging/wusbcore/wa-xfer.c *out_num_sgs = nents; nents 153 drivers/target/iscsi/cxgbit/cxgbit_ddp.c unsigned int nents) nents 155 drivers/target/iscsi/cxgbit/cxgbit_ddp.c unsigned int last_sgidx = nents - 1; nents 158 drivers/target/iscsi/cxgbit/cxgbit_ddp.c for (i = 0; i < nents; i++, sg = sg_next(sg)) { nents 177 drivers/target/iscsi/cxgbit/cxgbit_ddp.c unsigned int sgcnt = ttinfo->nents; nents 184 drivers/target/iscsi/cxgbit/cxgbit_ddp.c xferlen, ttinfo->nents); nents 246 drivers/target/iscsi/cxgbit/cxgbit_ddp.c ttinfo->nents = cmd->se_cmd.t_data_nents; nents 251 drivers/target/iscsi/cxgbit/cxgbit_ddp.c csk, cmd, cmd->se_cmd.data_length, ttinfo->nents); nents 254 drivers/target/iscsi/cxgbit/cxgbit_ddp.c ttinfo->nents = 0; nents 286 drivers/target/iscsi/cxgbit/cxgbit_ddp.c ttinfo->nents, DMA_FROM_DEVICE); nents 829 drivers/target/iscsi/cxgbit/cxgbit_target.c unsigned int nents, u32 skip) nents 847 drivers/target/iscsi/cxgbit/cxgbit_target.c consumed += sg_pcopy_from_buffer(sg, nents, (void *)buf, nents 2409 drivers/target/target_core_transport.c void target_free_sgl(struct scatterlist *sgl, int nents) nents 2411 drivers/target/target_core_transport.c sgl_free_n_order(sgl, nents, 0); nents 2516 drivers/target/target_core_transport.c target_alloc_sgl(struct scatterlist **sgl, unsigned int *nents, u32 length, nents 2521 drivers/target/target_core_transport.c *sgl = sgl_alloc_order(length, 0, chainable, gfp, nents); nents 369 drivers/usb/core/message.c int nents, size_t length, gfp_t mem_flags) nents 378 drivers/usb/core/message.c || nents <= 0) nents 390 drivers/usb/core/message.c io->entries = nents; nents 424 drivers/usb/core/message.c urb->num_sgs = nents; nents 432 drivers/usb/core/message.c for_each_sg(sg, sg2, nents, j) nents 1062 drivers/usb/gadget/function/f_fs.c req->num_sgs = io_data->sgt.nents; nents 1106 drivers/usb/gadget/function/f_fs.c req->num_sgs = io_data->sgt.nents; nents 521 drivers/usb/misc/usbtest.c static void free_sglist(struct scatterlist *sg, int nents) nents 527 drivers/usb/misc/usbtest.c for (i = 0; i < nents; i++) { nents 536 drivers/usb/misc/usbtest.c alloc_sglist(int nents, int max, int vary, struct usbtest_dev *dev, int pipe) nents 548 drivers/usb/misc/usbtest.c sg = kmalloc_array(nents, sizeof(*sg), GFP_KERNEL); nents 551 drivers/usb/misc/usbtest.c sg_init_table(sg, nents); nents 553 drivers/usb/misc/usbtest.c for (i = 0; i < nents; i++) { nents 606 drivers/usb/misc/usbtest.c int nents nents 622 drivers/usb/misc/usbtest.c sg, nents, 0, GFP_KERNEL); nents 460 drivers/usb/storage/isd200.c srb->sdb.table.nents = buff ? 1 : 0; nents 128 drivers/usb/storage/protocol.c unsigned int nents = scsi_sg_count(srb); nents 131 drivers/usb/storage/protocol.c nents = sg_nents(sg); nents 135 drivers/usb/storage/protocol.c sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? nents 461 drivers/usb/storage/uas.c urb->num_sgs = udev->bus->sg_tablesize ? sdb->table.nents : 0; nents 462 drivers/usb/usbip/stub_rx.c int nents; nents 492 drivers/usb/usbip/stub_rx.c sgl = sgl_alloc(buf_len, GFP_KERNEL, &nents); nents 512 drivers/usb/usbip/stub_rx.c num_urbs = nents; nents 542 drivers/usb/usbip/stub_rx.c priv->urbs[0]->num_sgs = nents; nents 559 drivers/usb/usbip/stub_rx.c for_each_sg(sgl, sg, nents, i) { nents 251 drivers/xen/gntdev-dmabuf.c sgt->nents, nents 291 drivers/xen/gntdev-dmabuf.c if (!dma_map_sg_attrs(attach->dev, sgt->sgl, sgt->nents, dir, nents 656 drivers/xen/gntdev-dmabuf.c for_each_sg_page(sgt->sgl, &sg_iter, sgt->nents, 0) { nents 2505 fs/proc/base.c const struct pid_entry *ents, unsigned int nents) nents 2516 fs/proc/base.c if (ctx->pos >= nents + 2) nents 2519 fs/proc/base.c for (p = ents + (ctx->pos - 2); p < ents + nents; p++) { nents 34 include/linux/dma-debug.h int nents, int mapped_ents, int direction); nents 100 include/linux/dma-debug.h int nents, int mapped_ents, int direction) nents 105 include/linux/dma-mapping.h int nents, enum dma_data_direction dir, nents 108 include/linux/dma-mapping.h struct scatterlist *sg, int nents, nents 124 include/linux/dma-mapping.h struct scatterlist *sg, int nents, nents 127 include/linux/dma-mapping.h struct scatterlist *sg, int nents, nents 206 include/linux/dma-mapping.h int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, int nents, nents 216 include/linux/dma-mapping.h struct scatterlist *sgl, int nents, enum dma_data_direction dir); nents 223 include/linux/dma-mapping.h struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 234 include/linux/dma-mapping.h int nents, enum dma_data_direction dir, unsigned long attrs); nents 238 include/linux/dma-mapping.h struct scatterlist *sgl, int nents, enum dma_data_direction dir); nents 245 include/linux/dma-mapping.h struct scatterlist *sgl, int nents, enum dma_data_direction dir, nents 254 include/linux/dma-mapping.h struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 312 include/linux/dma-mapping.h int nents, enum dma_data_direction dir, nents 320 include/linux/dma-mapping.h ents = dma_direct_map_sg(dev, sg, nents, dir, attrs); nents 322 include/linux/dma-mapping.h ents = ops->map_sg(dev, sg, nents, dir, attrs); nents 324 include/linux/dma-mapping.h debug_dma_map_sg(dev, sg, nents, ents, dir); nents 330 include/linux/dma-mapping.h int nents, enum dma_data_direction dir, nents 336 include/linux/dma-mapping.h debug_dma_unmap_sg(dev, sg, nents, dir); nents 338 include/linux/dma-mapping.h dma_direct_unmap_sg(dev, sg, nents, dir, attrs); nents 340 include/linux/dma-mapping.h ops->unmap_sg(dev, sg, nents, dir, attrs); nents 480 include/linux/dma-mapping.h int nents, enum dma_data_direction dir, unsigned long attrs) nents 485 include/linux/dma-mapping.h struct scatterlist *sg, int nents, enum dma_data_direction dir, nents 775 include/linux/dmaengine.h unsigned int nents, int value, unsigned long flags); nents 430 include/linux/iommu.h struct scatterlist *sg,unsigned int nents, int prot); nents 680 include/linux/iommu.h unsigned int nents, int prot) nents 645 include/linux/kfifo.h #define kfifo_dma_in_prepare(fifo, sgl, nents, len) \ nents 649 include/linux/kfifo.h int __nents = (nents); \ nents 696 include/linux/kfifo.h #define kfifo_dma_out_prepare(fifo, sgl, nents, len) \ nents 700 include/linux/kfifo.h int __nents = (nents); \ nents 779 include/linux/kfifo.h struct scatterlist *sgl, int nents, unsigned int len); nents 782 include/linux/kfifo.h struct scatterlist *sgl, int nents, unsigned int len); nents 801 include/linux/kfifo.h struct scatterlist *sgl, int nents, unsigned int len, size_t recsize); nents 807 include/linux/kfifo.h struct scatterlist *sgl, int nents, unsigned int len, size_t recsize); nents 66 include/linux/pci-dma-compat.h int nents, int direction) nents 68 include/linux/pci-dma-compat.h return dma_map_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction); nents 73 include/linux/pci-dma-compat.h int nents, int direction) nents 75 include/linux/pci-dma-compat.h dma_unmap_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction); nents 30 include/linux/pci-p2pdma.h unsigned int *nents, u32 length); nents 34 include/linux/pci-p2pdma.h int nents, enum dma_data_direction dir, unsigned long attrs); nents 36 include/linux/pci-p2pdma.h int nents, enum dma_data_direction dir, unsigned long attrs); nents 75 include/linux/pci-p2pdma.h unsigned int *nents, u32 length) nents 87 include/linux/pci-p2pdma.h struct scatterlist *sg, int nents, enum dma_data_direction dir, nents 93 include/linux/pci-p2pdma.h struct scatterlist *sg, int nents, enum dma_data_direction dir, nents 123 include/linux/pci-p2pdma.h int nents, enum dma_data_direction dir) nents 125 include/linux/pci-p2pdma.h return pci_p2pdma_map_sg_attrs(dev, sg, nents, dir, 0); nents 129 include/linux/pci-p2pdma.h struct scatterlist *sg, int nents, enum dma_data_direction dir) nents 131 include/linux/pci-p2pdma.h pci_p2pdma_unmap_sg_attrs(dev, sg, nents, dir, 0); nents 44 include/linux/scatterlist.h unsigned int nents; /* number of mapped entries */ nents 249 include/linux/scatterlist.h unsigned int nents) nents 251 include/linux/scatterlist.h sg_mark_end(&sgl[nents - 1]); nents 289 include/linux/scatterlist.h void sgl_free_n_order(struct scatterlist *sgl, int nents, int order); nents 294 include/linux/scatterlist.h size_t sg_copy_buffer(struct scatterlist *sgl, unsigned int nents, void *buf, nents 297 include/linux/scatterlist.h size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents, nents 299 include/linux/scatterlist.h size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents, nents 302 include/linux/scatterlist.h size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents, nents 304 include/linux/scatterlist.h size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents, nents 306 include/linux/scatterlist.h size_t sg_zero_buffer(struct scatterlist *sgl, unsigned int nents, nents 337 include/linux/scatterlist.h int sg_alloc_table_chained(struct sg_table *table, int nents, nents 376 include/linux/scatterlist.h struct scatterlist *sglist, unsigned int nents, nents 408 include/linux/scatterlist.h #define for_each_sg_page(sglist, piter, nents, pgoffset) \ nents 409 include/linux/scatterlist.h for (__sg_page_iter_start((piter), (sglist), (nents), (pgoffset)); \ nents 462 include/linux/scatterlist.h unsigned int nents, unsigned int flags); nents 144 include/linux/tifm.h int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, nents 146 include/linux/tifm.h void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents, nents 1771 include/linux/usb.h struct scatterlist *sg, int nents); nents 1879 include/linux/usb.h int nents, nents 72 include/media/drv-intf/saa7146.h int nents; nents 2745 include/rdma/ib_verbs.h unsigned int nents, nents 2770 include/rdma/ib_verbs.h #define rdma_for_each_block(sglist, biter, nents, pgsz) \ nents 2771 include/rdma/ib_verbs.h for (__rdma_block_iter_start(biter, sglist, nents, \ nents 4004 include/rdma/ib_verbs.h struct scatterlist *sg, int nents, nents 4007 include/rdma/ib_verbs.h return dma_map_sg(dev->dma_device, sg, nents, direction); nents 4018 include/rdma/ib_verbs.h struct scatterlist *sg, int nents, nents 4021 include/rdma/ib_verbs.h dma_unmap_sg(dev->dma_device, sg, nents, direction); nents 4025 include/rdma/ib_verbs.h struct scatterlist *sg, int nents, nents 4029 include/rdma/ib_verbs.h return dma_map_sg_attrs(dev->dma_device, sg, nents, direction, nents 4034 include/rdma/ib_verbs.h struct scatterlist *sg, int nents, nents 4038 include/rdma/ib_verbs.h dma_unmap_sg_attrs(dev->dma_device, sg, nents, direction, dma_attrs); nents 180 include/scsi/scsi_cmnd.h return cmd->sdb.table.nents; nents 294 include/scsi/scsi_cmnd.h return cmd->prot_sdb ? cmd->prot_sdb->table.nents : 0; nents 205 include/target/target_core_fabric.h int target_alloc_sgl(struct scatterlist **sgl, unsigned int *nents, nents 207 include/target/target_core_fabric.h void target_free_sgl(struct scatterlist *sgl, int nents); nents 102 include/trace/events/rpcrdma.h __field(int, nents) nents 113 include/trace/events/rpcrdma.h __entry->nents = mr->mr_nents; nents 124 include/trace/events/rpcrdma.h __entry->nents < __entry->nsegs ? "more" : "last" nents 150 include/trace/events/rpcrdma.h __field(int, nents) nents 160 include/trace/events/rpcrdma.h __entry->nents = mr->mr_nents; nents 171 include/trace/events/rpcrdma.h __entry->nents < __entry->nsegs ? "more" : "last" nents 904 include/trace/events/rpcrdma.h __field(int, nents) nents 911 include/trace/events/rpcrdma.h __entry->nents = sg_nents; nents 917 include/trace/events/rpcrdma.h __entry->nents nents 934 include/trace/events/rpcrdma.h __field(int, nents) nents 942 include/trace/events/rpcrdma.h __entry->nents = mr->mr_nents; nents 948 include/trace/events/rpcrdma.h __entry->num_mapped, __entry->nents nents 569 kernel/dma/debug.c unsigned int nents, i; nents 581 kernel/dma/debug.c nents = radix_tree_gang_lookup(&dma_active_cacheline, results, cln, nents 583 kernel/dma/debug.c for (i = 0; i < nents; i++) { nents 1347 kernel/dma/debug.c int nents, int mapped_ents, int direction) nents 1368 kernel/dma/debug.c entry->sg_call_ents = nents; nents 240 kernel/dma/direct.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 245 kernel/dma/direct.c for_each_sg(sgl, sg, nents, i) { nents 279 kernel/dma/direct.c struct scatterlist *sgl, int nents, enum dma_data_direction dir) nents 284 kernel/dma/direct.c for_each_sg(sgl, sg, nents, i) { nents 314 kernel/dma/direct.c int nents, enum dma_data_direction dir, unsigned long attrs) nents 319 kernel/dma/direct.c for_each_sg(sgl, sg, nents, i) nents 352 kernel/dma/direct.c int dma_direct_map_sg(struct device *dev, struct scatterlist *sgl, int nents, nents 358 kernel/dma/direct.c for_each_sg(sgl, sg, nents, i) { nents 366 kernel/dma/direct.c return nents; nents 38 kernel/dma/virt.c int nents, enum dma_data_direction dir, nents 44 kernel/dma/virt.c for_each_sg(sgl, sg, nents, i) { nents 50 kernel/dma/virt.c return nents; nents 296 lib/kfifo.c int nents, unsigned int len) nents 303 lib/kfifo.c if (!nents) nents 323 lib/kfifo.c if (++n == nents || sgl == NULL) nents 335 lib/kfifo.c int nents, unsigned int len, unsigned int off) nents 350 lib/kfifo.c n = setup_sgl_buf(sgl, fifo->data + off, nents, l); nents 351 lib/kfifo.c n += setup_sgl_buf(sgl + n, fifo->data, nents - n, len - l); nents 357 lib/kfifo.c struct scatterlist *sgl, int nents, unsigned int len) nents 365 lib/kfifo.c return setup_sgl(fifo, sgl, nents, len, fifo->in); nents 370 lib/kfifo.c struct scatterlist *sgl, int nents, unsigned int len) nents 378 lib/kfifo.c return setup_sgl(fifo, sgl, nents, len, fifo->out); nents 549 lib/kfifo.c struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) nents 551 lib/kfifo.c BUG_ON(!nents); nents 558 lib/kfifo.c return setup_sgl(fifo, sgl, nents, len, fifo->in + recsize); nents 572 lib/kfifo.c struct scatterlist *sgl, int nents, unsigned int len, size_t recsize) nents 574 lib/kfifo.c BUG_ON(!nents); nents 581 lib/kfifo.c return setup_sgl(fifo, sgl, nents, len, fifo->out + recsize); nents 261 lib/mpi/mpicoder.c int nents; nents 269 lib/mpi/mpicoder.c nents = sg_nents_for_len(sgl, nbytes); nents 270 lib/mpi/mpicoder.c if (nents < 0) nents 273 lib/mpi/mpicoder.c sg_miter_start(&miter, sgl, nents, SG_MITER_ATOMIC | SG_MITER_TO_SG); nents 47 lib/scatterlist.c int nents; nents 48 lib/scatterlist.c for (nents = 0; sg; sg = sg_next(sg)) nents 49 lib/scatterlist.c nents++; nents 50 lib/scatterlist.c return nents; nents 70 lib/scatterlist.c int nents; nents 76 lib/scatterlist.c for (nents = 0, total = 0; sg; sg = sg_next(sg)) { nents 77 lib/scatterlist.c nents++; nents 80 lib/scatterlist.c return nents; nents 101 lib/scatterlist.c struct scatterlist *sg_last(struct scatterlist *sgl, unsigned int nents) nents 106 lib/scatterlist.c for_each_sg(sgl, sg, nents, i) nents 124 lib/scatterlist.c void sg_init_table(struct scatterlist *sgl, unsigned int nents) nents 126 lib/scatterlist.c memset(sgl, 0, sizeof(*sgl) * nents); nents 127 lib/scatterlist.c sg_init_marker(sgl, nents); nents 149 lib/scatterlist.c static struct scatterlist *sg_kmalloc(unsigned int nents, gfp_t gfp_mask) nents 151 lib/scatterlist.c if (nents == SG_MAX_SINGLE_ALLOC) { nents 165 lib/scatterlist.c return kmalloc_array(nents, sizeof(struct scatterlist), nents 169 lib/scatterlist.c static void sg_kfree(struct scatterlist *sg, unsigned int nents) nents 171 lib/scatterlist.c if (nents == SG_MAX_SINGLE_ALLOC) { nents 266 lib/scatterlist.c int __sg_alloc_table(struct sg_table *table, unsigned int nents, nents 278 lib/scatterlist.c if (nents == 0) nents 281 lib/scatterlist.c if (WARN_ON_ONCE(nents > max_ents)) nents 285 lib/scatterlist.c left = nents; nents 312 lib/scatterlist.c table->nents = ++table->orig_nents; nents 318 lib/scatterlist.c table->nents = table->orig_nents += sg_size; nents 355 lib/scatterlist.c int sg_alloc_table(struct sg_table *table, unsigned int nents, gfp_t gfp_mask) nents 359 lib/scatterlist.c ret = __sg_alloc_table(table, nents, SG_MAX_SINGLE_ALLOC, nents 560 lib/scatterlist.c void sgl_free_n_order(struct scatterlist *sgl, int nents, int order) nents 566 lib/scatterlist.c for_each_sg(sgl, sg, nents, i) { nents 601 lib/scatterlist.c struct scatterlist *sglist, unsigned int nents, nents 605 lib/scatterlist.c piter->__nents = nents; nents 675 lib/scatterlist.c unsigned int nents, unsigned int flags) nents 679 lib/scatterlist.c __sg_page_iter_start(&miter->piter, sgl, nents, 0); nents 840 lib/scatterlist.c size_t sg_copy_buffer(struct scatterlist *sgl, unsigned int nents, void *buf, nents 852 lib/scatterlist.c sg_miter_start(&miter, sgl, nents, sg_flags); nents 886 lib/scatterlist.c size_t sg_copy_from_buffer(struct scatterlist *sgl, unsigned int nents, nents 889 lib/scatterlist.c return sg_copy_buffer(sgl, nents, (void *)buf, buflen, 0, false); nents 903 lib/scatterlist.c size_t sg_copy_to_buffer(struct scatterlist *sgl, unsigned int nents, nents 906 lib/scatterlist.c return sg_copy_buffer(sgl, nents, buf, buflen, 0, true); nents 921 lib/scatterlist.c size_t sg_pcopy_from_buffer(struct scatterlist *sgl, unsigned int nents, nents 924 lib/scatterlist.c return sg_copy_buffer(sgl, nents, (void *)buf, buflen, skip, false); nents 939 lib/scatterlist.c size_t sg_pcopy_to_buffer(struct scatterlist *sgl, unsigned int nents, nents 942 lib/scatterlist.c return sg_copy_buffer(sgl, nents, buf, buflen, skip, true); nents 955 lib/scatterlist.c size_t sg_zero_buffer(struct scatterlist *sgl, unsigned int nents, nents 962 lib/scatterlist.c sg_miter_start(&miter, sgl, nents, sg_flags); nents 40 lib/sg_pool.c static inline unsigned int sg_pool_index(unsigned short nents) nents 44 lib/sg_pool.c BUG_ON(nents > SG_CHUNK_SIZE); nents 46 lib/sg_pool.c if (nents <= 8) nents 49 lib/sg_pool.c index = get_count_order(nents) - 3; nents 54 lib/sg_pool.c static void sg_pool_free(struct scatterlist *sgl, unsigned int nents) nents 58 lib/sg_pool.c sgp = sg_pools + sg_pool_index(nents); nents 62 lib/sg_pool.c static struct scatterlist *sg_pool_alloc(unsigned int nents, gfp_t gfp_mask) nents 66 lib/sg_pool.c sgp = sg_pools + sg_pool_index(nents); nents 111 lib/sg_pool.c int sg_alloc_table_chained(struct sg_table *table, int nents, nents 116 lib/sg_pool.c BUG_ON(!nents); nents 119 lib/sg_pool.c if (nents <= nents_first_chunk) { nents 120 lib/sg_pool.c table->nents = table->orig_nents = nents; nents 121 lib/sg_pool.c sg_init_table(table->sgl, nents); nents 132 lib/sg_pool.c ret = __sg_alloc_table(table, nents, SG_CHUNK_SIZE, nents 13 lib/sg_split.c int nents; nents 20 lib/sg_split.c static int sg_calculate_split(struct scatterlist *in, int nents, int nb_splits, nents 32 lib/sg_split.c splitters[i].nents = 0; nents 35 lib/sg_split.c for_each_sg(in, sg, nents, i) { nents 48 lib/sg_split.c curr->nents++; nents 59 lib/sg_split.c curr->nents = 1; nents 86 lib/sg_split.c for (j = 0; j < split->nents; j++, out_sg++) { nents 112 lib/sg_split.c for (j = 0; j < split->nents; j++, out_sg++) { nents 168 lib/sg_split.c splitters[i].out_sg = kmalloc_array(splitters[i].nents, nents 190 lib/sg_split.c out_mapped_nents[i] = splitters[i].nents; nents 189 net/ceph/crypto.c sgt->nents = sgt->orig_nents = 1; nents 100 net/rds/ib_fmr.c unsigned int nents) nents 112 net/rds/ib_fmr.c sg_dma_len = ib_dma_map_sg(dev, sg, nents, DMA_BIDIRECTIONAL); nents 127 net/rds/ib_fmr.c ib_dma_unmap_sg(dev, sg, nents, nents 136 net/rds/ib_fmr.c ib_dma_unmap_sg(dev, sg, nents, nents 149 net/rds/ib_fmr.c ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL); nents 156 net/rds/ib_fmr.c ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL); nents 172 net/rds/ib_fmr.c ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL); nents 182 net/rds/ib_fmr.c ibmr->sg_len = nents; nents 200 net/rds/ib_fmr.c unsigned long nents, nents 207 net/rds/ib_fmr.c ibmr = rds_ib_alloc_fmr(rds_ibdev, nents); nents 213 net/rds/ib_fmr.c ret = rds_ib_map_fmr(rds_ibdev, ibmr, sg, nents); nents 404 net/rds/ib_frmr.c unsigned long nents, u32 *key) nents 418 net/rds/ib_frmr.c ibmr = rds_ib_alloc_frmr(rds_ibdev, nents); nents 426 net/rds/ib_frmr.c ret = rds_ib_map_frmr(rds_ibdev, ibmr->pool, ibmr, sg, nents); nents 123 net/rds/ib_mr.h void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents, nents 146 net/rds/ib_mr.h unsigned long nents, u32 *key); nents 529 net/rds/ib_rdma.c void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents, nents 553 net/rds/ib_rdma.c ibmr = rds_ib_reg_frmr(rds_ibdev, ic, sg, nents, key_ret); nents 555 net/rds/ib_rdma.c ibmr = rds_ib_reg_fmr(rds_ibdev, sg, nents, key_ret); nents 311 net/rds/message.c struct scatterlist *rds_message_alloc_sgs(struct rds_message *rm, int nents) nents 316 net/rds/message.c if (nents <= 0) { nents 321 net/rds/message.c if (rm->m_used_sgs + nents > rm->m_total_sgs) { nents 323 net/rds/message.c rm->m_total_sgs, rm->m_used_sgs, nents); nents 328 net/rds/message.c sg_init_table(sg_ret, nents); nents 329 net/rds/message.c rm->m_used_sgs += nents; nents 184 net/rds/rdma.c unsigned int nents; nents 254 net/rds/rdma.c nents = ret; nents 255 net/rds/rdma.c sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL); nents 260 net/rds/rdma.c WARN_ON(!nents); nents 261 net/rds/rdma.c sg_init_table(sg, nents); nents 264 net/rds/rdma.c for (i = 0 ; i < nents; i++) nents 267 net/rds/rdma.c rdsdebug("RDS: trans_private nents is %u\n", nents); nents 273 net/rds/rdma.c trans_private = rs->rs_transport->get_mr(sg, nents, rs, nents 278 net/rds/rdma.c for (i = 0 ; i < nents; i++) nents 851 net/rds/rds.h struct rds_message *rds_message_alloc(unsigned int nents, gfp_t gfp); nents 852 net/rds/rds.h struct scatterlist *rds_message_alloc_sgs(struct rds_message *rm, int nents); nents 409 net/smc/smc_ib.c buf_slot->sgt[SMC_SINGLE_LINK].nents, i) { nents 429 net/smc/smc_ib.c buf_slot->sgt[SMC_SINGLE_LINK].nents, i) { nents 25 samples/kfifo/dma-example.c unsigned int nents; nents 63 samples/kfifo/dma-example.c nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE); nents 64 samples/kfifo/dma-example.c printk(KERN_INFO "DMA sgl entries: %d\n", nents); nents 65 samples/kfifo/dma-example.c if (!nents) { nents 73 samples/kfifo/dma-example.c for (i = 0; i < nents; i++) { nents 93 samples/kfifo/dma-example.c nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8); nents 94 samples/kfifo/dma-example.c printk(KERN_INFO "DMA sgl entries: %d\n", nents); nents 95 samples/kfifo/dma-example.c if (!nents) { nents 102 samples/kfifo/dma-example.c for (i = 0; i < nents; i++) { nents 849 samples/vfio-mdev/mbochs.c if (!dma_map_sg(at->dev, sg->sgl, sg->nents, direction)) nents 70 tools/testing/scatterlist/main.c assert(st.nents == test->expected_segments); nents 149 tools/virtio/linux/scatterlist.h static inline void sg_init_table(struct scatterlist *sgl, unsigned int nents) nents 151 tools/virtio/linux/scatterlist.h memset(sgl, 0, sizeof(*sgl) * nents); nents 152 tools/virtio/linux/scatterlist.h sg_mark_end(&sgl[nents - 1]);