sg                212 arch/alpha/include/asm/core_marvel.h 		unsigned sg : 1;	/* <1>			*/
sg                517 arch/alpha/kernel/pci_iommu.c sg_classify(struct device *dev, struct scatterlist *sg, struct scatterlist *end,
sg                525 arch/alpha/kernel/pci_iommu.c 	leader = sg;
sg                532 arch/alpha/kernel/pci_iommu.c 	for (++sg; sg < end; ++sg) {
sg                534 arch/alpha/kernel/pci_iommu.c 		addr = SG_ENT_PHYS_ADDRESS(sg);
sg                535 arch/alpha/kernel/pci_iommu.c 		len = sg->length;
sg                541 arch/alpha/kernel/pci_iommu.c 			sg->dma_address = -1;
sg                544 arch/alpha/kernel/pci_iommu.c 			sg->dma_address = -2;
sg                551 arch/alpha/kernel/pci_iommu.c 			leader = sg;
sg                573 arch/alpha/kernel/pci_iommu.c 	struct scatterlist *sg;
sg                630 arch/alpha/kernel/pci_iommu.c 	sg = leader;
sg                633 arch/alpha/kernel/pci_iommu.c 		struct scatterlist *last_sg = sg;
sg                636 arch/alpha/kernel/pci_iommu.c 		size = sg->length;
sg                637 arch/alpha/kernel/pci_iommu.c 		paddr = SG_ENT_PHYS_ADDRESS(sg);
sg                639 arch/alpha/kernel/pci_iommu.c 		while (sg+1 < end && (int) sg[1].dma_address == -1) {
sg                640 arch/alpha/kernel/pci_iommu.c 			size += sg[1].length;
sg                641 arch/alpha/kernel/pci_iommu.c 			sg++;
sg                654 arch/alpha/kernel/pci_iommu.c 		while (++last_sg <= sg) {
sg                660 arch/alpha/kernel/pci_iommu.c 	} while (++sg < end && (int) sg->dma_address < 0);
sg                665 arch/alpha/kernel/pci_iommu.c static int alpha_pci_map_sg(struct device *dev, struct scatterlist *sg,
sg                682 arch/alpha/kernel/pci_iommu.c 		sg->dma_length = sg->length;
sg                683 arch/alpha/kernel/pci_iommu.c 		sg->dma_address
sg                684 arch/alpha/kernel/pci_iommu.c 		  = pci_map_single_1(pdev, SG_ENT_VIRT_ADDRESS(sg),
sg                685 arch/alpha/kernel/pci_iommu.c 				     sg->length, dac_allowed);
sg                686 arch/alpha/kernel/pci_iommu.c 		return sg->dma_address != DMA_MAPPING_ERROR;
sg                689 arch/alpha/kernel/pci_iommu.c 	start = sg;
sg                690 arch/alpha/kernel/pci_iommu.c 	end = sg + nents;
sg                693 arch/alpha/kernel/pci_iommu.c 	sg_classify(dev, sg, end, alpha_mv.mv_pci_tbi != 0);
sg                710 arch/alpha/kernel/pci_iommu.c 	for (out = sg; sg < end; ++sg) {
sg                711 arch/alpha/kernel/pci_iommu.c 		if ((int) sg->dma_address < 0)
sg                713 arch/alpha/kernel/pci_iommu.c 		if (sg_fill(dev, sg, end, out, arena, max_dma, dac_allowed) < 0)
sg                743 arch/alpha/kernel/pci_iommu.c static void alpha_pci_unmap_sg(struct device *dev, struct scatterlist *sg,
sg                770 arch/alpha/kernel/pci_iommu.c 	for (end = sg + nents; sg < end; ++sg) {
sg                776 arch/alpha/kernel/pci_iommu.c 		addr = sg->dma_address;
sg                777 arch/alpha/kernel/pci_iommu.c 		size = sg->dma_length;
sg                784 arch/alpha/kernel/pci_iommu.c 			      sg - end + nents, addr, size);
sg                792 arch/alpha/kernel/pci_iommu.c 			      sg - end + nents, addr, size);
sg                797 arch/alpha/kernel/pci_iommu.c 		     sg - end + nents, addr, size);
sg                816 arch/alpha/kernel/pci_iommu.c 	DBGA("pci_unmap_sg: %ld entries\n", nents - (end - sg));
sg                 99 arch/arm/include/asm/dma.h extern void set_dma_sg(unsigned int chan, struct scatterlist *sg, int nr_sg);
sg                 29 arch/arm/include/asm/mach/dma.h 	struct scatterlist *sg;		/* DMA Scatter-Gather List	*/
sg                 87 arch/arm/kernel/dma-isa.c 		if (!dma->sg) {
sg                 92 arch/arm/kernel/dma-isa.c 			dma->sg = &dma->buf;
sg                121 arch/arm/kernel/dma.c void set_dma_sg (unsigned int chan, struct scatterlist *sg, int nr_sg)
sg                128 arch/arm/kernel/dma.c 	dma->sg = sg;
sg                145 arch/arm/kernel/dma.c 	dma->sg = NULL;
sg                162 arch/arm/kernel/dma.c 	dma->sg = NULL;
sg                 58 arch/arm/mach-rpc/dma.c 	if (idma->dma.sg) {
sg                 77 arch/arm/mach-rpc/dma.c 				idma->dma.sg = sg_next(idma->dma.sg);
sg                 78 arch/arm/mach-rpc/dma.c 				idma->dma_addr = idma->dma.sg->dma_address;
sg                 79 arch/arm/mach-rpc/dma.c 				idma->dma_len = idma->dma.sg->length;
sg                 82 arch/arm/mach-rpc/dma.c 				idma->dma.sg = NULL;
sg                168 arch/arm/mach-rpc/dma.c 		if (!idma->dma.sg) {
sg                169 arch/arm/mach-rpc/dma.c 			idma->dma.sg = &idma->dma.buf;
sg                178 arch/arm/mach-rpc/dma.c 		idma->dma_addr = idma->dma.sg->dma_address;
sg                179 arch/arm/mach-rpc/dma.c 		idma->dma_len = idma->dma.sg->length;
sg                272 arch/arm/mach-rpc/dma.c 	if (fdma->dma.sg)
sg                122 arch/arm/mm/dma-mapping-nommu.c 	struct scatterlist *sg;
sg                124 arch/arm/mm/dma-mapping-nommu.c 	for_each_sg(sgl, sg, nents, i) {
sg                125 arch/arm/mm/dma-mapping-nommu.c 		sg_dma_address(sg) = sg_phys(sg);
sg                126 arch/arm/mm/dma-mapping-nommu.c 		sg_dma_len(sg) = sg->length;
sg                127 arch/arm/mm/dma-mapping-nommu.c 		__dma_page_cpu_to_dev(sg_dma_address(sg), sg_dma_len(sg), dir);
sg                137 arch/arm/mm/dma-mapping-nommu.c 	struct scatterlist *sg;
sg                140 arch/arm/mm/dma-mapping-nommu.c 	for_each_sg(sgl, sg, nents, i)
sg                141 arch/arm/mm/dma-mapping-nommu.c 		__dma_page_dev_to_cpu(sg_dma_address(sg), sg_dma_len(sg), dir);
sg                159 arch/arm/mm/dma-mapping-nommu.c 	struct scatterlist *sg;
sg                162 arch/arm/mm/dma-mapping-nommu.c 	for_each_sg(sgl, sg, nents, i)
sg                163 arch/arm/mm/dma-mapping-nommu.c 		__dma_page_cpu_to_dev(sg_dma_address(sg), sg_dma_len(sg), dir);
sg                169 arch/arm/mm/dma-mapping-nommu.c 	struct scatterlist *sg;
sg                172 arch/arm/mm/dma-mapping-nommu.c 	for_each_sg(sgl, sg, nents, i)
sg                173 arch/arm/mm/dma-mapping-nommu.c 		__dma_page_dev_to_cpu(sg_dma_address(sg), sg_dma_len(sg), dir);
sg               1006 arch/arm/mm/dma-mapping.c int arm_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg               1013 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i) {
sg               1025 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, i, j)
sg               1040 arch/arm/mm/dma-mapping.c void arm_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg               1048 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i)
sg               1059 arch/arm/mm/dma-mapping.c void arm_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
sg               1066 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i)
sg               1078 arch/arm/mm/dma-mapping.c void arm_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
sg               1085 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i)
sg               1615 arch/arm/mm/dma-mapping.c static int __map_sg_chunk(struct device *dev, struct scatterlist *sg,
sg               1634 arch/arm/mm/dma-mapping.c 	for (count = 0, s = sg; count < (size >> PAGE_SHIFT); s = sg_next(s)) {
sg               1658 arch/arm/mm/dma-mapping.c static int __iommu_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg               1662 arch/arm/mm/dma-mapping.c 	struct scatterlist *s = sg, *dma = sg, *start = sg;
sg               1699 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, count, i)
sg               1716 arch/arm/mm/dma-mapping.c int arm_coherent_iommu_map_sg(struct device *dev, struct scatterlist *sg,
sg               1719 arch/arm/mm/dma-mapping.c 	return __iommu_map_sg(dev, sg, nents, dir, attrs, true);
sg               1734 arch/arm/mm/dma-mapping.c int arm_iommu_map_sg(struct device *dev, struct scatterlist *sg,
sg               1737 arch/arm/mm/dma-mapping.c 	return __iommu_map_sg(dev, sg, nents, dir, attrs, false);
sg               1740 arch/arm/mm/dma-mapping.c static void __iommu_unmap_sg(struct device *dev, struct scatterlist *sg,
sg               1747 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i) {
sg               1767 arch/arm/mm/dma-mapping.c void arm_coherent_iommu_unmap_sg(struct device *dev, struct scatterlist *sg,
sg               1771 arch/arm/mm/dma-mapping.c 	__iommu_unmap_sg(dev, sg, nents, dir, attrs, true);
sg               1784 arch/arm/mm/dma-mapping.c void arm_iommu_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg               1788 arch/arm/mm/dma-mapping.c 	__iommu_unmap_sg(dev, sg, nents, dir, attrs, false);
sg               1798 arch/arm/mm/dma-mapping.c void arm_iommu_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
sg               1804 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i)
sg               1816 arch/arm/mm/dma-mapping.c void arm_iommu_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
sg               1822 arch/arm/mm/dma-mapping.c 	for_each_sg(sg, s, nents, i)
sg                167 arch/arm64/crypto/aes-ce-ccm-glue.c 			scatterwalk_start(&walk, sg_next(walk.sg));
sg                381 arch/arm64/crypto/ghash-ce-glue.c 			scatterwalk_start(&walk, sg_next(walk.sg));
sg                247 arch/ia64/hp/common/sba_iommu.c #define sba_sg_address(sg)	sg_virt((sg))
sg               1440 arch/ia64/hp/common/sba_iommu.c 	struct scatterlist *sg;
sg               1450 arch/ia64/hp/common/sba_iommu.c 		for_each_sg(sglist, sg, nents, filled) {
sg               1451 arch/ia64/hp/common/sba_iommu.c 			sg->dma_length = sg->length;
sg               1452 arch/ia64/hp/common/sba_iommu.c 			sg->dma_address = virt_to_phys(sba_sg_address(sg));
sg                611 arch/mips/jazz/jazzdma.c 	struct scatterlist *sg;
sg                613 arch/mips/jazz/jazzdma.c 	for_each_sg(sglist, sg, nents, i) {
sg                615 arch/mips/jazz/jazzdma.c 			arch_sync_dma_for_device(dev, sg_phys(sg), sg->length,
sg                617 arch/mips/jazz/jazzdma.c 		sg->dma_address = vdma_alloc(sg_phys(sg), sg->length);
sg                618 arch/mips/jazz/jazzdma.c 		if (sg->dma_address == DMA_MAPPING_ERROR)
sg                620 arch/mips/jazz/jazzdma.c 		sg_dma_len(sg) = sg->length;
sg                630 arch/mips/jazz/jazzdma.c 	struct scatterlist *sg;
sg                632 arch/mips/jazz/jazzdma.c 	for_each_sg(sglist, sg, nents, i) {
sg                634 arch/mips/jazz/jazzdma.c 			arch_sync_dma_for_cpu(dev, sg_phys(sg), sg->length,
sg                636 arch/mips/jazz/jazzdma.c 		vdma_free(sg->dma_address);
sg                655 arch/mips/jazz/jazzdma.c 	struct scatterlist *sg;
sg                658 arch/mips/jazz/jazzdma.c 	for_each_sg(sgl, sg, nents, i)
sg                659 arch/mips/jazz/jazzdma.c 		arch_sync_dma_for_device(dev, sg_phys(sg), sg->length, dir);
sg                665 arch/mips/jazz/jazzdma.c 	struct scatterlist *sg;
sg                668 arch/mips/jazz/jazzdma.c 	for_each_sg(sgl, sg, nents, i)
sg                669 arch/mips/jazz/jazzdma.c 		arch_sync_dma_for_cpu(dev, sg_phys(sg), sg->length, dir);
sg                376 arch/powerpc/include/asm/opal.h void opal_free_sg_list(struct opal_sg_list *sg);
sg                567 arch/powerpc/kernel/iommu.c 	struct scatterlist *sg;
sg                574 arch/powerpc/kernel/iommu.c 	sg = sglist;
sg                577 arch/powerpc/kernel/iommu.c 		dma_addr_t dma_handle = sg->dma_address;
sg                579 arch/powerpc/kernel/iommu.c 		if (sg->dma_length == 0)
sg                581 arch/powerpc/kernel/iommu.c 		npages = iommu_num_pages(dma_handle, sg->dma_length,
sg                584 arch/powerpc/kernel/iommu.c 		sg = sg_next(sg);
sg                166 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	struct scatterlist sg;
sg                248 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	sg_init_table(&sg, 1);
sg                250 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	sg_dma_address(&sg) = dma_map_single(dma_dev->dev,
sg                252 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	if (dma_mapping_error(dma_dev->dev, sg_dma_address(&sg)))
sg                255 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	lpbfifo.ram_bus_addr = sg_dma_address(&sg); /* For freeing later */
sg                257 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	sg_dma_len(&sg) = lpbfifo.req->size;
sg                259 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	dma_tx = dmaengine_prep_slave_sg(lpbfifo.chan, &sg,
sg                327 arch/powerpc/platforms/512x/mpc512x_lpbfifo.c 	dma_unmap_single(dma_dev->dev, sg_dma_address(&sg),
sg                 27 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	struct attribute_group sg;
sg                129 arch/powerpc/platforms/powernv/opal-sensor-groups.c static int add_attr_group(const __be32 *ops, int len, struct sensor_group *sg,
sg                138 arch/powerpc/platforms/powernv/opal-sensor-groups.c 				add_attr(handle, &sg->sgattrs[count], j);
sg                139 arch/powerpc/platforms/powernv/opal-sensor-groups.c 				sg->sg.attrs[count] =
sg                140 arch/powerpc/platforms/powernv/opal-sensor-groups.c 					&sg->sgattrs[count].attr.attr;
sg                144 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	return sysfs_create_group(sg_kobj, &sg->sg);
sg                162 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	struct device_node *sg, *node;
sg                165 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	sg = of_find_compatible_node(NULL, NULL, "ibm,opal-sensor-group");
sg                166 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	if (!sg) {
sg                171 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	sgs = kcalloc(of_get_child_count(sg), sizeof(*sgs), GFP_KERNEL);
sg                181 arch/powerpc/platforms/powernv/opal-sensor-groups.c 	for_each_child_of_node(sg, node) {
sg                198 arch/powerpc/platforms/powernv/opal-sensor-groups.c 		sgs[i].sg.attrs = kcalloc(nr_attrs + 1,
sg                199 arch/powerpc/platforms/powernv/opal-sensor-groups.c 					  sizeof(*sgs[i].sg.attrs),
sg                202 arch/powerpc/platforms/powernv/opal-sensor-groups.c 		if (!sgs[i].sg.attrs) {
sg                217 arch/powerpc/platforms/powernv/opal-sensor-groups.c 		sgs[i].sg.name = sgs[i].name;
sg                220 arch/powerpc/platforms/powernv/opal-sensor-groups.c 				sgs[i].sg.name);
sg                231 arch/powerpc/platforms/powernv/opal-sensor-groups.c 		kfree(sgs[i].sg.attrs);
sg               1049 arch/powerpc/platforms/powernv/opal.c 	struct opal_sg_list *sg, *first = NULL;
sg               1052 arch/powerpc/platforms/powernv/opal.c 	sg = kzalloc(PAGE_SIZE, GFP_KERNEL);
sg               1053 arch/powerpc/platforms/powernv/opal.c 	if (!sg)
sg               1056 arch/powerpc/platforms/powernv/opal.c 	first = sg;
sg               1062 arch/powerpc/platforms/powernv/opal.c 		sg->entry[i].data = cpu_to_be64(data);
sg               1063 arch/powerpc/platforms/powernv/opal.c 		sg->entry[i].length = cpu_to_be64(length);
sg               1073 arch/powerpc/platforms/powernv/opal.c 			sg->length = cpu_to_be64(
sg               1076 arch/powerpc/platforms/powernv/opal.c 			sg->next = cpu_to_be64(__pa(next));
sg               1077 arch/powerpc/platforms/powernv/opal.c 			sg = next;
sg               1084 arch/powerpc/platforms/powernv/opal.c 	sg->length = cpu_to_be64(i * sizeof(struct opal_sg_entry) + 16);
sg               1094 arch/powerpc/platforms/powernv/opal.c void opal_free_sg_list(struct opal_sg_list *sg)
sg               1096 arch/powerpc/platforms/powernv/opal.c 	while (sg) {
sg               1097 arch/powerpc/platforms/powernv/opal.c 		uint64_t next = be64_to_cpu(sg->next);
sg               1099 arch/powerpc/platforms/powernv/opal.c 		kfree(sg);
sg               1102 arch/powerpc/platforms/powernv/opal.c 			sg = __va(next);
sg               1104 arch/powerpc/platforms/powernv/opal.c 			sg = NULL;
sg                640 arch/powerpc/platforms/ps3/system-bus.c 	struct scatterlist *sg;
sg                643 arch/powerpc/platforms/ps3/system-bus.c 	for_each_sg(sgl, sg, nents, i) {
sg                644 arch/powerpc/platforms/ps3/system-bus.c 		int result = ps3_dma_map(dev->d_region, sg_phys(sg),
sg                645 arch/powerpc/platforms/ps3/system-bus.c 					sg->length, &sg->dma_address, 0);
sg                653 arch/powerpc/platforms/ps3/system-bus.c 		sg->dma_length = sg->length;
sg                660 arch/powerpc/platforms/ps3/system-bus.c static int ps3_ioc0_map_sg(struct device *_dev, struct scatterlist *sg,
sg                669 arch/powerpc/platforms/ps3/system-bus.c static void ps3_sb_unmap_sg(struct device *_dev, struct scatterlist *sg,
sg                677 arch/powerpc/platforms/ps3/system-bus.c static void ps3_ioc0_unmap_sg(struct device *_dev, struct scatterlist *sg,
sg                110 arch/powerpc/platforms/pseries/ibmebus.c 	struct scatterlist *sg;
sg                113 arch/powerpc/platforms/pseries/ibmebus.c 	for_each_sg(sgl, sg, nents, i) {
sg                114 arch/powerpc/platforms/pseries/ibmebus.c 		sg->dma_address = (dma_addr_t) sg_virt(sg);
sg                115 arch/powerpc/platforms/pseries/ibmebus.c 		sg->dma_length = sg->length;
sg                122 arch/powerpc/platforms/pseries/ibmebus.c 			     struct scatterlist *sg,
sg                835 arch/s390/crypto/aes_s390.c static void gcm_walk_start(struct gcm_sg_walk *gw, struct scatterlist *sg,
sg                840 arch/s390/crypto/aes_s390.c 	scatterwalk_start(&gw->walk, sg);
sg                849 arch/s390/crypto/aes_s390.c 		nextsg = sg_next(gw->walk.sg);
sg                124 arch/s390/include/asm/gmap.h int gmap_shadow_valid(struct gmap *sg, unsigned long asce, int edat_level);
sg                125 arch/s390/include/asm/gmap.h int gmap_shadow_r2t(struct gmap *sg, unsigned long saddr, unsigned long r2t,
sg                127 arch/s390/include/asm/gmap.h int gmap_shadow_r3t(struct gmap *sg, unsigned long saddr, unsigned long r3t,
sg                129 arch/s390/include/asm/gmap.h int gmap_shadow_sgt(struct gmap *sg, unsigned long saddr, unsigned long sgt,
sg                131 arch/s390/include/asm/gmap.h int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt,
sg                133 arch/s390/include/asm/gmap.h int gmap_shadow_pgt_lookup(struct gmap *sg, unsigned long saddr,
sg                135 arch/s390/include/asm/gmap.h int gmap_shadow_page(struct gmap *sg, unsigned long saddr, pte_t pte);
sg                982 arch/s390/kvm/gaccess.c static int kvm_s390_shadow_tables(struct gmap *sg, unsigned long saddr,
sg                994 arch/s390/kvm/gaccess.c 	parent = sg->parent;
sg                996 arch/s390/kvm/gaccess.c 	asce.val = sg->orig_asce;
sg               1046 arch/s390/kvm/gaccess.c 		if (sg->edat_level >= 1)
sg               1050 arch/s390/kvm/gaccess.c 		rc = gmap_shadow_r2t(sg, saddr, rfte.val, *fake);
sg               1071 arch/s390/kvm/gaccess.c 		if (sg->edat_level >= 1)
sg               1076 arch/s390/kvm/gaccess.c 		rc = gmap_shadow_r3t(sg, saddr, rste.val, *fake);
sg               1095 arch/s390/kvm/gaccess.c 		if (rtte.cr && asce.p && sg->edat_level >= 2)
sg               1097 arch/s390/kvm/gaccess.c 		if (rtte.fc && sg->edat_level >= 2) {
sg               1106 arch/s390/kvm/gaccess.c 		if (sg->edat_level >= 1)
sg               1111 arch/s390/kvm/gaccess.c 		rc = gmap_shadow_sgt(sg, saddr, rtte.val, *fake);
sg               1133 arch/s390/kvm/gaccess.c 		if (ste.fc && sg->edat_level >= 1) {
sg               1142 arch/s390/kvm/gaccess.c 		rc = gmap_shadow_pgt(sg, saddr, ste.val, *fake);
sg               1164 arch/s390/kvm/gaccess.c int kvm_s390_shadow_fault(struct kvm_vcpu *vcpu, struct gmap *sg,
sg               1173 arch/s390/kvm/gaccess.c 	down_read(&sg->mm->mmap_sem);
sg               1181 arch/s390/kvm/gaccess.c 	rc = gmap_shadow_pgt_lookup(sg, saddr, &pgt, &dat_protection, &fake);
sg               1183 arch/s390/kvm/gaccess.c 		rc = kvm_s390_shadow_tables(sg, saddr, &pgt, &dat_protection,
sg               1192 arch/s390/kvm/gaccess.c 		rc = gmap_read_table(sg->parent, pgt + vaddr.px * 8, &pte.val);
sg               1200 arch/s390/kvm/gaccess.c 		rc = gmap_shadow_page(sg, saddr, __pte(pte.val));
sg               1202 arch/s390/kvm/gaccess.c 	up_read(&sg->mm->mmap_sem);
sg                241 arch/s390/mm/gmap.c 	struct gmap *sg, *next;
sg                247 arch/s390/mm/gmap.c 		list_for_each_entry_safe(sg, next, &gmap->children, list) {
sg                248 arch/s390/mm/gmap.c 			list_del(&sg->list);
sg                249 arch/s390/mm/gmap.c 			gmap_put(sg);
sg               1177 arch/s390/mm/gmap.c static inline void gmap_insert_rmap(struct gmap *sg, unsigned long vmaddr,
sg               1182 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1183 arch/s390/mm/gmap.c 	slot = radix_tree_lookup_slot(&sg->host_to_rmap, vmaddr >> PAGE_SHIFT);
sg               1186 arch/s390/mm/gmap.c 							&sg->guest_table_lock);
sg               1187 arch/s390/mm/gmap.c 		radix_tree_replace_slot(&sg->host_to_rmap, slot, rmap);
sg               1190 arch/s390/mm/gmap.c 		radix_tree_insert(&sg->host_to_rmap, vmaddr >> PAGE_SHIFT,
sg               1205 arch/s390/mm/gmap.c static int gmap_protect_rmap(struct gmap *sg, unsigned long raddr,
sg               1215 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1216 arch/s390/mm/gmap.c 	parent = sg->parent;
sg               1233 arch/s390/mm/gmap.c 			spin_lock(&sg->guest_table_lock);
sg               1237 arch/s390/mm/gmap.c 				gmap_insert_rmap(sg, vmaddr, rmap);
sg               1238 arch/s390/mm/gmap.c 			spin_unlock(&sg->guest_table_lock);
sg               1286 arch/s390/mm/gmap.c static void gmap_unshadow_page(struct gmap *sg, unsigned long raddr)
sg               1290 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1291 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, raddr, 0); /* get page table pointer */
sg               1294 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, raddr, raddr + _PAGE_SIZE - 1);
sg               1295 arch/s390/mm/gmap.c 	ptep_unshadow_pte(sg->mm, raddr, (pte_t *) table);
sg               1306 arch/s390/mm/gmap.c static void __gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr,
sg               1311 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1323 arch/s390/mm/gmap.c static void gmap_unshadow_pgt(struct gmap *sg, unsigned long raddr)
sg               1328 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1329 arch/s390/mm/gmap.c 	ste = gmap_table_walk(sg, raddr, 1); /* get segment pointer */
sg               1332 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, raddr, raddr + _SEGMENT_SIZE - 1);
sg               1337 arch/s390/mm/gmap.c 	__gmap_unshadow_pgt(sg, raddr, pgt);
sg               1352 arch/s390/mm/gmap.c static void __gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr,
sg               1359 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1365 arch/s390/mm/gmap.c 		__gmap_unshadow_pgt(sg, raddr, pgt);
sg               1380 arch/s390/mm/gmap.c static void gmap_unshadow_sgt(struct gmap *sg, unsigned long raddr)
sg               1385 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1386 arch/s390/mm/gmap.c 	r3e = gmap_table_walk(sg, raddr, 2); /* get region-3 pointer */
sg               1389 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, raddr, raddr + _REGION3_SIZE - 1);
sg               1394 arch/s390/mm/gmap.c 	__gmap_unshadow_sgt(sg, raddr, sgt);
sg               1409 arch/s390/mm/gmap.c static void __gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr,
sg               1416 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1422 arch/s390/mm/gmap.c 		__gmap_unshadow_sgt(sg, raddr, sgt);
sg               1437 arch/s390/mm/gmap.c static void gmap_unshadow_r3t(struct gmap *sg, unsigned long raddr)
sg               1442 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1443 arch/s390/mm/gmap.c 	r2e = gmap_table_walk(sg, raddr, 3); /* get region-2 pointer */
sg               1446 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, raddr, raddr + _REGION2_SIZE - 1);
sg               1451 arch/s390/mm/gmap.c 	__gmap_unshadow_r3t(sg, raddr, r3t);
sg               1466 arch/s390/mm/gmap.c static void __gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr,
sg               1473 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1479 arch/s390/mm/gmap.c 		__gmap_unshadow_r3t(sg, raddr, r3t);
sg               1494 arch/s390/mm/gmap.c static void gmap_unshadow_r2t(struct gmap *sg, unsigned long raddr)
sg               1499 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1500 arch/s390/mm/gmap.c 	r1e = gmap_table_walk(sg, raddr, 4); /* get region-1 pointer */
sg               1503 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, raddr, raddr + _REGION1_SIZE - 1);
sg               1508 arch/s390/mm/gmap.c 	__gmap_unshadow_r2t(sg, raddr, r2t);
sg               1523 arch/s390/mm/gmap.c static void __gmap_unshadow_r1t(struct gmap *sg, unsigned long raddr,
sg               1530 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1536 arch/s390/mm/gmap.c 		__gmap_unshadow_r2t(sg, raddr, r2t);
sg               1553 arch/s390/mm/gmap.c static void gmap_unshadow(struct gmap *sg)
sg               1557 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1558 arch/s390/mm/gmap.c 	if (sg->removed)
sg               1560 arch/s390/mm/gmap.c 	sg->removed = 1;
sg               1561 arch/s390/mm/gmap.c 	gmap_call_notifier(sg, 0, -1UL);
sg               1562 arch/s390/mm/gmap.c 	gmap_flush_tlb(sg);
sg               1563 arch/s390/mm/gmap.c 	table = (unsigned long *)(sg->asce & _ASCE_ORIGIN);
sg               1564 arch/s390/mm/gmap.c 	switch (sg->asce & _ASCE_TYPE_MASK) {
sg               1566 arch/s390/mm/gmap.c 		__gmap_unshadow_r1t(sg, 0, table);
sg               1569 arch/s390/mm/gmap.c 		__gmap_unshadow_r2t(sg, 0, table);
sg               1572 arch/s390/mm/gmap.c 		__gmap_unshadow_r3t(sg, 0, table);
sg               1575 arch/s390/mm/gmap.c 		__gmap_unshadow_sgt(sg, 0, table);
sg               1593 arch/s390/mm/gmap.c 	struct gmap *sg;
sg               1595 arch/s390/mm/gmap.c 	list_for_each_entry(sg, &parent->children, list) {
sg               1596 arch/s390/mm/gmap.c 		if (sg->orig_asce != asce || sg->edat_level != edat_level ||
sg               1597 arch/s390/mm/gmap.c 		    sg->removed)
sg               1599 arch/s390/mm/gmap.c 		if (!sg->initialized)
sg               1601 arch/s390/mm/gmap.c 		refcount_inc(&sg->ref_count);
sg               1602 arch/s390/mm/gmap.c 		return sg;
sg               1619 arch/s390/mm/gmap.c int gmap_shadow_valid(struct gmap *sg, unsigned long asce, int edat_level)
sg               1621 arch/s390/mm/gmap.c 	if (sg->removed)
sg               1623 arch/s390/mm/gmap.c 	return sg->orig_asce == asce && sg->edat_level == edat_level;
sg               1645 arch/s390/mm/gmap.c 	struct gmap *sg, *new;
sg               1652 arch/s390/mm/gmap.c 	sg = gmap_find_shadow(parent, asce, edat_level);
sg               1654 arch/s390/mm/gmap.c 	if (sg)
sg               1655 arch/s390/mm/gmap.c 		return sg;
sg               1670 arch/s390/mm/gmap.c 	sg = gmap_find_shadow(parent, asce, edat_level);
sg               1671 arch/s390/mm/gmap.c 	if (sg) {
sg               1674 arch/s390/mm/gmap.c 		return sg;
sg               1678 arch/s390/mm/gmap.c 		list_for_each_entry(sg, &parent->children, list) {
sg               1679 arch/s390/mm/gmap.c 			if (sg->orig_asce & _ASCE_REAL_SPACE) {
sg               1680 arch/s390/mm/gmap.c 				spin_lock(&sg->guest_table_lock);
sg               1681 arch/s390/mm/gmap.c 				gmap_unshadow(sg);
sg               1682 arch/s390/mm/gmap.c 				spin_unlock(&sg->guest_table_lock);
sg               1683 arch/s390/mm/gmap.c 				list_del(&sg->list);
sg               1684 arch/s390/mm/gmap.c 				gmap_put(sg);
sg               1734 arch/s390/mm/gmap.c int gmap_shadow_r2t(struct gmap *sg, unsigned long saddr, unsigned long r2t,
sg               1742 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1752 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1753 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, saddr, 4); /* get region-1 pointer */
sg               1769 arch/s390/mm/gmap.c 	if (sg->edat_level >= 1)
sg               1771 arch/s390/mm/gmap.c 	list_add(&page->lru, &sg->crst_list);
sg               1775 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               1778 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1784 arch/s390/mm/gmap.c 	rc = gmap_protect_rmap(sg, raddr, origin + offset, len);
sg               1785 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1787 arch/s390/mm/gmap.c 		table = gmap_table_walk(sg, saddr, 4);
sg               1794 arch/s390/mm/gmap.c 		gmap_unshadow_r2t(sg, raddr);
sg               1796 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1799 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1818 arch/s390/mm/gmap.c int gmap_shadow_r3t(struct gmap *sg, unsigned long saddr, unsigned long r3t,
sg               1826 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1836 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1837 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, saddr, 3); /* get region-2 pointer */
sg               1853 arch/s390/mm/gmap.c 	if (sg->edat_level >= 1)
sg               1855 arch/s390/mm/gmap.c 	list_add(&page->lru, &sg->crst_list);
sg               1859 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               1862 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1868 arch/s390/mm/gmap.c 	rc = gmap_protect_rmap(sg, raddr, origin + offset, len);
sg               1869 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1871 arch/s390/mm/gmap.c 		table = gmap_table_walk(sg, saddr, 3);
sg               1878 arch/s390/mm/gmap.c 		gmap_unshadow_r3t(sg, raddr);
sg               1880 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1883 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1902 arch/s390/mm/gmap.c int gmap_shadow_sgt(struct gmap *sg, unsigned long saddr, unsigned long sgt,
sg               1910 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg) || (sgt & _REGION3_ENTRY_LARGE));
sg               1920 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1921 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, saddr, 2); /* get region-3 pointer */
sg               1937 arch/s390/mm/gmap.c 	if (sg->edat_level >= 1)
sg               1939 arch/s390/mm/gmap.c 	list_add(&page->lru, &sg->crst_list);
sg               1943 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               1946 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1952 arch/s390/mm/gmap.c 	rc = gmap_protect_rmap(sg, raddr, origin + offset, len);
sg               1953 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1955 arch/s390/mm/gmap.c 		table = gmap_table_walk(sg, saddr, 2);
sg               1962 arch/s390/mm/gmap.c 		gmap_unshadow_sgt(sg, raddr);
sg               1964 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1967 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               1986 arch/s390/mm/gmap.c int gmap_shadow_pgt_lookup(struct gmap *sg, unsigned long saddr,
sg               1994 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               1995 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               1996 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, saddr, 1); /* get segment pointer */
sg               2007 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               2026 arch/s390/mm/gmap.c int gmap_shadow_pgt(struct gmap *sg, unsigned long saddr, unsigned long pgt,
sg               2034 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg) || (pgt & _SEGMENT_ENTRY_LARGE));
sg               2036 arch/s390/mm/gmap.c 	page = page_table_alloc_pgste(sg->mm);
sg               2044 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               2045 arch/s390/mm/gmap.c 	table = gmap_table_walk(sg, saddr, 1); /* get segment pointer */
sg               2060 arch/s390/mm/gmap.c 	list_add(&page->lru, &sg->pt_list);
sg               2064 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               2067 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               2071 arch/s390/mm/gmap.c 	rc = gmap_protect_rmap(sg, raddr, origin, PAGE_SIZE);
sg               2072 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               2074 arch/s390/mm/gmap.c 		table = gmap_table_walk(sg, saddr, 1);
sg               2081 arch/s390/mm/gmap.c 		gmap_unshadow_pgt(sg, raddr);
sg               2083 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               2086 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               2105 arch/s390/mm/gmap.c int gmap_shadow_page(struct gmap *sg, unsigned long saddr, pte_t pte)
sg               2115 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               2116 arch/s390/mm/gmap.c 	parent = sg->parent;
sg               2137 arch/s390/mm/gmap.c 			spin_lock(&sg->guest_table_lock);
sg               2139 arch/s390/mm/gmap.c 			tptep = (pte_t *) gmap_table_walk(sg, saddr, 0);
sg               2141 arch/s390/mm/gmap.c 				spin_unlock(&sg->guest_table_lock);
sg               2146 arch/s390/mm/gmap.c 			rc = ptep_shadow_pte(sg->mm, saddr, sptep, tptep, pte);
sg               2149 arch/s390/mm/gmap.c 				gmap_insert_rmap(sg, vmaddr, rmap);
sg               2154 arch/s390/mm/gmap.c 			spin_unlock(&sg->guest_table_lock);
sg               2173 arch/s390/mm/gmap.c static void gmap_shadow_notify(struct gmap *sg, unsigned long vmaddr,
sg               2179 arch/s390/mm/gmap.c 	BUG_ON(!gmap_is_shadow(sg));
sg               2181 arch/s390/mm/gmap.c 	spin_lock(&sg->guest_table_lock);
sg               2182 arch/s390/mm/gmap.c 	if (sg->removed) {
sg               2183 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               2187 arch/s390/mm/gmap.c 	start = sg->orig_asce & _ASCE_ORIGIN;
sg               2188 arch/s390/mm/gmap.c 	end = start + ((sg->orig_asce & _ASCE_TABLE_LENGTH) + 1) * PAGE_SIZE;
sg               2189 arch/s390/mm/gmap.c 	if (!(sg->orig_asce & _ASCE_REAL_SPACE) && gaddr >= start &&
sg               2192 arch/s390/mm/gmap.c 		gmap_unshadow(sg);
sg               2193 arch/s390/mm/gmap.c 		spin_unlock(&sg->guest_table_lock);
sg               2194 arch/s390/mm/gmap.c 		list_del(&sg->list);
sg               2195 arch/s390/mm/gmap.c 		gmap_put(sg);
sg               2199 arch/s390/mm/gmap.c 	head = radix_tree_delete(&sg->host_to_rmap, vmaddr >> PAGE_SHIFT);
sg               2205 arch/s390/mm/gmap.c 			gmap_unshadow_r2t(sg, raddr);
sg               2208 arch/s390/mm/gmap.c 			gmap_unshadow_r3t(sg, raddr);
sg               2211 arch/s390/mm/gmap.c 			gmap_unshadow_sgt(sg, raddr);
sg               2214 arch/s390/mm/gmap.c 			gmap_unshadow_pgt(sg, raddr);
sg               2217 arch/s390/mm/gmap.c 			gmap_unshadow_page(sg, raddr);
sg               2222 arch/s390/mm/gmap.c 	spin_unlock(&sg->guest_table_lock);
sg               2240 arch/s390/mm/gmap.c 	struct gmap *gmap, *sg, *next;
sg               2257 arch/s390/mm/gmap.c 			list_for_each_entry_safe(sg, next,
sg               2259 arch/s390/mm/gmap.c 				gmap_shadow_notify(sg, vmaddr, gaddr);
sg                437 arch/s390/pci/pci_dma.c static int __s390_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                457 arch/s390/pci/pci_dma.c 	for (s = sg; dma_addr < dma_addr_base + size; s = sg_next(s)) {
sg                484 arch/s390/pci/pci_dma.c static int s390_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                488 arch/s390/pci/pci_dma.c 	struct scatterlist *s = sg, *start = sg, *dma = sg;
sg                524 arch/s390/pci/pci_dma.c 	for_each_sg(sg, s, count, i)
sg                531 arch/s390/pci/pci_dma.c static void s390_dma_unmap_sg(struct device *dev, struct scatterlist *sg,
sg                538 arch/s390/pci/pci_dma.c 	for_each_sg(sg, s, nr_elements, i) {
sg                107 arch/sparc/include/asm/ldc.h 	       struct scatterlist *sg, int num_sg,
sg                590 arch/sparc/kernel/iommu.c static unsigned long fetch_sg_ctx(struct iommu *iommu, struct scatterlist *sg)
sg                599 arch/sparc/kernel/iommu.c 		bus_addr = sg->dma_address & IO_PAGE_MASK;
sg                613 arch/sparc/kernel/iommu.c 	struct scatterlist *sg;
sg                626 arch/sparc/kernel/iommu.c 	sg = sglist;
sg                628 arch/sparc/kernel/iommu.c 		dma_addr_t dma_handle = sg->dma_address;
sg                629 arch/sparc/kernel/iommu.c 		unsigned int len = sg->dma_length;
sg                652 arch/sparc/kernel/iommu.c 		sg = sg_next(sg);
sg                705 arch/sparc/kernel/iommu.c 	struct scatterlist *sg, *sgprv;
sg                731 arch/sparc/kernel/iommu.c 	for_each_sg(sglist, sg, nelems, i) {
sg                732 arch/sparc/kernel/iommu.c 		if (sg->dma_length == 0)
sg                734 arch/sparc/kernel/iommu.c 		sgprv = sg;
sg                 42 arch/sparc/kernel/iommu_common.h 				   struct scatterlist *sg)
sg                 45 arch/sparc/kernel/iommu_common.h 	int nr = iommu_num_pages(paddr, outs->dma_length + sg->length,
sg               2120 arch/sparc/kernel/ldc.c static int sg_count_one(struct scatterlist *sg)
sg               2122 arch/sparc/kernel/ldc.c 	unsigned long base = page_to_pfn(sg_page(sg)) << PAGE_SHIFT;
sg               2123 arch/sparc/kernel/ldc.c 	long len = sg->length;
sg               2125 arch/sparc/kernel/ldc.c 	if ((sg->offset | len) & (8UL - 1))
sg               2128 arch/sparc/kernel/ldc.c 	return pages_in_region(base + sg->offset, len);
sg               2131 arch/sparc/kernel/ldc.c static int sg_count_pages(struct scatterlist *sg, int num_sg)
sg               2138 arch/sparc/kernel/ldc.c 		int err = sg_count_one(sg + i);
sg               2148 arch/sparc/kernel/ldc.c 	       struct scatterlist *sg, int num_sg,
sg               2162 arch/sparc/kernel/ldc.c 	err = sg_count_pages(sg, num_sg);
sg               2184 arch/sparc/kernel/ldc.c 	for_each_sg(sg, s, num_sg, i) {
sg                630 arch/sparc/kernel/pci_sun4v.c 	struct scatterlist *sg;
sg                646 arch/sparc/kernel/pci_sun4v.c 	sg = sglist;
sg                648 arch/sparc/kernel/pci_sun4v.c 		dma_addr_t dma_handle = sg->dma_address;
sg                649 arch/sparc/kernel/pci_sun4v.c 		unsigned int len = sg->dma_length;
sg                670 arch/sparc/kernel/pci_sun4v.c 		sg = sg_next(sg);
sg                165 arch/sparc/mm/io-unit.c 	struct scatterlist *sg;
sg                171 arch/sparc/mm/io-unit.c 	for_each_sg(sgl, sg, nents, i) {
sg                172 arch/sparc/mm/io-unit.c 		sg->dma_address = iounit_get_area(iounit, (unsigned long) sg_virt(sg), sg->length);
sg                173 arch/sparc/mm/io-unit.c 		sg->dma_length = sg->length;
sg                199 arch/sparc/mm/io-unit.c 	struct scatterlist *sg;
sg                203 arch/sparc/mm/io-unit.c 	for_each_sg(sgl, sg, nents, i) {
sg                204 arch/sparc/mm/io-unit.c 		len = ((sg->dma_address & ~PAGE_MASK) + sg->length + (PAGE_SIZE-1)) >> PAGE_SHIFT;
sg                205 arch/sparc/mm/io-unit.c 		vaddr = (sg->dma_address - IOUNIT_DMA_BASE) >> PAGE_SHIFT;
sg                247 arch/sparc/mm/iommu.c 	struct scatterlist *sg;
sg                250 arch/sparc/mm/iommu.c 	for_each_sg(sgl, sg, nents, j) {
sg                251 arch/sparc/mm/iommu.c 		sg->dma_address =__sbus_iommu_map_page(dev, sg_page(sg),
sg                252 arch/sparc/mm/iommu.c 				sg->offset, sg->length, per_page_flush);
sg                253 arch/sparc/mm/iommu.c 		if (sg->dma_address == DMA_MAPPING_ERROR)
sg                255 arch/sparc/mm/iommu.c 		sg->dma_length = sg->length;
sg                296 arch/sparc/mm/iommu.c 	struct scatterlist *sg;
sg                299 arch/sparc/mm/iommu.c 	for_each_sg(sgl, sg, nents, i) {
sg                300 arch/sparc/mm/iommu.c 		sbus_iommu_unmap_page(dev, sg->dma_address, sg->length, dir,
sg                302 arch/sparc/mm/iommu.c 		sg->dma_address = 0x21212121;
sg                 86 arch/unicore32/include/asm/assembler.h 	.else;	.ifc	\cond, sg
sg                279 arch/x86/kernel/amd_gart_64.c static void gart_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                285 arch/x86/kernel/amd_gart_64.c 	for_each_sg(sg, s, nents, i) {
sg                293 arch/x86/kernel/amd_gart_64.c static int dma_map_sg_nonforce(struct device *dev, struct scatterlist *sg,
sg                303 arch/x86/kernel/amd_gart_64.c 	for_each_sg(sg, s, nents, i) {
sg                310 arch/x86/kernel/amd_gart_64.c 					gart_unmap_sg(dev, sg, i, dir, 0);
sg                312 arch/x86/kernel/amd_gart_64.c 				sg[0].dma_length = 0;
sg                380 arch/x86/kernel/amd_gart_64.c static int gart_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                394 arch/x86/kernel/amd_gart_64.c 	start_sg	= sg;
sg                395 arch/x86/kernel/amd_gart_64.c 	sgmap		= sg;
sg                400 arch/x86/kernel/amd_gart_64.c 	for_each_sg(sg, s, nents, i) {
sg                448 arch/x86/kernel/amd_gart_64.c 	gart_unmap_sg(dev, sg, out, dir, 0);
sg                452 arch/x86/kernel/amd_gart_64.c 		out = dma_map_sg_nonforce(dev, sg, nents, dir);
sg                460 arch/x86/kernel/amd_gart_64.c 	for_each_sg(sg, s, nents, i)
sg                349 arch/x86/kernel/pci-calgary_64.c static int calgary_map_sg(struct device *dev, struct scatterlist *sg,
sg                360 arch/x86/kernel/pci-calgary_64.c 	for_each_sg(sg, s, nelems, i) {
sg                383 arch/x86/kernel/pci-calgary_64.c 	calgary_unmap_sg(dev, sg, nelems, dir, 0);
sg                384 arch/x86/kernel/pci-calgary_64.c 	for_each_sg(sg, s, nelems, i) {
sg                385 arch/x86/kernel/pci-calgary_64.c 		sg->dma_address = DMA_MAPPING_ERROR;
sg                386 arch/x86/kernel/pci-calgary_64.c 		sg->dma_length = 0;
sg                 72 block/blk-integrity.c 	struct scatterlist *sg = NULL;
sg                 82 block/blk-integrity.c 			if (sg->length + iv.bv_len > queue_max_segment_size(q))
sg                 85 block/blk-integrity.c 			sg->length += iv.bv_len;
sg                 88 block/blk-integrity.c 			if (!sg)
sg                 89 block/blk-integrity.c 				sg = sglist;
sg                 91 block/blk-integrity.c 				sg_unmark_end(sg);
sg                 92 block/blk-integrity.c 				sg = sg_next(sg);
sg                 95 block/blk-integrity.c 			sg_set_page(sg, iv.bv_page, iv.bv_len, iv.bv_offset);
sg                103 block/blk-integrity.c 	if (sg)
sg                104 block/blk-integrity.c 		sg_mark_end(sg);
sg                381 block/blk-merge.c static inline struct scatterlist *blk_next_sg(struct scatterlist **sg,
sg                384 block/blk-merge.c 	if (!*sg)
sg                394 block/blk-merge.c 	sg_unmark_end(*sg);
sg                395 block/blk-merge.c 	return sg_next(*sg);
sg                400 block/blk-merge.c 		struct scatterlist **sg)
sg                421 block/blk-merge.c 		*sg = blk_next_sg(sg, sglist);
sg                422 block/blk-merge.c 		sg_set_page(*sg, page, len, offset);
sg                433 block/blk-merge.c 		struct scatterlist *sglist, struct scatterlist **sg)
sg                435 block/blk-merge.c 	*sg = blk_next_sg(sg, sglist);
sg                436 block/blk-merge.c 	sg_set_page(*sg, bv.bv_page, bv.bv_len, bv.bv_offset);
sg                443 block/blk-merge.c 			   struct bio_vec *bvprv, struct scatterlist **sg)
sg                448 block/blk-merge.c 	if (!*sg)
sg                451 block/blk-merge.c 	if ((*sg)->length + nbytes > queue_max_segment_size(q))
sg                457 block/blk-merge.c 	(*sg)->length += nbytes;
sg                464 block/blk-merge.c 			     struct scatterlist **sg)
sg                479 block/blk-merge.c 			    __blk_segment_map_sg_merge(q, &bvec, &bvprv, sg))
sg                483 block/blk-merge.c 				nsegs += __blk_bvec_map_sg(bvec, sglist, sg);
sg                485 block/blk-merge.c 				nsegs += blk_bvec_map_sg(q, &bvec, sglist, sg);
sg                505 block/blk-merge.c 	struct scatterlist *sg = NULL;
sg                509 block/blk-merge.c 		nsegs = __blk_bvec_map_sg(rq->special_vec, sglist, &sg);
sg                511 block/blk-merge.c 		nsegs = __blk_bvec_map_sg(bio_iovec(rq->bio), sglist, &sg);
sg                513 block/blk-merge.c 		nsegs = __blk_bios_map_sg(q, rq->bio, sglist, &sg);
sg                520 block/blk-merge.c 		sg->length += pad_len;
sg                528 block/blk-merge.c 		sg_unmark_end(sg);
sg                529 block/blk-merge.c 		sg = sg_next(sg);
sg                530 block/blk-merge.c 		sg_set_page(sg, virt_to_page(q->dma_drain_buffer),
sg                538 block/blk-merge.c 	if (sg)
sg                539 block/blk-merge.c 		sg_mark_end(sg);
sg                 81 crypto/ablkcipher.c 		scatterwalk_start(&walk->out, sg_next(walk->out.sg));
sg                281 crypto/ablkcipher.c 	scatterwalk_start(&walk->in, walk->in.sg);
sg                282 crypto/ablkcipher.c 	scatterwalk_start(&walk->out, walk->out.sg);
sg                124 crypto/adiantum.c 		struct scatterlist sg;
sg                149 crypto/adiantum.c 	sg_init_one(&data->sg, data->derived_keys, sizeof(data->derived_keys));
sg                155 crypto/adiantum.c 	skcipher_request_set_crypt(&data->req, &data->sg, &data->sg,
sg                407 crypto/af_alg.c 	sg_init_table(sgl->sg, npages + 1);
sg                412 crypto/af_alg.c 		sg_set_page(sgl->sg + i, sgl->pages[i], plen, off);
sg                417 crypto/af_alg.c 	sg_mark_end(sgl->sg + npages - 1);
sg                427 crypto/af_alg.c 	sg_unmark_end(sgl_prev->sg + sgl_prev->npages - 1);
sg                428 crypto/af_alg.c 	sg_chain(sgl_prev->sg, sgl_prev->npages + 1, sgl_new->sg);
sg                491 crypto/af_alg.c 	struct scatterlist *sg = NULL;
sg                495 crypto/af_alg.c 		sg = sgl->sg;
sg                497 crypto/af_alg.c 	if (!sg || sgl->cur >= MAX_SGL_ENTS) {
sg                499 crypto/af_alg.c 				   struct_size(sgl, sg, (MAX_SGL_ENTS + 1)),
sg                504 crypto/af_alg.c 		sg_init_table(sgl->sg, MAX_SGL_ENTS + 1);
sg                507 crypto/af_alg.c 		if (sg)
sg                508 crypto/af_alg.c 			sg_chain(sg, MAX_SGL_ENTS + 1, sgl->sg);
sg                539 crypto/af_alg.c 		const struct scatterlist *sg = sgl->sg;
sg                545 crypto/af_alg.c 			if (offset >= sg[i].length) {
sg                546 crypto/af_alg.c 				offset -= sg[i].length;
sg                547 crypto/af_alg.c 				bytes -= sg[i].length;
sg                551 crypto/af_alg.c 			bytes_count = sg[i].length - offset;
sg                589 crypto/af_alg.c 	struct scatterlist *sg;
sg                595 crypto/af_alg.c 		sg = sgl->sg;
sg                598 crypto/af_alg.c 			size_t plen = min_t(size_t, used, sg[i].length);
sg                599 crypto/af_alg.c 			struct page *page = sg_page(sg + i);
sg                617 crypto/af_alg.c 						    sg[i].offset + dst_offset);
sg                623 crypto/af_alg.c 			sg[i].length -= plen;
sg                624 crypto/af_alg.c 			sg[i].offset += plen;
sg                629 crypto/af_alg.c 			if (sg[i].length)
sg                633 crypto/af_alg.c 			sg_assign_page(sg + i, NULL);
sg                637 crypto/af_alg.c 		sock_kfree_s(sk, sgl, struct_size(sgl, sg, MAX_SGL_ENTS + 1));
sg                657 crypto/af_alg.c 	struct scatterlist *sg;
sg                670 crypto/af_alg.c 		for_each_sg(tsgl, sg, areq->tsgl_entries, i) {
sg                671 crypto/af_alg.c 			if (!sg_page(sg))
sg                673 crypto/af_alg.c 			put_page(sg_page(sg));
sg                864 crypto/af_alg.c 		struct scatterlist *sg;
sg                872 crypto/af_alg.c 			sg = sgl->sg + sgl->cur - 1;
sg                874 crypto/af_alg.c 				    PAGE_SIZE - sg->offset - sg->length);
sg                876 crypto/af_alg.c 			err = memcpy_from_msg(page_address(sg_page(sg)) +
sg                877 crypto/af_alg.c 					      sg->offset + sg->length,
sg                882 crypto/af_alg.c 			sg->length += len;
sg                883 crypto/af_alg.c 			ctx->merge = (sg->offset + sg->length) &
sg                907 crypto/af_alg.c 		sg = sgl->sg;
sg                909 crypto/af_alg.c 			sg_unmark_end(sg + sgl->cur - 1);
sg                916 crypto/af_alg.c 			sg_assign_page(sg + i, alloc_page(GFP_KERNEL));
sg                917 crypto/af_alg.c 			if (!sg_page(sg + i)) {
sg                922 crypto/af_alg.c 			err = memcpy_from_msg(page_address(sg_page(sg + i)),
sg                925 crypto/af_alg.c 				__free_page(sg_page(sg + i));
sg                926 crypto/af_alg.c 				sg_assign_page(sg + i, NULL);
sg                930 crypto/af_alg.c 			sg[i].length = plen;
sg                939 crypto/af_alg.c 			sg_mark_end(sg + sgl->cur - 1);
sg                994 crypto/af_alg.c 		sg_unmark_end(sgl->sg + sgl->cur - 1);
sg                996 crypto/af_alg.c 	sg_mark_end(sgl->sg + sgl->cur);
sg                999 crypto/af_alg.c 	sg_set_page(sgl->sg + sgl->cur, page, size, offset);
sg                 66 crypto/ahash.c 	struct scatterlist *sg;
sg                 68 crypto/ahash.c 	sg = walk->sg;
sg                 69 crypto/ahash.c 	walk->offset = sg->offset;
sg                 70 crypto/ahash.c 	walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
sg                 72 crypto/ahash.c 	walk->entrylen = sg->length;
sg                123 crypto/ahash.c 	walk->sg = sg_next(walk->sg);
sg                140 crypto/ahash.c 	walk->sg = req->src;
sg                158 crypto/ahash.c 	walk->sg = req->src;
sg                184 crypto/algif_aead.c 			struct scatterlist *process_sg = tsgl->sg + i;
sg                213 crypto/algif_aead.c 	rsgl_src = areq->first_rsgl.sgl.sg;
sg                227 crypto/algif_aead.c 					   areq->first_rsgl.sgl.sg, processed);
sg                245 crypto/algif_aead.c 					   areq->first_rsgl.sgl.sg, outlen);
sg                271 crypto/algif_aead.c 			sg_unmark_end(sgl_prev->sg + sgl_prev->npages - 1);
sg                272 crypto/algif_aead.c 			sg_chain(sgl_prev->sg, sgl_prev->npages + 1,
sg                281 crypto/algif_aead.c 			       areq->first_rsgl.sgl.sg, used, ctx->iv);
sg                100 crypto/algif_hash.c 		ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, NULL, len);
sg                143 crypto/algif_hash.c 	sg_init_table(ctx->sgl.sg, 1);
sg                144 crypto/algif_hash.c 	sg_set_page(ctx->sgl.sg, page, size, offset);
sg                153 crypto/algif_hash.c 	ahash_request_set_crypt(&ctx->req, ctx->sgl.sg, ctx->result, size);
sg                112 crypto/algif_skcipher.c 				   areq->first_rsgl.sgl.sg, len, ctx->iv);
sg                334 crypto/blkcipher.c 	scatterwalk_start(&walk->in, walk->in.sg);
sg                335 crypto/blkcipher.c 	scatterwalk_start(&walk->out, walk->out.sg);
sg                184 crypto/ccm.c   	struct scatterlist sg[3];
sg                194 crypto/ccm.c   	sg_init_table(sg, 3);
sg                195 crypto/ccm.c   	sg_set_buf(&sg[0], odata, 16);
sg                200 crypto/ccm.c   		sg_set_buf(&sg[1], idata, ilen);
sg                201 crypto/ccm.c   		sg_chain(sg, 3, req->src);
sg                204 crypto/ccm.c   		sg_chain(sg, 2, req->src);
sg                209 crypto/ccm.c   	ahash_request_set_crypt(ahreq, sg, NULL, assoclen + ilen + 16);
sg                221 crypto/ccm.c   		sg_init_table(sg, 2);
sg                222 crypto/ccm.c   		sg_set_buf(&sg[0], idata, ilen);
sg                224 crypto/ccm.c   			sg_chain(sg, 2, plain);
sg                225 crypto/ccm.c   		plain = sg;
sg                261 crypto/ccm.c   	struct scatterlist *sg;
sg                278 crypto/ccm.c   	sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
sg                279 crypto/ccm.c   	if (sg != pctx->src + 1)
sg                280 crypto/ccm.c   		sg_chain(pctx->src, 2, sg);
sg                285 crypto/ccm.c   		sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
sg                286 crypto/ccm.c   		if (sg != pctx->dst + 1)
sg                287 crypto/ccm.c   			sg_chain(pctx->dst, 2, sg);
sg                649 crypto/ccm.c   	struct scatterlist *sg;
sg                663 crypto/ccm.c   	sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
sg                664 crypto/ccm.c   	if (sg != rctx->src + 1)
sg                665 crypto/ccm.c   		sg_chain(rctx->src, 2, sg);
sg                670 crypto/ccm.c   		sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
sg                671 crypto/ccm.c   		if (sg != rctx->dst + 1)
sg                672 crypto/ccm.c   			sg_chain(rctx->dst, 2, sg);
sg                 60 crypto/cts.c   	struct scatterlist sg[2];
sg                109 crypto/cts.c   	struct scatterlist *sg;
sg                116 crypto/cts.c   	sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
sg                117 crypto/cts.c   	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
sg                122 crypto/cts.c   	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
sg                128 crypto/cts.c   	skcipher_request_set_crypt(subreq, sg, sg, bsize, req->iv);
sg                190 crypto/cts.c   	struct scatterlist *sg;
sg                198 crypto/cts.c   	sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize);
sg                201 crypto/cts.c   	scatterwalk_map_and_copy(d + bsize, sg, 0, bsize, 0);
sg                215 crypto/cts.c   	scatterwalk_map_and_copy(d, sg, 0, bsize + lastn, 1);
sg                222 crypto/cts.c   	skcipher_request_set_crypt(subreq, sg, sg, bsize, space);
sg                 60 crypto/essiv.c 	struct scatterlist		sg[4];
sg                221 crypto/essiv.c 		struct scatterlist *sg;
sg                232 crypto/essiv.c 		sg_init_table(rctx->sg, 4);
sg                245 crypto/essiv.c 			sg_set_buf(rctx->sg, rctx->assoc, ssize);
sg                247 crypto/essiv.c 			sg_set_page(rctx->sg, sg_page(req->src), ssize,
sg                251 crypto/essiv.c 		sg_set_buf(rctx->sg + 1, iv, ivsize);
sg                252 crypto/essiv.c 		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
sg                253 crypto/essiv.c 		if (sg != rctx->sg + 2)
sg                254 crypto/essiv.c 			sg_chain(rctx->sg, 3, sg);
sg                256 crypto/essiv.c 		src = rctx->sg;
sg                 70 crypto/gcm.c   	struct scatterlist sg;
sg                 80 crypto/gcm.c   	struct scatterlist sg;
sg                105 crypto/gcm.c   		struct scatterlist sg[1];
sg                125 crypto/gcm.c   	sg_init_one(data->sg, &data->hash, sizeof(data->hash));
sg                131 crypto/gcm.c   	skcipher_request_set_crypt(&data->req, data->sg, data->sg,
sg                162 crypto/gcm.c   	struct scatterlist *sg;
sg                170 crypto/gcm.c   	sg = scatterwalk_ffwd(pctx->src + 1, req->src, req->assoclen);
sg                171 crypto/gcm.c   	if (sg != pctx->src + 1)
sg                172 crypto/gcm.c   		sg_chain(pctx->src, 2, sg);
sg                177 crypto/gcm.c   		sg = scatterwalk_ffwd(pctx->dst + 1, req->dst, req->assoclen);
sg                178 crypto/gcm.c   		if (sg != pctx->dst + 1)
sg                179 crypto/gcm.c   			sg_chain(pctx->dst, 2, sg);
sg                226 crypto/gcm.c   	return gcm_hash_update(req, compl, &gcm_zeroes->sg, remain, flags);
sg                239 crypto/gcm.c   	sg_init_one(&pctx->sg, pctx->iauth_tag, 16);
sg                241 crypto/gcm.c   	ahash_request_set_crypt(ahreq, &pctx->sg,
sg                768 crypto/gcm.c   	struct scatterlist *sg;
sg                779 crypto/gcm.c   	sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen);
sg                780 crypto/gcm.c   	if (sg != rctx->src + 1)
sg                781 crypto/gcm.c   		sg_chain(rctx->src, 2, sg);
sg                786 crypto/gcm.c   		sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen);
sg                787 crypto/gcm.c   		if (sg != rctx->dst + 1)
sg                788 crypto/gcm.c   			sg_chain(rctx->dst, 2, sg);
sg               1220 crypto/gcm.c   	sg_init_one(&gcm_zeroes->sg, gcm_zeroes->buf, sizeof(gcm_zeroes->buf));
sg                102 crypto/keywrap.c 				     struct scatterlist *sg,
sg                111 crypto/keywrap.c 	while (sg) {
sg                112 crypto/keywrap.c 		if (sg->length > skip) {
sg                113 crypto/keywrap.c 			scatterwalk_start(walk, sg);
sg                117 crypto/keywrap.c 			skip -= sg->length;
sg                119 crypto/keywrap.c 		sg = sg_next(sg);
sg                161 crypto/rsa-pkcs1pad.c static void pkcs1pad_sg_set_buf(struct scatterlist *sg, void *buf, size_t len,
sg                166 crypto/rsa-pkcs1pad.c 	sg_init_table(sg, nsegs);
sg                167 crypto/rsa-pkcs1pad.c 	sg_set_buf(sg, buf, len);
sg                170 crypto/rsa-pkcs1pad.c 		sg_chain(sg, nsegs, next);
sg                 55 crypto/scatterwalk.c void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
sg                 64 crypto/scatterwalk.c 	sg = scatterwalk_ffwd(tmp, sg, start);
sg                 66 crypto/scatterwalk.c 	scatterwalk_start(&walk, sg);
sg                297 crypto/shash.c 	struct scatterlist *sg;
sg                302 crypto/shash.c 	    (sg = req->src, offset = sg->offset,
sg                303 crypto/shash.c 	     nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
sg                306 crypto/shash.c 		data = kmap_atomic(sg_page(sg));
sg                112 crypto/tcrypt.c static void sg_init_aead(struct scatterlist *sg, char *xbuf[XBUFSIZE],
sg                126 crypto/tcrypt.c 	sg_init_table(sg, np + 1);
sg                128 crypto/tcrypt.c 	sg_set_buf(&sg[0], assoc, aad_size);
sg                133 crypto/tcrypt.c 		sg_set_buf(&sg[k + 1], xbuf[k], PAGE_SIZE);
sg                136 crypto/tcrypt.c 		sg_set_buf(&sg[k + 1], xbuf[k], rem);
sg                147 crypto/tcrypt.c 	struct scatterlist sg[XBUFSIZE];
sg                381 crypto/tcrypt.c 				sg_init_aead(cur->sg, cur->xbuf,
sg                395 crypto/tcrypt.c 							       cur->sg,
sg                407 crypto/tcrypt.c 				aead_request_set_crypt(cur->req, cur->sg,
sg                528 crypto/tcrypt.c 	struct scatterlist *sg;
sg                561 crypto/tcrypt.c 	sg = kmalloc(sizeof(*sg) * 9 * 2, GFP_KERNEL);
sg                562 crypto/tcrypt.c 	if (!sg)
sg                564 crypto/tcrypt.c 	sgout = &sg[9];
sg                629 crypto/tcrypt.c 			sg_init_aead(sg, xbuf, *b_size + (enc ? 0 : authsize),
sg                645 crypto/tcrypt.c 				aead_request_set_crypt(req, sgout, sg,
sg                657 crypto/tcrypt.c 			aead_request_set_crypt(req, sg, sgout,
sg                684 crypto/tcrypt.c 	kfree(sg);
sg                695 crypto/tcrypt.c static void test_hash_sg_init(struct scatterlist *sg)
sg                699 crypto/tcrypt.c 	sg_init_table(sg, TVMEMSIZE);
sg                701 crypto/tcrypt.c 		sg_set_buf(sg + i, tvmem[i], PAGE_SIZE);
sg                714 crypto/tcrypt.c 	struct scatterlist sg[XBUFSIZE];
sg                846 crypto/tcrypt.c 		sg_init_table(data[i].sg, XBUFSIZE);
sg                848 crypto/tcrypt.c 			sg_set_buf(data[i].sg + j, data[i].xbuf[j], PAGE_SIZE);
sg                871 crypto/tcrypt.c 			ahash_request_set_crypt(data[k].req, data[k].sg,
sg               1057 crypto/tcrypt.c 	struct scatterlist sg[TVMEMSIZE];
sg               1080 crypto/tcrypt.c 	test_hash_sg_init(sg);
sg               1109 crypto/tcrypt.c 		ahash_request_set_crypt(req, sg, output, speed[i].plen);
sg               1148 crypto/tcrypt.c 	struct scatterlist sg[XBUFSIZE];
sg               1359 crypto/tcrypt.c 				sg_init_table(cur->sg, pages);
sg               1362 crypto/tcrypt.c 					sg_set_buf(cur->sg + p, cur->xbuf[p],
sg               1369 crypto/tcrypt.c 				sg_set_buf(cur->sg + p, cur->xbuf[p], k);
sg               1372 crypto/tcrypt.c 				skcipher_request_set_crypt(cur->req, cur->sg,
sg               1373 crypto/tcrypt.c 							   cur->sg, *b_size,
sg               1535 crypto/tcrypt.c 			struct scatterlist sg[TVMEMSIZE];
sg               1568 crypto/tcrypt.c 			sg_init_table(sg, DIV_ROUND_UP(k, PAGE_SIZE));
sg               1571 crypto/tcrypt.c 				sg_set_buf(sg, tvmem[0] + *keysize,
sg               1576 crypto/tcrypt.c 					sg_set_buf(sg + j, tvmem[j], PAGE_SIZE);
sg               1581 crypto/tcrypt.c 				sg_set_buf(sg + j, tvmem[j], k);
sg               1584 crypto/tcrypt.c 				sg_set_buf(sg, tvmem[0] + *keysize, *b_size);
sg               1591 crypto/tcrypt.c 			skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
sg                631 crypto/testmgr.c 		struct scatterlist *sg = &tsgl->sgl_ptr[i];
sg                632 crypto/testmgr.c 		unsigned int len = sg->length;
sg                633 crypto/testmgr.c 		unsigned int offset = sg->offset;
sg                646 crypto/testmgr.c 		actual_output = page_address(sg_page(sg)) + offset;
sg               1081 crypto/testmgr.c static inline const void *sg_data(struct scatterlist *sg)
sg               1083 crypto/testmgr.c 	return page_address(sg_page(sg)) + sg->offset;
sg                486 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[(a) ^ sc] ^ sg]; \
sg                494 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[(a) ^ sc] ^ sg] ^ sk]; \
sg                502 crypto/twofish_common.c    ctx->s[2][i] = mds[2][q1[q0[q0[(a) ^ sc] ^ sg] ^ sk] ^ so]; \
sg                579 crypto/twofish_common.c 	u8 sa = 0, sb = 0, sc = 0, sd = 0, se = 0, sf = 0, sg = 0, sh = 0;
sg                604 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 8, 0x00, 0x2D, 0x01, 0x2D); /* 01 A4 02 A4 */
sg                605 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 9, 0x2D, 0xA4, 0x44, 0x8A); /* A4 56 A1 55 */
sg                606 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 10, 0x8A, 0xD5, 0xBF, 0xD1); /* 55 82 FC 87 */
sg                607 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 11, 0xD1, 0x7F, 0x3D, 0x99); /* 87 F3 C1 5A */
sg                608 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 12, 0x99, 0x46, 0x66, 0x96); /* 5A 1E 47 58 */
sg                609 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 13, 0x96, 0x3C, 0x5B, 0xED); /* 58 C6 AE DB */
sg                610 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 14, 0xED, 0x37, 0x4F, 0xE0); /* DB 68 3D 9E */
sg                611 crypto/twofish_common.c 	CALC_S (se, sf, sg, sh, 15, 0xE0, 0xD0, 0x8C, 0x17); /* 9E E5 19 03 */
sg                 36 crypto/xts.c   	struct scatterlist sg[2];
sg                174 crypto/xts.c   	rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst,
sg                184 drivers/ata/acard-ahci.c 	struct scatterlist *sg;
sg                193 drivers/ata/acard-ahci.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                194 drivers/ata/acard-ahci.c 		dma_addr_t addr = sg_dma_address(sg);
sg                195 drivers/ata/acard-ahci.c 		u32 sg_len = sg_dma_len(sg);
sg               1595 drivers/ata/libahci.c 	struct scatterlist *sg;
sg               1604 drivers/ata/libahci.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               1605 drivers/ata/libahci.c 		dma_addr_t addr = sg_dma_address(sg);
sg               1606 drivers/ata/libahci.c 		u32 sg_len = sg_dma_len(sg);
sg               1607 drivers/ata/libata-core.c 		struct scatterlist *sg;
sg               1609 drivers/ata/libata-core.c 		for_each_sg(sgl, sg, n_elem, i)
sg               1610 drivers/ata/libata-core.c 			buflen += sg->length;
sg               1728 drivers/ata/libata-core.c 	struct scatterlist *psg = NULL, sg;
sg               1733 drivers/ata/libata-core.c 		sg_init_one(&sg, buf, buflen);
sg               1734 drivers/ata/libata-core.c 		psg = &sg;
sg               4998 drivers/ata/libata-core.c void ata_sg_init(struct ata_queued_cmd *qc, struct scatterlist *sg,
sg               5001 drivers/ata/libata-core.c 	qc->sg = sg;
sg               5003 drivers/ata/libata-core.c 	qc->cursg = qc->sg;
sg               5020 drivers/ata/libata-core.c 	struct scatterlist *sg = qc->sg;
sg               5023 drivers/ata/libata-core.c 	WARN_ON_ONCE(sg == NULL);
sg               5028 drivers/ata/libata-core.c 		dma_unmap_sg(ap->dev, sg, qc->orig_n_elem, dir);
sg               5031 drivers/ata/libata-core.c 	qc->sg = NULL;
sg               5054 drivers/ata/libata-core.c 	n_elem = dma_map_sg(ap->dev, qc->sg, qc->n_elem, qc->dma_dir);
sg               5454 drivers/ata/libata-core.c 	if (ata_is_data(prot) && (!qc->sg || !qc->n_elem || !qc->nbytes))
sg                853 drivers/ata/libata-scsi.c 		qc->sg = scsi_sglist(cmd);
sg                781 drivers/ata/libata-sff.c 	struct scatterlist *sg;
sg                787 drivers/ata/libata-sff.c 	sg = qc->cursg;
sg                788 drivers/ata/libata-sff.c 	if (unlikely(!sg)) {
sg                795 drivers/ata/libata-sff.c 	page = sg_page(sg);
sg                796 drivers/ata/libata-sff.c 	offset = sg->offset + qc->cursg_ofs;
sg                803 drivers/ata/libata-sff.c 	count = min(sg->length - qc->cursg_ofs, bytes);
sg                819 drivers/ata/libata-sff.c 	if (qc->cursg_ofs == sg->length) {
sg               2579 drivers/ata/libata-sff.c 	struct scatterlist *sg;
sg               2583 drivers/ata/libata-sff.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               2591 drivers/ata/libata-sff.c 		addr = (u32) sg_dma_address(sg);
sg               2592 drivers/ata/libata-sff.c 		sg_len = sg_dma_len(sg);
sg               2630 drivers/ata/libata-sff.c 	struct scatterlist *sg;
sg               2634 drivers/ata/libata-sff.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               2642 drivers/ata/libata-sff.c 		addr = (u32) sg_dma_address(sg);
sg               2643 drivers/ata/libata-sff.c 		sg_len = sg_dma_len(sg);
sg                 55 drivers/ata/libata.h 				     int dma_dir, struct scatterlist *sg,
sg                431 drivers/ata/pata_arasan_cf.c static int sg_xfer(struct arasan_cf_dev *acdev, struct scatterlist *sg)
sg                439 drivers/ata/pata_arasan_cf.c 	sglen = sg_dma_len(sg);
sg                441 drivers/ata/pata_arasan_cf.c 		src = sg_dma_address(sg);
sg                444 drivers/ata/pata_arasan_cf.c 		dest = sg_dma_address(sg);
sg                523 drivers/ata/pata_arasan_cf.c 	struct scatterlist *sg;
sg                536 drivers/ata/pata_arasan_cf.c 	for_each_sg(qc->sg, sg, qc->n_elem, temp) {
sg                537 drivers/ata/pata_arasan_cf.c 		ret = sg_xfer(acdev, sg);
sg                712 drivers/ata/pata_ep93xx.c 	txd = dmaengine_prep_slave_sg(channel, qc->sg, qc->n_elem, qc->dma_dir,
sg                 50 drivers/ata/pata_ftide010.c 	struct sata_gemini *sg;
sg                274 drivers/ata/pata_ftide010.c 	struct sata_gemini *sg = ftide->sg;
sg                284 drivers/ata/pata_ftide010.c 		ret = gemini_sata_start_bridge(sg, 0);
sg                290 drivers/ata/pata_ftide010.c 		ret = gemini_sata_start_bridge(sg, 1);
sg                297 drivers/ata/pata_ftide010.c 		ret = gemini_sata_start_bridge(sg, 0);
sg                304 drivers/ata/pata_ftide010.c 		ret = gemini_sata_start_bridge(sg, 1);
sg                317 drivers/ata/pata_ftide010.c 	struct sata_gemini *sg = ftide->sg;
sg                321 drivers/ata/pata_ftide010.c 		gemini_sata_stop_bridge(sg, 0);
sg                325 drivers/ata/pata_ftide010.c 		gemini_sata_stop_bridge(sg, 1);
sg                330 drivers/ata/pata_ftide010.c 		gemini_sata_stop_bridge(sg, 0);
sg                335 drivers/ata/pata_ftide010.c 		gemini_sata_stop_bridge(sg, 1);
sg                355 drivers/ata/pata_ftide010.c 	struct sata_gemini *sg;
sg                359 drivers/ata/pata_ftide010.c 	sg = gemini_sata_bridge_get();
sg                360 drivers/ata/pata_ftide010.c 	if (IS_ERR(sg))
sg                361 drivers/ata/pata_ftide010.c 		return PTR_ERR(sg);
sg                362 drivers/ata/pata_ftide010.c 	ftide->sg = sg;
sg                364 drivers/ata/pata_ftide010.c 	muxmode = gemini_sata_get_muxmode(sg);
sg                375 drivers/ata/pata_ftide010.c 	if (gemini_sata_bridge_enabled(sg, is_ata1))
sg                238 drivers/ata/pata_icside.c 	set_dma_sg(state->dma, qc->sg, qc->n_elem);
sg                518 drivers/ata/pata_macio.c 	struct scatterlist *sg;
sg                531 drivers/ata/pata_macio.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                538 drivers/ata/pata_macio.c 		addr = (u32) sg_dma_address(sg);
sg                539 drivers/ata/pata_macio.c 		sg_len = sg_dma_len(sg);
sg                449 drivers/ata/pata_mpc52xx.c 	struct scatterlist *sg;
sg                457 drivers/ata/pata_mpc52xx.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                458 drivers/ata/pata_mpc52xx.c 		dma_addr_t cur_addr = sg_dma_address(sg);
sg                459 drivers/ata/pata_mpc52xx.c 		u32 cur_len = sg_dma_len(sg);
sg                548 drivers/ata/pata_octeon_cf.c 	qc->cursg = qc->sg;
sg                564 drivers/ata/pata_octeon_cf.c 	struct scatterlist *sg;
sg                569 drivers/ata/pata_octeon_cf.c 	sg = qc->cursg;
sg                570 drivers/ata/pata_octeon_cf.c 	BUG_ON(!sg);
sg                601 drivers/ata/pata_octeon_cf.c 	mio_boot_dma_cfg.s.size = sg_dma_len(sg) / 2 - 1;
sg                606 drivers/ata/pata_octeon_cf.c 	mio_boot_dma_cfg.s.adr = sg_dma_address(sg);
sg                609 drivers/ata/pata_octeon_cf.c 		(mio_boot_dma_cfg.s.rw) ? "write" : "read", sg->length,
sg                 57 drivers/ata/pata_pxa.c 	tx = dmaengine_prep_slave_sg(pd->dma_chan, qc->sg, qc->n_elem, dir,
sg                258 drivers/ata/pdc_adma.c 	struct scatterlist *sg;
sg                266 drivers/ata/pdc_adma.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                270 drivers/ata/pdc_adma.c 		addr = (u32)sg_dma_address(sg);
sg                274 drivers/ata/pdc_adma.c 		len = sg_dma_len(sg) >> 3;
sg                388 drivers/ata/sata_dwc_460ex.c 	desc = dmaengine_prep_slave_sg(hsdevp->chan, qc->sg, qc->n_elem,
sg                399 drivers/ata/sata_dwc_460ex.c 		qc->sg, qc->n_elem, &hsdev->dmadr);
sg                435 drivers/ata/sata_fsl.c 	struct scatterlist *sg;
sg                455 drivers/ata/sata_fsl.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                456 drivers/ata/sata_fsl.c 		dma_addr_t sg_addr = sg_dma_address(sg);
sg                457 drivers/ata/sata_fsl.c 		u32 sg_len = sg_dma_len(sg);
sg                471 drivers/ata/sata_fsl.c 		    sg_next(sg) != NULL) {
sg                126 drivers/ata/sata_gemini.c bool gemini_sata_bridge_enabled(struct sata_gemini *sg, bool is_ata1)
sg                128 drivers/ata/sata_gemini.c 	if (!sg->sata_bridge)
sg                134 drivers/ata/sata_gemini.c 	if ((sg->muxmode == GEMINI_MUXMODE_2) &&
sg                137 drivers/ata/sata_gemini.c 	if ((sg->muxmode == GEMINI_MUXMODE_3) &&
sg                145 drivers/ata/sata_gemini.c enum gemini_muxmode gemini_sata_get_muxmode(struct sata_gemini *sg)
sg                147 drivers/ata/sata_gemini.c 	return sg->muxmode;
sg                151 drivers/ata/sata_gemini.c static int gemini_sata_setup_bridge(struct sata_gemini *sg,
sg                161 drivers/ata/sata_gemini.c 		if (sg->muxmode == GEMINI_MUXMODE_2)
sg                163 drivers/ata/sata_gemini.c 		writel(val, sg->base + GEMINI_SATA0_CTRL);
sg                167 drivers/ata/sata_gemini.c 		if (sg->muxmode == GEMINI_MUXMODE_3)
sg                169 drivers/ata/sata_gemini.c 		writel(val, sg->base + GEMINI_SATA1_CTRL);
sg                180 drivers/ata/sata_gemini.c 			val = readl(sg->base + GEMINI_SATA0_STATUS);
sg                182 drivers/ata/sata_gemini.c 			val = readl(sg->base + GEMINI_SATA1_STATUS);
sg                189 drivers/ata/sata_gemini.c 	dev_info(sg->dev, "SATA%d PHY %s\n", bridge,
sg                195 drivers/ata/sata_gemini.c int gemini_sata_start_bridge(struct sata_gemini *sg, unsigned int bridge)
sg                201 drivers/ata/sata_gemini.c 		pclk = sg->sata0_pclk;
sg                203 drivers/ata/sata_gemini.c 		pclk = sg->sata1_pclk;
sg                208 drivers/ata/sata_gemini.c 	ret = gemini_sata_setup_bridge(sg, bridge);
sg                216 drivers/ata/sata_gemini.c void gemini_sata_stop_bridge(struct sata_gemini *sg, unsigned int bridge)
sg                219 drivers/ata/sata_gemini.c 		clk_disable(sg->sata0_pclk);
sg                221 drivers/ata/sata_gemini.c 		clk_disable(sg->sata1_pclk);
sg                225 drivers/ata/sata_gemini.c int gemini_sata_reset_bridge(struct sata_gemini *sg,
sg                229 drivers/ata/sata_gemini.c 		reset_control_reset(sg->sata0_reset);
sg                231 drivers/ata/sata_gemini.c 		reset_control_reset(sg->sata1_reset);
sg                233 drivers/ata/sata_gemini.c 	return gemini_sata_setup_bridge(sg, bridge);
sg                237 drivers/ata/sata_gemini.c static int gemini_sata_bridge_init(struct sata_gemini *sg)
sg                239 drivers/ata/sata_gemini.c 	struct device *dev = sg->dev;
sg                243 drivers/ata/sata_gemini.c 	sg->sata0_pclk = devm_clk_get(dev, "SATA0_PCLK");
sg                244 drivers/ata/sata_gemini.c 	if (IS_ERR(sg->sata0_pclk)) {
sg                248 drivers/ata/sata_gemini.c 	sg->sata1_pclk = devm_clk_get(dev, "SATA1_PCLK");
sg                249 drivers/ata/sata_gemini.c 	if (IS_ERR(sg->sata1_pclk)) {
sg                254 drivers/ata/sata_gemini.c 	ret = clk_prepare_enable(sg->sata0_pclk);
sg                259 drivers/ata/sata_gemini.c 	ret = clk_prepare_enable(sg->sata1_pclk);
sg                262 drivers/ata/sata_gemini.c 		clk_disable_unprepare(sg->sata0_pclk);
sg                266 drivers/ata/sata_gemini.c 	sg->sata0_reset = devm_reset_control_get_exclusive(dev, "sata0");
sg                267 drivers/ata/sata_gemini.c 	if (IS_ERR(sg->sata0_reset)) {
sg                269 drivers/ata/sata_gemini.c 		clk_disable_unprepare(sg->sata1_pclk);
sg                270 drivers/ata/sata_gemini.c 		clk_disable_unprepare(sg->sata0_pclk);
sg                271 drivers/ata/sata_gemini.c 		return PTR_ERR(sg->sata0_reset);
sg                273 drivers/ata/sata_gemini.c 	sg->sata1_reset = devm_reset_control_get_exclusive(dev, "sata1");
sg                274 drivers/ata/sata_gemini.c 	if (IS_ERR(sg->sata1_reset)) {
sg                276 drivers/ata/sata_gemini.c 		clk_disable_unprepare(sg->sata1_pclk);
sg                277 drivers/ata/sata_gemini.c 		clk_disable_unprepare(sg->sata0_pclk);
sg                278 drivers/ata/sata_gemini.c 		return PTR_ERR(sg->sata1_reset);
sg                281 drivers/ata/sata_gemini.c 	sata_id = readl(sg->base + GEMINI_SATA_ID);
sg                282 drivers/ata/sata_gemini.c 	sata_phy_id = readl(sg->base + GEMINI_SATA_PHY_ID);
sg                283 drivers/ata/sata_gemini.c 	sg->sata_bridge = true;
sg                284 drivers/ata/sata_gemini.c 	clk_disable(sg->sata0_pclk);
sg                285 drivers/ata/sata_gemini.c 	clk_disable(sg->sata1_pclk);
sg                319 drivers/ata/sata_gemini.c 	struct sata_gemini *sg;
sg                327 drivers/ata/sata_gemini.c 	sg = devm_kzalloc(dev, sizeof(*sg), GFP_KERNEL);
sg                328 drivers/ata/sata_gemini.c 	if (!sg)
sg                330 drivers/ata/sata_gemini.c 	sg->dev = dev;
sg                336 drivers/ata/sata_gemini.c 	sg->base = devm_ioremap_resource(dev, res);
sg                337 drivers/ata/sata_gemini.c 	if (IS_ERR(sg->base))
sg                338 drivers/ata/sata_gemini.c 		return PTR_ERR(sg->base);
sg                348 drivers/ata/sata_gemini.c 		ret = gemini_sata_bridge_init(sg);
sg                354 drivers/ata/sata_gemini.c 		sg->ide_pins = true;
sg                356 drivers/ata/sata_gemini.c 	if (!sg->sata_bridge && !sg->ide_pins) {
sg                372 drivers/ata/sata_gemini.c 	sg->muxmode = muxmode;
sg                388 drivers/ata/sata_gemini.c 	if (sg->ide_pins) {
sg                395 drivers/ata/sata_gemini.c 	platform_set_drvdata(pdev, sg);
sg                396 drivers/ata/sata_gemini.c 	sg_singleton = sg;
sg                401 drivers/ata/sata_gemini.c 	if (sg->sata_bridge) {
sg                402 drivers/ata/sata_gemini.c 		clk_unprepare(sg->sata1_pclk);
sg                403 drivers/ata/sata_gemini.c 		clk_unprepare(sg->sata0_pclk);
sg                410 drivers/ata/sata_gemini.c 	struct sata_gemini *sg = platform_get_drvdata(pdev);
sg                412 drivers/ata/sata_gemini.c 	if (sg->sata_bridge) {
sg                413 drivers/ata/sata_gemini.c 		clk_unprepare(sg->sata1_pclk);
sg                414 drivers/ata/sata_gemini.c 		clk_unprepare(sg->sata0_pclk);
sg                 16 drivers/ata/sata_gemini.h bool gemini_sata_bridge_enabled(struct sata_gemini *sg, bool is_ata1);
sg                 17 drivers/ata/sata_gemini.h enum gemini_muxmode gemini_sata_get_muxmode(struct sata_gemini *sg);
sg                 18 drivers/ata/sata_gemini.h int gemini_sata_start_bridge(struct sata_gemini *sg, unsigned int bridge);
sg                 19 drivers/ata/sata_gemini.h void gemini_sata_stop_bridge(struct sata_gemini *sg, unsigned int bridge);
sg                 20 drivers/ata/sata_gemini.h int gemini_sata_reset_bridge(struct sata_gemini *sg, unsigned int bridge);
sg                460 drivers/ata/sata_inic162x.c 	struct scatterlist *sg;
sg                470 drivers/ata/sata_inic162x.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                471 drivers/ata/sata_inic162x.c 		prd->mad = cpu_to_le32(sg_dma_address(sg));
sg                472 drivers/ata/sata_inic162x.c 		prd->len = cpu_to_le16(sg_dma_len(sg));
sg               1788 drivers/ata/sata_mv.c 	struct scatterlist *sg;
sg               1793 drivers/ata/sata_mv.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               1794 drivers/ata/sata_mv.c 		dma_addr_t addr = sg_dma_address(sg);
sg               1795 drivers/ata/sata_mv.c 		u32 sg_len = sg_dma_len(sg);
sg               1292 drivers/ata/sata_nv.c 			      struct scatterlist *sg,
sg               1304 drivers/ata/sata_nv.c 	aprd->addr  = cpu_to_le64(((u64)sg_dma_address(sg)));
sg               1305 drivers/ata/sata_nv.c 	aprd->len   = cpu_to_le32(((u32)sg_dma_len(sg))); /* len in bytes */
sg               1314 drivers/ata/sata_nv.c 	struct scatterlist *sg;
sg               1319 drivers/ata/sata_nv.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               1322 drivers/ata/sata_nv.c 		nv_adma_fill_aprd(qc, sg, si, aprd);
sg               1969 drivers/ata/sata_nv.c 	struct scatterlist *sg;
sg               1977 drivers/ata/sata_nv.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               1981 drivers/ata/sata_nv.c 		addr = (u32)sg_dma_address(sg);
sg               1982 drivers/ata/sata_nv.c 		sg_len = sg_dma_len(sg);
sg                577 drivers/ata/sata_promise.c 	struct scatterlist *sg;
sg                586 drivers/ata/sata_promise.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                594 drivers/ata/sata_promise.c 		addr = (u32) sg_dma_address(sg);
sg                595 drivers/ata/sata_promise.c 		sg_len = sg_dma_len(sg);
sg                238 drivers/ata/sata_qstor.c 	struct scatterlist *sg;
sg                244 drivers/ata/sata_qstor.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                248 drivers/ata/sata_qstor.c 		addr = sg_dma_address(sg);
sg                252 drivers/ata/sata_qstor.c 		len = sg_dma_len(sg);
sg                531 drivers/ata/sata_rcar.c 	struct scatterlist *sg;
sg                534 drivers/ata/sata_rcar.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                541 drivers/ata/sata_rcar.c 		addr = (u32)sg_dma_address(sg);
sg                542 drivers/ata/sata_rcar.c 		sg_len = sg_dma_len(sg);
sg                295 drivers/ata/sata_sil.c 	struct scatterlist *sg;
sg                301 drivers/ata/sata_sil.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                305 drivers/ata/sata_sil.c 		u32 addr = (u32) sg_dma_address(sg);
sg                306 drivers/ata/sata_sil.c 		u32 sg_len = sg_dma_len(sg);
sg                772 drivers/ata/sata_sil24.c 	struct scatterlist *sg;
sg                776 drivers/ata/sata_sil24.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                777 drivers/ata/sata_sil24.c 		sge->addr = cpu_to_le64(sg_dma_address(sg));
sg                778 drivers/ata/sata_sil24.c 		sge->cnt = cpu_to_le32(sg_dma_len(sg));
sg                439 drivers/ata/sata_sx4.c 	struct scatterlist *sg;
sg                459 drivers/ata/sata_sx4.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg                460 drivers/ata/sata_sx4.c 		buf[idx++] = cpu_to_le32(sg_dma_address(sg));
sg                461 drivers/ata/sata_sx4.c 		buf[idx++] = cpu_to_le32(sg_dma_len(sg));
sg                462 drivers/ata/sata_sx4.c 		total_len += sg_dma_len(sg);
sg               1464 drivers/block/mtip32xx/mtip32xx.c 	struct scatterlist *sg;
sg               1468 drivers/block/mtip32xx/mtip32xx.c 	for_each_sg(command->sg, sg, nents, n) {
sg               1469 drivers/block/mtip32xx/mtip32xx.c 		dma_len = sg_dma_len(sg);
sg               1474 drivers/block/mtip32xx/mtip32xx.c 		command_sg->dba	=  cpu_to_le32(sg_dma_address(sg));
sg               1476 drivers/block/mtip32xx/mtip32xx.c 			cpu_to_le32((sg_dma_address(sg) >> 16) >> 16);
sg               2070 drivers/block/mtip32xx/mtip32xx.c 	nents = blk_rq_map_sg(hctx->queue, rq, command->sg);
sg               2071 drivers/block/mtip32xx/mtip32xx.c 	nents = dma_map_sg(&dd->pdev->dev, command->sg, nents, dma_dir);
sg               2590 drivers/block/mtip32xx/mtip32xx.c 	dma_unmap_sg(&dd->pdev->dev, cmd->sg, cmd->scatter_ents,
sg               3529 drivers/block/mtip32xx/mtip32xx.c 	sg_init_table(cmd->sg, MTIP_MAX_SG);
sg                312 drivers/block/mtip32xx/mtip32xx.h 		struct scatterlist sg[MTIP_MAX_SG]; /* Scatter list entries */
sg                172 drivers/block/skd_main.c 	struct scatterlist *sg;
sg                623 drivers/block/skd_main.c 	struct scatterlist *sgl = &skreq->sg[0], *sg;
sg                648 drivers/block/skd_main.c 	for_each_sg(sgl, sg, n_sg, i) {
sg                650 drivers/block/skd_main.c 		u32 cnt = sg_dma_len(sg);
sg                651 drivers/block/skd_main.c 		uint64_t dma_addr = sg_dma_address(sg);
sg                690 drivers/block/skd_main.c 	dma_unmap_sg(&skdev->pdev->dev, &skreq->sg[0], skreq->n_sg,
sg               2746 drivers/block/skd_main.c 	skreq->sg = (void *)(skreq + 1);
sg               2747 drivers/block/skd_main.c 	sg_init_table(skreq->sg, skd_sgs_per_request);
sg                463 drivers/block/sunvdc.c 	struct scatterlist sg[MAX_RING_COOKIES];
sg                484 drivers/block/sunvdc.c 	sg_init_table(sg, port->ring_cookies);
sg                485 drivers/block/sunvdc.c 	nsg = blk_rq_map_sg(req->q, req, sg);
sg                489 drivers/block/sunvdc.c 		len += sg[i].length;
sg                493 drivers/block/sunvdc.c 	err = ldc_map_sg(port->vio.lp, sg, nsg,
sg                260 drivers/block/sx8.c 	struct scatterlist		sg[CARM_MAX_REQ_SG];
sg                322 drivers/block/sx8.c 	struct carm_msg_sg sg[32];
sg                339 drivers/block/sx8.c 	struct carm_msg_sg sg[8];
sg                621 drivers/block/sx8.c 	ab->sg[0].start	= cpu_to_le32(host->shm_dma + (PDC_SHM_SIZE >> 1));
sg                622 drivers/block/sx8.c 	ab->sg[0].len	= cpu_to_le32(65536);
sg                711 drivers/block/sx8.c 	struct scatterlist *sg;
sg                717 drivers/block/sx8.c 	sg_init_table(crq->sg, CARM_MAX_REQ_SG);
sg                726 drivers/block/sx8.c 	sg = &crq->sg[0];
sg                727 drivers/block/sx8.c 	n_elem = blk_rq_map_sg(q, rq, sg);
sg                732 drivers/block/sx8.c 	n_elem = dma_map_sg(&host->pdev->dev, sg, n_elem, carm_rq_dir(rq));
sg                767 drivers/block/sx8.c 	msg_size = sizeof(struct carm_msg_rw) - sizeof(msg->sg);
sg                769 drivers/block/sx8.c 		struct carm_msg_sg *carm_sg = &msg->sg[i];
sg                770 drivers/block/sx8.c 		carm_sg->start = cpu_to_le32(sg_dma_address(&crq->sg[i]));
sg                771 drivers/block/sx8.c 		carm_sg->len = cpu_to_le32(sg_dma_len(&crq->sg[i]));
sg                792 drivers/block/sx8.c 	dma_unmap_sg(&host->pdev->dev, &crq->sg[0], n_elem, carm_rq_dir(rq));
sg                932 drivers/block/sx8.c 		dma_unmap_sg(&host->pdev->dev, &crq->sg[0], crq->n_elem,
sg                 82 drivers/block/virtio_blk.c 	struct scatterlist sg[];
sg                343 drivers/block/virtio_blk.c 	num = blk_rq_map_sg(hctx->queue, req, vbr->sg);
sg                353 drivers/block/virtio_blk.c 		err = virtblk_add_req_scsi(vblk->vqs[qid].vq, vbr, vbr->sg, num);
sg                355 drivers/block/virtio_blk.c 		err = virtblk_add_req(vblk->vqs[qid].vq, vbr, vbr->sg, num);
sg                761 drivers/block/virtio_blk.c 	sg_init_table(vbr->sg, vblk->sg_elems);
sg                103 drivers/block/xen-blkfront.c 	struct scatterlist *sg;
sg                712 drivers/block/xen-blkfront.c 	struct scatterlist *sg;
sg                742 drivers/block/xen-blkfront.c 	num_sg = blk_rq_map_sg(req->q, req, rinfo->shadow[id].sg);
sg                745 drivers/block/xen-blkfront.c 	for_each_sg(rinfo->shadow[id].sg, sg, num_sg, i)
sg                746 drivers/block/xen-blkfront.c 	       num_grant += gnttab_count_grant(sg->offset, sg->length);
sg                813 drivers/block/xen-blkfront.c 	for_each_sg(rinfo->shadow[id].sg, sg, num_sg, i) {
sg                814 drivers/block/xen-blkfront.c 		BUG_ON(sg->offset + sg->length > PAGE_SIZE);
sg                817 drivers/block/xen-blkfront.c 			setup.bvec_off = sg->offset;
sg                818 drivers/block/xen-blkfront.c 			setup.bvec_data = kmap_atomic(sg_page(sg));
sg                821 drivers/block/xen-blkfront.c 		gnttab_foreach_grant_in_range(sg_page(sg),
sg                822 drivers/block/xen-blkfront.c 					      sg->offset,
sg                823 drivers/block/xen-blkfront.c 					      sg->length,
sg               1318 drivers/block/xen-blkfront.c 		kvfree(rinfo->shadow[i].sg);
sg               1319 drivers/block/xen-blkfront.c 		rinfo->shadow[i].sg = NULL;
sg               1424 drivers/block/xen-blkfront.c 	struct scatterlist *sg;
sg               1478 drivers/block/xen-blkfront.c 		for_each_sg(s->sg, sg, num_sg, i) {
sg               1479 drivers/block/xen-blkfront.c 			BUG_ON(sg->offset + sg->length > PAGE_SIZE);
sg               1481 drivers/block/xen-blkfront.c 			data.bvec_offset = sg->offset;
sg               1482 drivers/block/xen-blkfront.c 			data.bvec_data = kmap_atomic(sg_page(sg));
sg               1484 drivers/block/xen-blkfront.c 			gnttab_foreach_grant_in_range(sg_page(sg),
sg               1485 drivers/block/xen-blkfront.c 						      sg->offset,
sg               1486 drivers/block/xen-blkfront.c 						      sg->length,
sg               2241 drivers/block/xen-blkfront.c 		rinfo->shadow[i].sg = kvcalloc(psegs,
sg               2242 drivers/block/xen-blkfront.c 					       sizeof(rinfo->shadow[i].sg[0]),
sg               2250 drivers/block/xen-blkfront.c 			(rinfo->shadow[i].sg == NULL) ||
sg               2254 drivers/block/xen-blkfront.c 		sg_init_table(rinfo->shadow[i].sg, psegs);
sg               2265 drivers/block/xen-blkfront.c 		kvfree(rinfo->shadow[i].sg);
sg               2266 drivers/block/xen-blkfront.c 		rinfo->shadow[i].sg = NULL;
sg                102 drivers/char/agp/intel-gtt.c 	struct scatterlist *sg;
sg                110 drivers/char/agp/intel-gtt.c 	for_each_sg(st->sgl, sg, num_entries, i)
sg                111 drivers/char/agp/intel-gtt.c 		sg_set_page(sg, pages[i], PAGE_SIZE, 0);
sg                859 drivers/char/agp/intel-gtt.c 	struct scatterlist *sg;
sg                867 drivers/char/agp/intel-gtt.c 	for_each_sg(st->sgl, sg, st->nents, i) {
sg                868 drivers/char/agp/intel-gtt.c 		len = sg_dma_len(sg) >> PAGE_SHIFT;
sg                870 drivers/char/agp/intel-gtt.c 			dma_addr_t addr = sg_dma_address(sg) + (m << PAGE_SHIFT);
sg                 43 drivers/char/hw_random/virtio-rng.c 	struct scatterlist sg;
sg                 45 drivers/char/hw_random/virtio-rng.c 	sg_init_one(&sg, buf, size);
sg                 48 drivers/char/hw_random/virtio-rng.c 	virtqueue_add_inbuf(vi->vq, &sg, 1, buf, GFP_KERNEL);
sg                115 drivers/char/virtio_console.c 	struct scatterlist sg[0];
sg                362 drivers/char/virtio_console.c 		struct page *page = sg_page(&buf->sg[i]);
sg                423 drivers/char/virtio_console.c 	buf = kmalloc(struct_size(buf, sg, pages), GFP_KERNEL);
sg                493 drivers/char/virtio_console.c 	struct scatterlist sg[1];
sg                496 drivers/char/virtio_console.c 	sg_init_one(sg, buf->buf, buf->size);
sg                498 drivers/char/virtio_console.c 	ret = virtqueue_add_inbuf(vq, sg, 1, buf, GFP_ATOMIC);
sg                550 drivers/char/virtio_console.c 	struct scatterlist sg[1];
sg                565 drivers/char/virtio_console.c 	sg_init_one(sg, &portdev->cpkt, sizeof(struct virtio_console_control));
sg                567 drivers/char/virtio_console.c 	if (virtqueue_add_outbuf(vq, sg, 1, &portdev->cpkt, GFP_ATOMIC) == 0) {
sg                604 drivers/char/virtio_console.c static ssize_t __send_to_port(struct port *port, struct scatterlist *sg,
sg                619 drivers/char/virtio_console.c 	err = virtqueue_add_outbuf(out_vq, sg, nents, data, GFP_ATOMIC);
sg                811 drivers/char/virtio_console.c 	struct scatterlist sg[1];
sg                845 drivers/char/virtio_console.c 	sg_init_one(sg, buf->buf, count);
sg                846 drivers/char/virtio_console.c 	ret = __send_to_port(port, sg, 1, count, buf, nonblock);
sg                861 drivers/char/virtio_console.c 	struct scatterlist *sg;
sg                880 drivers/char/virtio_console.c 		sg_set_page(&(sgl->sg[sgl->n]), buf->page, len, buf->offset);
sg                899 drivers/char/virtio_console.c 		sg_set_page(&(sgl->sg[sgl->n]), page, len, offset);
sg                955 drivers/char/virtio_console.c 	sgl.sg = buf->sg;
sg                956 drivers/char/virtio_console.c 	sg_init_table(sgl.sg, sgl.size);
sg                960 drivers/char/virtio_console.c 		ret = __send_to_port(port, buf->sg, sgl.n, sgl.len, buf, true);
sg               1121 drivers/char/virtio_console.c 	struct scatterlist sg[1];
sg               1136 drivers/char/virtio_console.c 	sg_init_one(sg, data, count);
sg               1137 drivers/char/virtio_console.c 	ret = __send_to_port(port, sg, 1, count, data, false);
sg                 13 drivers/clk/sprd/gate.c static void clk_gate_toggle(const struct sprd_gate *sg, bool en)
sg                 15 drivers/clk/sprd/gate.c 	const struct sprd_clk_common *common = &sg->common;
sg                 17 drivers/clk/sprd/gate.c 	bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? true : false;
sg                 24 drivers/clk/sprd/gate.c 		reg |= sg->enable_mask;
sg                 26 drivers/clk/sprd/gate.c 		reg &= ~sg->enable_mask;
sg                 31 drivers/clk/sprd/gate.c static void clk_sc_gate_toggle(const struct sprd_gate *sg, bool en)
sg                 33 drivers/clk/sprd/gate.c 	const struct sprd_clk_common *common = &sg->common;
sg                 34 drivers/clk/sprd/gate.c 	bool set = sg->flags & CLK_GATE_SET_TO_DISABLE ? 1 : 0;
sg                 45 drivers/clk/sprd/gate.c 	offset = set ? sg->sc_offset : sg->sc_offset * 2;
sg                 48 drivers/clk/sprd/gate.c 			  sg->enable_mask);
sg                 53 drivers/clk/sprd/gate.c 	struct sprd_gate *sg = hw_to_sprd_gate(hw);
sg                 55 drivers/clk/sprd/gate.c 	clk_gate_toggle(sg, false);
sg                 60 drivers/clk/sprd/gate.c 	struct sprd_gate *sg = hw_to_sprd_gate(hw);
sg                 62 drivers/clk/sprd/gate.c 	clk_gate_toggle(sg, true);
sg                 69 drivers/clk/sprd/gate.c 	struct sprd_gate *sg = hw_to_sprd_gate(hw);
sg                 71 drivers/clk/sprd/gate.c 	clk_sc_gate_toggle(sg, false);
sg                 76 drivers/clk/sprd/gate.c 	struct sprd_gate *sg = hw_to_sprd_gate(hw);
sg                 78 drivers/clk/sprd/gate.c 	clk_sc_gate_toggle(sg, true);
sg                 84 drivers/clk/sprd/gate.c 	struct sprd_gate *sg = hw_to_sprd_gate(hw);
sg                 85 drivers/clk/sprd/gate.c 	struct sprd_clk_common *common = &sg->common;
sg                 90 drivers/clk/sprd/gate.c 	if (sg->flags & CLK_GATE_SET_TO_DISABLE)
sg                 91 drivers/clk/sprd/gate.c 		reg ^= sg->enable_mask;
sg                 93 drivers/clk/sprd/gate.c 	reg &= sg->enable_mask;
sg                829 drivers/crypto/amcc/crypto4xx_core.c 		struct scatterlist *sg;
sg                840 drivers/crypto/amcc/crypto4xx_core.c 		sg = src;
sg                844 drivers/crypto/amcc/crypto4xx_core.c 			len = min(sg->length, nbytes);
sg                846 drivers/crypto/amcc/crypto4xx_core.c 				sg_page(sg), sg->offset, len, DMA_TO_DEVICE);
sg                853 drivers/crypto/amcc/crypto4xx_core.c 			nbytes -= sg->length;
sg                856 drivers/crypto/amcc/crypto4xx_core.c 			sg = sg_next(sg);
sg                175 drivers/crypto/atmel-aes.c 	struct scatterlist	*sg;
sg                638 drivers/crypto/atmel-aes.c 				    struct scatterlist *sg,
sg                647 drivers/crypto/atmel-aes.c 	for (nents = 0; sg; sg = sg_next(sg), ++nents) {
sg                648 drivers/crypto/atmel-aes.c 		if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg                651 drivers/crypto/atmel-aes.c 		if (len <= sg->length) {
sg                656 drivers/crypto/atmel-aes.c 			dma->remainder = sg->length - len;
sg                657 drivers/crypto/atmel-aes.c 			sg->length = len;
sg                661 drivers/crypto/atmel-aes.c 		if (!IS_ALIGNED(sg->length, dd->ctx->block_size))
sg                664 drivers/crypto/atmel-aes.c 		len -= sg->length;
sg                672 drivers/crypto/atmel-aes.c 	struct scatterlist *sg = dma->sg;
sg                678 drivers/crypto/atmel-aes.c 	while (--nents > 0 && sg)
sg                679 drivers/crypto/atmel-aes.c 		sg = sg_next(sg);
sg                681 drivers/crypto/atmel-aes.c 	if (!sg)
sg                684 drivers/crypto/atmel-aes.c 	sg->length += dma->remainder;
sg                696 drivers/crypto/atmel-aes.c 	dd->src.sg = src;
sg                697 drivers/crypto/atmel-aes.c 	dd->dst.sg = dst;
sg                713 drivers/crypto/atmel-aes.c 			dd->src.sg = &dd->aligned_sg;
sg                719 drivers/crypto/atmel-aes.c 			dd->dst.sg = &dd->aligned_sg;
sg                728 drivers/crypto/atmel-aes.c 	if (dd->src.sg == dd->dst.sg) {
sg                729 drivers/crypto/atmel-aes.c 		dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
sg                735 drivers/crypto/atmel-aes.c 		dd->src.sg_len = dma_map_sg(dd->dev, dd->src.sg, dd->src.nents,
sg                740 drivers/crypto/atmel-aes.c 		dd->dst.sg_len = dma_map_sg(dd->dev, dd->dst.sg, dd->dst.nents,
sg                743 drivers/crypto/atmel-aes.c 			dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
sg                754 drivers/crypto/atmel-aes.c 	if (dd->src.sg == dd->dst.sg) {
sg                755 drivers/crypto/atmel-aes.c 		dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
sg                758 drivers/crypto/atmel-aes.c 		if (dd->src.sg != &dd->aligned_sg)
sg                761 drivers/crypto/atmel-aes.c 		dma_unmap_sg(dd->dev, dd->dst.sg, dd->dst.nents,
sg                764 drivers/crypto/atmel-aes.c 		if (dd->dst.sg != &dd->aligned_sg)
sg                767 drivers/crypto/atmel-aes.c 		dma_unmap_sg(dd->dev, dd->src.sg, dd->src.nents,
sg                770 drivers/crypto/atmel-aes.c 		if (dd->src.sg != &dd->aligned_sg)
sg                774 drivers/crypto/atmel-aes.c 	if (dd->dst.sg == &dd->aligned_sg)
sg                818 drivers/crypto/atmel-aes.c 	desc = dmaengine_prep_slave_sg(dma->chan, dma->sg, dma->sg_len, dir,
sg                 99 drivers/crypto/atmel-sha.c 	struct scatterlist	*sg;
sg                123 drivers/crypto/atmel-sha.c 	struct scatterlist	*sg;
sg                307 drivers/crypto/atmel-sha.c 		count = min(ctx->sg->length - ctx->offset, ctx->total);
sg                317 drivers/crypto/atmel-sha.c 			if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) {
sg                318 drivers/crypto/atmel-sha.c 				ctx->sg = sg_next(ctx->sg);
sg                325 drivers/crypto/atmel-sha.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg,
sg                332 drivers/crypto/atmel-sha.c 		if (ctx->offset == ctx->sg->length) {
sg                333 drivers/crypto/atmel-sha.c 			ctx->sg = sg_next(ctx->sg);
sg                334 drivers/crypto/atmel-sha.c 			if (ctx->sg)
sg                642 drivers/crypto/atmel-sha.c 	struct scatterlist sg[2];
sg                653 drivers/crypto/atmel-sha.c 		sg_init_table(sg, 2);
sg                654 drivers/crypto/atmel-sha.c 		sg_dma_address(&sg[0]) = dma_addr1;
sg                655 drivers/crypto/atmel-sha.c 		sg_dma_len(&sg[0]) = length1;
sg                656 drivers/crypto/atmel-sha.c 		sg_dma_address(&sg[1]) = dma_addr2;
sg                657 drivers/crypto/atmel-sha.c 		sg_dma_len(&sg[1]) = length2;
sg                658 drivers/crypto/atmel-sha.c 		in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 2,
sg                661 drivers/crypto/atmel-sha.c 		sg_init_table(sg, 1);
sg                662 drivers/crypto/atmel-sha.c 		sg_dma_address(&sg[0]) = dma_addr1;
sg                663 drivers/crypto/atmel-sha.c 		sg_dma_len(&sg[0]) = length1;
sg                664 drivers/crypto/atmel-sha.c 		in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 1,
sg                763 drivers/crypto/atmel-sha.c 	struct scatterlist *sg;
sg                775 drivers/crypto/atmel-sha.c 	sg = ctx->sg;
sg                777 drivers/crypto/atmel-sha.c 	if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg                780 drivers/crypto/atmel-sha.c 	if (!sg_is_last(sg) && !IS_ALIGNED(sg->length, ctx->block_size))
sg                784 drivers/crypto/atmel-sha.c 	length = min(ctx->total, sg->length);
sg                786 drivers/crypto/atmel-sha.c 	if (sg_is_last(sg)) {
sg                806 drivers/crypto/atmel-sha.c 		sg = ctx->sg;
sg                826 drivers/crypto/atmel-sha.c 			ctx->sg = sg;
sg                827 drivers/crypto/atmel-sha.c 			if (!dma_map_sg(dd->dev, ctx->sg, 1,
sg                837 drivers/crypto/atmel-sha.c 			return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg),
sg                842 drivers/crypto/atmel-sha.c 	if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
sg                850 drivers/crypto/atmel-sha.c 	return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0,
sg                859 drivers/crypto/atmel-sha.c 		dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE);
sg                860 drivers/crypto/atmel-sha.c 		if (ctx->sg->length == ctx->offset) {
sg                861 drivers/crypto/atmel-sha.c 			ctx->sg = sg_next(ctx->sg);
sg                862 drivers/crypto/atmel-sha.c 			if (ctx->sg)
sg               1166 drivers/crypto/atmel-sha.c 	ctx->sg = req->src;
sg               1455 drivers/crypto/atmel-sha.c 					struct scatterlist *sg,
sg               1464 drivers/crypto/atmel-sha.c 	for (nents = 0; sg; sg = sg_next(sg), ++nents) {
sg               1465 drivers/crypto/atmel-sha.c 		if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg               1472 drivers/crypto/atmel-sha.c 		if (len <= sg->length) {
sg               1474 drivers/crypto/atmel-sha.c 			dma->last_sg_length = sg->length;
sg               1475 drivers/crypto/atmel-sha.c 			sg->length = ALIGN(len, sizeof(u32));
sg               1480 drivers/crypto/atmel-sha.c 		if (!IS_ALIGNED(sg->length, bs))
sg               1483 drivers/crypto/atmel-sha.c 		len -= sg->length;
sg               1493 drivers/crypto/atmel-sha.c 	struct scatterlist *sg;
sg               1497 drivers/crypto/atmel-sha.c 	dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
sg               1499 drivers/crypto/atmel-sha.c 	sg = dma->sg;
sg               1501 drivers/crypto/atmel-sha.c 		sg = sg_next(sg);
sg               1502 drivers/crypto/atmel-sha.c 	sg->length = dma->last_sg_length;
sg               1527 drivers/crypto/atmel-sha.c 	dma->sg = src;
sg               1528 drivers/crypto/atmel-sha.c 	sg_len = dma_map_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
sg               1540 drivers/crypto/atmel-sha.c 	desc = dmaengine_prep_slave_sg(chan, dma->sg, sg_len, DMA_MEM_TO_DEV,
sg               1559 drivers/crypto/atmel-sha.c 	dma_unmap_sg(dd->dev, dma->sg, dma->nents, DMA_TO_DEVICE);
sg               1597 drivers/crypto/atmel-sha.c 		scatterwalk_map_and_copy(ctx->buffer, ctx->sg,
sg               1617 drivers/crypto/atmel-sha.c 			       struct scatterlist *sg,
sg               1637 drivers/crypto/atmel-sha.c 	ctx->sg = sg;
sg               1643 drivers/crypto/atmel-sha.c 	scatterwalk_map_and_copy(ctx->buffer, ctx->sg,
sg                147 drivers/crypto/atmel-tdes.c static int atmel_tdes_sg_copy(struct scatterlist **sg, size_t *offset,
sg                153 drivers/crypto/atmel-tdes.c 		count = min((*sg)->length - *offset, total);
sg                159 drivers/crypto/atmel-tdes.c 		scatterwalk_map_and_copy(buf + off, *sg, *offset, count, out);
sg                166 drivers/crypto/atmel-tdes.c 		if (*offset == (*sg)->length) {
sg                167 drivers/crypto/atmel-tdes.c 			*sg = sg_next(*sg);
sg                168 drivers/crypto/atmel-tdes.c 			if (*sg)
sg                439 drivers/crypto/atmel-tdes.c 	struct scatterlist sg[2];
sg                471 drivers/crypto/atmel-tdes.c 	sg_init_table(&sg[0], 1);
sg                472 drivers/crypto/atmel-tdes.c 	sg_dma_address(&sg[0]) = dma_addr_in;
sg                473 drivers/crypto/atmel-tdes.c 	sg_dma_len(&sg[0]) = length;
sg                475 drivers/crypto/atmel-tdes.c 	sg_init_table(&sg[1], 1);
sg                476 drivers/crypto/atmel-tdes.c 	sg_dma_address(&sg[1]) = dma_addr_out;
sg                477 drivers/crypto/atmel-tdes.c 	sg_dma_len(&sg[1]) = length;
sg                479 drivers/crypto/atmel-tdes.c 	in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, &sg[0],
sg                485 drivers/crypto/atmel-tdes.c 	out_desc = dmaengine_prep_slave_sg(dd->dma_lch_out.chan, &sg[1],
sg                224 drivers/crypto/axis/artpec6_crypto.c 	struct scatterlist *sg;
sg                381 drivers/crypto/axis/artpec6_crypto.c 	struct scatterlist *sg;
sg                386 drivers/crypto/axis/artpec6_crypto.c 				     struct scatterlist *sg)
sg                388 drivers/crypto/axis/artpec6_crypto.c 	awalk->sg = sg;
sg                395 drivers/crypto/axis/artpec6_crypto.c 	while (nbytes && awalk->sg) {
sg                398 drivers/crypto/axis/artpec6_crypto.c 		WARN_ON(awalk->offset > awalk->sg->length);
sg                400 drivers/crypto/axis/artpec6_crypto.c 		piece = min(nbytes, (size_t)awalk->sg->length - awalk->offset);
sg                403 drivers/crypto/axis/artpec6_crypto.c 		if (awalk->offset == awalk->sg->length) {
sg                404 drivers/crypto/axis/artpec6_crypto.c 			awalk->sg = sg_next(awalk->sg);
sg                416 drivers/crypto/axis/artpec6_crypto.c 	WARN_ON(awalk->sg->length == awalk->offset);
sg                418 drivers/crypto/axis/artpec6_crypto.c 	return awalk->sg->length - awalk->offset;
sg                424 drivers/crypto/axis/artpec6_crypto.c 	return sg_phys(awalk->sg) + awalk->offset;
sg                437 drivers/crypto/axis/artpec6_crypto.c 		sg_pcopy_from_buffer(b->sg,
sg                807 drivers/crypto/axis/artpec6_crypto.c 	bbuf->sg = walk->sg;
sg                830 drivers/crypto/axis/artpec6_crypto.c 	while (walk->sg && count) {
sg                857 drivers/crypto/axis/artpec6_crypto.c 							 sg_page(walk->sg),
sg                858 drivers/crypto/axis/artpec6_crypto.c 							 walk->sg->offset +
sg                893 drivers/crypto/axis/artpec6_crypto.c 	while (walk->sg && count) {
sg                904 drivers/crypto/axis/artpec6_crypto.c 			sg_pcopy_to_buffer(walk->sg, 1, buf, chunk,
sg                914 drivers/crypto/axis/artpec6_crypto.c 							 sg_page(walk->sg),
sg                915 drivers/crypto/axis/artpec6_crypto.c 							 walk->sg->offset +
sg               1436 drivers/crypto/axis/artpec6_crypto.c 		if (walk.sg) {
sg                139 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg                148 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.dst;
sg                149 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, rx_frag_num);
sg                151 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
sg                156 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak,
sg                160 drivers/crypto/bcm/cipher.c 	datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip,
sg                170 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, SPU_SUPDT_LEN);
sg                173 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len);
sg                176 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len());
sg                206 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg                216 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.src;
sg                217 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, tx_frag_num);
sg                219 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr,
sg                225 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.iv_ctr, SPU_XTS_TWEAK_SIZE);
sg                228 drivers/crypto/bcm/cipher.c 	datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip,
sg                237 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len);
sg                242 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len);
sg                568 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg                576 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.dst;
sg                577 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, rx_frag_num);
sg                579 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
sg                582 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.digest, digestsize);
sg                585 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len);
sg                588 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len());
sg                622 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg                631 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.src;
sg                632 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, tx_frag_num);
sg                634 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr,
sg                638 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->hash_carry, hash_carry_len);
sg                642 drivers/crypto/bcm/cipher.c 		datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip,
sg                652 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len);
sg                657 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len);
sg               1088 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg               1122 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.dst;
sg               1123 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, rx_frag_num);
sg               1126 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len);
sg               1134 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.a.resp_aad, assoc_buf_len);
sg               1142 drivers/crypto/bcm/cipher.c 		datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip,
sg               1154 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.a.gcmpad, data_padlen);
sg               1158 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.digest, digestsize);
sg               1163 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len);
sg               1167 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len());
sg               1211 drivers/crypto/bcm/cipher.c 	struct scatterlist *sg;	/* used to build sgs in mbox message */
sg               1224 drivers/crypto/bcm/cipher.c 	sg = mssg->spu.src;
sg               1225 drivers/crypto/bcm/cipher.c 	sg_init_table(sg, tx_frag_num);
sg               1227 drivers/crypto/bcm/cipher.c 	sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr,
sg               1232 drivers/crypto/bcm/cipher.c 		written = spu_msg_sg_add(&sg, &assoc_sg, &assoc_offset,
sg               1242 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.iv_ctr, aead_iv_len);
sg               1246 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.a.req_aad_pad, aad_pad_len);
sg               1254 drivers/crypto/bcm/cipher.c 		written = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip,
sg               1265 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len);
sg               1269 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg++, rctx->msg_buf.digest, ctx->digestsize);
sg               1274 drivers/crypto/bcm/cipher.c 		sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len);
sg                 27 drivers/crypto/bcm/util.c int spu_sg_at_offset(struct scatterlist *sg, unsigned int skip,
sg                 35 drivers/crypto/bcm/util.c 	next_index = sg->length;
sg                 37 drivers/crypto/bcm/util.c 		sg = sg_next(sg);
sg                 39 drivers/crypto/bcm/util.c 		if (!sg)
sg                 41 drivers/crypto/bcm/util.c 		next_index += sg->length;
sg                 45 drivers/crypto/bcm/util.c 	*sge = sg;
sg                 95 drivers/crypto/bcm/util.c 	struct scatterlist *sg;
sg                102 drivers/crypto/bcm/util.c 	if (spu_sg_at_offset(sg_list, skip, &sg, &offset) < 0)
sg                105 drivers/crypto/bcm/util.c 	while (sg && (nbytes > 0)) {
sg                107 drivers/crypto/bcm/util.c 		nbytes -= (sg->length - offset);
sg                109 drivers/crypto/bcm/util.c 		sg = sg_next(sg);
sg                137 drivers/crypto/bcm/util.c 	struct scatterlist *sg;	/* an entry in from_sg */
sg                150 drivers/crypto/bcm/util.c 	for_each_sg(from, sg, from_nents, i) {
sg                152 drivers/crypto/bcm/util.c 		entry_len = sg->length - skip;
sg                154 drivers/crypto/bcm/util.c 		offset = sg->offset + skip;
sg                156 drivers/crypto/bcm/util.c 			sg_set_page(to++, sg_page(sg), frag_len, offset);
sg                166 drivers/crypto/bcm/util.c 	*from_sg = sg;
sg                272 drivers/crypto/bcm/util.c void __dump_sg(struct scatterlist *sg, unsigned int skip, unsigned int len)
sg                282 drivers/crypto/bcm/util.c 			sg_copy_part_to_buf(sg, dbuf, count, idx);
sg                 55 drivers/crypto/bcm/util.h void __dump_sg(struct scatterlist *sg, unsigned int skip, unsigned int len);
sg                 57 drivers/crypto/bcm/util.h #define dump_sg(sg, skip, len)     __dump_sg(sg, skip, len)
sg                 66 drivers/crypto/bcm/util.h #define dump_sg(sg, skip, len) do {} while (0)
sg                 70 drivers/crypto/bcm/util.h int spu_sg_at_offset(struct scatterlist *sg, unsigned int skip,
sg                754 drivers/crypto/caam/caamhash.c 		struct sec4_sg_entry *sg = edesc->sec4_sg;
sg                755 drivers/crypto/caam/caamhash.c 		unsigned int sgsize = sizeof(*sg) *
sg                758 drivers/crypto/caam/caamhash.c 		sg_to_sec4_sg_last(req->src, to_hash, sg + first_sg, 0);
sg                760 drivers/crypto/caam/caamhash.c 		src_dma = dma_map_single(ctx->jrdev, sg, sgsize, DMA_TO_DEVICE);
sg                 17 drivers/crypto/caam/error.c 		  int rowsize, int groupsize, struct scatterlist *sg,
sg                 25 drivers/crypto/caam/error.c 	for (it = sg; it && tlen > 0 ; it = sg_next(it)) {
sg                 47 drivers/crypto/caam/error.c 		  int rowsize, int groupsize, struct scatterlist *sg,
sg                 21 drivers/crypto/caam/error.h 		  int rowsize, int groupsize, struct scatterlist *sg,
sg                 57 drivers/crypto/caam/sg_sw_qm.h sg_to_qm_sg(struct scatterlist *sg, int len,
sg                 63 drivers/crypto/caam/sg_sw_qm.h 		ent_len = min_t(int, sg_dma_len(sg), len);
sg                 65 drivers/crypto/caam/sg_sw_qm.h 		dma_to_qm_sg_one(qm_sg_ptr, sg_dma_address(sg), ent_len,
sg                 68 drivers/crypto/caam/sg_sw_qm.h 		sg = sg_next(sg);
sg                 78 drivers/crypto/caam/sg_sw_qm.h static inline void sg_to_qm_sg_last(struct scatterlist *sg, int len,
sg                 81 drivers/crypto/caam/sg_sw_qm.h 	qm_sg_ptr = sg_to_qm_sg(sg, len, qm_sg_ptr, offset);
sg                 28 drivers/crypto/caam/sg_sw_qm2.h sg_to_qm_sg(struct scatterlist *sg, int len,
sg                 34 drivers/crypto/caam/sg_sw_qm2.h 		ent_len = min_t(int, sg_dma_len(sg), len);
sg                 36 drivers/crypto/caam/sg_sw_qm2.h 		dma_to_qm_sg_one(qm_sg_ptr, sg_dma_address(sg), ent_len,
sg                 39 drivers/crypto/caam/sg_sw_qm2.h 		sg = sg_next(sg);
sg                 49 drivers/crypto/caam/sg_sw_qm2.h static inline void sg_to_qm_sg_last(struct scatterlist *sg, int len,
sg                 53 drivers/crypto/caam/sg_sw_qm2.h 	qm_sg_ptr = sg_to_qm_sg(sg, len, qm_sg_ptr, offset);
sg                 48 drivers/crypto/caam/sg_sw_sec4.h sg_to_sec4_sg(struct scatterlist *sg, int len,
sg                 54 drivers/crypto/caam/sg_sw_sec4.h 		ent_len = min_t(int, sg_dma_len(sg), len);
sg                 56 drivers/crypto/caam/sg_sw_sec4.h 		dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), ent_len,
sg                 59 drivers/crypto/caam/sg_sw_sec4.h 		sg = sg_next(sg);
sg                 77 drivers/crypto/caam/sg_sw_sec4.h static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int len,
sg                 81 drivers/crypto/caam/sg_sw_sec4.h 	sec4_sg_ptr = sg_to_sec4_sg(sg, len, sec4_sg_ptr, offset);
sg                368 drivers/crypto/cavium/nitrox/nitrox_aead.c 	struct scatterlist *sg;
sg                376 drivers/crypto/cavium/nitrox/nitrox_aead.c 	sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen);
sg                377 drivers/crypto/cavium/nitrox/nitrox_aead.c 	if (sg != rctx->src + 1)
sg                378 drivers/crypto/cavium/nitrox/nitrox_aead.c 		sg_chain(rctx->src, 2, sg);
sg                383 drivers/crypto/cavium/nitrox/nitrox_aead.c 		sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen);
sg                384 drivers/crypto/cavium/nitrox/nitrox_aead.c 		if (sg != rctx->dst + 1)
sg                385 drivers/crypto/cavium/nitrox/nitrox_aead.c 			sg_chain(rctx->dst, 2, sg);
sg                474 drivers/crypto/cavium/nitrox/nitrox_req.h 	struct scatterlist *sg;
sg                565 drivers/crypto/cavium/nitrox/nitrox_req.h static inline struct scatterlist *create_single_sg(struct scatterlist *sg,
sg                568 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg_set_buf(sg, buf, buflen);
sg                569 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg++;
sg                570 drivers/crypto/cavium/nitrox/nitrox_req.h 	return sg;
sg                586 drivers/crypto/cavium/nitrox/nitrox_req.h 	struct scatterlist *sg = to_sg;
sg                594 drivers/crypto/cavium/nitrox/nitrox_req.h 		sg_set_buf(sg, sg_virt(from_sg), sglen);
sg                596 drivers/crypto/cavium/nitrox/nitrox_req.h 		sg++;
sg                599 drivers/crypto/cavium/nitrox/nitrox_req.h 	return sg;
sg                639 drivers/crypto/cavium/nitrox/nitrox_req.h 	struct scatterlist *sg;
sg                643 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = creq->src;
sg                644 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg_init_table(sg, nents);
sg                653 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = create_single_sg(sg, iv, ivsize);
sg                655 drivers/crypto/cavium/nitrox/nitrox_req.h 	create_multi_sg(sg, src, buflen);
sg                697 drivers/crypto/cavium/nitrox/nitrox_req.h 	struct scatterlist *sg;
sg                701 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = creq->dst;
sg                702 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg_init_table(sg, nents);
sg                711 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = create_single_sg(sg, creq->orh, ORH_HLEN);
sg                713 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = create_single_sg(sg, iv, ivsize);
sg                715 drivers/crypto/cavium/nitrox/nitrox_req.h 	sg = create_multi_sg(sg, dst, buflen);
sg                717 drivers/crypto/cavium/nitrox/nitrox_req.h 	create_single_sg(sg, creq->comp, COMP_HLEN);
sg                 60 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	dma_unmap_sg(dev, sr->in.sg, sr->in.sgmap_cnt, DMA_BIDIRECTIONAL);
sg                 64 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->in.sg = NULL;
sg                 67 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	dma_unmap_sg(dev, sr->out.sg, sr->out.sgmap_cnt,
sg                 72 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->out.sg = NULL;
sg                110 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	struct scatterlist *sg;
sg                125 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sg = sgtbl->sg;
sg                128 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 		for (j = 0; j < 4 && sg; j++) {
sg                129 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 			sgcomp[i].len[j] = cpu_to_be16(sg_dma_len(sg));
sg                130 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 			sgcomp[i].dma[j] = cpu_to_be64(sg_dma_address(sg));
sg                131 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 			sg = sg_next(sg);
sg                160 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	struct scatterlist *sg = req->src;
sg                168 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	for_each_sg(req->src, sg, nents, i)
sg                169 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 		sr->in.total_bytes += sg_dma_len(sg);
sg                171 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->in.sg = req->src;
sg                196 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->out.sg = req->dst;
sg                207 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c 	sr->out.sg = NULL;
sg                 61 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	struct scatterlist *sg, *cmac_key_sg = NULL;
sg                114 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	sg = NULL;
sg                117 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg);
sg                118 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		if (!sg) {
sg                125 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src);
sg                126 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		if (!sg) {
sg                140 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg);
sg                141 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		if (!sg) {
sg                146 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	if (sg) {
sg                147 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		sg_mark_end(sg);
sg                148 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 		sg = rctx->data_sg.sgl;
sg                166 drivers/crypto/ccp/ccp-crypto-aes-cmac.c 	rctx->cmd.u.aes.src = sg;
sg                309 drivers/crypto/ccp/ccp-crypto-main.c 	struct scatterlist *sg, *sg_last = NULL;
sg                311 drivers/crypto/ccp/ccp-crypto-main.c 	for (sg = table->sgl; sg; sg = sg_next(sg))
sg                312 drivers/crypto/ccp/ccp-crypto-main.c 		if (!sg_page(sg))
sg                314 drivers/crypto/ccp/ccp-crypto-main.c 	if (WARN_ON(!sg))
sg                317 drivers/crypto/ccp/ccp-crypto-main.c 	for (; sg && sg_add; sg = sg_next(sg), sg_add = sg_next(sg_add)) {
sg                318 drivers/crypto/ccp/ccp-crypto-main.c 		sg_set_page(sg, sg_page(sg_add), sg_add->length,
sg                320 drivers/crypto/ccp/ccp-crypto-main.c 		sg_last = sg;
sg                 62 drivers/crypto/ccp/ccp-crypto-sha.c 	struct scatterlist *sg;
sg                 95 drivers/crypto/ccp/ccp-crypto-sha.c 	sg = NULL;
sg                108 drivers/crypto/ccp/ccp-crypto-sha.c 		sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg);
sg                109 drivers/crypto/ccp/ccp-crypto-sha.c 		if (!sg) {
sg                113 drivers/crypto/ccp/ccp-crypto-sha.c 		sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src);
sg                114 drivers/crypto/ccp/ccp-crypto-sha.c 		if (!sg) {
sg                118 drivers/crypto/ccp/ccp-crypto-sha.c 		sg_mark_end(sg);
sg                120 drivers/crypto/ccp/ccp-crypto-sha.c 		sg = rctx->data_sg.sgl;
sg                124 drivers/crypto/ccp/ccp-crypto-sha.c 		sg = &rctx->buf_sg;
sg                126 drivers/crypto/ccp/ccp-crypto-sha.c 		sg = req->src;
sg                158 drivers/crypto/ccp/ccp-crypto-sha.c 	rctx->cmd.u.sha.src = sg;
sg                467 drivers/crypto/ccp/ccp-dev.h 	struct scatterlist *sg;
sg                 72 drivers/crypto/ccp/ccp-ops.c 				struct scatterlist *sg, u64 len,
sg                 77 drivers/crypto/ccp/ccp-ops.c 	wa->sg = sg;
sg                 78 drivers/crypto/ccp/ccp-ops.c 	if (!sg)
sg                 81 drivers/crypto/ccp/ccp-ops.c 	wa->nents = sg_nents_for_len(sg, len);
sg                 94 drivers/crypto/ccp/ccp-ops.c 	wa->dma_sg = sg;
sg                 97 drivers/crypto/ccp/ccp-ops.c 	wa->dma_count = dma_map_sg(dev, sg, wa->nents, dma_dir);
sg                108 drivers/crypto/ccp/ccp-ops.c 	if (!wa->sg)
sg                113 drivers/crypto/ccp/ccp-ops.c 	if (wa->sg_used == wa->sg->length) {
sg                114 drivers/crypto/ccp/ccp-ops.c 		wa->sg = sg_next(wa->sg);
sg                177 drivers/crypto/ccp/ccp-ops.c 			   struct scatterlist *sg, unsigned int sg_offset,
sg                185 drivers/crypto/ccp/ccp-ops.c 	scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len,
sg                191 drivers/crypto/ccp/ccp-ops.c 			    struct scatterlist *sg, unsigned int sg_offset,
sg                196 drivers/crypto/ccp/ccp-ops.c 	scatterwalk_map_and_copy(wa->address + wa_offset, sg, sg_offset, len,
sg                202 drivers/crypto/ccp/ccp-ops.c 				   struct scatterlist *sg,
sg                209 drivers/crypto/ccp/ccp-ops.c 	rc = ccp_set_dm_area(wa, wa_offset, sg, sg_offset, len);
sg                227 drivers/crypto/ccp/ccp-ops.c 				    struct scatterlist *sg,
sg                243 drivers/crypto/ccp/ccp-ops.c 	ccp_get_dm_area(wa, wa_offset, sg, sg_offset, len);
sg                253 drivers/crypto/ccp/ccp-ops.c 			 struct scatterlist *sg, u64 sg_len,
sg                261 drivers/crypto/ccp/ccp-ops.c 	ret = ccp_init_sg_workarea(&data->sg_wa, cmd_q->ccp->dev, sg, sg_len,
sg                288 drivers/crypto/ccp/ccp-ops.c 	if (!sg_wa->sg)
sg                296 drivers/crypto/ccp/ccp-ops.c 	scatterwalk_map_and_copy(dm_wa->address, sg_wa->sg, sg_wa->sg_used,
sg                302 drivers/crypto/ccp/ccp-ops.c 		nbytes = min(sg_wa->sg->length - sg_wa->sg_used,
sg                334 drivers/crypto/ccp/ccp-ops.c 	sg_src_len = sg_dma_len(src->sg_wa.sg) - src->sg_wa.sg_used;
sg                338 drivers/crypto/ccp/ccp-ops.c 		sg_dst_len = sg_dma_len(dst->sg_wa.sg) - dst->sg_wa.sg_used;
sg                368 drivers/crypto/ccp/ccp-ops.c 		op->src.u.dma.address = sg_dma_address(src->sg_wa.sg);
sg                389 drivers/crypto/ccp/ccp-ops.c 			op->dst.u.dma.address = sg_dma_address(dst->sg_wa.sg);
sg               1745 drivers/crypto/ccp/ccp-ops.c 		struct scatterlist sg;
sg               1758 drivers/crypto/ccp/ccp-ops.c 		sg_init_one(&sg, hmac_buf, block_size + digest_size);
sg               1789 drivers/crypto/ccp/ccp-ops.c 		hmac_cmd.u.sha.src = &sg;
sg               2029 drivers/crypto/ccp/ccp-ops.c 		if (!dst.sg_wa.sg ||
sg               2030 drivers/crypto/ccp/ccp-ops.c 		    (dst.sg_wa.sg->length < src.sg_wa.sg->length)) {
sg               2041 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.address = sg_dma_address(src.sg_wa.sg);
sg               2043 drivers/crypto/ccp/ccp-ops.c 		op.src.u.dma.length = sg_dma_len(src.sg_wa.sg);
sg               2046 drivers/crypto/ccp/ccp-ops.c 		op.dst.u.dma.address = sg_dma_address(dst.sg_wa.sg);
sg               2056 drivers/crypto/ccp/ccp-ops.c 		dst.sg_wa.sg_used += src.sg_wa.sg->length;
sg               2057 drivers/crypto/ccp/ccp-ops.c 		if (dst.sg_wa.sg_used == dst.sg_wa.sg->length) {
sg               2058 drivers/crypto/ccp/ccp-ops.c 			dst.sg_wa.sg = sg_next(dst.sg_wa.sg);
sg               2061 drivers/crypto/ccp/ccp-ops.c 		src.sg_wa.sg = sg_next(src.sg_wa.sg);
sg                115 drivers/crypto/ccree/cc_buffer_mgr.c void cc_copy_sg_portion(struct device *dev, u8 *dest, struct scatterlist *sg,
sg                120 drivers/crypto/ccree/cc_buffer_mgr.c 	nents = sg_nents_for_len(sg, end);
sg                121 drivers/crypto/ccree/cc_buffer_mgr.c 	sg_copy_buffer(sg, nents, (void *)dest, (end - to_skip + 1), to_skip,
sg                292 drivers/crypto/ccree/cc_buffer_mgr.c static int cc_map_sg(struct device *dev, struct scatterlist *sg,
sg                298 drivers/crypto/ccree/cc_buffer_mgr.c 	*nents = cc_get_sgl_nents(dev, sg, nbytes, lbytes);
sg                306 drivers/crypto/ccree/cc_buffer_mgr.c 	ret = dma_map_sg(dev, sg, *nents, direction);
sg                705 drivers/crypto/ccree/cc_buffer_mgr.c 	struct scatterlist *sg;
sg                711 drivers/crypto/ccree/cc_buffer_mgr.c 		sg = areq_ctx->src_sgl;
sg                714 drivers/crypto/ccree/cc_buffer_mgr.c 		sg = areq_ctx->dst_sgl;
sg                718 drivers/crypto/ccree/cc_buffer_mgr.c 	areq_ctx->icv_dma_addr = sg_dma_address(sg) + offset;
sg                719 drivers/crypto/ccree/cc_buffer_mgr.c 	areq_ctx->icv_virt_addr = sg_virt(sg) + offset;
sg                732 drivers/crypto/ccree/cc_buffer_mgr.c 	struct scatterlist *sg;
sg                766 drivers/crypto/ccree/cc_buffer_mgr.c 			sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1];
sg                768 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_dma_addr = sg_dma_address(sg) +
sg                770 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_virt_addr = sg_virt(sg) +
sg                798 drivers/crypto/ccree/cc_buffer_mgr.c 			sg = &areq_ctx->src_sgl[areq_ctx->src.nents - 1];
sg                800 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_dma_addr = sg_dma_address(sg) +
sg                802 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_virt_addr = sg_virt(sg) +
sg                822 drivers/crypto/ccree/cc_buffer_mgr.c 			sg = &areq_ctx->dst_sgl[areq_ctx->dst.nents - 1];
sg                824 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_dma_addr = sg_dma_address(sg) +
sg                826 drivers/crypto/ccree/cc_buffer_mgr.c 			areq_ctx->icv_virt_addr = sg_virt(sg) +
sg                 67 drivers/crypto/ccree/cc_buffer_mgr.h void cc_copy_sg_portion(struct device *dev, u8 *dest, struct scatterlist *sg,
sg                139 drivers/crypto/chelsio/chcr_algo.c static int sg_nents_xlen(struct scatterlist *sg, unsigned int reqlen,
sg                147 drivers/crypto/chelsio/chcr_algo.c 	while (sg && skip) {
sg                148 drivers/crypto/chelsio/chcr_algo.c 		if (sg_dma_len(sg) <= skip) {
sg                149 drivers/crypto/chelsio/chcr_algo.c 			skip -= sg_dma_len(sg);
sg                151 drivers/crypto/chelsio/chcr_algo.c 			sg = sg_next(sg);
sg                158 drivers/crypto/chelsio/chcr_algo.c 	while (sg && reqlen) {
sg                159 drivers/crypto/chelsio/chcr_algo.c 		less = min(reqlen, sg_dma_len(sg) - skip_len);
sg                163 drivers/crypto/chelsio/chcr_algo.c 		sg = sg_next(sg);
sg                428 drivers/crypto/chelsio/chcr_algo.c 			   struct scatterlist *sg,
sg                439 drivers/crypto/chelsio/chcr_algo.c 	while (sg && skip) {
sg                440 drivers/crypto/chelsio/chcr_algo.c 		if (sg_dma_len(sg) <= skip) {
sg                441 drivers/crypto/chelsio/chcr_algo.c 			skip -= sg_dma_len(sg);
sg                443 drivers/crypto/chelsio/chcr_algo.c 			sg = sg_next(sg);
sg                450 drivers/crypto/chelsio/chcr_algo.c 	while (left_size && sg) {
sg                451 drivers/crypto/chelsio/chcr_algo.c 		len = min_t(u32, left_size, sg_dma_len(sg) - skip_len);
sg                456 drivers/crypto/chelsio/chcr_algo.c 			walk->to->addr[j % 8] = cpu_to_be64(sg_dma_address(sg) +
sg                464 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
sg                465 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg_len = min_t(u32, left_size, sg_dma_len(sg) -
sg                467 drivers/crypto/chelsio/chcr_algo.c 		left_size -= min_t(u32, left_size, sg_dma_len(sg) - skip_len);
sg                469 drivers/crypto/chelsio/chcr_algo.c 		sg = sg_next(sg);
sg                513 drivers/crypto/chelsio/chcr_algo.c 					struct scatterlist *sg,
sg                523 drivers/crypto/chelsio/chcr_algo.c 	while (sg && skip) {
sg                524 drivers/crypto/chelsio/chcr_algo.c 		if (sg_dma_len(sg) <= skip) {
sg                525 drivers/crypto/chelsio/chcr_algo.c 			skip -= sg_dma_len(sg);
sg                527 drivers/crypto/chelsio/chcr_algo.c 			sg = sg_next(sg);
sg                533 drivers/crypto/chelsio/chcr_algo.c 	WARN(!sg, "SG should not be null here\n");
sg                534 drivers/crypto/chelsio/chcr_algo.c 	if (sg && (walk->nents == 0)) {
sg                535 drivers/crypto/chelsio/chcr_algo.c 		small = min_t(unsigned int, sg_dma_len(sg) - skip_len, len);
sg                538 drivers/crypto/chelsio/chcr_algo.c 		walk->sgl->addr0 = cpu_to_be64(sg_dma_address(sg) + skip_len);
sg                541 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
sg                544 drivers/crypto/chelsio/chcr_algo.c 		if (sg_dma_len(sg) == skip_len) {
sg                545 drivers/crypto/chelsio/chcr_algo.c 			sg = sg_next(sg);
sg                550 drivers/crypto/chelsio/chcr_algo.c 	while (sg && len) {
sg                551 drivers/crypto/chelsio/chcr_algo.c 		small = min(sg_dma_len(sg) - skip_len, len);
sg                555 drivers/crypto/chelsio/chcr_algo.c 			cpu_to_be64(sg_dma_address(sg) + skip_len);
sg                562 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
sg                564 drivers/crypto/chelsio/chcr_algo.c 		if (sg_dma_len(sg) == skip_len) {
sg                565 drivers/crypto/chelsio/chcr_algo.c 			sg = sg_next(sg);
sg                333 drivers/crypto/chelsio/chcr_crypto.h int sg_nents_len_skip(struct scatterlist *sg, u64 len, u64 skip);
sg                183 drivers/crypto/hisilicon/sec/sec_algs.c 	struct scatterlist *sg;
sg                189 drivers/crypto/hisilicon/sec/sec_algs.c 	for_each_sg(sgl, sg, count, i) {
sg                209 drivers/crypto/hisilicon/sec/sec_algs.c 		sgl_current->sge_entries[sge_index].buf = sg_dma_address(sg);
sg                210 drivers/crypto/hisilicon/sec/sec_algs.c 		sgl_current->sge_entries[sge_index].len = sg_dma_len(sg);
sg                211 drivers/crypto/hisilicon/sec/sec_algs.c 		sgl_current->data_bytes_in_sgl += sg_dma_len(sg);
sg                155 drivers/crypto/hisilicon/sgl.c 	struct scatterlist *sg;
sg                174 drivers/crypto/hisilicon/sgl.c 	for_each_sg(sgl, sg, sg_n, i) {
sg                175 drivers/crypto/hisilicon/sgl.c 		sg_map_to_hw_sg(sg, curr_hw_sge);
sg                 94 drivers/crypto/img-hash.c 	struct scatterlist	*sg;
sg                217 drivers/crypto/img-hash.c 	if (ctx->sg)
sg                221 drivers/crypto/img-hash.c static int img_hash_xmit_dma(struct img_hash_dev *hdev, struct scatterlist *sg)
sg                226 drivers/crypto/img-hash.c 	ctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
sg                234 drivers/crypto/img-hash.c 				       sg,
sg                241 drivers/crypto/img-hash.c 		dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
sg                256 drivers/crypto/img-hash.c 	ctx->bufcnt = sg_copy_to_buffer(hdev->req->src, sg_nents(ctx->sg),
sg                364 drivers/crypto/img-hash.c 	if (!hdev->req || !ctx->sg)
sg                367 drivers/crypto/img-hash.c 	addr = sg_virt(ctx->sg);
sg                368 drivers/crypto/img-hash.c 	nbytes = ctx->sg->length - ctx->offset;
sg                402 drivers/crypto/img-hash.c 		ctx->sg = sg_next(ctx->sg);
sg                403 drivers/crypto/img-hash.c 		while (ctx->sg && (ctx->bufcnt < 4)) {
sg                404 drivers/crypto/img-hash.c 			len = ctx->sg->length;
sg                411 drivers/crypto/img-hash.c 			if (tbc >= ctx->sg->length) {
sg                412 drivers/crypto/img-hash.c 				ctx->sg = sg_next(ctx->sg);
sg                424 drivers/crypto/img-hash.c 		ctx->sg = sg_next(ctx->sg);
sg                433 drivers/crypto/img-hash.c 		dma_unmap_sg(hdev->dev, ctx->sg, ctx->dma_ct, DMA_TO_DEVICE);
sg                663 drivers/crypto/img-hash.c 	ctx->sg = req->src;
sg                665 drivers/crypto/img-hash.c 	ctx->nents = sg_nents(ctx->sg);
sg                562 drivers/crypto/inside-secure/safexcel_cipher.c 	struct scatterlist *sg;
sg                645 drivers/crypto/inside-secure/safexcel_cipher.c 	for_each_sg(src, sg, sreq->nr_src, i) {
sg                646 drivers/crypto/inside-secure/safexcel_cipher.c 		int len = sg_dma_len(sg);
sg                654 drivers/crypto/inside-secure/safexcel_cipher.c 					   sg_dma_address(sg), len, totlen,
sg                693 drivers/crypto/inside-secure/safexcel_cipher.c 	for_each_sg(dst, sg, sreq->nr_dst, i) {
sg                695 drivers/crypto/inside-secure/safexcel_cipher.c 		u32 len = sg_dma_len(sg);
sg                711 drivers/crypto/inside-secure/safexcel_cipher.c 						   sg_dma_address(sg) +
sg                717 drivers/crypto/inside-secure/safexcel_cipher.c 						   sg_dma_address(sg),
sg                263 drivers/crypto/inside-secure/safexcel_hash.c 	struct scatterlist *sg;
sg                336 drivers/crypto/inside-secure/safexcel_hash.c 	for_each_sg(areq->src, sg, req->nents, i) {
sg                337 drivers/crypto/inside-secure/safexcel_hash.c 		int sglen = sg_dma_len(sg);
sg                345 drivers/crypto/inside-secure/safexcel_hash.c 					   sg_dma_address(sg),
sg                929 drivers/crypto/inside-secure/safexcel_hash.c 	struct scatterlist sg;
sg                942 drivers/crypto/inside-secure/safexcel_hash.c 		sg_init_one(&sg, keydup, keylen);
sg                943 drivers/crypto/inside-secure/safexcel_hash.c 		ahash_request_set_crypt(areq, &sg, ipad, keylen);
sg                978 drivers/crypto/inside-secure/safexcel_hash.c 	struct scatterlist sg;
sg                983 drivers/crypto/inside-secure/safexcel_hash.c 	sg_init_one(&sg, pad, blocksize);
sg                984 drivers/crypto/inside-secure/safexcel_hash.c 	ahash_request_set_crypt(areq, &sg, pad, blocksize);
sg                781 drivers/crypto/ixp4xx_crypto.c 		struct scatterlist *sg,	unsigned nbytes,
sg                785 drivers/crypto/ixp4xx_crypto.c 	for (; nbytes > 0; sg = sg_next(sg)) {
sg                786 drivers/crypto/ixp4xx_crypto.c 		unsigned len = min(nbytes, sg->length);
sg                792 drivers/crypto/ixp4xx_crypto.c 		ptr = sg_virt(sg);
sg                798 drivers/crypto/ixp4xx_crypto.c 		sg_dma_address(sg) = dma_map_single(dev, ptr, len, dir);
sg                803 drivers/crypto/ixp4xx_crypto.c 		buf->phys_addr = sg_dma_address(sg);
sg                329 drivers/crypto/marvell/cesa.h 	struct scatterlist *sg;
sg                786 drivers/crypto/marvell/cesa.h 					    struct scatterlist *sg,
sg                791 drivers/crypto/marvell/cesa.h 	iter->sg = sg;
sg                800 drivers/crypto/marvell/cesa.h 		   sg_dma_len(sgiter->sg) - sgiter->offset);
sg                633 drivers/crypto/marvell/hash.c 	if (iter.src.sg) {
sg               1091 drivers/crypto/marvell/hash.c 	struct scatterlist sg;
sg               1096 drivers/crypto/marvell/hash.c 	sg_init_one(&sg, pad, blocksize);
sg               1097 drivers/crypto/marvell/hash.c 	ahash_request_set_crypt(req, &sg, pad, blocksize);
sg               1125 drivers/crypto/marvell/hash.c 	struct scatterlist sg;
sg               1140 drivers/crypto/marvell/hash.c 		sg_init_one(&sg, keydup, keylen);
sg               1141 drivers/crypto/marvell/hash.c 		ahash_request_set_crypt(req, &sg, ipad, keylen);
sg                 19 drivers/crypto/marvell/tdma.c 	if (!sgiter->sg)
sg                 24 drivers/crypto/marvell/tdma.c 	if (sgiter->offset == sg_dma_len(sgiter->sg)) {
sg                 25 drivers/crypto/marvell/tdma.c 		if (sg_is_last(sgiter->sg))
sg                 28 drivers/crypto/marvell/tdma.c 		sgiter->sg = sg_next(sgiter->sg);
sg                336 drivers/crypto/marvell/tdma.c 			src = sg_dma_address(sgiter->sg) + sgiter->offset;
sg                338 drivers/crypto/marvell/tdma.c 			dst = sg_dma_address(sgiter->sg) + sgiter->offset;
sg                191 drivers/crypto/mediatek/mtk-aes.c static bool mtk_aes_check_aligned(struct scatterlist *sg, size_t len,
sg                199 drivers/crypto/mediatek/mtk-aes.c 	for (nents = 0; sg; sg = sg_next(sg), ++nents) {
sg                200 drivers/crypto/mediatek/mtk-aes.c 		if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg                203 drivers/crypto/mediatek/mtk-aes.c 		if (len <= sg->length) {
sg                208 drivers/crypto/mediatek/mtk-aes.c 			dma->remainder = sg->length - len;
sg                209 drivers/crypto/mediatek/mtk-aes.c 			sg->length = len;
sg                213 drivers/crypto/mediatek/mtk-aes.c 		if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
sg                216 drivers/crypto/mediatek/mtk-aes.c 		len -= sg->length;
sg                231 drivers/crypto/mediatek/mtk-aes.c 	struct scatterlist *sg = dma->sg;
sg                237 drivers/crypto/mediatek/mtk-aes.c 	while (--nents > 0 && sg)
sg                238 drivers/crypto/mediatek/mtk-aes.c 		sg = sg_next(sg);
sg                240 drivers/crypto/mediatek/mtk-aes.c 	if (!sg)
sg                243 drivers/crypto/mediatek/mtk-aes.c 	sg->length += dma->remainder;
sg                281 drivers/crypto/mediatek/mtk-aes.c 	struct scatterlist *ssg = aes->src.sg, *dsg = aes->dst.sg;
sg                346 drivers/crypto/mediatek/mtk-aes.c 	if (aes->src.sg == aes->dst.sg) {
sg                347 drivers/crypto/mediatek/mtk-aes.c 		dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents,
sg                350 drivers/crypto/mediatek/mtk-aes.c 		if (aes->src.sg != &aes->aligned_sg)
sg                353 drivers/crypto/mediatek/mtk-aes.c 		dma_unmap_sg(cryp->dev, aes->dst.sg, aes->dst.nents,
sg                356 drivers/crypto/mediatek/mtk-aes.c 		if (aes->dst.sg != &aes->aligned_sg)
sg                359 drivers/crypto/mediatek/mtk-aes.c 		dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents,
sg                362 drivers/crypto/mediatek/mtk-aes.c 		if (aes->src.sg != &aes->aligned_sg)
sg                366 drivers/crypto/mediatek/mtk-aes.c 	if (aes->dst.sg == &aes->aligned_sg)
sg                383 drivers/crypto/mediatek/mtk-aes.c 	if (aes->src.sg == aes->dst.sg) {
sg                384 drivers/crypto/mediatek/mtk-aes.c 		aes->src.sg_len = dma_map_sg(cryp->dev, aes->src.sg,
sg                391 drivers/crypto/mediatek/mtk-aes.c 		aes->src.sg_len = dma_map_sg(cryp->dev, aes->src.sg,
sg                396 drivers/crypto/mediatek/mtk-aes.c 		aes->dst.sg_len = dma_map_sg(cryp->dev, aes->dst.sg,
sg                399 drivers/crypto/mediatek/mtk-aes.c 			dma_unmap_sg(cryp->dev, aes->src.sg, aes->src.nents,
sg                471 drivers/crypto/mediatek/mtk-aes.c 	aes->src.sg = src;
sg                472 drivers/crypto/mediatek/mtk-aes.c 	aes->dst.sg = dst;
sg                489 drivers/crypto/mediatek/mtk-aes.c 			aes->src.sg = &aes->aligned_sg;
sg                495 drivers/crypto/mediatek/mtk-aes.c 			aes->dst.sg = &aes->aligned_sg;
sg                921 drivers/crypto/mediatek/mtk-aes.c 	aes->src.sg = src;
sg                922 drivers/crypto/mediatek/mtk-aes.c 	aes->dst.sg = dst;
sg                937 drivers/crypto/mediatek/mtk-aes.c 			aes->src.sg = &aes->aligned_sg;
sg                943 drivers/crypto/mediatek/mtk-aes.c 			aes->dst.sg = &aes->aligned_sg;
sg               1024 drivers/crypto/mediatek/mtk-aes.c 		struct scatterlist sg[1];
sg               1063 drivers/crypto/mediatek/mtk-aes.c 	sg_init_one(data->sg, &data->hash, AES_BLOCK_SIZE);
sg               1068 drivers/crypto/mediatek/mtk-aes.c 	skcipher_request_set_crypt(&data->req, data->sg, data->sg,
sg                112 drivers/crypto/mediatek/mtk-platform.h 	struct scatterlist *sg;
sg                 89 drivers/crypto/mediatek/mtk-sha.c 	struct scatterlist *sg;
sg                186 drivers/crypto/mediatek/mtk-sha.c 		count = min(ctx->sg->length - ctx->offset, ctx->total);
sg                196 drivers/crypto/mediatek/mtk-sha.c 			if ((ctx->sg->length == 0) && !sg_is_last(ctx->sg)) {
sg                197 drivers/crypto/mediatek/mtk-sha.c 				ctx->sg = sg_next(ctx->sg);
sg                204 drivers/crypto/mediatek/mtk-sha.c 		scatterwalk_map_and_copy(ctx->buffer + ctx->bufcnt, ctx->sg,
sg                211 drivers/crypto/mediatek/mtk-sha.c 		if (ctx->offset == ctx->sg->length) {
sg                212 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg_next(ctx->sg);
sg                213 drivers/crypto/mediatek/mtk-sha.c 			if (ctx->sg)
sg                517 drivers/crypto/mediatek/mtk-sha.c 	struct scatterlist *sg;
sg                525 drivers/crypto/mediatek/mtk-sha.c 	sg = ctx->sg;
sg                527 drivers/crypto/mediatek/mtk-sha.c 	if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg                530 drivers/crypto/mediatek/mtk-sha.c 	if (!sg_is_last(sg) && !IS_ALIGNED(sg->length, ctx->bs))
sg                534 drivers/crypto/mediatek/mtk-sha.c 	len = min(ctx->total, sg->length);
sg                536 drivers/crypto/mediatek/mtk-sha.c 	if (sg_is_last(sg)) {
sg                558 drivers/crypto/mediatek/mtk-sha.c 		sg = ctx->sg;
sg                579 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg;
sg                580 drivers/crypto/mediatek/mtk-sha.c 			if (!dma_map_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
sg                586 drivers/crypto/mediatek/mtk-sha.c 			return mtk_sha_xmit(cryp, sha, sg_dma_address(ctx->sg),
sg                591 drivers/crypto/mediatek/mtk-sha.c 	if (!dma_map_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE)) {
sg                598 drivers/crypto/mediatek/mtk-sha.c 	return mtk_sha_xmit(cryp, sha, sg_dma_address(ctx->sg),
sg                721 drivers/crypto/mediatek/mtk-sha.c 		dma_unmap_sg(cryp->dev, ctx->sg, 1, DMA_TO_DEVICE);
sg                722 drivers/crypto/mediatek/mtk-sha.c 		if (ctx->sg->length == ctx->offset) {
sg                723 drivers/crypto/mediatek/mtk-sha.c 			ctx->sg = sg_next(ctx->sg);
sg                724 drivers/crypto/mediatek/mtk-sha.c 			if (ctx->sg)
sg                751 drivers/crypto/mediatek/mtk-sha.c 	ctx->sg = req->src;
sg                 84 drivers/crypto/nx/nx.c 	struct nx_sg *sg;
sg                108 drivers/crypto/nx/nx.c 	for (sg = sg_head; sg_len < *len; sg++) {
sg                111 drivers/crypto/nx/nx.c 		sg->addr = sg_addr;
sg                115 drivers/crypto/nx/nx.c 		next_page = (sg->addr & PAGE_MASK) + PAGE_SIZE;
sg                116 drivers/crypto/nx/nx.c 		sg->len = min_t(u64, sg_addr, next_page) - sg->addr;
sg                117 drivers/crypto/nx/nx.c 		sg_len += sg->len;
sg                126 drivers/crypto/nx/nx.c 		if ((sg - sg_head) == sgmax) {
sg                129 drivers/crypto/nx/nx.c 			sg++;
sg                136 drivers/crypto/nx/nx.c 	return sg;
sg                179 drivers/crypto/nx/nx.c 			scatterwalk_start(&walk, sg_next(walk.sg));
sg                205 drivers/crypto/nx/nx.c static long int trim_sg_list(struct nx_sg *sg,
sg                214 drivers/crypto/nx/nx.c 	while (delta && end > sg) {
sg                231 drivers/crypto/nx/nx.c 	oplen = (sg - end) * sizeof(struct nx_sg);
sg                233 drivers/crypto/nx/nx.c 		data_back = (abs(oplen) / AES_BLOCK_SIZE) *  sg->len;
sg                 17 drivers/crypto/omap-crypto.c 				     struct scatterlist **sg,
sg                 20 drivers/crypto/omap-crypto.c 	int n = sg_nents(*sg);
sg                 24 drivers/crypto/omap-crypto.c 		new_sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
sg                 33 drivers/crypto/omap-crypto.c 	while (*sg && total) {
sg                 34 drivers/crypto/omap-crypto.c 		int len = (*sg)->length;
sg                 41 drivers/crypto/omap-crypto.c 			sg_set_page(tmp, sg_page(*sg), len, (*sg)->offset);
sg                 47 drivers/crypto/omap-crypto.c 		*sg = sg_next(*sg);
sg                 50 drivers/crypto/omap-crypto.c 	*sg = new_sg;
sg                 55 drivers/crypto/omap-crypto.c static int omap_crypto_copy_sgs(int total, int bs, struct scatterlist **sg,
sg                 73 drivers/crypto/omap-crypto.c 		scatterwalk_map_and_copy(buf, *sg, 0, total, 0);
sg                 83 drivers/crypto/omap-crypto.c 	*sg = new_sg;
sg                 88 drivers/crypto/omap-crypto.c static int omap_crypto_check_sg(struct scatterlist *sg, int total, int bs,
sg                 97 drivers/crypto/omap-crypto.c 	while (sg) {
sg                100 drivers/crypto/omap-crypto.c 		if (!IS_ALIGNED(sg->offset, 4))
sg                102 drivers/crypto/omap-crypto.c 		if (!IS_ALIGNED(sg->length, bs))
sg                105 drivers/crypto/omap-crypto.c 		if (page_zonenum(sg_page(sg)) != ZONE_DMA)
sg                109 drivers/crypto/omap-crypto.c 		len += sg->length;
sg                110 drivers/crypto/omap-crypto.c 		sg = sg_next(sg);
sg                125 drivers/crypto/omap-crypto.c int omap_crypto_align_sg(struct scatterlist **sg, int total, int bs,
sg                136 drivers/crypto/omap-crypto.c 		ret = omap_crypto_check_sg(*sg, total, bs, flags);
sg                139 drivers/crypto/omap-crypto.c 		ret = omap_crypto_copy_sgs(total, bs, sg, new_sg, flags);
sg                144 drivers/crypto/omap-crypto.c 		ret = omap_crypto_copy_sg_lists(total, bs, sg, new_sg, flags);
sg                150 drivers/crypto/omap-crypto.c 		sg_set_buf(new_sg, sg_virt(*sg), (*sg)->length);
sg                157 drivers/crypto/omap-crypto.c void omap_crypto_cleanup(struct scatterlist *sg, struct scatterlist *orig,
sg                170 drivers/crypto/omap-crypto.c 	buf = sg_virt(sg);
sg                179 drivers/crypto/omap-crypto.c 		kfree(sg);
sg                 27 drivers/crypto/omap-crypto.h int omap_crypto_align_sg(struct scatterlist **sg, int total, int bs,
sg                 30 drivers/crypto/omap-crypto.h void omap_crypto_cleanup(struct scatterlist *sg, struct scatterlist *orig,
sg                152 drivers/crypto/omap-sham.c 	struct scatterlist	*sg;
sg                542 drivers/crypto/omap-sham.c 	sg_miter_start(&mi, ctx->sg, ctx->sg_len,
sg                593 drivers/crypto/omap-sham.c 	if (!dma_map_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE)) {
sg                610 drivers/crypto/omap-sham.c 	tx = dmaengine_prep_slave_sg(dd->dma_lch, ctx->sg, ctx->sg_len,
sg                641 drivers/crypto/omap-sham.c 				   struct scatterlist *sg, int bs, int new_len)
sg                643 drivers/crypto/omap-sham.c 	int n = sg_nents(sg);
sg                650 drivers/crypto/omap-sham.c 	ctx->sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
sg                651 drivers/crypto/omap-sham.c 	if (!ctx->sg)
sg                654 drivers/crypto/omap-sham.c 	sg_init_table(ctx->sg, n);
sg                656 drivers/crypto/omap-sham.c 	tmp = ctx->sg;
sg                666 drivers/crypto/omap-sham.c 	while (sg && new_len) {
sg                667 drivers/crypto/omap-sham.c 		int len = sg->length - offset;
sg                670 drivers/crypto/omap-sham.c 			offset -= sg->length;
sg                680 drivers/crypto/omap-sham.c 			sg_set_page(tmp, sg_page(sg), len, sg->offset);
sg                687 drivers/crypto/omap-sham.c 		sg = sg_next(sg);
sg                698 drivers/crypto/omap-sham.c 			      struct scatterlist *sg, int bs, int new_len)
sg                717 drivers/crypto/omap-sham.c 	scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->offset,
sg                721 drivers/crypto/omap-sham.c 	ctx->sg = ctx->sgl;
sg                730 drivers/crypto/omap-sham.c static int omap_sham_align_sgs(struct scatterlist *sg,
sg                737 drivers/crypto/omap-sham.c 	struct scatterlist *sg_tmp = sg;
sg                741 drivers/crypto/omap-sham.c 	if (!sg || !sg->length || !nbytes)
sg                798 drivers/crypto/omap-sham.c 		return omap_sham_copy_sgs(rctx, sg, bs, new_len);
sg                800 drivers/crypto/omap-sham.c 		return omap_sham_copy_sg_lists(rctx, sg, bs, new_len);
sg                803 drivers/crypto/omap-sham.c 	rctx->sg = sg;
sg                870 drivers/crypto/omap-sham.c 		rctx->sg = rctx->sgl;
sg                878 drivers/crypto/omap-sham.c 		rctx->sg = rctx->sgl;
sg                914 drivers/crypto/omap-sham.c 	dma_unmap_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
sg               1085 drivers/crypto/omap-sham.c 		free_pages((unsigned long)sg_virt(ctx->sg),
sg               1086 drivers/crypto/omap-sham.c 			   get_order(ctx->sg->length + ctx->bufcnt));
sg               1089 drivers/crypto/omap-sham.c 		kfree(ctx->sg);
sg               1091 drivers/crypto/omap-sham.c 	ctx->sg = NULL;
sg                719 drivers/crypto/qat/qat_common/qat_algs.c 	struct scatterlist *sg;
sg                734 drivers/crypto/qat/qat_common/qat_algs.c 	for_each_sg(sgl, sg, n, i) {
sg                737 drivers/crypto/qat/qat_common/qat_algs.c 		if (!sg->length)
sg                740 drivers/crypto/qat/qat_common/qat_algs.c 		bufl->bufers[y].addr = dma_map_single(dev, sg_virt(sg),
sg                741 drivers/crypto/qat/qat_common/qat_algs.c 						      sg->length,
sg                743 drivers/crypto/qat/qat_common/qat_algs.c 		bufl->bufers[y].len = sg->length;
sg                767 drivers/crypto/qat/qat_common/qat_algs.c 		for_each_sg(sglout, sg, n, i) {
sg                770 drivers/crypto/qat/qat_common/qat_algs.c 			if (!sg->length)
sg                773 drivers/crypto/qat/qat_common/qat_algs.c 			bufers[y].addr = dma_map_single(dev, sg_virt(sg),
sg                774 drivers/crypto/qat/qat_common/qat_algs.c 							sg->length,
sg                778 drivers/crypto/qat/qat_common/qat_algs.c 			bufers[y].len = sg->length;
sg                 60 drivers/crypto/qce/ablkcipher.c 	struct scatterlist *sg;
sg                 98 drivers/crypto/qce/ablkcipher.c 	sg = qce_sgtable_add(&rctx->dst_tbl, req->dst);
sg                 99 drivers/crypto/qce/ablkcipher.c 	if (IS_ERR(sg)) {
sg                100 drivers/crypto/qce/ablkcipher.c 		ret = PTR_ERR(sg);
sg                104 drivers/crypto/qce/ablkcipher.c 	sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg);
sg                105 drivers/crypto/qce/ablkcipher.c 	if (IS_ERR(sg)) {
sg                106 drivers/crypto/qce/ablkcipher.c 		ret = PTR_ERR(sg);
sg                110 drivers/crypto/qce/ablkcipher.c 	sg_mark_end(sg);
sg                 52 drivers/crypto/qce/dma.c 	struct scatterlist *sg = sgt->sgl, *sg_last = NULL;
sg                 54 drivers/crypto/qce/dma.c 	while (sg) {
sg                 55 drivers/crypto/qce/dma.c 		if (!sg_page(sg))
sg                 57 drivers/crypto/qce/dma.c 		sg = sg_next(sg);
sg                 60 drivers/crypto/qce/dma.c 	if (!sg)
sg                 63 drivers/crypto/qce/dma.c 	while (new_sgl && sg) {
sg                 64 drivers/crypto/qce/dma.c 		sg_set_page(sg, sg_page(new_sgl), new_sgl->length,
sg                 66 drivers/crypto/qce/dma.c 		sg_last = sg;
sg                 67 drivers/crypto/qce/dma.c 		sg = sg_next(sg);
sg                 74 drivers/crypto/qce/dma.c static int qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg,
sg                 82 drivers/crypto/qce/dma.c 	if (!sg || !nents)
sg                 85 drivers/crypto/qce/dma.c 	desc = dmaengine_prep_slave_sg(chan, sg, nents, dir, flags);
sg                232 drivers/crypto/qce/sha.c 	struct scatterlist *sg_last, *sg;
sg                274 drivers/crypto/qce/sha.c 	sg = sg_last = req->src;
sg                276 drivers/crypto/qce/sha.c 	while (len < nbytes && sg) {
sg                277 drivers/crypto/qce/sha.c 		if (len + sg_dma_len(sg) > nbytes)
sg                279 drivers/crypto/qce/sha.c 		len += sg_dma_len(sg);
sg                280 drivers/crypto/qce/sha.c 		sg_last = sg;
sg                281 drivers/crypto/qce/sha.c 		sg = sg_next(sg);
sg                290 drivers/crypto/qce/sha.c 		sg_init_table(rctx->sg, 2);
sg                291 drivers/crypto/qce/sha.c 		sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen);
sg                292 drivers/crypto/qce/sha.c 		sg_chain(rctx->sg, 2, req->src);
sg                293 drivers/crypto/qce/sha.c 		req->src = rctx->sg;
sg                317 drivers/crypto/qce/sha.c 	sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen);
sg                319 drivers/crypto/qce/sha.c 	req->src = rctx->sg;
sg                351 drivers/crypto/qce/sha.c 	struct scatterlist sg;
sg                395 drivers/crypto/qce/sha.c 	sg_init_one(&sg, buf, keylen);
sg                396 drivers/crypto/qce/sha.c 	ahash_request_set_crypt(req, &sg, ctx->authkey, keylen);
sg                 54 drivers/crypto/qce/sha.h 	struct scatterlist sg[2];
sg                363 drivers/crypto/s5p-sss.c 	struct scatterlist	*sg;
sg                439 drivers/crypto/s5p-sss.c 			       const struct scatterlist *sg)
sg                441 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCBRDMAS, sg_dma_address(sg));
sg                442 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCBRDMAL, sg_dma_len(sg));
sg                446 drivers/crypto/s5p-sss.c 				const struct scatterlist *sg)
sg                448 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCBTDMAS, sg_dma_address(sg));
sg                449 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCBTDMAL, sg_dma_len(sg));
sg                452 drivers/crypto/s5p-sss.c static void s5p_free_sg_cpy(struct s5p_aes_dev *dev, struct scatterlist **sg)
sg                456 drivers/crypto/s5p-sss.c 	if (!*sg)
sg                460 drivers/crypto/s5p-sss.c 	free_pages((unsigned long)sg_virt(*sg), get_order(len));
sg                462 drivers/crypto/s5p-sss.c 	kfree(*sg);
sg                463 drivers/crypto/s5p-sss.c 	*sg = NULL;
sg                466 drivers/crypto/s5p-sss.c static void s5p_sg_copy_buf(void *buf, struct scatterlist *sg,
sg                474 drivers/crypto/s5p-sss.c 	scatterwalk_start(&walk, sg);
sg                542 drivers/crypto/s5p-sss.c static int s5p_set_outdata(struct s5p_aes_dev *dev, struct scatterlist *sg)
sg                544 drivers/crypto/s5p-sss.c 	if (!sg->length)
sg                547 drivers/crypto/s5p-sss.c 	if (!dma_map_sg(dev->dev, sg, 1, DMA_FROM_DEVICE))
sg                550 drivers/crypto/s5p-sss.c 	dev->sg_dst = sg;
sg                555 drivers/crypto/s5p-sss.c static int s5p_set_indata(struct s5p_aes_dev *dev, struct scatterlist *sg)
sg                557 drivers/crypto/s5p-sss.c 	if (!sg->length)
sg                560 drivers/crypto/s5p-sss.c 	if (!dma_map_sg(dev->dev, sg, 1, DMA_TO_DEVICE))
sg                563 drivers/crypto/s5p-sss.c 	dev->sg_src = sg;
sg                629 drivers/crypto/s5p-sss.c 				 const struct scatterlist *sg)
sg                632 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCHRDMAS, sg_dma_address(sg));
sg                633 drivers/crypto/s5p-sss.c 	SSS_WRITE(dev, FCHRDMAL, sg_dma_len(sg)); /* DMA starts */
sg                989 drivers/crypto/s5p-sss.c 	cnt = dma_map_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
sg                997 drivers/crypto/s5p-sss.c 	dd->hash_sg_iter = ctx->sg;
sg               1025 drivers/crypto/s5p-sss.c 			     struct scatterlist *sg, unsigned int new_len)
sg               1043 drivers/crypto/s5p-sss.c 	scatterwalk_map_and_copy(buf + ctx->bufcnt, sg, ctx->skip,
sg               1047 drivers/crypto/s5p-sss.c 	ctx->sg = ctx->sgl;
sg               1071 drivers/crypto/s5p-sss.c 				  struct scatterlist *sg, unsigned int new_len)
sg               1073 drivers/crypto/s5p-sss.c 	unsigned int skip = ctx->skip, n = sg_nents(sg);
sg               1080 drivers/crypto/s5p-sss.c 	ctx->sg = kmalloc_array(n, sizeof(*sg), GFP_KERNEL);
sg               1081 drivers/crypto/s5p-sss.c 	if (!ctx->sg) {
sg               1086 drivers/crypto/s5p-sss.c 	sg_init_table(ctx->sg, n);
sg               1088 drivers/crypto/s5p-sss.c 	tmp = ctx->sg;
sg               1098 drivers/crypto/s5p-sss.c 	while (sg && skip >= sg->length) {
sg               1099 drivers/crypto/s5p-sss.c 		skip -= sg->length;
sg               1100 drivers/crypto/s5p-sss.c 		sg = sg_next(sg);
sg               1103 drivers/crypto/s5p-sss.c 	while (sg && new_len) {
sg               1104 drivers/crypto/s5p-sss.c 		len = sg->length - skip;
sg               1109 drivers/crypto/s5p-sss.c 		sg_set_page(tmp, sg_page(sg), len, sg->offset + skip);
sg               1116 drivers/crypto/s5p-sss.c 		sg = sg_next(sg);
sg               1141 drivers/crypto/s5p-sss.c 				struct scatterlist *sg,
sg               1146 drivers/crypto/s5p-sss.c 	struct scatterlist *sg_tmp = sg;
sg               1148 drivers/crypto/s5p-sss.c 	if (!sg || !sg->length || !new_len)
sg               1181 drivers/crypto/s5p-sss.c 		return s5p_hash_copy_sgs(ctx, sg, new_len);
sg               1183 drivers/crypto/s5p-sss.c 		return s5p_hash_copy_sg_lists(ctx, sg, new_len);
sg               1193 drivers/crypto/s5p-sss.c 		sg_chain(ctx->sgl, 2, sg);
sg               1194 drivers/crypto/s5p-sss.c 		ctx->sg = ctx->sgl;
sg               1197 drivers/crypto/s5p-sss.c 		ctx->sg = sg;
sg               1282 drivers/crypto/s5p-sss.c 		ctx->sg = ctx->sgl;
sg               1303 drivers/crypto/s5p-sss.c 	dma_unmap_sg(dd->dev, ctx->sg, ctx->sg_len, DMA_TO_DEVICE);
sg               1334 drivers/crypto/s5p-sss.c 		free_pages((unsigned long)sg_virt(ctx->sg),
sg               1335 drivers/crypto/s5p-sss.c 			   get_order(ctx->sg->length));
sg               1338 drivers/crypto/s5p-sss.c 		kfree(ctx->sg);
sg               1340 drivers/crypto/s5p-sss.c 	ctx->sg = NULL;
sg               1861 drivers/crypto/s5p-sss.c static bool s5p_is_sg_aligned(struct scatterlist *sg)
sg               1863 drivers/crypto/s5p-sss.c 	while (sg) {
sg               1864 drivers/crypto/s5p-sss.c 		if (!IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
sg               1866 drivers/crypto/s5p-sss.c 		sg = sg_next(sg);
sg               1875 drivers/crypto/s5p-sss.c 	struct scatterlist *sg;
sg               1879 drivers/crypto/s5p-sss.c 	sg = req->src;
sg               1880 drivers/crypto/s5p-sss.c 	if (!s5p_is_sg_aligned(sg)) {
sg               1883 drivers/crypto/s5p-sss.c 		err = s5p_make_sg_cpy(dev, sg, &dev->sg_src_cpy);
sg               1887 drivers/crypto/s5p-sss.c 		sg = dev->sg_src_cpy;
sg               1890 drivers/crypto/s5p-sss.c 	err = s5p_set_indata(dev, sg);
sg               1902 drivers/crypto/s5p-sss.c 	struct scatterlist *sg;
sg               1906 drivers/crypto/s5p-sss.c 	sg = req->dst;
sg               1907 drivers/crypto/s5p-sss.c 	if (!s5p_is_sg_aligned(sg)) {
sg               1910 drivers/crypto/s5p-sss.c 		err = s5p_make_sg_cpy(dev, sg, &dev->sg_dst_cpy);
sg               1914 drivers/crypto/s5p-sss.c 		sg = dev->sg_dst_cpy;
sg               1917 drivers/crypto/s5p-sss.c 	err = s5p_set_outdata(dev, sg);
sg                444 drivers/crypto/sahara.c 	struct scatterlist *sg;
sg                501 drivers/crypto/sahara.c 	sg = dev->in_sg;
sg                503 drivers/crypto/sahara.c 		dev->hw_link[i]->len = sg->length;
sg                504 drivers/crypto/sahara.c 		dev->hw_link[i]->p = sg->dma_address;
sg                509 drivers/crypto/sahara.c 			sg = sg_next(sg);
sg                515 drivers/crypto/sahara.c 	sg = dev->out_sg;
sg                517 drivers/crypto/sahara.c 		dev->hw_link[j]->len = sg->length;
sg                518 drivers/crypto/sahara.c 		dev->hw_link[j]->p = sg->dma_address;
sg                523 drivers/crypto/sahara.c 			sg = sg_next(sg);
sg                798 drivers/crypto/sahara.c 	struct scatterlist *sg;
sg                815 drivers/crypto/sahara.c 	sg = dev->in_sg;
sg                821 drivers/crypto/sahara.c 		dev->hw_link[i]->len = sg->length;
sg                822 drivers/crypto/sahara.c 		dev->hw_link[i]->p = sg->dma_address;
sg                827 drivers/crypto/sahara.c 			sg = sg_next(sg);
sg                906 drivers/crypto/sahara.c static int sahara_walk_and_recalc(struct scatterlist *sg, unsigned int nbytes)
sg                908 drivers/crypto/sahara.c 	if (!sg || !sg->length)
sg                911 drivers/crypto/sahara.c 	while (nbytes && sg) {
sg                912 drivers/crypto/sahara.c 		if (nbytes <= sg->length) {
sg                913 drivers/crypto/sahara.c 			sg->length = nbytes;
sg                914 drivers/crypto/sahara.c 			sg_mark_end(sg);
sg                917 drivers/crypto/sahara.c 		nbytes -= sg->length;
sg                918 drivers/crypto/sahara.c 		sg = sg_next(sg);
sg                285 drivers/crypto/stm32/stm32-cryp.c static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
sg                296 drivers/crypto/stm32/stm32-cryp.c 	while (sg) {
sg                297 drivers/crypto/stm32/stm32-cryp.c 		if (!IS_ALIGNED(sg->offset, sizeof(u32)))
sg                300 drivers/crypto/stm32/stm32-cryp.c 		if (!IS_ALIGNED(sg->length, align))
sg                303 drivers/crypto/stm32/stm32-cryp.c 		len += sg->length;
sg                304 drivers/crypto/stm32/stm32-cryp.c 		sg = sg_next(sg);
sg                328 drivers/crypto/stm32/stm32-cryp.c static void sg_copy_buf(void *buf, struct scatterlist *sg,
sg                336 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_start(&walk, sg);
sg                133 drivers/crypto/stm32/stm32-hash.c 	struct scatterlist	*sg;
sg                300 drivers/crypto/stm32/stm32-hash.c 		count = min(rctx->sg->length - rctx->offset, rctx->total);
sg                304 drivers/crypto/stm32/stm32-hash.c 			if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) {
sg                305 drivers/crypto/stm32/stm32-hash.c 				rctx->sg = sg_next(rctx->sg);
sg                312 drivers/crypto/stm32/stm32-hash.c 		scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg,
sg                319 drivers/crypto/stm32/stm32-hash.c 		if (rctx->offset == rctx->sg->length) {
sg                320 drivers/crypto/stm32/stm32-hash.c 			rctx->sg = sg_next(rctx->sg);
sg                321 drivers/crypto/stm32/stm32-hash.c 			if (rctx->sg)
sg                408 drivers/crypto/stm32/stm32-hash.c 			       struct scatterlist *sg, int length, int mdma)
sg                415 drivers/crypto/stm32/stm32-hash.c 	in_desc = dmaengine_prep_slave_sg(hdev->dma_lch, sg, 1,
sg                543 drivers/crypto/stm32/stm32-hash.c 	struct scatterlist sg[1], *tsg;
sg                548 drivers/crypto/stm32/stm32-hash.c 	rctx->sg = hdev->req->src;
sg                551 drivers/crypto/stm32/stm32-hash.c 	rctx->nents = sg_nents(rctx->sg);
sg                564 drivers/crypto/stm32/stm32-hash.c 	for_each_sg(rctx->sg, tsg, rctx->nents, i) {
sg                565 drivers/crypto/stm32/stm32-hash.c 		len = sg->length;
sg                567 drivers/crypto/stm32/stm32-hash.c 		sg[0] = *tsg;
sg                568 drivers/crypto/stm32/stm32-hash.c 		if (sg_is_last(sg)) {
sg                570 drivers/crypto/stm32/stm32-hash.c 				len = (ALIGN(sg->length, 16) - 16);
sg                573 drivers/crypto/stm32/stm32-hash.c 					rctx->sg, rctx->nents,
sg                574 drivers/crypto/stm32/stm32-hash.c 					rctx->buffer, sg->length - len,
sg                575 drivers/crypto/stm32/stm32-hash.c 					rctx->total - sg->length + len);
sg                577 drivers/crypto/stm32/stm32-hash.c 				sg->length = len;
sg                579 drivers/crypto/stm32/stm32-hash.c 				if (!(IS_ALIGNED(sg->length, sizeof(u32)))) {
sg                580 drivers/crypto/stm32/stm32-hash.c 					len = sg->length;
sg                581 drivers/crypto/stm32/stm32-hash.c 					sg->length = ALIGN(sg->length,
sg                587 drivers/crypto/stm32/stm32-hash.c 		rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1,
sg                594 drivers/crypto/stm32/stm32-hash.c 		err = stm32_hash_xmit_dma(hdev, sg, len,
sg                595 drivers/crypto/stm32/stm32-hash.c 					  !sg_is_last(sg));
sg                597 drivers/crypto/stm32/stm32-hash.c 		dma_unmap_sg(hdev->dev, sg, 1, DMA_TO_DEVICE);
sg                655 drivers/crypto/stm32/stm32-hash.c 	struct scatterlist *sg;
sg                666 drivers/crypto/stm32/stm32-hash.c 		for_each_sg(req->src, sg, sg_nents(req->src), i) {
sg                667 drivers/crypto/stm32/stm32-hash.c 			if ((!IS_ALIGNED(sg->length, sizeof(u32))) &&
sg                668 drivers/crypto/stm32/stm32-hash.c 			    (!sg_is_last(sg)))
sg                903 drivers/crypto/stm32/stm32-hash.c 	rctx->sg = req->src;
sg               1098 drivers/crypto/talitos.c static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
sg               1106 drivers/crypto/talitos.c 	while (cryptlen && sg && n_sg--) {
sg               1107 drivers/crypto/talitos.c 		unsigned int len = sg_dma_len(sg);
sg               1121 drivers/crypto/talitos.c 				       sg_dma_address(sg) + offset, datalen, 0);
sg               1128 drivers/crypto/talitos.c 			       sg_dma_address(sg) + offset, len, 0);
sg               1136 drivers/crypto/talitos.c 		sg = sg_next(sg);
sg               2189 drivers/crypto/talitos.c 	struct scatterlist sg[1];
sg               2205 drivers/crypto/talitos.c 	sg_init_one(&sg[0], key, keylen);
sg               2207 drivers/crypto/talitos.c 	ahash_request_set_crypt(req, sg, hash, keylen);
sg                521 drivers/crypto/ux500/cryp/cryp_core.c 				 struct scatterlist *sg,
sg                531 drivers/crypto/ux500/cryp/cryp_core.c 	if (unlikely(!IS_ALIGNED((unsigned long)sg, 4))) {
sg                533 drivers/crypto/ux500/cryp/cryp_core.c 			"aligned! Addr: 0x%08lx", __func__, (unsigned long)sg);
sg                540 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_src = sg;
sg                564 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->device->dma.sg_dst = sg;
sg                626 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_dma_write(struct cryp_ctx *ctx, struct scatterlist *sg,
sg                629 drivers/crypto/ux500/cryp/cryp_core.c 	int error = cryp_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
sg                641 drivers/crypto/ux500/cryp/cryp_core.c static int cryp_dma_read(struct cryp_ctx *ctx, struct scatterlist *sg, int len)
sg                643 drivers/crypto/ux500/cryp/cryp_core.c 	int error = cryp_set_dma_transfer(ctx, sg, len, DMA_FROM_DEVICE);
sg                818 drivers/crypto/ux500/cryp/cryp_core.c static int get_nents(struct scatterlist *sg, int nbytes)
sg                823 drivers/crypto/ux500/cryp/cryp_core.c 		nbytes -= sg->length;
sg                824 drivers/crypto/ux500/cryp/cryp_core.c 		sg = sg_next(sg);
sg                315 drivers/crypto/ux500/hash/hash_alg.h 	struct scatterlist	*sg;
sg                138 drivers/crypto/ux500/hash/hash_core.c static int hash_set_dma_transfer(struct hash_ctx *ctx, struct scatterlist *sg,
sg                151 drivers/crypto/ux500/hash/hash_core.c 	sg->length = ALIGN(sg->length, HASH_DMA_ALIGN_SIZE);
sg                154 drivers/crypto/ux500/hash/hash_core.c 	ctx->device->dma.sg = sg;
sg                156 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.nents,
sg                168 drivers/crypto/ux500/hash/hash_core.c 			ctx->device->dma.sg, ctx->device->dma.sg_len,
sg                191 drivers/crypto/ux500/hash/hash_core.c 	dma_unmap_sg(chan->device->dev, ctx->device->dma.sg,
sg                196 drivers/crypto/ux500/hash/hash_core.c 			  struct scatterlist *sg, int len)
sg                198 drivers/crypto/ux500/hash/hash_core.c 	int error = hash_set_dma_transfer(ctx, sg, len, DMA_TO_DEVICE);
sg                498 drivers/crypto/ux500/hash/hash_core.c static int hash_get_nents(struct scatterlist *sg, int size, bool *aligned)
sg                503 drivers/crypto/ux500/hash/hash_core.c 	while (size > 0 && sg) {
sg                505 drivers/crypto/ux500/hash/hash_core.c 		size -= sg->length;
sg                508 drivers/crypto/ux500/hash/hash_core.c 		if ((aligned && !IS_ALIGNED(sg->offset, HASH_DMA_ALIGN_SIZE)) ||
sg                509 drivers/crypto/ux500/hash/hash_core.c 		    (!IS_ALIGNED(sg->length, HASH_DMA_ALIGN_SIZE) && size > 0))
sg                512 drivers/crypto/ux500/hash/hash_core.c 		sg = sg_next(sg);
sg                532 drivers/crypto/ux500/hash/hash_core.c static bool hash_dma_valid_data(struct scatterlist *sg, int datasize)
sg                537 drivers/crypto/ux500/hash/hash_core.c 	if (hash_get_nents(sg, datasize, &aligned) < 1)
sg                 88 drivers/crypto/virtio/virtio_crypto_algs.c static u64 virtio_crypto_alg_sg_nents_length(struct scatterlist *sg)
sg                 92 drivers/crypto/virtio/virtio_crypto_algs.c 	for (total = 0; sg; sg = sg_next(sg))
sg                 93 drivers/crypto/virtio/virtio_crypto_algs.c 		total += sg->length;
sg                360 drivers/crypto/virtio/virtio_crypto_algs.c 	struct scatterlist *sg;
sg                454 drivers/crypto/virtio/virtio_crypto_algs.c 	for (sg = req->src; src_nents; sg = sg_next(sg), src_nents--)
sg                455 drivers/crypto/virtio/virtio_crypto_algs.c 		sgs[num_out++] = sg;
sg                458 drivers/crypto/virtio/virtio_crypto_algs.c 	for (sg = req->dst; sg; sg = sg_next(sg))
sg                459 drivers/crypto/virtio/virtio_crypto_algs.c 		sgs[num_out + num_in++] = sg;
sg                 53 drivers/dma-buf/udmabuf.c 	struct sg_table *sg;
sg                 56 drivers/dma-buf/udmabuf.c 	sg = kzalloc(sizeof(*sg), GFP_KERNEL);
sg                 57 drivers/dma-buf/udmabuf.c 	if (!sg)
sg                 59 drivers/dma-buf/udmabuf.c 	ret = sg_alloc_table_from_pages(sg, ubuf->pages, ubuf->pagecount,
sg                 64 drivers/dma-buf/udmabuf.c 	if (!dma_map_sg(at->dev, sg->sgl, sg->nents, direction)) {
sg                 68 drivers/dma-buf/udmabuf.c 	return sg;
sg                 71 drivers/dma-buf/udmabuf.c 	sg_free_table(sg);
sg                 72 drivers/dma-buf/udmabuf.c 	kfree(sg);
sg                 77 drivers/dma-buf/udmabuf.c 			  struct sg_table *sg,
sg                 80 drivers/dma-buf/udmabuf.c 	dma_unmap_sg(at->dev, sg->sgl, sg->nents, direction);
sg                 81 drivers/dma-buf/udmabuf.c 	sg_free_table(sg);
sg                 82 drivers/dma-buf/udmabuf.c 	kfree(sg);
sg                397 drivers/dma/altera-msgdma.c 	struct scatterlist *sg;
sg                401 drivers/dma/altera-msgdma.c 	for_each_sg(sgl, sg, sg_len, i)
sg                402 drivers/dma/altera-msgdma.c 		desc_cnt += DIV_ROUND_UP(sg_dma_len(sg), MSGDMA_MAX_TRANS_LEN);
sg               2057 drivers/dma/amba-pl08x.c 	struct scatterlist *sg;
sg               2068 drivers/dma/amba-pl08x.c 	for_each_sg(sgl, sg, sg_len, tmp) {
sg               2070 drivers/dma/amba-pl08x.c 				      sg_dma_address(sg),
sg               2071 drivers/dma/amba-pl08x.c 				      sg_dma_len(sg));
sg                986 drivers/dma/at_hdmac.c 	struct scatterlist	*sg;
sg               1009 drivers/dma/at_hdmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1010 drivers/dma/at_hdmac.c 		dma_addr_t dest = sg_dma_address(sg);
sg               1011 drivers/dma/at_hdmac.c 		size_t len = sg_dma_len(sg);
sg               1079 drivers/dma/at_hdmac.c 	struct scatterlist	*sg;
sg               1105 drivers/dma/at_hdmac.c 		for_each_sg(sgl, sg, sg_len, i) {
sg               1114 drivers/dma/at_hdmac.c 			mem = sg_dma_address(sg);
sg               1115 drivers/dma/at_hdmac.c 			len = sg_dma_len(sg);
sg               1146 drivers/dma/at_hdmac.c 		for_each_sg(sgl, sg, sg_len, i) {
sg               1155 drivers/dma/at_hdmac.c 			mem = sg_dma_address(sg);
sg               1156 drivers/dma/at_hdmac.c 			len = sg_dma_len(sg);
sg                639 drivers/dma/at_xdmac.c 	struct scatterlist		*sg;
sg                665 drivers/dma/at_xdmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                669 drivers/dma/at_xdmac.c 		len = sg_dma_len(sg);
sg                670 drivers/dma/at_xdmac.c 		mem = sg_dma_address(sg);
sg               1235 drivers/dma/at_xdmac.c 	struct scatterlist	*sg, *psg = NULL, *ppsg = NULL;
sg               1246 drivers/dma/at_xdmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1248 drivers/dma/at_xdmac.c 			__func__, &sg_dma_address(sg), sg_dma_len(sg),
sg               1251 drivers/dma/at_xdmac.c 						   sg_dma_address(sg),
sg               1252 drivers/dma/at_xdmac.c 						   sg_dma_len(sg),
sg               1264 drivers/dma/at_xdmac.c 			stride = sg_dma_address(sg) -
sg               1323 drivers/dma/at_xdmac.c 				   sg_dma_address(sg) < sg_dma_address(psg)) {
sg               1351 drivers/dma/at_xdmac.c 		    sg_dma_len(psg) == sg_dma_len(sg)) {
sg               1377 drivers/dma/at_xdmac.c 		psg = sg;
sg               1379 drivers/dma/at_xdmac.c 		len += sg_dma_len(sg);
sg               1271 drivers/dma/coh901318.c 	struct scatterlist *sg;
sg               1534 drivers/dma/coh901318.c 		desc->sg = NULL;
sg               2276 drivers/dma/coh901318.c 	struct scatterlist *sg;
sg               2335 drivers/dma/coh901318.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               2337 drivers/dma/coh901318.c 		size = sg_dma_len(sg);
sg                136 drivers/dma/coh901318.h 		      struct scatterlist *sg, unsigned int nents,
sg                239 drivers/dma/coh901318_lli.c 	struct scatterlist *sg;
sg                258 drivers/dma/coh901318_lli.c 	for_each_sg(sgl, sg, nents, i) {
sg                259 drivers/dma/coh901318_lli.c 		if (sg_is_chain(sg)) {
sg                273 drivers/dma/coh901318_lli.c 			src = sg_dma_address(sg);
sg                276 drivers/dma/coh901318_lli.c 			dst = sg_dma_address(sg);
sg                278 drivers/dma/coh901318_lli.c 		bytes_to_transfer = sg_dma_len(sg);
sg                105 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg sg[];
sg                199 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg;
sg                216 drivers/dma/dma-axi-dmac.c 	sg = &desc->sg[desc->num_submitted];
sg                219 drivers/dma/dma-axi-dmac.c 	if (sg->id != AXI_DMAC_SG_UNUSED) {
sg                220 drivers/dma/dma-axi-dmac.c 		sg->schedule_when_free = true;
sg                236 drivers/dma/dma-axi-dmac.c 	sg->id = axi_dmac_read(dmac, AXI_DMAC_REG_TRANSFER_ID);
sg                239 drivers/dma/dma-axi-dmac.c 		axi_dmac_write(dmac, AXI_DMAC_REG_DEST_ADDRESS, sg->dest_addr);
sg                240 drivers/dma/dma-axi-dmac.c 		axi_dmac_write(dmac, AXI_DMAC_REG_DEST_STRIDE, sg->dest_stride);
sg                244 drivers/dma/dma-axi-dmac.c 		axi_dmac_write(dmac, AXI_DMAC_REG_SRC_ADDRESS, sg->src_addr);
sg                245 drivers/dma/dma-axi-dmac.c 		axi_dmac_write(dmac, AXI_DMAC_REG_SRC_STRIDE, sg->src_stride);
sg                260 drivers/dma/dma-axi-dmac.c 	axi_dmac_write(dmac, AXI_DMAC_REG_X_LENGTH, sg->x_len - 1);
sg                261 drivers/dma/dma-axi-dmac.c 	axi_dmac_write(dmac, AXI_DMAC_REG_Y_LENGTH, sg->y_len - 1);
sg                273 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg)
sg                276 drivers/dma/dma-axi-dmac.c 		return sg->x_len * sg->y_len;
sg                278 drivers/dma/dma-axi-dmac.c 		return sg->x_len;
sg                285 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg;
sg                296 drivers/dma/dma-axi-dmac.c 				sg = &desc->sg[i];
sg                297 drivers/dma/dma-axi-dmac.c 				if (sg->id == AXI_DMAC_SG_UNUSED)
sg                299 drivers/dma/dma-axi-dmac.c 				if (sg->id == id) {
sg                301 drivers/dma/dma-axi-dmac.c 					sg->partial_len = len;
sg                332 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg;
sg                343 drivers/dma/dma-axi-dmac.c 		sg = &active->sg[i];
sg                344 drivers/dma/dma-axi-dmac.c 		total = axi_dmac_total_sg_bytes(chan, sg);
sg                345 drivers/dma/dma-axi-dmac.c 		rslt->residue += (total - sg->partial_len);
sg                353 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg;
sg                365 drivers/dma/dma-axi-dmac.c 		sg = &active->sg[active->num_completed];
sg                366 drivers/dma/dma-axi-dmac.c 		if (sg->id == AXI_DMAC_SG_UNUSED) /* Not yet submitted */
sg                368 drivers/dma/dma-axi-dmac.c 		if (!(BIT(sg->id) & completed_transfers))
sg                371 drivers/dma/dma-axi-dmac.c 		sg->id = AXI_DMAC_SG_UNUSED;
sg                372 drivers/dma/dma-axi-dmac.c 		if (sg->schedule_when_free) {
sg                373 drivers/dma/dma-axi-dmac.c 			sg->schedule_when_free = false;
sg                377 drivers/dma/dma-axi-dmac.c 		if (sg->partial_len)
sg                384 drivers/dma/dma-axi-dmac.c 		    sg->partial_len) {
sg                471 drivers/dma/dma-axi-dmac.c 	desc = kzalloc(struct_size(desc, sg, num_sgs), GFP_NOWAIT);
sg                476 drivers/dma/dma-axi-dmac.c 		desc->sg[i].id = AXI_DMAC_SG_UNUSED;
sg                486 drivers/dma/dma-axi-dmac.c 	struct axi_dmac_sg *sg)
sg                503 drivers/dma/dma-axi-dmac.c 				sg->dest_addr = addr;
sg                505 drivers/dma/dma-axi-dmac.c 				sg->src_addr = addr;
sg                506 drivers/dma/dma-axi-dmac.c 			sg->x_len = segment_size;
sg                507 drivers/dma/dma-axi-dmac.c 			sg->y_len = 1;
sg                508 drivers/dma/dma-axi-dmac.c 			sg++;
sg                514 drivers/dma/dma-axi-dmac.c 			sg->dest_addr = addr;
sg                516 drivers/dma/dma-axi-dmac.c 			sg->src_addr = addr;
sg                517 drivers/dma/dma-axi-dmac.c 		sg->x_len = len;
sg                518 drivers/dma/dma-axi-dmac.c 		sg->y_len = 1;
sg                519 drivers/dma/dma-axi-dmac.c 		sg++;
sg                523 drivers/dma/dma-axi-dmac.c 	return sg;
sg                534 drivers/dma/dma-axi-dmac.c 	struct scatterlist *sg;
sg                542 drivers/dma/dma-axi-dmac.c 	for_each_sg(sgl, sg, sg_len, i)
sg                543 drivers/dma/dma-axi-dmac.c 		num_sgs += DIV_ROUND_UP(sg_dma_len(sg), chan->max_length);
sg                549 drivers/dma/dma-axi-dmac.c 	dsg = desc->sg;
sg                551 drivers/dma/dma-axi-dmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                552 drivers/dma/dma-axi-dmac.c 		if (!axi_dmac_check_addr(chan, sg_dma_address(sg)) ||
sg                553 drivers/dma/dma-axi-dmac.c 		    !axi_dmac_check_len(chan, sg_dma_len(sg))) {
sg                558 drivers/dma/dma-axi-dmac.c 		dsg = axi_dmac_fill_linear_sg(chan, direction, sg_dma_address(sg), 1,
sg                559 drivers/dma/dma-axi-dmac.c 			sg_dma_len(sg), dsg);
sg                594 drivers/dma/dma-axi-dmac.c 		period_len, desc->sg);
sg                649 drivers/dma/dma-axi-dmac.c 		desc->sg[0].src_addr = xt->src_start;
sg                650 drivers/dma/dma-axi-dmac.c 		desc->sg[0].src_stride = xt->sgl[0].size + src_icg;
sg                654 drivers/dma/dma-axi-dmac.c 		desc->sg[0].dest_addr = xt->dst_start;
sg                655 drivers/dma/dma-axi-dmac.c 		desc->sg[0].dest_stride = xt->sgl[0].size + dst_icg;
sg                659 drivers/dma/dma-axi-dmac.c 		desc->sg[0].x_len = xt->sgl[0].size;
sg                660 drivers/dma/dma-axi-dmac.c 		desc->sg[0].y_len = xt->numf;
sg                662 drivers/dma/dma-axi-dmac.c 		desc->sg[0].x_len = xt->sgl[0].size * xt->numf;
sg                663 drivers/dma/dma-axi-dmac.c 		desc->sg[0].y_len = 1;
sg                327 drivers/dma/dw-edma/dw-edma-core.c 	struct scatterlist *sg = NULL;
sg                342 drivers/dma/dw-edma/dw-edma-core.c 		if (xfer->xfer.sg.len < 1)
sg                363 drivers/dma/dw-edma/dw-edma-core.c 		cnt = xfer->xfer.sg.len;
sg                364 drivers/dma/dw-edma/dw-edma-core.c 		sg = xfer->xfer.sg.sgl;
sg                368 drivers/dma/dw-edma/dw-edma-core.c 		if (!xfer->cyclic && !sg)
sg                384 drivers/dma/dw-edma/dw-edma-core.c 			burst->sz = sg_dma_len(sg);
sg                394 drivers/dma/dw-edma/dw-edma-core.c 				burst->dar = sg_dma_address(sg);
sg                402 drivers/dma/dw-edma/dw-edma-core.c 				src_addr += sg_dma_len(sg);
sg                409 drivers/dma/dw-edma/dw-edma-core.c 				burst->sar = sg_dma_address(sg);
sg                417 drivers/dma/dw-edma/dw-edma-core.c 				dst_addr += sg_dma_len(sg);
sg                422 drivers/dma/dw-edma/dw-edma-core.c 			sg = sg_next(sg);
sg                444 drivers/dma/dw-edma/dw-edma-core.c 	xfer.xfer.sg.sgl = sgl;
sg                445 drivers/dma/dw-edma/dw-edma-core.c 	xfer.xfer.sg.len = len;
sg                145 drivers/dma/dw-edma/dw-edma-core.h 		struct dw_edma_sg	sg;
sg                639 drivers/dma/dw/core.c 	struct scatterlist	*sg;
sg                663 drivers/dma/dw/core.c 		for_each_sg(sgl, sg, sg_len, i) {
sg                668 drivers/dma/dw/core.c 			mem = sg_dma_address(sg);
sg                669 drivers/dma/dw/core.c 			len = sg_dma_len(sg);
sg                713 drivers/dma/dw/core.c 		for_each_sg(sgl, sg, sg_len, i) {
sg                718 drivers/dma/dw/core.c 			mem = sg_dma_address(sg);
sg                719 drivers/dma/dw/core.c 			len = sg_dma_len(sg);
sg               1039 drivers/dma/ep93xx_dma.c 	struct scatterlist *sg;
sg               1057 drivers/dma/ep93xx_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1058 drivers/dma/ep93xx_dma.c 		size_t len = sg_dma_len(sg);
sg               1073 drivers/dma/ep93xx_dma.c 			desc->src_addr = sg_dma_address(sg);
sg               1077 drivers/dma/ep93xx_dma.c 			desc->dst_addr = sg_dma_address(sg);
sg                518 drivers/dma/fsl-edma-common.c 	struct scatterlist *sg;
sg                547 drivers/dma/fsl-edma-common.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                552 drivers/dma/fsl-edma-common.c 			src_addr = sg_dma_address(sg);
sg                558 drivers/dma/fsl-edma-common.c 			dst_addr = sg_dma_address(sg);
sg                563 drivers/dma/fsl-edma-common.c 		iter = sg_dma_len(sg) / nbytes;
sg                 79 drivers/dma/hsu/hsu.c 		hsu_chan_writel(hsuc, HSU_CH_DxSAR(i), desc->sg[i].addr);
sg                 80 drivers/dma/hsu/hsu.c 		hsu_chan_writel(hsuc, HSU_CH_DxTSR(i), desc->sg[i].len);
sg                241 drivers/dma/hsu/hsu.c 	desc->sg = kcalloc(nents, sizeof(*desc->sg), GFP_NOWAIT);
sg                242 drivers/dma/hsu/hsu.c 	if (!desc->sg) {
sg                254 drivers/dma/hsu/hsu.c 	kfree(desc->sg);
sg                265 drivers/dma/hsu/hsu.c 	struct scatterlist *sg;
sg                272 drivers/dma/hsu/hsu.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                273 drivers/dma/hsu/hsu.c 		desc->sg[i].addr = sg_dma_address(sg);
sg                274 drivers/dma/hsu/hsu.c 		desc->sg[i].len = sg_dma_len(sg);
sg                276 drivers/dma/hsu/hsu.c 		desc->length += sg_dma_len(sg);
sg                305 drivers/dma/hsu/hsu.c 		bytes += desc->sg[i].len;
sg                 70 drivers/dma/hsu/hsu.h 	struct hsu_dma_sg *sg;
sg                299 drivers/dma/idma64.c 	struct scatterlist *sg;
sg                306 drivers/dma/idma64.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                317 drivers/dma/idma64.c 		hw->phys = sg_dma_address(sg);
sg                318 drivers/dma/idma64.c 		hw->len = sg_dma_len(sg);
sg                455 drivers/dma/img-mdc-dma.c 	struct scatterlist *sg;
sg                474 drivers/dma/img-mdc-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                475 drivers/dma/img-mdc-dma.c 		dma_addr_t buf = sg_dma_address(sg);
sg                476 drivers/dma/img-mdc-dma.c 		size_t buf_len = sg_dma_len(sg);
sg                137 drivers/dma/imx-dma.c 	struct scatterlist		*sg;
sg                285 drivers/dma/imx-dma.c 	struct scatterlist *sg = d->sg;
sg                288 drivers/dma/imx-dma.c 	now = min_t(size_t, d->len, sg_dma_len(sg));
sg                293 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, sg->dma_address,
sg                296 drivers/dma/imx-dma.c 		imx_dmav1_writel(imxdma, sg->dma_address,
sg                326 drivers/dma/imx-dma.c 			d->sg && imxdma_hw_chain(imxdmac)) {
sg                327 drivers/dma/imx-dma.c 		d->sg = sg_next(d->sg);
sg                328 drivers/dma/imx-dma.c 		if (d->sg) {
sg                445 drivers/dma/imx-dma.c 	if (desc->sg) {
sg                447 drivers/dma/imx-dma.c 		desc->sg = sg_next(desc->sg);
sg                449 drivers/dma/imx-dma.c 		if (desc->sg) {
sg                587 drivers/dma/imx-dma.c 				d->sg, d->sgcount, d->len,
sg                598 drivers/dma/imx-dma.c 				d->sg, d->sgcount, d->len,
sg                821 drivers/dma/imx-dma.c 	struct scatterlist *sg;
sg                831 drivers/dma/imx-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                832 drivers/dma/imx-dma.c 		dma_length += sg_dma_len(sg);
sg                851 drivers/dma/imx-dma.c 	desc->sg = sgl;
sg                907 drivers/dma/imx-dma.c 	desc->sg = imxdmac->sg_list;
sg               1449 drivers/dma/imx-sdma.c 	struct scatterlist *sg;
sg               1461 drivers/dma/imx-sdma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1465 drivers/dma/imx-sdma.c 		bd->buffer_addr = sg->dma_address;
sg               1467 drivers/dma/imx-sdma.c 		count = sg_dma_len(sg);
sg               1484 drivers/dma/imx-sdma.c 			if (count & 3 || sg->dma_address & 3)
sg               1489 drivers/dma/imx-sdma.c 			if (count & 1 || sg->dma_address & 1)
sg               1508 drivers/dma/imx-sdma.c 				i, count, (u64)sg->dma_address,
sg                774 drivers/dma/ipu/ipu_idmac.c 	struct idmac_tx_desc *desc, struct scatterlist *sg, int buf_idx)
sg                788 drivers/dma/ipu/ipu_idmac.c 	ipu_update_channel_buffer(ichan, buf_idx, sg_dma_address(sg));
sg                792 drivers/dma/ipu/ipu_idmac.c 		sg, chan_id, buf_idx);
sg                801 drivers/dma/ipu/ipu_idmac.c 	struct scatterlist *sg;
sg                804 drivers/dma/ipu/ipu_idmac.c 	for (i = 0, sg = desc->sg; i < 2 && sg; i++) {
sg                805 drivers/dma/ipu/ipu_idmac.c 		if (!ichan->sg[i]) {
sg                806 drivers/dma/ipu/ipu_idmac.c 			ichan->sg[i] = sg;
sg                808 drivers/dma/ipu/ipu_idmac.c 			ret = ipu_submit_buffer(ichan, desc, sg, i);
sg                812 drivers/dma/ipu/ipu_idmac.c 			sg = sg_next(sg);
sg                847 drivers/dma/ipu/ipu_idmac.c 		dma_addr_t dma_1 = sg_is_last(desc->sg) ? 0 :
sg                848 drivers/dma/ipu/ipu_idmac.c 			sg_dma_address(&desc->sg[1]);
sg                850 drivers/dma/ipu/ipu_idmac.c 		WARN_ON(ichan->sg[0] || ichan->sg[1]);
sg                858 drivers/dma/ipu/ipu_idmac.c 						 sg_dma_address(&desc->sg[0]),
sg                864 drivers/dma/ipu/ipu_idmac.c 	dev_dbg(dev, "Submitting sg %p\n", &desc->sg[0]);
sg               1124 drivers/dma/ipu/ipu_idmac.c 	struct idmac_tx_desc **desc, struct scatterlist *sg)
sg               1126 drivers/dma/ipu/ipu_idmac.c 	struct scatterlist *sgnew = sg ? sg_next(sg) : NULL;
sg               1138 drivers/dma/ipu/ipu_idmac.c 	return (*desc)->sg;
sg               1158 drivers/dma/ipu/ipu_idmac.c 	struct scatterlist **sg, *sgnext, *sgnew = NULL;
sg               1207 drivers/dma/ipu/ipu_idmac.c 		ichan->sg[ichan->active_buffer] = NULL;
sg               1221 drivers/dma/ipu/ipu_idmac.c 	sg = &ichan->sg[ichan->active_buffer];
sg               1222 drivers/dma/ipu/ipu_idmac.c 	sgnext = ichan->sg[!ichan->active_buffer];
sg               1224 drivers/dma/ipu/ipu_idmac.c 	if (!*sg) {
sg               1233 drivers/dma/ipu/ipu_idmac.c 		irq, (u64)sg_dma_address(*sg),
sg               1238 drivers/dma/ipu/ipu_idmac.c 	sgnew = idmac_sg_next(ichan, &descnew, *sg);
sg               1247 drivers/dma/ipu/ipu_idmac.c 		if (!WARN_ON(sg_next(*sg)))
sg               1249 drivers/dma/ipu/ipu_idmac.c 		ichan->sg[!ichan->active_buffer] = sgnew;
sg               1265 drivers/dma/ipu/ipu_idmac.c 	if (unlikely(!sg_next(*sg)) || !sgnext) {
sg               1274 drivers/dma/ipu/ipu_idmac.c 	*sg = sgnew;
sg               1311 drivers/dma/ipu/ipu_idmac.c 		struct scatterlist *sg;
sg               1319 drivers/dma/ipu/ipu_idmac.c 				for_each_sg(desc->sg, sg, desc->sg_len, k) {
sg               1320 drivers/dma/ipu/ipu_idmac.c 					if (ichan->sg[0] == sg)
sg               1321 drivers/dma/ipu/ipu_idmac.c 						ichan->sg[0] = NULL;
sg               1322 drivers/dma/ipu/ipu_idmac.c 					else if (ichan->sg[1] == sg)
sg               1323 drivers/dma/ipu/ipu_idmac.c 						ichan->sg[1] = NULL;
sg               1363 drivers/dma/ipu/ipu_idmac.c 		desc->sg	= sgl;
sg               1414 drivers/dma/ipu/ipu_idmac.c 	ichan->sg[0] = NULL;
sg               1415 drivers/dma/ipu/ipu_idmac.c 	ichan->sg[1] = NULL;
sg               1453 drivers/dma/ipu/ipu_idmac.c 	ichan->sg[0] = NULL;
sg               1454 drivers/dma/ipu/ipu_idmac.c 	ichan->sg[1] = NULL;
sg                539 drivers/dma/k3dma.c 	struct scatterlist *sg;
sg                548 drivers/dma/k3dma.c 	for_each_sg(sgl, sg, sglen, i) {
sg                549 drivers/dma/k3dma.c 		avail = sg_dma_len(sg);
sg                560 drivers/dma/k3dma.c 	for_each_sg(sgl, sg, sglen, i) {
sg                561 drivers/dma/k3dma.c 		addr = sg_dma_address(sg);
sg                562 drivers/dma/k3dma.c 		avail = sg_dma_len(sg);
sg                533 drivers/dma/mmp_pdma.c 	struct scatterlist *sg;
sg                544 drivers/dma/mmp_pdma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                545 drivers/dma/mmp_pdma.c 		addr = sg_dma_address(sg);
sg                130 drivers/dma/moxart-dma.c 	struct moxart_sg		sg[0];
sg                312 drivers/dma/moxart-dma.c 	d = kzalloc(struct_size(d, sg, sg_len), GFP_ATOMIC);
sg                321 drivers/dma/moxart-dma.c 		d->sg[i].addr = sg_dma_address(sgent);
sg                322 drivers/dma/moxart-dma.c 		d->sg[i].len = sg_dma_len(sgent);
sg                407 drivers/dma/moxart-dma.c 	struct moxart_sg *sg = ch->desc->sg + idx;
sg                410 drivers/dma/moxart-dma.c 		moxart_dma_set_params(ch, sg->addr, d->dev_addr);
sg                412 drivers/dma/moxart-dma.c 		moxart_dma_set_params(ch, d->dev_addr, sg->addr);
sg                414 drivers/dma/moxart-dma.c 	moxart_set_transfer_params(ch, sg->len);
sg                457 drivers/dma/moxart-dma.c 		size += d->sg[i].len;
sg                695 drivers/dma/mpc512x_dma.c 	struct scatterlist *sg;
sg                706 drivers/dma/mpc512x_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                743 drivers/dma/mpc512x_dma.c 			tcd->daddr = sg_dma_address(sg);
sg                745 drivers/dma/mpc512x_dma.c 			if (!IS_ALIGNED(sg_dma_address(sg), mchan->dwidth))
sg                751 drivers/dma/mpc512x_dma.c 			tcd->saddr = sg_dma_address(sg);
sg                754 drivers/dma/mpc512x_dma.c 			if (!IS_ALIGNED(sg_dma_address(sg), mchan->swidth))
sg                765 drivers/dma/mpc512x_dma.c 			tcd->nbytes = sg_dma_len(sg);
sg                773 drivers/dma/mpc512x_dma.c 			len = sg_dma_len(sg);
sg                502 drivers/dma/mxs-dma.c 	struct scatterlist *sg;
sg                554 drivers/dma/mxs-dma.c 		for_each_sg(sgl, sg, sg_len, i) {
sg                555 drivers/dma/mxs-dma.c 			if (sg_dma_len(sg) > MAX_XFER_BYTES) {
sg                557 drivers/dma/mxs-dma.c 						sg_dma_len(sg), MAX_XFER_BYTES);
sg                564 drivers/dma/mxs-dma.c 			ccw->bufaddr = sg->dma_address;
sg                565 drivers/dma/mxs-dma.c 			ccw->xfer_bytes = sg_dma_len(sg);
sg                155 drivers/dma/nbpfaxi.c 	struct list_head sg;
sg                384 drivers/dma/nbpfaxi.c 	struct nbpf_link_desc *ldesc = list_first_entry(&desc->sg, struct nbpf_link_desc, node);
sg                719 drivers/dma/nbpfaxi.c 		INIT_LIST_HEAD(&desc->sg);
sg                743 drivers/dma/nbpfaxi.c 	list_for_each_entry_safe(ldesc, tmp, &desc->sg, node)
sg                820 drivers/dma/nbpfaxi.c 			list_move_tail(&ldesc->node, &desc->sg);
sg                957 drivers/dma/nbpfaxi.c 	list_for_each_entry(ldesc, &desc->sg, node) {
sg                886 drivers/dma/owl-dma.c 	struct scatterlist *sg;
sg                897 drivers/dma/owl-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                898 drivers/dma/owl-dma.c 		addr = sg_dma_address(sg);
sg                899 drivers/dma/owl-dma.c 		len = sg_dma_len(sg);
sg                576 drivers/dma/pch_dma.c 	struct scatterlist *sg;
sg                595 drivers/dma/pch_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                602 drivers/dma/pch_dma.c 		desc->regs.mem_addr = sg_dma_address(sg);
sg                603 drivers/dma/pch_dma.c 		desc->regs.size = sg_dma_len(sg);
sg               2835 drivers/dma/pl330.c 	struct scatterlist *sg;
sg               2848 drivers/dma/pl330.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               2870 drivers/dma/pl330.c 			fill_px(&desc->px, pch->fifo_dma, sg_dma_address(sg),
sg               2871 drivers/dma/pl330.c 				sg_dma_len(sg));
sg               2875 drivers/dma/pl330.c 			fill_px(&desc->px, sg_dma_address(sg), pch->fifo_dma,
sg               2876 drivers/dma/pl330.c 				sg_dma_len(sg));
sg               2882 drivers/dma/pl330.c 		desc->bytes_requested = sg_dma_len(sg);
sg                978 drivers/dma/pxa_dma.c 	struct scatterlist *sg;
sg                990 drivers/dma/pxa_dma.c 	for_each_sg(sgl, sg, sg_len, i)
sg                991 drivers/dma/pxa_dma.c 		nb_desc += DIV_ROUND_UP(sg_dma_len(sg), PDMA_MAX_DESC_BYTES);
sg                996 drivers/dma/pxa_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                997 drivers/dma/pxa_dma.c 		dma = sg_dma_address(sg);
sg                998 drivers/dma/pxa_dma.c 		avail = sg_dma_len(sg);
sg                614 drivers/dma/qcom/bam_dma.c 	struct scatterlist *sg;
sg                626 drivers/dma/qcom/bam_dma.c 	for_each_sg(sgl, sg, sg_len, i)
sg                627 drivers/dma/qcom/bam_dma.c 		num_alloc += DIV_ROUND_UP(sg_dma_len(sg), BAM_FIFO_SIZE);
sg                648 drivers/dma/qcom/bam_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                649 drivers/dma/qcom/bam_dma.c 		unsigned int remainder = sg_dma_len(sg);
sg                656 drivers/dma/qcom/bam_dma.c 			desc->addr = cpu_to_le32(sg_dma_address(sg) +
sg                985 drivers/dma/s3c24xx-dma.c 	struct scatterlist *sg;
sg               1042 drivers/dma/s3c24xx-dma.c 	for_each_sg(sgl, sg, sg_len, tmp) {
sg               1050 drivers/dma/s3c24xx-dma.c 		dsg->len = sg_dma_len(sg);
sg               1052 drivers/dma/s3c24xx-dma.c 			dsg->src_addr = sg_dma_address(sg);
sg               1056 drivers/dma/s3c24xx-dma.c 			dsg->dst_addr = sg_dma_address(sg);
sg                 81 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_sg	sg[0];
sg                162 drivers/dma/sa11x0-dma.c 	struct sa11x0_dma_sg *sg;
sg                198 drivers/dma/sa11x0-dma.c 	sg = &txd->sg[p->sg_load++];
sg                212 drivers/dma/sa11x0-dma.c 	writel_relaxed(sg->addr, base + dbsx);
sg                213 drivers/dma/sa11x0-dma.c 	writel_relaxed(sg->len, base + dbtx);
sg                218 drivers/dma/sa11x0-dma.c 		'A' + (dbsx == DMA_DBSB), sg->addr,
sg                219 drivers/dma/sa11x0-dma.c 		'A' + (dbtx == DMA_DBTB), sg->len);
sg                466 drivers/dma/sa11x0-dma.c 					i, txd->sg[i].addr, txd->sg[i].len);
sg                467 drivers/dma/sa11x0-dma.c 				if (addr >= txd->sg[i].addr &&
sg                468 drivers/dma/sa11x0-dma.c 				    addr < txd->sg[i].addr + txd->sg[i].len) {
sg                471 drivers/dma/sa11x0-dma.c 					len = txd->sg[i].len -
sg                472 drivers/dma/sa11x0-dma.c 						(addr - txd->sg[i].addr);
sg                482 drivers/dma/sa11x0-dma.c 					i, txd->sg[i].addr, txd->sg[i].len);
sg                483 drivers/dma/sa11x0-dma.c 				bytes += txd->sg[i].len;
sg                523 drivers/dma/sa11x0-dma.c 	struct dma_chan *chan, struct scatterlist *sg, unsigned int sglen,
sg                543 drivers/dma/sa11x0-dma.c 	for_each_sg(sg, sgent, sglen, i) {
sg                556 drivers/dma/sa11x0-dma.c 	txd = kzalloc(struct_size(txd, sg, j), GFP_ATOMIC);
sg                563 drivers/dma/sa11x0-dma.c 	for_each_sg(sg, sgent, sglen, i) {
sg                585 drivers/dma/sa11x0-dma.c 			txd->sg[j].addr = addr;
sg                586 drivers/dma/sa11x0-dma.c 			txd->sg[j].len = tlen;
sg                626 drivers/dma/sa11x0-dma.c 	txd = kzalloc(struct_size(txd, sg, sglen), GFP_ATOMIC);
sg                643 drivers/dma/sa11x0-dma.c 			txd->sg[k].addr = addr;
sg                644 drivers/dma/sa11x0-dma.c 			txd->sg[k].len = tlen;
sg                901 drivers/dma/sh/rcar-dmac.c 	struct scatterlist *sg;
sg                930 drivers/dma/sh/rcar-dmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                931 drivers/dma/sh/rcar-dmac.c 		dma_addr_t mem_addr = sg_dma_address(sg);
sg                932 drivers/dma/sh/rcar-dmac.c 		unsigned int len = sg_dma_len(sg);
sg                982 drivers/dma/sh/rcar-dmac.c 				chan->index, chunk, desc, i, sg, size, len,
sg                564 drivers/dma/sh/shdma-base.c 	struct scatterlist *sg;
sg                571 drivers/dma/sh/shdma-base.c 	for_each_sg(sgl, sg, sg_len, i)
sg                572 drivers/dma/sh/shdma-base.c 		chunks += DIV_ROUND_UP(sg_dma_len(sg), schan->max_xfer_len);
sg                588 drivers/dma/sh/shdma-base.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                589 drivers/dma/sh/shdma-base.c 		dma_addr_t sg_addr = sg_dma_address(sg);
sg                590 drivers/dma/sh/shdma-base.c 		size_t len = sg_dma_len(sg);
sg                597 drivers/dma/sh/shdma-base.c 				i, sg, len, &sg_addr);
sg                644 drivers/dma/sh/shdma-base.c 	struct scatterlist sg;
sg                651 drivers/dma/sh/shdma-base.c 	sg_init_table(&sg, 1);
sg                652 drivers/dma/sh/shdma-base.c 	sg_set_page(&sg, pfn_to_page(PFN_DOWN(dma_src)), len,
sg                654 drivers/dma/sh/shdma-base.c 	sg_dma_address(&sg) = dma_src;
sg                655 drivers/dma/sh/shdma-base.c 	sg_dma_len(&sg) = len;
sg                657 drivers/dma/sh/shdma-base.c 	return shdma_prep_sg(schan, &sg, 1, &dma_dest, DMA_MEM_TO_MEM,
sg                 60 drivers/dma/sh/usb-dmac.c 	struct usb_dmac_sg sg[];
sg                197 drivers/dma/sh/usb-dmac.c 	struct usb_dmac_sg *sg = desc->sg + index;
sg                203 drivers/dma/sh/usb-dmac.c 		dst_addr = sg->mem_addr;
sg                205 drivers/dma/sh/usb-dmac.c 		src_addr = sg->mem_addr;
sg                209 drivers/dma/sh/usb-dmac.c 		chan->index, sg, sg->size, &src_addr, &dst_addr);
sg                214 drivers/dma/sh/usb-dmac.c 			    DIV_ROUND_UP(sg->size, USB_DMAC_XFER_SIZE));
sg                215 drivers/dma/sh/usb-dmac.c 	usb_dmac_chan_write(chan, USB_DMATEND, usb_dmac_calc_tend(sg->size));
sg                269 drivers/dma/sh/usb-dmac.c 	desc = kzalloc(struct_size(desc, sg, sg_len), gfp);
sg                422 drivers/dma/sh/usb-dmac.c 	struct scatterlist *sg;
sg                437 drivers/dma/sh/usb-dmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                438 drivers/dma/sh/usb-dmac.c 		desc->sg[i].mem_addr = sg_dma_address(sg);
sg                439 drivers/dma/sh/usb-dmac.c 		desc->sg[i].size = sg_dma_len(sg);
sg                471 drivers/dma/sh/usb-dmac.c 	struct usb_dmac_sg *sg = desc->sg + sg_index;
sg                472 drivers/dma/sh/usb-dmac.c 	u32 mem_addr = sg->mem_addr & 0xffffffff;
sg                473 drivers/dma/sh/usb-dmac.c 	unsigned int residue = sg->size;
sg                520 drivers/dma/sh/usb-dmac.c 		residue += desc->sg[i].size;
sg                929 drivers/dma/sprd-dma.c 	struct scatterlist *sg;
sg                963 drivers/dma/sprd-dma.c 	for_each_sg(sgl, sg, sglen, i) {
sg                964 drivers/dma/sprd-dma.c 		len = sg_dma_len(sg);
sg                967 drivers/dma/sprd-dma.c 			src = sg_dma_address(sg);
sg                971 drivers/dma/sprd-dma.c 			dst = sg_dma_address(sg);
sg                513 drivers/dma/st_fdma.c 	struct scatterlist *sg;
sg                531 drivers/dma/st_fdma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                540 drivers/dma/st_fdma.c 			hw_node->saddr = sg_dma_address(sg);
sg                542 drivers/dma/st_fdma.c 			hw_node->daddr = sg_dma_address(sg);
sg                544 drivers/dma/st_fdma.c 		hw_node->nbytes = sg_dma_len(sg);
sg                545 drivers/dma/st_fdma.c 		hw_node->generic.length = sg_dma_len(sg);
sg               1023 drivers/dma/ste_dma40.c 	struct scatterlist *sg;
sg               1028 drivers/dma/ste_dma40.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1029 drivers/dma/ste_dma40.c 		ret = d40_size_2_dmalen(sg_dma_len(sg),
sg               2171 drivers/dma/ste_dma40.c d40_prep_desc(struct d40_chan *chan, struct scatterlist *sg,
sg               2183 drivers/dma/ste_dma40.c 	desc->lli_len = d40_sg_2_dmalen(sg, sg_len, cfg->src_info.data_width,
sg               2515 drivers/dma/ste_dma40.c 	struct scatterlist *sg;
sg               2518 drivers/dma/ste_dma40.c 	sg = kcalloc(periods + 1, sizeof(struct scatterlist), GFP_NOWAIT);
sg               2519 drivers/dma/ste_dma40.c 	if (!sg)
sg               2523 drivers/dma/ste_dma40.c 		sg_dma_address(&sg[i]) = dma_addr;
sg               2524 drivers/dma/ste_dma40.c 		sg_dma_len(&sg[i]) = period_len;
sg               2528 drivers/dma/ste_dma40.c 	sg_chain(sg, periods + 1, sg);
sg               2530 drivers/dma/ste_dma40.c 	txd = d40_prep_sg(chan, sg, sg, periods, direction,
sg               2533 drivers/dma/ste_dma40.c 	kfree(sg);
sg                265 drivers/dma/ste_dma40_ll.c int d40_phy_sg_to_lli(struct scatterlist *sg,
sg                277 drivers/dma/ste_dma40_ll.c 	struct scatterlist *current_sg = sg;
sg                284 drivers/dma/ste_dma40_ll.c 	for_each_sg(sg, current_sg, sg_len, i) {
sg                417 drivers/dma/ste_dma40_ll.c int d40_log_sg_to_lli(struct scatterlist *sg,
sg                425 drivers/dma/ste_dma40_ll.c 	struct scatterlist *current_sg = sg;
sg                433 drivers/dma/ste_dma40_ll.c 	for_each_sg(sg, current_sg, sg_len, i) {
sg                441 drivers/dma/ste_dma40_ll.h int d40_phy_sg_to_lli(struct scatterlist *sg,
sg                453 drivers/dma/ste_dma40_ll.h int d40_log_sg_to_lli(struct scatterlist *sg,
sg                835 drivers/dma/stm32-dma.c 	struct scatterlist *sg;
sg                860 drivers/dma/stm32-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                862 drivers/dma/stm32-dma.c 					       sg_dma_len(sg));
sg                866 drivers/dma/stm32-dma.c 		desc->sg_req[i].len = sg_dma_len(sg);
sg                878 drivers/dma/stm32-dma.c 		desc->sg_req[i].chan_reg.dma_sm0ar = sg_dma_address(sg);
sg                879 drivers/dma/stm32-dma.c 		desc->sg_req[i].chan_reg.dma_sm1ar = sg_dma_address(sg);
sg                733 drivers/dma/stm32-mdma.c 	struct scatterlist *sg;
sg                738 drivers/dma/stm32-mdma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                739 drivers/dma/stm32-mdma.c 		if (sg_dma_len(sg) > STM32_MDMA_MAX_BLOCK_LEN) {
sg                745 drivers/dma/stm32-mdma.c 			src_addr = sg_dma_address(sg);
sg                749 drivers/dma/stm32-mdma.c 							sg_dma_len(sg));
sg                754 drivers/dma/stm32-mdma.c 			dst_addr = sg_dma_address(sg);
sg                757 drivers/dma/stm32-mdma.c 							sg_dma_len(sg));
sg                766 drivers/dma/stm32-mdma.c 					dst_addr, sg_dma_len(sg), ctcr, ctbr,
sg                776 drivers/dma/sun4i-dma.c 	struct scatterlist *sg;
sg                814 drivers/dma/sun4i-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                817 drivers/dma/sun4i-dma.c 			srcaddr = sg_dma_address(sg);
sg                821 drivers/dma/sun4i-dma.c 			dstaddr = sg_dma_address(sg);
sg                838 drivers/dma/sun4i-dma.c 							sg_dma_len(sg),
sg                842 drivers/dma/sun4i-dma.c 							sg_dma_len(sg),
sg                692 drivers/dma/sun6i-dma.c 	struct scatterlist *sg;
sg                710 drivers/dma/sun6i-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                715 drivers/dma/sun6i-dma.c 		v_lli->len = sg_dma_len(sg);
sg                719 drivers/dma/sun6i-dma.c 			v_lli->src = sg_dma_address(sg);
sg                728 drivers/dma/sun6i-dma.c 				&sconfig->dst_addr, &sg_dma_address(sg),
sg                729 drivers/dma/sun6i-dma.c 				sg_dma_len(sg), flags);
sg                733 drivers/dma/sun6i-dma.c 			v_lli->dst = sg_dma_address(sg);
sg                741 drivers/dma/sun6i-dma.c 				&sg_dma_address(sg), &sconfig->src_addr,
sg                742 drivers/dma/sun6i-dma.c 				sg_dma_len(sg), flags);
sg               1008 drivers/dma/tegra20-apb-dma.c 	struct scatterlist *sg;
sg               1064 drivers/dma/tegra20-apb-dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1067 drivers/dma/tegra20-apb-dma.c 		mem = sg_dma_address(sg);
sg               1068 drivers/dma/tegra20-apb-dma.c 		len = sg_dma_len(sg);
sg                592 drivers/dma/ti/cppi41.c 	struct scatterlist *sg;
sg                607 drivers/dma/ti/cppi41.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                612 drivers/dma/ti/cppi41.c 		addr = lower_32_bits(sg_dma_address(sg));
sg                613 drivers/dma/ti/cppi41.c 		len = sg_dma_len(sg);
sg               1064 drivers/dma/ti/edma.c 	struct scatterlist *sg;
sg               1114 drivers/dma/ti/edma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1117 drivers/dma/ti/edma.c 			dst_addr = sg_dma_address(sg);
sg               1119 drivers/dma/ti/edma.c 			src_addr = sg_dma_address(sg);
sg               1123 drivers/dma/ti/edma.c 				       sg_dma_len(sg), direction);
sg               1130 drivers/dma/ti/edma.c 		edesc->residue += sg_dma_len(sg);
sg                105 drivers/dma/ti/omap-dma.c 	struct omap_sg sg[0];
sg                235 drivers/dma/ti/omap-dma.c 			if (d->sg[i].t2_desc)
sg                236 drivers/dma/ti/omap-dma.c 				dma_pool_free(od->desc_pool, d->sg[i].t2_desc,
sg                237 drivers/dma/ti/omap-dma.c 					      d->sg[i].t2_desc_paddr);
sg                247 drivers/dma/ti/omap-dma.c 	struct omap_sg *sg = &d->sg[idx];
sg                248 drivers/dma/ti/omap-dma.c 	struct omap_type2_desc *t2_desc = sg->t2_desc;
sg                251 drivers/dma/ti/omap-dma.c 		d->sg[idx - 1].t2_desc->next_desc = sg->t2_desc_paddr;
sg                255 drivers/dma/ti/omap-dma.c 	t2_desc->en = sg->en;
sg                256 drivers/dma/ti/omap-dma.c 	t2_desc->addr = sg->addr;
sg                257 drivers/dma/ti/omap-dma.c 	t2_desc->fn = sg->fn & 0xffff;
sg                264 drivers/dma/ti/omap-dma.c 		t2_desc->cdei = sg->ei;
sg                266 drivers/dma/ti/omap-dma.c 		t2_desc->cdfi = sg->fi;
sg                274 drivers/dma/ti/omap-dma.c 		t2_desc->csei = sg->ei;
sg                276 drivers/dma/ti/omap-dma.c 		t2_desc->csfi = sg->fi;
sg                408 drivers/dma/ti/omap-dma.c 		omap_dma_chan_write(c, CNDP, d->sg[0].t2_desc_paddr);
sg                506 drivers/dma/ti/omap-dma.c 	struct omap_sg *sg = d->sg + c->sgidx;
sg                519 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxsa, sg->addr);
sg                520 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxei, sg->ei);
sg                521 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, cxfi, sg->fi);
sg                522 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CEN, sg->en);
sg                523 drivers/dma/ti/omap-dma.c 	omap_dma_chan_write(c, CFN, sg->fn);
sg                712 drivers/dma/ti/omap-dma.c static size_t omap_dma_sg_size(struct omap_sg *sg)
sg                714 drivers/dma/ti/omap-dma.c 	return sg->en * sg->fn;
sg                723 drivers/dma/ti/omap-dma.c 		size += omap_dma_sg_size(&d->sg[i]);
sg                734 drivers/dma/ti/omap-dma.c 		size_t this_size = omap_dma_sg_size(&d->sg[i]) * es_size;
sg                738 drivers/dma/ti/omap-dma.c 		else if (addr >= d->sg[i].addr &&
sg                739 drivers/dma/ti/omap-dma.c 			 addr < d->sg[i].addr + this_size)
sg                740 drivers/dma/ti/omap-dma.c 			size += d->sg[i].addr + this_size - addr;
sg                931 drivers/dma/ti/omap-dma.c 	d = kzalloc(struct_size(d, sg, sglen), GFP_ATOMIC);
sg               1028 drivers/dma/ti/omap-dma.c 		struct omap_sg *osg = &d->sg[i];
sg               1054 drivers/dma/ti/omap-dma.c 			struct omap_sg *osg = &d->sg[i];
sg               1108 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
sg               1116 drivers/dma/ti/omap-dma.c 	d->sg[0].addr = buf_addr;
sg               1117 drivers/dma/ti/omap-dma.c 	d->sg[0].en = period_len / es_bytes[es];
sg               1118 drivers/dma/ti/omap-dma.c 	d->sg[0].fn = buf_len / period_len;
sg               1176 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
sg               1188 drivers/dma/ti/omap-dma.c 	d->sg[0].en = len / BIT(data_type);
sg               1189 drivers/dma/ti/omap-dma.c 	d->sg[0].fn = 1;
sg               1190 drivers/dma/ti/omap-dma.c 	d->sg[0].addr = dest;
sg               1220 drivers/dma/ti/omap-dma.c 	struct omap_sg *sg;
sg               1231 drivers/dma/ti/omap-dma.c 	d = kzalloc(sizeof(*d) + sizeof(d->sg[0]), GFP_ATOMIC);
sg               1239 drivers/dma/ti/omap-dma.c 	sg = &d->sg[0];
sg               1243 drivers/dma/ti/omap-dma.c 	sg->en = xt->sgl[0].size / BIT(data_type);
sg               1244 drivers/dma/ti/omap-dma.c 	sg->fn = xt->numf;
sg               1245 drivers/dma/ti/omap-dma.c 	sg->addr = xt->dst_start;
sg               1268 drivers/dma/ti/omap-dma.c 		sg->ei = 1;
sg               1269 drivers/dma/ti/omap-dma.c 		sg->fi = dst_icg + 1;
sg               1272 drivers/dma/ti/omap-dma.c 		sg->fi = 0;
sg                146 drivers/dma/timb_dma.c 	struct scatterlist *sg, bool last)
sg                148 drivers/dma/timb_dma.c 	if (sg_dma_len(sg) > USHRT_MAX) {
sg                154 drivers/dma/timb_dma.c 	if (sg_dma_len(sg) % sizeof(u32)) {
sg                156 drivers/dma/timb_dma.c 			sg_dma_len(sg));
sg                161 drivers/dma/timb_dma.c 		dma_desc, (unsigned long long)sg_dma_address(sg));
sg                163 drivers/dma/timb_dma.c 	dma_desc[7] = (sg_dma_address(sg) >> 24) & 0xff;
sg                164 drivers/dma/timb_dma.c 	dma_desc[6] = (sg_dma_address(sg) >> 16) & 0xff;
sg                165 drivers/dma/timb_dma.c 	dma_desc[5] = (sg_dma_address(sg) >> 8) & 0xff;
sg                166 drivers/dma/timb_dma.c 	dma_desc[4] = (sg_dma_address(sg) >> 0) & 0xff;
sg                168 drivers/dma/timb_dma.c 	dma_desc[3] = (sg_dma_len(sg) >> 8) & 0xff;
sg                169 drivers/dma/timb_dma.c 	dma_desc[2] = (sg_dma_len(sg) >> 0) & 0xff;
sg                497 drivers/dma/timb_dma.c 	struct scatterlist *sg;
sg                521 drivers/dma/timb_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                528 drivers/dma/timb_dma.c 		err = td_fill_desc(td_chan, td_desc->desc_list + desc_usage, sg,
sg                813 drivers/dma/txx9dmac.c 	struct scatterlist *sg;
sg                827 drivers/dma/txx9dmac.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                838 drivers/dma/txx9dmac.c 		mem = sg_dma_address(sg);
sg                848 drivers/dma/txx9dmac.c 			desc->hwdesc.CNTR = sg_dma_len(sg);
sg                857 drivers/dma/txx9dmac.c 			desc->hwdesc32.CNTR = sg_dma_len(sg);
sg                110 drivers/dma/uniphier-mdmac.c 	struct scatterlist *sg;
sg                114 drivers/dma/uniphier-mdmac.c 	sg = &md->sgl[md->sg_cur];
sg                118 drivers/dma/uniphier-mdmac.c 		src_addr = sg_dma_address(sg);
sg                125 drivers/dma/uniphier-mdmac.c 		dest_addr = sg_dma_address(sg);
sg                128 drivers/dma/uniphier-mdmac.c 	chunk_size = sg_dma_len(sg);
sg               1784 drivers/dma/xilinx/xilinx_dma.c 	struct scatterlist *sg;
sg               1801 drivers/dma/xilinx/xilinx_dma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg               1805 drivers/dma/xilinx/xilinx_dma.c 		while (sg_used < sg_dma_len(sg)) {
sg               1817 drivers/dma/xilinx/xilinx_dma.c 			copy = xilinx_dma_calc_copysize(chan, sg_dma_len(sg),
sg               1822 drivers/dma/xilinx/xilinx_dma.c 			xilinx_axidma_buf(chan, hw, sg_dma_address(sg),
sg                552 drivers/dma/zx_dma.c 	struct scatterlist *sg;
sg                562 drivers/dma/zx_dma.c 	for_each_sg(sgl, sg, sglen, i) {
sg                563 drivers/dma/zx_dma.c 		avail = sg_dma_len(sg);
sg                574 drivers/dma/zx_dma.c 	for_each_sg(sgl, sg, sglen, i) {
sg                575 drivers/dma/zx_dma.c 		addr = sg_dma_address(sg);
sg                576 drivers/dma/zx_dma.c 		avail = sg_dma_len(sg);
sg               1384 drivers/firewire/sbp2.c 	struct scatterlist *sg = scsi_sglist(orb->cmd);
sg               1402 drivers/firewire/sbp2.c 			cpu_to_be32(sg_dma_address(sg));
sg               1404 drivers/firewire/sbp2.c 			cpu_to_be32(COMMAND_ORB_DATA_SIZE(sg_dma_len(sg)));
sg               1408 drivers/firewire/sbp2.c 	for_each_sg(sg, sg, n, i) {
sg               1409 drivers/firewire/sbp2.c 		orb->page_table[i].high = cpu_to_be32(sg_dma_len(sg) << 16);
sg               1410 drivers/firewire/sbp2.c 		orb->page_table[i].low = cpu_to_be32(sg_dma_address(sg));
sg                392 drivers/fpga/zynq-fpga.c 	struct scatterlist *sg;
sg                400 drivers/fpga/zynq-fpga.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                401 drivers/fpga/zynq-fpga.c 		if ((sg->offset % 8) || (sg->length % 4)) {
sg                117 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		uint64_t size, u32 domain, bool sg)
sg                131 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	} else if (domain == AMDGPU_GEM_DOMAIN_CPU && !sg) {
sg                163 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		uint64_t size, u32 domain, bool sg)
sg                174 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	} else if (domain == AMDGPU_GEM_DOMAIN_CPU && !sg) {
sg                198 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	bool sg = (bo->preferred_domains == AMDGPU_GEM_DOMAIN_CPU);
sg                202 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg = false;
sg                205 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unreserve_mem_limit(adev, amdgpu_bo_size(bo), domain, sg);
sg                773 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct sg_table *sg = kmalloc(sizeof(*sg), GFP_KERNEL);
sg                775 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (!sg)
sg                777 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (sg_alloc_table(sg, 1, GFP_KERNEL)) {
sg                778 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(sg);
sg                781 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	sg->sgl->dma_address = addr;
sg                782 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	sg->sgl->length = size;
sg                784 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	sg->sgl->dma_length = size;
sg                786 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	return sg;
sg               1078 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	struct sg_table *sg = NULL;
sg               1115 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg = create_doorbell_sg(*offset, size);
sg               1116 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		if (!sg)
sg               1160 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	ret = amdgpu_amdkfd_reserve_mem_limit(adev, size, alloc_domain, !!sg);
sg               1183 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->tbo.sg = sg;
sg               1184 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		bo->tbo.ttm->sg = sg;
sg               1214 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	unreserve_mem_limit(adev, size, alloc_domain, !!sg);
sg               1219 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (sg) {
sg               1220 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg_free_table(sg);
sg               1221 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(sg);
sg               1285 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 	if (mem->bo->tbo.sg) {
sg               1286 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		sg_free_table(mem->bo->tbo.sg);
sg               1287 drivers/gpu/drm/amd/amdgpu/amdgpu_amdkfd_gpuvm.c 		kfree(mem->bo->tbo.sg);
sg                368 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 				 struct sg_table *sg)
sg                388 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->tbo.sg = sg;
sg                389 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.c 	bo->tbo.ttm->sg = sg;
sg                 32 drivers/gpu/drm/amd/amdgpu/amdgpu_dma_buf.h 				 struct sg_table *sg);
sg                 86 drivers/gpu/drm/amd/amdgpu/amdgpu_object.c 		drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
sg                950 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
sg                958 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	nents = dma_map_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
sg                959 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (nents != ttm->sg->nents)
sg                963 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
sg                969 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	kfree(ttm->sg);
sg                986 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (!ttm->sg->sgl)
sg                990 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	dma_unmap_sg(adev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
sg                992 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	sg_free_table(ttm->sg);
sg               1259 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL);
sg               1260 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		if (!ttm->sg)
sg               1268 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	if (slave && ttm->sg) {
sg               1269 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
sg               1301 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 		kfree(ttm->sg);
sg                380 drivers/gpu/drm/armada/armada_gem.c 	struct scatterlist *sg;
sg                398 drivers/gpu/drm/armada/armada_gem.c 		for_each_sg(sgt->sgl, sg, count, i) {
sg                407 drivers/gpu/drm/armada/armada_gem.c 			sg_set_page(sg, page, PAGE_SIZE, 0);
sg                435 drivers/gpu/drm/armada/armada_gem.c 	for_each_sg(sgt->sgl, sg, num, i)
sg                436 drivers/gpu/drm/armada/armada_gem.c 		put_page(sg_page(sg));
sg                455 drivers/gpu/drm/armada/armada_gem.c 		struct scatterlist *sg;
sg                456 drivers/gpu/drm/armada/armada_gem.c 		for_each_sg(sgt->sgl, sg, sgt->nents, i)
sg                457 drivers/gpu/drm/armada/armada_gem.c 			put_page(sg_page(sg));
sg                 63 drivers/gpu/drm/ati_pcigart.c 	struct drm_sg_mem *entry = dev->sg;
sg                103 drivers/gpu/drm/ati_pcigart.c 	struct drm_sg_mem *entry = dev->sg;
sg                316 drivers/gpu/drm/drm_bufs.c 		if (!dev->sg) {
sg                320 drivers/gpu/drm/drm_bufs.c 		map->offset += (unsigned long)dev->sg->virtual;
sg               1174 drivers/gpu/drm/drm_bufs.c 					+ (unsigned long)dev->sg->virtual);
sg                795 drivers/gpu/drm/drm_prime.c 	struct sg_table *sg = NULL;
sg                798 drivers/gpu/drm/drm_prime.c 	sg = kmalloc(sizeof(struct sg_table), GFP_KERNEL);
sg                799 drivers/gpu/drm/drm_prime.c 	if (!sg) {
sg                804 drivers/gpu/drm/drm_prime.c 	ret = sg_alloc_table_from_pages(sg, pages, nr_pages, 0,
sg                809 drivers/gpu/drm/drm_prime.c 	return sg;
sg                811 drivers/gpu/drm/drm_prime.c 	kfree(sg);
sg                951 drivers/gpu/drm/drm_prime.c 	struct scatterlist *sg;
sg                957 drivers/gpu/drm/drm_prime.c 	for_each_sg(sgt->sgl, sg, sgt->nents, count) {
sg                958 drivers/gpu/drm/drm_prime.c 		len = sg->length;
sg                959 drivers/gpu/drm/drm_prime.c 		page = sg_page(sg);
sg                960 drivers/gpu/drm/drm_prime.c 		addr = sg_dma_address(sg);
sg                988 drivers/gpu/drm/drm_prime.c void drm_prime_gem_destroy(struct drm_gem_object *obj, struct sg_table *sg)
sg                993 drivers/gpu/drm/drm_prime.c 	if (sg)
sg                994 drivers/gpu/drm/drm_prime.c 		dma_buf_unmap_attachment(attach, sg, DMA_BIDIRECTIONAL);
sg                 75 drivers/gpu/drm/drm_scatter.c 	if (drm_core_check_feature(dev, DRIVER_SG) && dev->sg &&
sg                 77 drivers/gpu/drm/drm_scatter.c 		drm_sg_cleanup(dev->sg);
sg                 78 drivers/gpu/drm/drm_scatter.c 		dev->sg = NULL;
sg                102 drivers/gpu/drm/drm_scatter.c 	if (dev->sg)
sg                154 drivers/gpu/drm/drm_scatter.c 	dev->sg = entry;
sg                215 drivers/gpu/drm/drm_scatter.c 	entry = dev->sg;
sg                216 drivers/gpu/drm/drm_scatter.c 	dev->sg = NULL;
sg                348 drivers/gpu/drm/drm_vm.c 	struct drm_sg_mem *entry = dev->sg;
sg                360 drivers/gpu/drm/drm_vm.c 	map_offset = map->offset - (unsigned long)dev->sg->virtual;
sg                 59 drivers/gpu/drm/etnaviv/etnaviv_drv.h 	struct dma_buf_attachment *attach, struct sg_table *sg);
sg                 74 drivers/gpu/drm/etnaviv/etnaviv_mmu.c {	struct scatterlist *sg;
sg                 82 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                 83 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		u32 pa = sg_dma_address(sg) - sg->offset;
sg                 84 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		size_t bytes = sg_dma_len(sg) + sg->offset;
sg                100 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	for_each_sg(sgt->sgl, sg, i, j) {
sg                101 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		size_t bytes = sg_dma_len(sg) + sg->offset;
sg                112 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	struct scatterlist *sg;
sg                116 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                117 drivers/gpu/drm/etnaviv/etnaviv_mmu.c 		size_t bytes = sg_dma_len(sg) + sg->offset;
sg                 69 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c 				   struct sg_table *sg,
sg                 74 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c 	dma_unmap_sg(attachment->dev, sg->sgl, sg->nents, dir);
sg                 75 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c 	sg_free_table(sg);
sg                 76 drivers/gpu/drm/i915/gem/i915_gem_dmabuf.c 	kfree(sg);
sg                 24 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	struct scatterlist *sg;
sg                 26 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	for (sg = st->sgl; sg; sg = __sg_next(sg)) {
sg                 27 drivers/gpu/drm/i915/gem/i915_gem_internal.c 		if (sg_page(sg))
sg                 28 drivers/gpu/drm/i915/gem/i915_gem_internal.c 			__free_pages(sg_page(sg), get_order(sg->length));
sg                 39 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	struct scatterlist *sg;
sg                 77 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	sg = st->sgl;
sg                 97 drivers/gpu/drm/i915/gem/i915_gem_internal.c 		sg_set_page(sg, page, PAGE_SIZE << order, 0);
sg                103 drivers/gpu/drm/i915/gem/i915_gem_internal.c 			sg_mark_end(sg);
sg                107 drivers/gpu/drm/i915/gem/i915_gem_internal.c 		sg = __sg_next(sg);
sg                132 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	sg_set_page(sg, NULL, 0, 0);
sg                133 drivers/gpu/drm/i915/gem/i915_gem_internal.c 	sg_mark_end(sg);
sg                181 drivers/gpu/drm/i915/gem/i915_gem_object_types.h 			unsigned int sg;
sg                 52 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	obj->mm.page_sizes.sg = 0;
sg                 55 drivers/gpu/drm/i915/gem/i915_gem_pages.c 			obj->mm.page_sizes.sg |= BIT(i);
sg                 57 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	GEM_BUG_ON(!HAS_PAGE_SIZES(i915, obj->mm.page_sizes.sg));
sg                177 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	obj->mm.page_sizes.phys = obj->mm.page_sizes.sg = 0;
sg                379 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	struct scatterlist *sg;
sg                405 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	sg = iter->sg_pos;
sg                407 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	count = __sg_page_count(sg);
sg                422 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		ret = radix_tree_insert(&iter->radix, idx, sg);
sg                434 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		sg = ____sg_next(sg);
sg                435 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		count = __sg_page_count(sg);
sg                439 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	iter->sg_pos = sg;
sg                452 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		sg = ____sg_next(sg);
sg                453 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		count = __sg_page_count(sg);
sg                457 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	return sg;
sg                462 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	sg = radix_tree_lookup(&iter->radix, n);
sg                463 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	GEM_BUG_ON(!sg);
sg                472 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	if (unlikely(xa_is_value(sg))) {
sg                473 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		unsigned long base = xa_to_value(sg);
sg                475 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		sg = radix_tree_lookup(&iter->radix, base);
sg                476 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		GEM_BUG_ON(!sg);
sg                483 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	return sg;
sg                489 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	struct scatterlist *sg;
sg                494 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	sg = i915_gem_object_get_sg(obj, n, &offset);
sg                495 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	return nth_page(sg_page(sg), offset);
sg                517 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	struct scatterlist *sg;
sg                520 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	sg = i915_gem_object_get_sg(obj, n, &offset);
sg                523 drivers/gpu/drm/i915/gem/i915_gem_pages.c 		*len = sg_dma_len(sg) - (offset << PAGE_SHIFT);
sg                525 drivers/gpu/drm/i915/gem/i915_gem_pages.c 	return sg_dma_address(sg) + (offset << PAGE_SHIFT);
sg                 24 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	struct scatterlist *sg;
sg                 52 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg = st->sgl;
sg                 53 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg->offset = 0;
sg                 54 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg->length = obj->base.size;
sg                 56 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg_assign_page(sg, (struct page *)vaddr);
sg                 57 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg_dma_address(sg) = dma;
sg                 58 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	sg_dma_len(sg) = obj->base.size;
sg                 80 drivers/gpu/drm/i915/gem/i915_gem_phys.c 	__i915_gem_object_set_pages(obj, st, sg->length);
sg                 33 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 	struct scatterlist *sg;
sg                 79 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 	sg = st->sgl;
sg                134 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 		    sg->length >= max_segment ||
sg                137 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 				sg_page_sizes |= sg->length;
sg                138 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 				sg = sg_next(sg);
sg                141 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 			sg_set_page(sg, page, PAGE_SIZE, 0);
sg                143 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 			sg->length += PAGE_SIZE;
sg                150 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 	if (sg) { /* loop terminated early; short sg table */
sg                151 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 		sg_page_sizes |= sg->length;
sg                152 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 		sg_mark_end(sg);
sg                188 drivers/gpu/drm/i915/gem/i915_gem_shmem.c 	sg_mark_end(sg);
sg                479 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	struct scatterlist *sg;
sg                497 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	sg = st->sgl;
sg                498 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	sg->offset = 0;
sg                499 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	sg->length = size;
sg                501 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	sg_dma_address(sg) = (dma_addr_t)dev_priv->dsm.start + offset;
sg                502 drivers/gpu/drm/i915/gem/i915_gem_stolen.c 	sg_dma_len(sg) = size;
sg                 15 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 	struct scatterlist *sg;
sg                 17 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 	for (sg = pages->sgl; sg && nreal--; sg = __sg_next(sg))
sg                 18 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 		__free_page(sg_page(sg));
sg                 29 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 	struct scatterlist *sg, *src, *end;
sg                 42 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 	sg = pages->sgl;
sg                 48 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 			sg_mark_end(sg);
sg                 52 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 		sg_set_page(sg, page, PAGE_SIZE, 0);
sg                 53 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 		sg = __sg_next(sg);
sg                 56 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 		for (end = sg, src = pages->sgl; sg; sg = __sg_next(sg)) {
sg                 57 drivers/gpu/drm/i915/gem/selftests/huge_gem_object.c 			sg_set_page(sg, sg_page(src), PAGE_SIZE, 0);
sg                 45 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct scatterlist *sg;
sg                 47 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	for (sg = st->sgl; sg; sg = __sg_next(sg)) {
sg                 48 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		if (sg_page(sg))
sg                 49 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			__free_pages(sg_page(sg), get_order(sg->length));
sg                 61 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct scatterlist *sg;
sg                 75 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg = st->sgl;
sg                 97 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			sg_set_page(sg, page, page_size, 0);
sg                103 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 				sg_mark_end(sg);
sg                107 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			sg = __sg_next(sg);
sg                124 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg_set_page(sg, NULL, 0, 0);
sg                125 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg_mark_end(sg);
sg                185 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct scatterlist *sg;
sg                200 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg = st->sgl;
sg                210 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		sg->offset = 0;
sg                211 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		sg->length = len;
sg                212 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		sg_dma_len(sg) = len;
sg                213 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		sg_dma_address(sg) = page_size;
sg                221 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			sg_mark_end(sg);
sg                225 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		sg = sg_next(sg);
sg                241 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	struct scatterlist *sg;
sg                253 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg = st->sgl;
sg                259 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg->offset = 0;
sg                260 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg->length = obj->base.size;
sg                261 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg_dma_len(sg) = obj->base.size;
sg                262 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	sg_dma_address(sg) = page_size;
sg                266 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	__i915_gem_object_set_pages(obj, st, sg->length);
sg                338 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (!HAS_PAGE_SIZES(i915, vma->page_sizes.sg)) {
sg                340 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		       vma->page_sizes.sg & ~supported, supported);
sg                356 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (vma->page_sizes.sg != obj->mm.page_sizes.sg) {
sg                358 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		       vma->page_sizes.sg, obj->mm.page_sizes.sg);
sg                422 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			if (vma->page_sizes.sg != combination) {
sg                424 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 				       vma->page_sizes.sg, combination);
sg                493 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		obj->mm.page_sizes.sg = page_size;
sg                677 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		if (vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K) {
sg                814 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			obj->mm.page_sizes.sg &= ~I915_GTT_PAGE_SIZE_2M;
sg                833 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			if (!offset && vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K) {
sg               1008 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (obj->mm.page_sizes.sg & I915_GTT_PAGE_SIZE_64K)
sg               1011 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	max_page_size = rounddown_pow_of_two(obj->mm.page_sizes.sg);
sg               1056 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		if (obj->mm.page_sizes.sg & I915_GTT_PAGE_SIZE_64K)
sg               1157 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 			obj->mm.page_sizes.sg = page_sizes;
sg               1360 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 		if (vma->page_sizes.sg < page_size) {
sg               1573 drivers/gpu/drm/i915/gem/selftests/huge_pages.c 	if (obj->mm.page_sizes.sg || obj->mm.page_sizes.phys) {
sg                 14 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c 	struct scatterlist *sg;
sg                 25 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c 	sg = st->sgl;
sg                 27 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c 		sg_set_page(sg, mock->pages[i], PAGE_SIZE, 0);
sg                 28 drivers/gpu/drm/i915/gem/selftests/mock_dmabuf.c 		sg = sg_next(sg);
sg                 44 drivers/gpu/drm/i915/gvt/dmabuf.c 	struct scatterlist *sg;
sg                 66 drivers/gpu/drm/i915/gvt/dmabuf.c 	for_each_sg(st->sgl, sg, page_num, i) {
sg                 67 drivers/gpu/drm/i915/gvt/dmabuf.c 		sg->offset = 0;
sg                 68 drivers/gpu/drm/i915/gvt/dmabuf.c 		sg->length = PAGE_SIZE;
sg                 69 drivers/gpu/drm/i915/gvt/dmabuf.c 		sg_dma_address(sg) =
sg                 71 drivers/gpu/drm/i915/gvt/dmabuf.c 		sg_dma_len(sg) = PAGE_SIZE;
sg               1158 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct scatterlist *sg;
sg               1161 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct scatterlist *sg = vma->pages->sgl;
sg               1162 drivers/gpu/drm/i915/i915_gem_gtt.c 	dma_addr_t addr = sg_dma_address(sg);
sg               1163 drivers/gpu/drm/i915/i915_gem_gtt.c 	return (struct sgt_dma) { sg, addr, addr + sg->length };
sg               1181 drivers/gpu/drm/i915/i915_gem_gtt.c 		GEM_BUG_ON(iter->sg->length < I915_GTT_PAGE_SIZE);
sg               1186 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->sg = __sg_next(iter->sg);
sg               1187 drivers/gpu/drm/i915/i915_gem_gtt.c 			if (!iter->sg) {
sg               1192 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->dma = sg_dma_address(iter->sg);
sg               1193 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter->max = iter->dma + iter->sg->length;
sg               1221 drivers/gpu/drm/i915/i915_gem_gtt.c 	dma_addr_t rem = iter->sg->length;
sg               1236 drivers/gpu/drm/i915/i915_gem_gtt.c 		if (vma->page_sizes.sg & I915_GTT_PAGE_SIZE_2M &&
sg               1253 drivers/gpu/drm/i915/i915_gem_gtt.c 			    vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K &&
sg               1263 drivers/gpu/drm/i915/i915_gem_gtt.c 			GEM_BUG_ON(iter->sg->length < page_size);
sg               1270 drivers/gpu/drm/i915/i915_gem_gtt.c 				iter->sg = __sg_next(iter->sg);
sg               1271 drivers/gpu/drm/i915/i915_gem_gtt.c 				if (!iter->sg)
sg               1274 drivers/gpu/drm/i915/i915_gem_gtt.c 				rem = iter->sg->length;
sg               1275 drivers/gpu/drm/i915/i915_gem_gtt.c 				iter->dma = sg_dma_address(iter->sg);
sg               1300 drivers/gpu/drm/i915/i915_gem_gtt.c 		      !iter->sg && IS_ALIGNED(vma->node.start +
sg               1331 drivers/gpu/drm/i915/i915_gem_gtt.c 	} while (iter->sg);
sg               1342 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (vma->page_sizes.sg > I915_GTT_PAGE_SIZE) {
sg               1661 drivers/gpu/drm/i915/i915_gem_gtt.c 		GEM_BUG_ON(iter.sg->length < I915_GTT_PAGE_SIZE);
sg               1666 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter.sg = __sg_next(iter.sg);
sg               1667 drivers/gpu/drm/i915/i915_gem_gtt.c 			if (!iter.sg)
sg               1670 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter.dma = sg_dma_address(iter.sg);
sg               1671 drivers/gpu/drm/i915/i915_gem_gtt.c 			iter.max = iter.dma + iter.sg->length;
sg               3360 drivers/gpu/drm/i915/i915_gem_gtt.c 	     struct sg_table *st, struct scatterlist *sg)
sg               3373 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_set_page(sg, NULL, I915_GTT_PAGE_SIZE, 0);
sg               3374 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_dma_address(sg) =
sg               3376 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_dma_len(sg) = I915_GTT_PAGE_SIZE;
sg               3377 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg = sg_next(sg);
sg               3382 drivers/gpu/drm/i915/i915_gem_gtt.c 	return sg;
sg               3391 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct scatterlist *sg;
sg               3405 drivers/gpu/drm/i915/i915_gem_gtt.c 	sg = st->sgl;
sg               3408 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg = rotate_pages(obj, rot_info->plane[i].offset,
sg               3410 drivers/gpu/drm/i915/i915_gem_gtt.c 				  rot_info->plane[i].stride, st, sg);
sg               3429 drivers/gpu/drm/i915/i915_gem_gtt.c 	    struct sg_table *st, struct scatterlist *sg)
sg               3451 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_set_page(sg, NULL, length, 0);
sg               3452 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_dma_address(sg) = addr;
sg               3453 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_dma_len(sg) = length;
sg               3454 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg = sg_next(sg);
sg               3463 drivers/gpu/drm/i915/i915_gem_gtt.c 	return sg;
sg               3472 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct scatterlist *sg;
sg               3486 drivers/gpu/drm/i915/i915_gem_gtt.c 	sg = st->sgl;
sg               3489 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg = remap_pages(obj, rem_info->plane[i].offset,
sg               3491 drivers/gpu/drm/i915/i915_gem_gtt.c 				 rem_info->plane[i].stride, st, sg);
sg               3513 drivers/gpu/drm/i915/i915_gem_gtt.c 	struct scatterlist *sg, *iter;
sg               3529 drivers/gpu/drm/i915/i915_gem_gtt.c 	sg = st->sgl;
sg               3536 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg_set_page(sg, NULL, len, 0);
sg               3537 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg_dma_address(sg) =
sg               3539 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg_dma_len(sg) = len;
sg               3544 drivers/gpu/drm/i915/i915_gem_gtt.c 			sg_mark_end(sg);
sg               3550 drivers/gpu/drm/i915/i915_gem_gtt.c 		sg = __sg_next(sg);
sg                 53 drivers/gpu/drm/i915/i915_gpu_error.c static void __sg_set_buf(struct scatterlist *sg,
sg                 56 drivers/gpu/drm/i915/i915_gpu_error.c 	sg->page_link = (unsigned long)virt_to_page(addr);
sg                 57 drivers/gpu/drm/i915/i915_gpu_error.c 	sg->offset = offset_in_page(addr);
sg                 58 drivers/gpu/drm/i915/i915_gpu_error.c 	sg->length = len;
sg                 59 drivers/gpu/drm/i915/i915_gpu_error.c 	sg->dma_address = it;
sg                642 drivers/gpu/drm/i915/i915_gpu_error.c 		struct scatterlist *sg;
sg                644 drivers/gpu/drm/i915/i915_gpu_error.c 		for (sg = sgl; !sg_is_chain(sg); sg++) {
sg                645 drivers/gpu/drm/i915/i915_gpu_error.c 			kfree(sg_virt(sg));
sg                646 drivers/gpu/drm/i915/i915_gpu_error.c 			if (sg_is_last(sg))
sg                650 drivers/gpu/drm/i915/i915_gpu_error.c 		sg = sg_is_last(sg) ? NULL : sg_chain_ptr(sg);
sg                652 drivers/gpu/drm/i915/i915_gpu_error.c 		sgl = sg;
sg                831 drivers/gpu/drm/i915/i915_gpu_error.c 	struct scatterlist *sg;
sg                843 drivers/gpu/drm/i915/i915_gpu_error.c 	sg = READ_ONCE(error->fit);
sg                844 drivers/gpu/drm/i915/i915_gpu_error.c 	if (!sg || off < sg->dma_address)
sg                845 drivers/gpu/drm/i915/i915_gpu_error.c 		sg = error->sgl;
sg                846 drivers/gpu/drm/i915/i915_gpu_error.c 	if (!sg)
sg                849 drivers/gpu/drm/i915/i915_gpu_error.c 	pos = sg->dma_address;
sg                854 drivers/gpu/drm/i915/i915_gpu_error.c 		if (sg_is_chain(sg)) {
sg                855 drivers/gpu/drm/i915/i915_gpu_error.c 			sg = sg_chain_ptr(sg);
sg                856 drivers/gpu/drm/i915/i915_gpu_error.c 			GEM_BUG_ON(sg_is_chain(sg));
sg                859 drivers/gpu/drm/i915/i915_gpu_error.c 		len = sg->length;
sg                865 drivers/gpu/drm/i915/i915_gpu_error.c 		start = sg->offset;
sg                874 drivers/gpu/drm/i915/i915_gpu_error.c 		GEM_BUG_ON(!len || len > sg->length);
sg                876 drivers/gpu/drm/i915/i915_gpu_error.c 		memcpy(buf, page_address(sg_page(sg)) + start, len);
sg                884 drivers/gpu/drm/i915/i915_gpu_error.c 			WRITE_ONCE(error->fit, sg);
sg                887 drivers/gpu/drm/i915/i915_gpu_error.c 	} while (!sg_is_last(sg++));
sg                 12 drivers/gpu/drm/i915/i915_scatterlist.c 	struct scatterlist *sg, *new_sg;
sg                 22 drivers/gpu/drm/i915/i915_scatterlist.c 	for_each_sg(orig_st->sgl, sg, orig_st->nents, i) {
sg                 23 drivers/gpu/drm/i915/i915_scatterlist.c 		sg_set_page(new_sg, sg_page(sg), sg->length, 0);
sg                 24 drivers/gpu/drm/i915/i915_scatterlist.c 		sg_dma_address(new_sg) = sg_dma_address(sg);
sg                 25 drivers/gpu/drm/i915/i915_scatterlist.c 		sg_dma_len(new_sg) = sg_dma_len(sg);
sg                 42 drivers/gpu/drm/i915/i915_scatterlist.h static inline int __sg_page_count(const struct scatterlist *sg)
sg                 44 drivers/gpu/drm/i915/i915_scatterlist.h 	return sg->length >> PAGE_SHIFT;
sg                 47 drivers/gpu/drm/i915/i915_scatterlist.h static inline struct scatterlist *____sg_next(struct scatterlist *sg)
sg                 49 drivers/gpu/drm/i915/i915_scatterlist.h 	++sg;
sg                 50 drivers/gpu/drm/i915/i915_scatterlist.h 	if (unlikely(sg_is_chain(sg)))
sg                 51 drivers/gpu/drm/i915/i915_scatterlist.h 		sg = sg_chain_ptr(sg);
sg                 52 drivers/gpu/drm/i915/i915_scatterlist.h 	return sg;
sg                 64 drivers/gpu/drm/i915/i915_scatterlist.h static inline struct scatterlist *__sg_next(struct scatterlist *sg)
sg                 66 drivers/gpu/drm/i915/i915_scatterlist.h 	return sg_is_last(sg) ? NULL : ____sg_next(sg);
sg                 95 drivers/gpu/drm/i915/i915_scatterlist.h static inline unsigned int i915_sg_page_sizes(struct scatterlist *sg)
sg                100 drivers/gpu/drm/i915/i915_scatterlist.h 	while (sg) {
sg                101 drivers/gpu/drm/i915/i915_scatterlist.h 		GEM_BUG_ON(sg->offset);
sg                102 drivers/gpu/drm/i915/i915_scatterlist.h 		GEM_BUG_ON(!IS_ALIGNED(sg->length, PAGE_SIZE));
sg                103 drivers/gpu/drm/i915/i915_scatterlist.h 		page_sizes |= sg->length;
sg                104 drivers/gpu/drm/i915/i915_scatterlist.h 		sg = __sg_next(sg);
sg                625 drivers/gpu/drm/i915/i915_vma.c 		    vma->page_sizes.sg > I915_GTT_PAGE_SIZE) {
sg                633 drivers/gpu/drm/i915/i915_vma.c 				rounddown_pow_of_two(vma->page_sizes.sg |
sg                645 drivers/gpu/drm/i915/i915_vma.c 			if (vma->page_sizes.sg & I915_GTT_PAGE_SIZE_64K)
sg                 62 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	struct scatterlist *sg;
sg                 78 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 	for (sg = pages->sgl; sg; sg = sg_next(sg)) {
sg                 82 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		sg_set_page(sg, pfn_to_page(PFN_BIAS), len, 0);
sg                 83 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		sg_dma_address(sg) = page_to_phys(sg_page(sg));
sg                 84 drivers/gpu/drm/i915/selftests/i915_gem_gtt.c 		sg_dma_len(sg) = len;
sg                368 drivers/gpu/drm/i915/selftests/i915_vma.c 	       struct scatterlist *sg)
sg                377 drivers/gpu/drm/i915/selftests/i915_vma.c 			if (!sg) {
sg                386 drivers/gpu/drm/i915/selftests/i915_vma.c 			if (sg_dma_len(sg) != PAGE_SIZE) {
sg                388 drivers/gpu/drm/i915/selftests/i915_vma.c 				       sg_dma_len(sg), PAGE_SIZE,
sg                393 drivers/gpu/drm/i915/selftests/i915_vma.c 			if (sg_dma_address(sg) != src) {
sg                399 drivers/gpu/drm/i915/selftests/i915_vma.c 			sg = sg_next(sg);
sg                403 drivers/gpu/drm/i915/selftests/i915_vma.c 	return sg;
sg                418 drivers/gpu/drm/i915/selftests/i915_vma.c 		struct scatterlist *sg)
sg                429 drivers/gpu/drm/i915/selftests/i915_vma.c 			if (!sg) {
sg                436 drivers/gpu/drm/i915/selftests/i915_vma.c 				left = sg_dma_len(sg);
sg                444 drivers/gpu/drm/i915/selftests/i915_vma.c 				       sg_dma_len(sg), PAGE_SIZE,
sg                449 drivers/gpu/drm/i915/selftests/i915_vma.c 			if (sg_dma_address(sg) + offset != src) {
sg                460 drivers/gpu/drm/i915/selftests/i915_vma.c 				sg = sg_next(sg);
sg                464 drivers/gpu/drm/i915/selftests/i915_vma.c 	return sg;
sg                529 drivers/gpu/drm/i915/selftests/i915_vma.c 					struct scatterlist *sg;
sg                580 drivers/gpu/drm/i915/selftests/i915_vma.c 					sg = vma->pages->sgl;
sg                583 drivers/gpu/drm/i915/selftests/i915_vma.c 							sg = assert_rotated(obj, &view.rotated, n, sg);
sg                585 drivers/gpu/drm/i915/selftests/i915_vma.c 							sg = assert_remapped(obj, &view.remapped, n, sg);
sg                586 drivers/gpu/drm/i915/selftests/i915_vma.c 						if (IS_ERR(sg)) {
sg                 47 drivers/gpu/drm/i915/selftests/scatterlist.c 	struct scatterlist *sg;
sg                 51 drivers/gpu/drm/i915/selftests/scatterlist.c 	for_each_sg(pt->st.sgl, sg, pt->st.nents, n) {
sg                 52 drivers/gpu/drm/i915/selftests/scatterlist.c 		struct page *page = sg_page(sg);
sg                 61 drivers/gpu/drm/i915/selftests/scatterlist.c 		if (sg->length != npages * PAGE_SIZE) {
sg                 63 drivers/gpu/drm/i915/selftests/scatterlist.c 			       __func__, who, npages * PAGE_SIZE, sg->length);
sg                220 drivers/gpu/drm/i915/selftests/scatterlist.c 	struct scatterlist *sg;
sg                228 drivers/gpu/drm/i915/selftests/scatterlist.c 	GEM_BUG_ON(overflows_type(count * PAGE_SIZE, sg->length));
sg                236 drivers/gpu/drm/i915/selftests/scatterlist.c 	sg = pt->st.sgl;
sg                249 drivers/gpu/drm/i915/selftests/scatterlist.c 			sg = sg_next(sg);
sg                250 drivers/gpu/drm/i915/selftests/scatterlist.c 		sg_set_page(sg, pfn_to_page(pfn), npages * PAGE_SIZE, 0);
sg                252 drivers/gpu/drm/i915/selftests/scatterlist.c 		GEM_BUG_ON(page_to_pfn(sg_page(sg)) != pfn);
sg                253 drivers/gpu/drm/i915/selftests/scatterlist.c 		GEM_BUG_ON(sg->length != npages * PAGE_SIZE);
sg                254 drivers/gpu/drm/i915/selftests/scatterlist.c 		GEM_BUG_ON(sg->offset != 0);
sg                258 drivers/gpu/drm/i915/selftests/scatterlist.c 	sg_mark_end(sg);
sg                 86 drivers/gpu/drm/mediatek/mtk_drm_gem.c 	if (mtk_gem->sg)
sg                 87 drivers/gpu/drm/mediatek/mtk_drm_gem.c 		drm_prime_gem_destroy(obj, mtk_gem->sg);
sg                212 drivers/gpu/drm/mediatek/mtk_drm_gem.c 			struct dma_buf_attachment *attach, struct sg_table *sg)
sg                225 drivers/gpu/drm/mediatek/mtk_drm_gem.c 	expected = sg_dma_address(sg->sgl);
sg                226 drivers/gpu/drm/mediatek/mtk_drm_gem.c 	for_each_sg(sg->sgl, s, sg->nents, i) {
sg                235 drivers/gpu/drm/mediatek/mtk_drm_gem.c 	mtk_gem->dma_addr = sg_dma_address(sg->sgl);
sg                236 drivers/gpu/drm/mediatek/mtk_drm_gem.c 	mtk_gem->sg = sg;
sg                 31 drivers/gpu/drm/mediatek/mtk_drm_gem.h 	struct sg_table		*sg;
sg                 47 drivers/gpu/drm/mediatek/mtk_drm_gem.h 			struct dma_buf_attachment *attach, struct sg_table *sg);
sg                293 drivers/gpu/drm/msm/msm_drv.h 		struct dma_buf_attachment *attach, struct sg_table *sg);
sg                 47 drivers/gpu/drm/msm/msm_gem_prime.c 		struct dma_buf_attachment *attach, struct sg_table *sg)
sg                 49 drivers/gpu/drm/msm/msm_gem_prime.c 	return msm_gem_import(dev, attach->dmabuf, sg);
sg                 40 drivers/gpu/drm/msm/msm_gpummu.c 	struct scatterlist *sg;
sg                 49 drivers/gpu/drm/msm/msm_gpummu.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                 50 drivers/gpu/drm/msm/msm_gpummu.c 		dma_addr_t addr = sg->dma_address;
sg                 51 drivers/gpu/drm/msm/msm_gpummu.c 		for (j = 0; j < sg->length / GPUMMU_PAGE_SIZE; j++, idx++) {
sg                297 drivers/gpu/drm/nouveau/nouveau_bo.c 		struct sg_table *sg, struct dma_resv *robj)
sg                299 drivers/gpu/drm/nouveau/nouveau_bo.c 	int type = sg ? ttm_bo_type_sg : ttm_bo_type_device;
sg                310 drivers/gpu/drm/nouveau/nouveau_bo.c 			  acc_size, sg, robj, nouveau_bo_del_ttm);
sg                322 drivers/gpu/drm/nouveau/nouveau_bo.c 	       struct sg_table *sg, struct dma_resv *robj,
sg                333 drivers/gpu/drm/nouveau/nouveau_bo.c 	ret = nouveau_bo_init(nvbo, size, align, flags, sg, robj);
sg               1596 drivers/gpu/drm/nouveau/nouveau_bo.c 	if (slave && ttm->sg) {
sg               1598 drivers/gpu/drm/nouveau/nouveau_bo.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
sg                 77 drivers/gpu/drm/nouveau/nouveau_bo.h 		     struct sg_table *sg, struct dma_resv *robj);
sg                 79 drivers/gpu/drm/nouveau/nouveau_bo.h 		    u32 tile_mode, u32 tile_flags, struct sg_table *sg,
sg                 52 drivers/gpu/drm/nouveau/nouveau_gem.c 		drm_prime_gem_destroy(gem, nvbo->bo.sg);
sg                119 drivers/gpu/drm/nouveau/nouveau_mem.c 	if (tt->ttm.sg) args.sgl = tt->ttm.sg->sgl;
sg                 60 drivers/gpu/drm/nouveau/nouveau_prime.c 							 struct sg_table *sg)
sg                 91 drivers/gpu/drm/nouveau/nouveau_prime.c 	ret = nouveau_bo_init(nvbo, size, align, flags, sg, robj);
sg                 23 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	struct sg_table *sg;
sg                 27 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg = kzalloc(sizeof(*sg), GFP_KERNEL);
sg                 28 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	if (!sg)
sg                 38 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	ret = sg_alloc_table(sg, 1, GFP_KERNEL);
sg                 42 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg_init_table(sg->sgl, 1);
sg                 43 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg_dma_len(sg->sgl) = obj->size;
sg                 44 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(dma_addr)), obj->size, 0);
sg                 45 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg_dma_address(sg->sgl) = dma_addr;
sg                 50 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	return sg;
sg                 52 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	kfree(sg);
sg                 57 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 		struct sg_table *sg, enum dma_data_direction dir)
sg                 61 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	sg_free_table(sg);
sg                 62 drivers/gpu/drm/omapdrm/omap_gem_dmabuf.c 	kfree(sg);
sg                327 drivers/gpu/drm/r128/r128_cce.c 		    (unsigned long)dev->sg->virtual;
sg                367 drivers/gpu/drm/r128/r128_cce.c 	if (dev_priv->is_pci && !dev->sg) {
sg                544 drivers/gpu/drm/r128/r128_cce.c 		dev_priv->cce_buffers_offset = (unsigned long)dev->sg->virtual;
sg                154 drivers/gpu/drm/radeon/radeon_drv.c 							struct sg_table *sg);
sg                 89 drivers/gpu/drm/radeon/radeon_object.c 		drm_prime_gem_destroy(&bo->tbo.base, bo->tbo.sg);
sg                185 drivers/gpu/drm/radeon/radeon_object.c 		     u32 domain, u32 flags, struct sg_table *sg,
sg                199 drivers/gpu/drm/radeon/radeon_object.c 	} else if (sg) {
sg                264 drivers/gpu/drm/radeon/radeon_object.c 			sg, resv, &radeon_ttm_bo_destroy);
sg                128 drivers/gpu/drm/radeon/radeon_object.h 			    struct sg_table *sg,
sg                 64 drivers/gpu/drm/radeon/radeon_prime.c 							struct sg_table *sg)
sg                 73 drivers/gpu/drm/radeon/radeon_prime.c 			       RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo);
sg                523 drivers/gpu/drm/radeon/radeon_ttm.c 	r = sg_alloc_table_from_pages(ttm->sg, ttm->pages, ttm->num_pages, 0,
sg                530 drivers/gpu/drm/radeon/radeon_ttm.c 	nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
sg                531 drivers/gpu/drm/radeon/radeon_ttm.c 	if (nents != ttm->sg->nents)
sg                534 drivers/gpu/drm/radeon/radeon_ttm.c 	drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
sg                540 drivers/gpu/drm/radeon/radeon_ttm.c 	kfree(ttm->sg);
sg                558 drivers/gpu/drm/radeon/radeon_ttm.c 	if (!ttm->sg->sgl)
sg                562 drivers/gpu/drm/radeon/radeon_ttm.c 	dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
sg                564 drivers/gpu/drm/radeon/radeon_ttm.c 	for_each_sg_page(ttm->sg->sgl, &sg_iter, ttm->sg->nents, 0) {
sg                573 drivers/gpu/drm/radeon/radeon_ttm.c 	sg_free_table(ttm->sg);
sg                674 drivers/gpu/drm/radeon/radeon_ttm.c 		ttm->sg = kzalloc(sizeof(struct sg_table), GFP_KERNEL);
sg                675 drivers/gpu/drm/radeon/radeon_ttm.c 		if (!ttm->sg)
sg                683 drivers/gpu/drm/radeon/radeon_ttm.c 	if (slave && ttm->sg) {
sg                684 drivers/gpu/drm/radeon/radeon_ttm.c 		drm_prime_sg_to_page_addr_arrays(ttm->sg, ttm->pages,
sg                713 drivers/gpu/drm/radeon/radeon_ttm.c 		kfree(ttm->sg);
sg                482 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 			  struct sg_table *sg,
sg                485 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 	rk_obj->sgt = sg;
sg                492 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 			struct sg_table *sg,
sg                495 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 	int count = dma_map_sg(drm->dev, sg->sgl, sg->nents,
sg                500 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 	if (rockchip_sg_get_contiguous_size(sg, count) < attach->dmabuf->size) {
sg                502 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 		dma_unmap_sg(drm->dev, sg->sgl, sg->nents,
sg                507 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 	rk_obj->dma_addr = sg_dma_address(sg->sgl);
sg                508 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 	rk_obj->sgt = sg;
sg                515 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 				   struct sg_table *sg)
sg                526 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 		ret = rockchip_gem_iommu_map_sg(drm, attach, sg, rk_obj);
sg                528 drivers/gpu/drm/rockchip/rockchip_drm_gem.c 		ret = rockchip_gem_dma_map_sg(drm, attach, sg, rk_obj);
sg                 33 drivers/gpu/drm/rockchip/rockchip_drm_gem.h 				   struct sg_table *sg);
sg                511 drivers/gpu/drm/tegra/gem.c 		struct scatterlist *sg;
sg                517 drivers/gpu/drm/tegra/gem.c 		for_each_sg(sgt->sgl, sg, bo->num_pages, i)
sg                518 drivers/gpu/drm/tegra/gem.c 			sg_set_page(sg, bo->pages[i], PAGE_SIZE, 0);
sg               1295 drivers/gpu/drm/ttm/ttm_bo.c 			 struct sg_table *sg,
sg               1347 drivers/gpu/drm/ttm/ttm_bo.c 	bo->sg = sg;
sg               1410 drivers/gpu/drm/ttm/ttm_bo.c 		struct sg_table *sg,
sg               1419 drivers/gpu/drm/ttm/ttm_bo.c 				   sg, resv, destroy);
sg                235 drivers/gpu/drm/ttm/ttm_tt.c 	ttm->sg = bo->sg;
sg                 91 drivers/gpu/drm/udl/udl_dmabuf.c 	obj->sg = drm_prime_pages_to_sg(obj->pages, page_count);
sg                 92 drivers/gpu/drm/udl/udl_dmabuf.c 	if (IS_ERR(obj->sg)) {
sg                 94 drivers/gpu/drm/udl/udl_dmabuf.c 		return ERR_CAST(obj->sg);
sg                 99 drivers/gpu/drm/udl/udl_dmabuf.c 	ret = sg_alloc_table(sgt, obj->sg->orig_nents, GFP_KERNEL);
sg                107 drivers/gpu/drm/udl/udl_dmabuf.c 	rd = obj->sg->sgl;
sg                189 drivers/gpu/drm/udl/udl_dmabuf.c 			    struct sg_table *sg,
sg                202 drivers/gpu/drm/udl/udl_dmabuf.c 	obj->sg = sg;
sg                209 drivers/gpu/drm/udl/udl_dmabuf.c 	drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages);
sg                219 drivers/gpu/drm/udl/udl_dmabuf.c 	struct sg_table *sg;
sg                233 drivers/gpu/drm/udl/udl_dmabuf.c 	sg = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL);
sg                234 drivers/gpu/drm/udl/udl_dmabuf.c 	if (IS_ERR(sg)) {
sg                235 drivers/gpu/drm/udl/udl_dmabuf.c 		ret = PTR_ERR(sg);
sg                239 drivers/gpu/drm/udl/udl_dmabuf.c 	ret = udl_prime_create(dev, dma_buf->size, sg, &uobj);
sg                249 drivers/gpu/drm/udl/udl_dmabuf.c 	dma_buf_unmap_attachment(attach, sg, DMA_BIDIRECTIONAL);
sg                 83 drivers/gpu/drm/udl/udl_drv.h 	struct sg_table *sg;
sg                189 drivers/gpu/drm/udl/udl_gem.c 		drm_prime_gem_destroy(gem_obj, obj->sg);
sg                361 drivers/gpu/drm/vgem/vgem_drv.c 			struct dma_buf_attachment *attach, struct sg_table *sg)
sg                372 drivers/gpu/drm/vgem/vgem_drv.c 	obj->table = sg;
sg                967 drivers/gpu/drm/virtio/virtgpu_vq.c 	struct scatterlist *sg;
sg                998 drivers/gpu/drm/virtio/virtgpu_vq.c 	for_each_sg(obj->pages->sgl, sg, nents, si) {
sg               1000 drivers/gpu/drm/virtio/virtgpu_vq.c 					    ? sg_dma_address(sg)
sg               1001 drivers/gpu/drm/virtio/virtgpu_vq.c 					    : sg_phys(sg));
sg               1002 drivers/gpu/drm/virtio/virtgpu_vq.c 		ents[si].length = cpu_to_le32(sg->length);
sg                129 drivers/gpu/host1x/job.c 		struct scatterlist *sg;
sg                145 drivers/gpu/host1x/job.c 			for_each_sg(sgt->sgl, sg, sgt->nents, j)
sg                146 drivers/gpu/host1x/job.c 				gather_size += sg->length;
sg                173 drivers/hsi/clients/ssi_protocol.c 	struct scatterlist *sg;
sg                178 drivers/hsi/clients/ssi_protocol.c 	sg = msg->sgt.sgl;
sg                179 drivers/hsi/clients/ssi_protocol.c 	sg_set_buf(sg, skb->data, skb_headlen(skb));
sg                181 drivers/hsi/clients/ssi_protocol.c 		sg = sg_next(sg);
sg                182 drivers/hsi/clients/ssi_protocol.c 		BUG_ON(!sg);
sg                184 drivers/hsi/clients/ssi_protocol.c 		sg_set_page(sg, skb_frag_page(frag), skb_frag_size(frag),
sg                115 drivers/hsi/controllers/omap_ssi.h 	struct scatterlist	*sg;
sg                200 drivers/hsi/controllers/omap_ssi_port.c 			omap_ssi->gdd_trn[lch].sg = msg->sgt.sgl;
sg                322 drivers/hwtracing/intel_th/msu.c 	struct scatterlist *sg;
sg                326 drivers/hwtracing/intel_th/msu.c 	for_each_sg(win->sgt->sgl, sg, win->nr_segs, blk) {
sg                327 drivers/hwtracing/intel_th/msu.c 		struct msc_block_desc *bdesc = sg_virt(sg);
sg                410 drivers/hwtracing/intel_th/msu.c 	struct scatterlist *sg;
sg                421 drivers/hwtracing/intel_th/msu.c 	for_each_sg(win->sgt->sgl, sg, win->nr_segs, blk) {
sg                422 drivers/hwtracing/intel_th/msu.c 		struct msc_block_desc *bdesc = sg_virt(sg);
sg                425 drivers/hwtracing/intel_th/msu.c 			return sg;
sg                653 drivers/hwtracing/intel_th/msu.c 	struct scatterlist *sg;
sg                660 drivers/hwtracing/intel_th/msu.c 		for_each_sg(win->sgt->sgl, sg, win->nr_segs, blk) {
sg                661 drivers/hwtracing/intel_th/msu.c 			struct msc_block_desc *bdesc = sg_virt(sg);
sg               1120 drivers/hwtracing/intel_th/msu.c 	struct scatterlist *sg;
sg               1123 drivers/hwtracing/intel_th/msu.c 	for_each_sg(win->sgt->sgl, sg, win->nr_segs, i) {
sg               1124 drivers/hwtracing/intel_th/msu.c 		struct page *page = sg_page(sg);
sg               1128 drivers/hwtracing/intel_th/msu.c 				  sg_virt(sg), sg_dma_address(sg));
sg               1174 drivers/hwtracing/intel_th/msu.c 		struct scatterlist *sg;
sg               1190 drivers/hwtracing/intel_th/msu.c 		for_each_sg(win->sgt->sgl, sg, win->nr_segs, blk) {
sg               1191 drivers/hwtracing/intel_th/msu.c 			struct msc_block_desc *bdesc = sg_virt(sg);
sg               1205 drivers/hwtracing/intel_th/msu.c 				dma_addr_t addr = sg_dma_address(sg_next(sg));
sg               1211 drivers/hwtracing/intel_th/msu.c 			bdesc->block_sz = sg->length / 64;
sg               1370 drivers/hwtracing/intel_th/msu.c 	struct scatterlist *sg;
sg               1385 drivers/hwtracing/intel_th/msu.c 	for_each_sg(win->sgt->sgl, sg, win->nr_segs, blk) {
sg               1386 drivers/hwtracing/intel_th/msu.c 		struct page *page = sg_page(sg);
sg               1387 drivers/hwtracing/intel_th/msu.c 		size_t pgsz = PFN_DOWN(sg->length);
sg                108 drivers/i2c/busses/i2c-at91-master.c 		dma_unmap_single(dev->dev, sg_dma_address(&dma->sg[0]),
sg                140 drivers/i2c/busses/i2c-at91-master.c 	dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
sg                180 drivers/i2c/busses/i2c-at91-master.c 		struct scatterlist *sg;
sg                187 drivers/i2c/busses/i2c-at91-master.c 			sg = &dma->sg[sg_len++];
sg                188 drivers/i2c/busses/i2c-at91-master.c 			sg_dma_len(sg) = part1_len;
sg                189 drivers/i2c/busses/i2c-at91-master.c 			sg_dma_address(sg) = dma_addr;
sg                194 drivers/i2c/busses/i2c-at91-master.c 			sg = &dma->sg[sg_len++];
sg                195 drivers/i2c/busses/i2c-at91-master.c 			sg_dma_len(sg) = part2_len;
sg                196 drivers/i2c/busses/i2c-at91-master.c 			sg_dma_address(sg) = dma_addr + part1_len;
sg                208 drivers/i2c/busses/i2c-at91-master.c 		sg_dma_len(&dma->sg[0]) = dev->buf_len;
sg                209 drivers/i2c/busses/i2c-at91-master.c 		sg_dma_address(&dma->sg[0]) = dma_addr;
sg                212 drivers/i2c/busses/i2c-at91-master.c 	txdesc = dmaengine_prep_slave_sg(chan_tx, dma->sg, sg_len,
sg                282 drivers/i2c/busses/i2c-at91-master.c 	dma_unmap_single(dev->dev, sg_dma_address(&dev->dma.sg[0]),
sg                328 drivers/i2c/busses/i2c-at91-master.c 	sg_dma_len(&dma->sg[0]) = buf_len;
sg                329 drivers/i2c/busses/i2c-at91-master.c 	sg_dma_address(&dma->sg[0]) = dma_addr;
sg                331 drivers/i2c/busses/i2c-at91-master.c 	rxdesc = dmaengine_prep_slave_sg(chan_rx, dma->sg, 1, DMA_DEV_TO_MEM,
sg                751 drivers/i2c/busses/i2c-at91-master.c 	sg_init_table(dma->sg, 2);
sg                117 drivers/i2c/busses/i2c-at91.h 	struct scatterlist sg[2];
sg                225 drivers/i2c/busses/i2c-qup.c 	struct	scatterlist *sg;
sg                602 drivers/i2c/busses/i2c-qup.c static int qup_sg_set_buf(struct scatterlist *sg, void *buf,
sg                608 drivers/i2c/busses/i2c-qup.c 	sg_set_buf(sg, buf, buflen);
sg                609 drivers/i2c/busses/i2c-qup.c 	ret = dma_map_sg(qup->dev, sg, 1, dir);
sg                674 drivers/i2c/busses/i2c-qup.c 			ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++],
sg                681 drivers/i2c/busses/i2c-qup.c 			ret = qup_sg_set_buf(&qup->brx.sg[qup->brx.sg_cnt++],
sg                691 drivers/i2c/busses/i2c-qup.c 		ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++],
sg                705 drivers/i2c/busses/i2c-qup.c 			ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++],
sg                712 drivers/i2c/busses/i2c-qup.c 			ret = qup_sg_set_buf(&qup->btx.sg[qup->btx.sg_cnt++],
sg                742 drivers/i2c/busses/i2c-qup.c 		ret = qup_sg_set_buf(&qup->brx.sg[rx_cnt++],
sg                750 drivers/i2c/busses/i2c-qup.c 	ret = qup_sg_set_buf(&qup->btx.sg[tx_cnt++], &qup->btx.tag.start[0],
sg                755 drivers/i2c/busses/i2c-qup.c 	txd = dmaengine_prep_slave_sg(qup->btx.dma, qup->btx.sg, tx_cnt,
sg                778 drivers/i2c/busses/i2c-qup.c 		rxd = dmaengine_prep_slave_sg(qup->brx.dma, qup->brx.sg,
sg                824 drivers/i2c/busses/i2c-qup.c 	dma_unmap_sg(qup->dev, qup->btx.sg, tx_cnt, DMA_TO_DEVICE);
sg                827 drivers/i2c/busses/i2c-qup.c 		dma_unmap_sg(qup->dev, qup->brx.sg, rx_cnt,
sg               1715 drivers/i2c/busses/i2c-qup.c 		qup->btx.sg = devm_kcalloc(&pdev->dev,
sg               1716 drivers/i2c/busses/i2c-qup.c 					   blocks, sizeof(*qup->btx.sg),
sg               1718 drivers/i2c/busses/i2c-qup.c 		if (!qup->btx.sg) {
sg               1722 drivers/i2c/busses/i2c-qup.c 		sg_init_table(qup->btx.sg, blocks);
sg               1724 drivers/i2c/busses/i2c-qup.c 		qup->brx.sg = devm_kcalloc(&pdev->dev,
sg               1725 drivers/i2c/busses/i2c-qup.c 					   blocks, sizeof(*qup->brx.sg),
sg               1727 drivers/i2c/busses/i2c-qup.c 		if (!qup->brx.sg) {
sg               1731 drivers/i2c/busses/i2c-qup.c 		sg_init_table(qup->brx.sg, blocks);
sg                138 drivers/i2c/busses/i2c-rcar.c 	struct scatterlist sg;
sg                368 drivers/i2c/busses/i2c-rcar.c 	dma_unmap_single(chan->device->dev, sg_dma_address(&priv->sg),
sg                369 drivers/i2c/busses/i2c-rcar.c 			 sg_dma_len(&priv->sg), priv->dma_direction);
sg                398 drivers/i2c/busses/i2c-rcar.c 	priv->pos += sg_dma_len(&priv->sg);
sg                442 drivers/i2c/busses/i2c-rcar.c 	sg_dma_len(&priv->sg) = len;
sg                443 drivers/i2c/busses/i2c-rcar.c 	sg_dma_address(&priv->sg) = dma_addr;
sg                447 drivers/i2c/busses/i2c-rcar.c 	txdesc = dmaengine_prep_slave_sg(chan, &priv->sg, 1,
sg                963 drivers/i2c/busses/i2c-rcar.c 	sg_init_table(&priv->sg, 1);
sg                136 drivers/i2c/busses/i2c-sh_mobile.c 	struct scatterlist sg;
sg                452 drivers/i2c/busses/i2c-sh_mobile.c 	dma_unmap_single(chan->device->dev, sg_dma_address(&pd->sg),
sg                544 drivers/i2c/busses/i2c-sh_mobile.c 	sg_dma_len(&pd->sg) = pd->msg->len;
sg                545 drivers/i2c/busses/i2c-sh_mobile.c 	sg_dma_address(&pd->sg) = dma_addr;
sg                549 drivers/i2c/busses/i2c-sh_mobile.c 	txdesc = dmaengine_prep_slave_sg(chan, &pd->sg, 1,
sg                877 drivers/i2c/busses/i2c-sh_mobile.c 	sg_init_table(&pd->sg, 1);
sg                219 drivers/ide/au1xxx-ide.c 	struct scatterlist *sg;
sg                227 drivers/ide/au1xxx-ide.c 	sg = hwif->sg_table;
sg                228 drivers/ide/au1xxx-ide.c 	while (i && sg_dma_len(sg)) {
sg                232 drivers/ide/au1xxx-ide.c 		cur_addr = sg_dma_address(sg);
sg                233 drivers/ide/au1xxx-ide.c 		cur_len = sg_dma_len(sg);
sg                253 drivers/ide/au1xxx-ide.c 					sg_phys(sg), tc, flags)) {
sg                259 drivers/ide/au1xxx-ide.c 					sg_phys(sg), tc, flags)) {
sg                268 drivers/ide/au1xxx-ide.c 		sg = sg_next(sg);
sg                121 drivers/ide/ide-dma-sff.c 	struct scatterlist *sg;
sg                124 drivers/ide/ide-dma-sff.c 	for_each_sg(hwif->sg_table, sg, cmd->sg_nents, i) {
sg                127 drivers/ide/ide-dma-sff.c 		cur_addr = sg_dma_address(sg);
sg                128 drivers/ide/ide-dma-sff.c 		cur_len = sg_dma_len(sg);
sg                136 drivers/ide/ide-dma.c 	struct scatterlist *sg = hwif->sg_table;
sg                144 drivers/ide/ide-dma.c 	i = dma_map_sg(hwif->dev, sg, cmd->sg_nents, cmd->sg_dma_direction);
sg                236 drivers/ide/ide-io.c 	struct scatterlist *sg = hwif->sg_table;
sg                239 drivers/ide/ide-io.c 	cmd->sg_nents = blk_rq_map_sg(drive->queue, rq, sg);
sg                230 drivers/ide/ide-taskfile.c 	struct scatterlist *sg = hwif->sg_table;
sg                238 drivers/ide/ide-taskfile.c 		cursg = cmd->cursg = sg;
sg               1459 drivers/ide/pmac.c 	struct scatterlist *sg;
sg               1472 drivers/ide/pmac.c 	sg = hwif->sg_table;
sg               1473 drivers/ide/pmac.c 	while (i && sg_dma_len(sg)) {
sg               1477 drivers/ide/pmac.c 		cur_addr = sg_dma_address(sg);
sg               1478 drivers/ide/pmac.c 		cur_len = sg_dma_len(sg);
sg               1506 drivers/ide/pmac.c 		sg = sg_next(sg);
sg                240 drivers/ide/tx4939ide.c 	struct scatterlist *sg;
sg                242 drivers/ide/tx4939ide.c 	for_each_sg(hwif->sg_table, sg, cmd->sg_nents, i) {
sg                245 drivers/ide/tx4939ide.c 		cur_addr = sg_dma_address(sg);
sg                246 drivers/ide/tx4939ide.c 		cur_len = sg_dma_len(sg);
sg                 86 drivers/infiniband/core/rw.c 		struct rdma_rw_reg_ctx *reg, struct scatterlist *sg,
sg                100 drivers/infiniband/core/rw.c 	ret = ib_map_mr_sg(reg->mr, sg, nents, &offset, PAGE_SIZE);
sg                119 drivers/infiniband/core/rw.c 		u8 port_num, struct scatterlist *sg, u32 sg_cnt, u32 offset,
sg                138 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_one_mr(qp, port_num, reg, sg, sg_cnt,
sg                170 drivers/infiniband/core/rw.c 			sg = sg_next(sg);
sg                190 drivers/infiniband/core/rw.c 		struct scatterlist *sg, u32 sg_cnt, u32 offset,
sg                221 drivers/infiniband/core/rw.c 		for (j = 0; j < nr_sge; j++, sg = sg_next(sg)) {
sg                222 drivers/infiniband/core/rw.c 			sge->addr = sg_dma_address(sg) + offset;
sg                223 drivers/infiniband/core/rw.c 			sge->length = sg_dma_len(sg) - offset;
sg                246 drivers/infiniband/core/rw.c 		struct scatterlist *sg, u32 offset, u64 remote_addr, u32 rkey,
sg                254 drivers/infiniband/core/rw.c 	ctx->single.sge.addr = sg_dma_address(sg) + offset;
sg                255 drivers/infiniband/core/rw.c 	ctx->single.sge.length = sg_dma_len(sg) - offset;
sg                271 drivers/infiniband/core/rw.c static void rdma_rw_unmap_sg(struct ib_device *dev, struct scatterlist *sg,
sg                274 drivers/infiniband/core/rw.c 	if (is_pci_p2pdma_page(sg_page(sg)))
sg                275 drivers/infiniband/core/rw.c 		pci_p2pdma_unmap_sg(dev->dma_device, sg, sg_cnt, dir);
sg                277 drivers/infiniband/core/rw.c 		ib_dma_unmap_sg(dev, sg, sg_cnt, dir);
sg                280 drivers/infiniband/core/rw.c static int rdma_rw_map_sg(struct ib_device *dev, struct scatterlist *sg,
sg                283 drivers/infiniband/core/rw.c 	if (is_pci_p2pdma_page(sg_page(sg)))
sg                284 drivers/infiniband/core/rw.c 		return pci_p2pdma_map_sg(dev->dma_device, sg, sg_cnt, dir);
sg                285 drivers/infiniband/core/rw.c 	return ib_dma_map_sg(dev, sg, sg_cnt, dir);
sg                304 drivers/infiniband/core/rw.c 		struct scatterlist *sg, u32 sg_cnt, u32 sg_offset,
sg                310 drivers/infiniband/core/rw.c 	ret = rdma_rw_map_sg(dev, sg, sg_cnt, dir);
sg                319 drivers/infiniband/core/rw.c 		u32 len = sg_dma_len(sg);
sg                324 drivers/infiniband/core/rw.c 		sg = sg_next(sg);
sg                334 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_mr_wrs(ctx, qp, port_num, sg, sg_cnt,
sg                337 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_map_wrs(ctx, qp, sg, sg_cnt, sg_offset,
sg                340 drivers/infiniband/core/rw.c 		ret = rdma_rw_init_single_wr(ctx, qp, sg, sg_offset,
sg                349 drivers/infiniband/core/rw.c 	rdma_rw_unmap_sg(dev, sg, sg_cnt, dir);
sg                372 drivers/infiniband/core/rw.c 		u8 port_num, struct scatterlist *sg, u32 sg_cnt,
sg                389 drivers/infiniband/core/rw.c 	ret = ib_dma_map_sg(dev, sg, sg_cnt, dir);
sg                421 drivers/infiniband/core/rw.c 	ret = ib_map_mr_sg_pi(ctx->reg->mr, sg, sg_cnt, NULL, prot_sg,
sg                466 drivers/infiniband/core/rw.c 	ib_dma_unmap_sg(dev, sg, sg_cnt, dir);
sg                578 drivers/infiniband/core/rw.c 		struct scatterlist *sg, u32 sg_cnt, enum dma_data_direction dir)
sg                599 drivers/infiniband/core/rw.c 	rdma_rw_unmap_sg(qp->pd->device, sg, sg_cnt, dir);
sg                616 drivers/infiniband/core/rw.c 		u8 port_num, struct scatterlist *sg, u32 sg_cnt,
sg                626 drivers/infiniband/core/rw.c 	ib_dma_unmap_sg(qp->pd->device, sg, sg_cnt, dir);
sg                 73 drivers/infiniband/core/umem.c static struct scatterlist *ib_umem_add_sg_table(struct scatterlist *sg,
sg                 82 drivers/infiniband/core/umem.c 	bool first = !sg_page(sg);
sg                 87 drivers/infiniband/core/umem.c 	if (!first && (page_to_pfn(sg_page(sg)) + (sg->length >> PAGE_SHIFT) ==
sg                108 drivers/infiniband/core/umem.c 			if ((max_seg_sz - sg->length) >= (len << PAGE_SHIFT)) {
sg                109 drivers/infiniband/core/umem.c 				sg_set_page(sg, sg_page(sg),
sg                110 drivers/infiniband/core/umem.c 					    sg->length + (len << PAGE_SHIFT),
sg                120 drivers/infiniband/core/umem.c 			sg = sg_next(sg);
sg                123 drivers/infiniband/core/umem.c 		sg_set_page(sg, first_page, len << PAGE_SHIFT, 0);
sg                127 drivers/infiniband/core/umem.c 	return sg;
sg                148 drivers/infiniband/core/umem.c 	struct scatterlist *sg;
sg                164 drivers/infiniband/core/umem.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) {
sg                168 drivers/infiniband/core/umem.c 		mask |= (sg_dma_address(sg) + pgoff) ^ va;
sg                169 drivers/infiniband/core/umem.c 		va += sg_dma_len(sg) - pgoff;
sg                206 drivers/infiniband/core/umem.c 	struct scatterlist *sg;
sg                274 drivers/infiniband/core/umem.c 	sg = umem->sg_head.sgl;
sg                291 drivers/infiniband/core/umem.c 		sg = ib_umem_add_sg_table(sg, page_list, ret,
sg                298 drivers/infiniband/core/umem.c 	sg_mark_end(sg);
sg                351 drivers/infiniband/core/umem.c 	struct scatterlist *sg;
sg                353 drivers/infiniband/core/umem.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i)
sg                354 drivers/infiniband/core/umem.c 		n += sg_dma_len(sg) >> PAGE_SHIFT;
sg               2514 drivers/infiniband/core/verbs.c int ib_map_mr_sg(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
sg               2522 drivers/infiniband/core/verbs.c 	return mr->device->ops.map_mr_sg(mr, sg, sg_nents, sg_offset);
sg               2549 drivers/infiniband/core/verbs.c 	struct scatterlist *sg;
sg               2562 drivers/infiniband/core/verbs.c 	for_each_sg(sgl, sg, sg_nents, i) {
sg               2563 drivers/infiniband/core/verbs.c 		u64 dma_addr = sg_dma_address(sg) + sg_offset;
sg               2565 drivers/infiniband/core/verbs.c 		unsigned int dma_len = sg_dma_len(sg) - sg_offset;
sg               2590 drivers/infiniband/core/verbs.c 				sg_offset = prev_addr - sg_dma_address(sg);
sg               3356 drivers/infiniband/hw/bnxt_re/ib_verbs.c int bnxt_re_map_mr_sg(struct ib_mr *ib_mr, struct scatterlist *sg, int sg_nents,
sg               3362 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	return ib_sg_to_pages(ib_mr, sg, sg_nents, sg_offset, bnxt_re_set_page);
sg                200 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_map_mr_sg(struct ib_mr *ib_mr, struct scatterlist *sg, int sg_nents,
sg                655 drivers/infiniband/hw/cxgb3/iwch_provider.c static int iwch_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg                662 drivers/infiniband/hw/cxgb3/iwch_provider.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, iwch_set_page);
sg                984 drivers/infiniband/hw/cxgb4/iw_cxgb4.h int c4iw_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                785 drivers/infiniband/hw/cxgb4/mem.c int c4iw_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                792 drivers/infiniband/hw/cxgb4/mem.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, c4iw_set_page);
sg               1197 drivers/infiniband/hw/hns/hns_roce_device.h int hns_roce_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                 46 drivers/infiniband/hw/hns/hns_roce_hw_v1.c static void set_data_seg(struct hns_roce_wqe_data_seg *dseg, struct ib_sge *sg)
sg                 48 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	dseg->lkey = cpu_to_le32(sg->lkey);
sg                 49 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	dseg->addr = cpu_to_le64(sg->addr);
sg                 50 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	dseg->len  = cpu_to_le32(sg->length);
sg                 52 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			    struct ib_sge *sg)
sg                 54 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	dseg->lkey = cpu_to_le32(sg->lkey);
sg                 55 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	dseg->addr = cpu_to_le64(sg->addr);
sg                 56 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	dseg->len  = cpu_to_le32(sg->length);
sg                116 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	struct ib_sge *sg;
sg                127 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	sg = wr->sg_list + num_in_wqe;
sg                142 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			set_data_seg_v2(dseg++, sg + i);
sg                148 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			set_data_seg_v2(dseg++, sg + fi_sge_num + i);
sg                153 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 			set_data_seg_v2(dseg++, sg + i);
sg               1434 drivers/infiniband/hw/hns/hns_roce_mr.c int hns_roce_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg               1441 drivers/infiniband/hw/hns/hns_roce_mr.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, hns_roce_set_page);
sg               1631 drivers/infiniband/hw/i40iw/i40iw_verbs.c static int i40iw_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg               1637 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, i40iw_set_page);
sg                742 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                193 drivers/infiniband/hw/mlx4/mr.c 	struct scatterlist *sg;
sg                203 drivers/infiniband/hw/mlx4/mr.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) {
sg                204 drivers/infiniband/hw/mlx4/mr.c 		if (cur_start_addr + len == sg_dma_address(sg)) {
sg                206 drivers/infiniband/hw/mlx4/mr.c 			len += sg_dma_len(sg);
sg                222 drivers/infiniband/hw/mlx4/mr.c 		cur_start_addr = sg_dma_address(sg);
sg                223 drivers/infiniband/hw/mlx4/mr.c 		len = sg_dma_len(sg);
sg                267 drivers/infiniband/hw/mlx4/mr.c 	struct scatterlist *sg;
sg                274 drivers/infiniband/hw/mlx4/mr.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) {
sg                280 drivers/infiniband/hw/mlx4/mr.c 			current_block_start = sg_dma_address(sg);
sg                308 drivers/infiniband/hw/mlx4/mr.c 		next_block_start = sg_dma_address(sg);
sg                333 drivers/infiniband/hw/mlx4/mr.c 			current_block_len = sg_dma_len(sg);
sg                341 drivers/infiniband/hw/mlx4/mr.c 		current_block_len += sg_dma_len(sg);
sg                806 drivers/infiniband/hw/mlx4/mr.c int mlx4_ib_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                817 drivers/infiniband/hw/mlx4/mr.c 	rc = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, mlx4_set_page);
sg               3466 drivers/infiniband/hw/mlx4/qp.c static void set_data_seg(struct mlx4_wqe_data_seg *dseg, struct ib_sge *sg)
sg               3468 drivers/infiniband/hw/mlx4/qp.c 	dseg->lkey       = cpu_to_be32(sg->lkey);
sg               3469 drivers/infiniband/hw/mlx4/qp.c 	dseg->addr       = cpu_to_be64(sg->addr);
sg               3481 drivers/infiniband/hw/mlx4/qp.c 	dseg->byte_count = cpu_to_be32(sg->length);
sg               3484 drivers/infiniband/hw/mlx4/qp.c static void __set_data_seg(struct mlx4_wqe_data_seg *dseg, struct ib_sge *sg)
sg               3486 drivers/infiniband/hw/mlx4/qp.c 	dseg->byte_count = cpu_to_be32(sg->length);
sg               3487 drivers/infiniband/hw/mlx4/qp.c 	dseg->lkey       = cpu_to_be32(sg->lkey);
sg               3488 drivers/infiniband/hw/mlx4/qp.c 	dseg->addr       = cpu_to_be64(sg->addr);
sg                 56 drivers/infiniband/hw/mlx5/mem.c 	struct scatterlist *sg;
sg                 65 drivers/infiniband/hw/mlx5/mem.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) {
sg                 66 drivers/infiniband/hw/mlx5/mem.c 		len = sg_dma_len(sg) >> PAGE_SHIFT;
sg                 67 drivers/infiniband/hw/mlx5/mem.c 		pfn = sg_dma_address(sg) >> PAGE_SHIFT;
sg                138 drivers/infiniband/hw/mlx5/mem.c 	struct scatterlist *sg;
sg                155 drivers/infiniband/hw/mlx5/mem.c 	for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) {
sg                156 drivers/infiniband/hw/mlx5/mem.c 		len = sg_dma_len(sg) >> PAGE_SHIFT;
sg                157 drivers/infiniband/hw/mlx5/mem.c 		base = sg_dma_address(sg);
sg               1175 drivers/infiniband/hw/mlx5/mlx5_ib.h int mlx5_ib_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                928 drivers/infiniband/hw/mlx5/mr.c 	struct ib_sge sg;
sg                984 drivers/infiniband/hw/mlx5/mr.c 	sg.addr = dma;
sg                985 drivers/infiniband/hw/mlx5/mr.c 	sg.lkey = dev->umrc.pd->local_dma_lkey;
sg                991 drivers/infiniband/hw/mlx5/mr.c 	wr.wr.sg_list = &sg;
sg               1012 drivers/infiniband/hw/mlx5/mr.c 		sg.length = ALIGN(npages * desc_size,
sg               1031 drivers/infiniband/hw/mlx5/mr.c 		wr.xlt_size = sg.length;
sg               2070 drivers/infiniband/hw/mlx5/mr.c 	struct scatterlist *sg = sgl;
sg               2076 drivers/infiniband/hw/mlx5/mr.c 	mr->ibmr.iova = sg_dma_address(sg) + sg_offset;
sg               2079 drivers/infiniband/hw/mlx5/mr.c 	for_each_sg(sgl, sg, sg_nents, i) {
sg               2082 drivers/infiniband/hw/mlx5/mr.c 		klms[i].va = cpu_to_be64(sg_dma_address(sg) + sg_offset);
sg               2083 drivers/infiniband/hw/mlx5/mr.c 		klms[i].bcount = cpu_to_be32(sg_dma_len(sg) - sg_offset);
sg               2085 drivers/infiniband/hw/mlx5/mr.c 		mr->ibmr.length += sg_dma_len(sg) - sg_offset;
sg               2097 drivers/infiniband/hw/mlx5/mr.c 		sg = meta_sgl;
sg               2099 drivers/infiniband/hw/mlx5/mr.c 		for_each_sg(meta_sgl, sg, meta_sg_nents, j) {
sg               2102 drivers/infiniband/hw/mlx5/mr.c 			klms[i + j].va = cpu_to_be64(sg_dma_address(sg) +
sg               2104 drivers/infiniband/hw/mlx5/mr.c 			klms[i + j].bcount = cpu_to_be32(sg_dma_len(sg) -
sg               2107 drivers/infiniband/hw/mlx5/mr.c 			mr->ibmr.length += sg_dma_len(sg) - sg_offset;
sg               2310 drivers/infiniband/hw/mlx5/mr.c int mlx5_ib_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg               2323 drivers/infiniband/hw/mlx5/mr.c 		n = mlx5_ib_sg_to_klms(mr, sg, sg_nents, sg_offset, NULL, 0,
sg               2326 drivers/infiniband/hw/mlx5/mr.c 		n = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset,
sg               1707 drivers/infiniband/hw/mlx5/odp.c 		struct ib_sge *sg = &sg_list[i];
sg               1710 drivers/infiniband/hw/mlx5/odp.c 		ret = pagefault_single_data_segment(dev, pd, sg->lkey, sg->addr,
sg               1711 drivers/infiniband/hw/mlx5/odp.c 						    sg->length,
sg               4151 drivers/infiniband/hw/mlx5/qp.c static void set_data_ptr_seg(struct mlx5_wqe_data_seg *dseg, struct ib_sge *sg)
sg               4153 drivers/infiniband/hw/mlx5/qp.c 	dseg->byte_count = cpu_to_be32(sg->length);
sg               4154 drivers/infiniband/hw/mlx5/qp.c 	dseg->lkey       = cpu_to_be32(sg->lkey);
sg               4155 drivers/infiniband/hw/mlx5/qp.c 	dseg->addr       = cpu_to_be64(sg->addr);
sg                117 drivers/infiniband/hw/mthca/mthca_wqe.h 					       struct ib_sge *sg)
sg                119 drivers/infiniband/hw/mthca/mthca_wqe.h 	dseg->byte_count = cpu_to_be32(sg->length);
sg                120 drivers/infiniband/hw/mthca/mthca_wqe.h 	dseg->lkey       = cpu_to_be32(sg->lkey);
sg                121 drivers/infiniband/hw/mthca/mthca_wqe.h 	dseg->addr       = cpu_to_be64(sg->addr);
sg               2976 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c int ocrdma_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg               2983 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, ocrdma_set_page);
sg                107 drivers/infiniband/hw/ocrdma/ocrdma_verbs.h int ocrdma_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg               1288 drivers/infiniband/hw/qedr/verbs.c 	struct scatterlist *sg;
sg               1308 drivers/infiniband/hw/qedr/verbs.c 	sg = srq->prod_umem->sg_head.sgl;
sg               1309 drivers/infiniband/hw/qedr/verbs.c 	srq->hw_srq.phy_prod_pair_addr = sg_dma_address(sg);
sg               2842 drivers/infiniband/hw/qedr/verbs.c int qedr_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg               2850 drivers/infiniband/hw/qedr/verbs.c 	return ib_sg_to_pages(ibmr, sg, sg_nents, NULL, qedr_set_page);
sg                 84 drivers/infiniband/hw/qedr/verbs.h int qedr_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg                 70 drivers/infiniband/hw/usnic/usnic_uiom.c 	struct scatterlist *sg;
sg                 75 drivers/infiniband/hw/usnic/usnic_uiom.c 		for_each_sg(chunk->page_list, sg, chunk->nents, i) {
sg                 76 drivers/infiniband/hw/usnic/usnic_uiom.c 			page = sg_page(sg);
sg                 77 drivers/infiniband/hw/usnic/usnic_uiom.c 			pa = sg_phys(sg);
sg                 90 drivers/infiniband/hw/usnic/usnic_uiom.c 	struct scatterlist *sg;
sg                167 drivers/infiniband/hw/usnic/usnic_uiom.c 			for_each_sg(chunk->page_list, sg, chunk->nents, i) {
sg                168 drivers/infiniband/hw/usnic/usnic_uiom.c 				sg_set_page(sg, page_list[i + off],
sg                170 drivers/infiniband/hw/usnic/usnic_uiom.c 				pa = sg_phys(sg);
sg                312 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c int pvrdma_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg, int sg_nents,
sg                321 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	ret = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, pvrdma_set_page);
sg                410 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h int pvrdma_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg                632 drivers/infiniband/sw/rdmavt/mr.c int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg                640 drivers/infiniband/sw/rdmavt/mr.c 	ret = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, rvt_set_page);
sg                 84 drivers/infiniband/sw/rdmavt/mr.h int rvt_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg               1026 drivers/infiniband/sw/rxe/rxe_verbs.c static int rxe_map_mr_sg(struct ib_mr *ibmr, struct scatterlist *sg,
sg               1034 drivers/infiniband/sw/rxe/rxe_verbs.c 	n = ib_sg_to_pages(ibmr, sg, sg_nents, sg_offset, rxe_set_page);
sg                198 drivers/infiniband/ulp/iser/iscsi_iser.h 	struct scatterlist *sg;
sg                391 drivers/infiniband/ulp/iser/iser_initiator.c 		data_buf->sg = scsi_sglist(sc);
sg                397 drivers/infiniband/ulp/iser/iser_initiator.c 		prot_buf->sg  = scsi_prot_sglist(sc);
sg                142 drivers/infiniband/ulp/iser/iser_memory.c 	struct scatterlist *sg;
sg                145 drivers/infiniband/ulp/iser/iser_memory.c 	for_each_sg(data->sg, sg, data->dma_nents, i)
sg                148 drivers/infiniband/ulp/iser/iser_memory.c 			 i, (unsigned long)sg_dma_address(sg),
sg                149 drivers/infiniband/ulp/iser/iser_memory.c 			 sg_page(sg), sg->offset, sg->length, sg_dma_len(sg));
sg                172 drivers/infiniband/ulp/iser/iser_memory.c 	data->dma_nents = ib_dma_map_sg(dev, data->sg, data->size, dma_dir);
sg                187 drivers/infiniband/ulp/iser/iser_memory.c 	ib_dma_unmap_sg(dev, data->sg, data->size, dir);
sg                194 drivers/infiniband/ulp/iser/iser_memory.c 	struct scatterlist *sg = mem->sg;
sg                206 drivers/infiniband/ulp/iser/iser_memory.c 	reg->sge.addr = sg_dma_address(&sg[0]);
sg                207 drivers/infiniband/ulp/iser/iser_memory.c 	reg->sge.length = sg_dma_len(&sg[0]);
sg                241 drivers/infiniband/ulp/iser/iser_memory.c 	plen = ib_sg_to_pages(&page_vec->fake_mr, mem->sg,
sg                405 drivers/infiniband/ulp/iser/iser_memory.c 	ret = ib_map_mr_sg_pi(mr, mem->sg, mem->dma_nents, NULL,
sg                406 drivers/infiniband/ulp/iser/iser_memory.c 			      sig_mem->sg, sig_mem->dma_nents, NULL, SZ_4K);
sg                454 drivers/infiniband/ulp/iser/iser_memory.c 	n = ib_map_mr_sg(mr, mem->sg, mem->dma_nents, NULL, SIZE_4K);
sg               1160 drivers/infiniband/ulp/isert/ib_isert.c 		sg_init_table(&isert_cmd->sg, 1);
sg               1161 drivers/infiniband/ulp/isert/ib_isert.c 		cmd->se_cmd.t_data_sg = &isert_cmd->sg;
sg               1163 drivers/infiniband/ulp/isert/ib_isert.c 		sg_set_buf(&isert_cmd->sg, &rx_desc->data[0], imm_data_len);
sg                128 drivers/infiniband/ulp/isert/ib_isert.h 	struct scatterlist	sg;
sg               1544 drivers/infiniband/ulp/srp/ib_srp.c 		srp_map_desc(state, sg_dma_address(state->sg) + sg_offset,
sg               1545 drivers/infiniband/ulp/srp/ib_srp.c 			     sg_dma_len(state->sg) - sg_offset,
sg               1559 drivers/infiniband/ulp/srp/ib_srp.c 	n = ib_map_mr_sg(desc->mr, state->sg, sg_nents, sg_offset_p,
sg               1601 drivers/infiniband/ulp/srp/ib_srp.c 			    struct scatterlist *sg)
sg               1605 drivers/infiniband/ulp/srp/ib_srp.c 	dma_addr_t dma_addr = sg_dma_address(sg);
sg               1606 drivers/infiniband/ulp/srp/ib_srp.c 	unsigned int dma_len = sg_dma_len(sg);
sg               1647 drivers/infiniband/ulp/srp/ib_srp.c 	struct scatterlist *sg;
sg               1654 drivers/infiniband/ulp/srp/ib_srp.c 	for_each_sg(scat, sg, count, i) {
sg               1655 drivers/infiniband/ulp/srp/ib_srp.c 		ret = srp_map_sg_entry(state, ch, sg);
sg               1675 drivers/infiniband/ulp/srp/ib_srp.c 	state->sg = scat;
sg               1689 drivers/infiniband/ulp/srp/ib_srp.c 			state->sg = sg_next(state->sg);
sg               1700 drivers/infiniband/ulp/srp/ib_srp.c 	struct scatterlist *sg;
sg               1703 drivers/infiniband/ulp/srp/ib_srp.c 	for_each_sg(scat, sg, count, i) {
sg               1704 drivers/infiniband/ulp/srp/ib_srp.c 		srp_map_desc(state, sg_dma_address(sg), sg_dma_len(sg),
sg               1739 drivers/infiniband/ulp/srp/ib_srp.c 		state.sg = idb_sg;
sg               1804 drivers/infiniband/ulp/srp/ib_srp.c 	struct scatterlist *scat, *sg;
sg               1853 drivers/infiniband/ulp/srp/ib_srp.c 		for_each_sg(scat, sg, count, i) {
sg               1854 drivers/infiniband/ulp/srp/ib_srp.c 			sge[i].addr   = sg_dma_address(sg);
sg               1855 drivers/infiniband/ulp/srp/ib_srp.c 			sge[i].length = sg_dma_len(sg);
sg                343 drivers/infiniband/ulp/srp/ib_srp.h 		struct scatterlist	*sg;
sg                889 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct srp_direct_buf *db, int nbufs, struct scatterlist **sg,
sg                913 drivers/infiniband/ulp/srpt/ib_srpt.c 		ret = target_alloc_sgl(&ctx->sg, &ctx->nents, size, false,
sg                919 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, 0, remote_addr, rkey, dir);
sg                921 drivers/infiniband/ulp/srpt/ib_srpt.c 			target_free_sgl(ctx->sg, ctx->nents);
sg                930 drivers/infiniband/ulp/srpt/ib_srpt.c 			sg_chain(prev, prev_nents + 1, ctx->sg);
sg                932 drivers/infiniband/ulp/srpt/ib_srpt.c 			*sg = ctx->sg;
sg                935 drivers/infiniband/ulp/srpt/ib_srpt.c 		prev = ctx->sg;
sg                948 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, dir);
sg                949 drivers/infiniband/ulp/srpt/ib_srpt.c 		target_free_sgl(ctx->sg, ctx->nents);
sg                966 drivers/infiniband/ulp/srpt/ib_srpt.c 				ctx->sg, ctx->nents, dir);
sg                967 drivers/infiniband/ulp/srpt/ib_srpt.c 		target_free_sgl(ctx->sg, ctx->nents);
sg               1014 drivers/infiniband/ulp/srpt/ib_srpt.c 		struct scatterlist **sg, unsigned int *sg_cnt, u64 *data_len,
sg               1042 drivers/infiniband/ulp/srpt/ib_srpt.c 		return srpt_alloc_rw_ctxs(ioctx, db, 1, sg, sg_cnt);
sg               1061 drivers/infiniband/ulp/srpt/ib_srpt.c 				sg, sg_cnt);
sg               1093 drivers/infiniband/ulp/srpt/ib_srpt.c 		*sg = &ioctx->imm_sg;
sg               1492 drivers/infiniband/ulp/srpt/ib_srpt.c 	struct scatterlist *sg = NULL;
sg               1521 drivers/infiniband/ulp/srpt/ib_srpt.c 			       &sg, &sg_cnt, &data_len, ch->imm_data_offset);
sg               1534 drivers/infiniband/ulp/srpt/ib_srpt.c 			       sg, sg_cnt, NULL, 0, NULL, 0);
sg                196 drivers/infiniband/ulp/srpt/ib_srpt.h 	struct scatterlist	*sg;
sg                606 drivers/iommu/dma-iommu.c 		struct scatterlist *sg;
sg                609 drivers/iommu/dma-iommu.c 		for_each_sg(sgt.sgl, sg, sgt.orig_nents, i)
sg                610 drivers/iommu/dma-iommu.c 			arch_dma_prep_coherent(sg_page(sg), sg->length);
sg                680 drivers/iommu/dma-iommu.c 	struct scatterlist *sg;
sg                686 drivers/iommu/dma-iommu.c 	for_each_sg(sgl, sg, nelems, i)
sg                687 drivers/iommu/dma-iommu.c 		arch_sync_dma_for_cpu(dev, sg_phys(sg), sg->length, dir);
sg                694 drivers/iommu/dma-iommu.c 	struct scatterlist *sg;
sg                700 drivers/iommu/dma-iommu.c 	for_each_sg(sgl, sg, nelems, i)
sg                701 drivers/iommu/dma-iommu.c 		arch_sync_dma_for_device(dev, sg_phys(sg), sg->length, dir);
sg                735 drivers/iommu/dma-iommu.c static int __finalise_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                738 drivers/iommu/dma-iommu.c 	struct scatterlist *s, *cur = sg;
sg                743 drivers/iommu/dma-iommu.c 	for_each_sg(sg, s, nents, i) {
sg                788 drivers/iommu/dma-iommu.c static void __invalidate_sg(struct scatterlist *sg, int nents)
sg                793 drivers/iommu/dma-iommu.c 	for_each_sg(sg, s, nents, i) {
sg                810 drivers/iommu/dma-iommu.c static int iommu_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                824 drivers/iommu/dma-iommu.c 		iommu_dma_sync_sg_for_device(dev, sg, nents, dir);
sg                832 drivers/iommu/dma-iommu.c 	for_each_sg(sg, s, nents, i) {
sg                873 drivers/iommu/dma-iommu.c 	if (iommu_map_sg(domain, iova, sg, nents, prot) < iova_len)
sg                876 drivers/iommu/dma-iommu.c 	return __finalise_sg(dev, sg, nents, iova);
sg                881 drivers/iommu/dma-iommu.c 	__invalidate_sg(sg, nents);
sg                885 drivers/iommu/dma-iommu.c static void iommu_dma_unmap_sg(struct device *dev, struct scatterlist *sg,
sg                893 drivers/iommu/dma-iommu.c 		iommu_dma_sync_sg_for_cpu(dev, sg, nents, dir);
sg                899 drivers/iommu/dma-iommu.c 	start = sg_dma_address(sg);
sg                900 drivers/iommu/dma-iommu.c 	for_each_sg(sg_next(sg), tmp, nents - 1, i) {
sg                903 drivers/iommu/dma-iommu.c 		sg = tmp;
sg                905 drivers/iommu/dma-iommu.c 	end = sg_dma_address(sg) + sg_dma_len(sg);
sg               2224 drivers/iommu/intel-iommu.c 			    struct scatterlist *sg, unsigned long phys_pfn,
sg               2240 drivers/iommu/intel-iommu.c 	if (!sg) {
sg               2249 drivers/iommu/intel-iommu.c 			unsigned int pgoff = sg->offset & ~PAGE_MASK;
sg               2251 drivers/iommu/intel-iommu.c 			sg_res = aligned_nrpages(sg->offset, sg->length);
sg               2252 drivers/iommu/intel-iommu.c 			sg->dma_address = ((dma_addr_t)iov_pfn << VTD_PAGE_SHIFT) + pgoff;
sg               2253 drivers/iommu/intel-iommu.c 			sg->dma_length = sg->length;
sg               2254 drivers/iommu/intel-iommu.c 			pteval = (sg_phys(sg) - pgoff) | prot;
sg               2333 drivers/iommu/intel-iommu.c 			sg = sg_next(sg);
sg               2339 drivers/iommu/intel-iommu.c 			  struct scatterlist *sg, unsigned long phys_pfn,
sg               2346 drivers/iommu/intel-iommu.c 	ret = __domain_mapping(domain, iov_pfn, sg, phys_pfn, nr_pages, prot);
sg               2359 drivers/iommu/intel-iommu.c 				    struct scatterlist *sg, unsigned long nr_pages,
sg               2362 drivers/iommu/intel-iommu.c 	return domain_mapping(domain, iov_pfn, sg, 0, nr_pages, prot);
sg               3693 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               3699 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i) {
sg               3700 drivers/iommu/intel-iommu.c 		nrpages += aligned_nrpages(sg_dma_address(sg), sg_dma_len(sg));
sg               3717 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               3731 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i)
sg               3732 drivers/iommu/intel-iommu.c 		size += aligned_nrpages(sg->offset, sg->length);
sg               3951 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               3954 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i)
sg               3955 drivers/iommu/intel-iommu.c 		bounce_unmap_page(dev, sg->dma_address,
sg               3956 drivers/iommu/intel-iommu.c 				  sg_dma_len(sg), dir, attrs);
sg               3964 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               3966 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i) {
sg               3967 drivers/iommu/intel-iommu.c 		sg->dma_address = bounce_map_page(dev, sg_page(sg),
sg               3968 drivers/iommu/intel-iommu.c 						  sg->offset, sg->length,
sg               3970 drivers/iommu/intel-iommu.c 		if (sg->dma_address == DMA_MAPPING_ERROR)
sg               3972 drivers/iommu/intel-iommu.c 		sg_dma_len(sg) = sg->length;
sg               4000 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               4003 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i)
sg               4004 drivers/iommu/intel-iommu.c 		bounce_sync_single(dev, sg_dma_address(sg),
sg               4005 drivers/iommu/intel-iommu.c 				   sg_dma_len(sg), dir, SYNC_FOR_CPU);
sg               4012 drivers/iommu/intel-iommu.c 	struct scatterlist *sg;
sg               4015 drivers/iommu/intel-iommu.c 	for_each_sg(sglist, sg, nelems, i)
sg               4016 drivers/iommu/intel-iommu.c 		bounce_sync_single(dev, sg_dma_address(sg),
sg               4017 drivers/iommu/intel-iommu.c 				   sg_dma_len(sg), dir, SYNC_FOR_DEVICE);
sg               1996 drivers/iommu/iommu.c 		    struct scatterlist *sg, unsigned int nents, int prot)
sg               2004 drivers/iommu/iommu.c 		phys_addr_t s_phys = sg_phys(sg);
sg               2016 drivers/iommu/iommu.c 			len += sg->length;
sg               2018 drivers/iommu/iommu.c 			len = sg->length;
sg               2023 drivers/iommu/iommu.c 			sg = sg_next(sg);
sg                225 drivers/iommu/virtio-iommu.c 	struct scatterlist *sg[2] = { &top_sg, &bottom_sg };
sg                248 drivers/iommu/virtio-iommu.c 	ret = virtqueue_add_sgs(vq, sg, 1, 1, req, GFP_ATOMIC);
sg                252 drivers/iommu/virtio-iommu.c 			ret = virtqueue_add_sgs(vq, sg, 1, 1, req, GFP_ATOMIC);
sg                563 drivers/iommu/virtio-iommu.c 	struct scatterlist sg[1];
sg                576 drivers/iommu/virtio-iommu.c 		sg_init_one(sg, evt, sizeof(*evt));
sg                577 drivers/iommu/virtio-iommu.c 		ret = virtqueue_add_inbuf(vq, sg, 1, evt, GFP_ATOMIC);
sg                987 drivers/iommu/virtio-iommu.c 	struct scatterlist sg[1];
sg                998 drivers/iommu/virtio-iommu.c 		sg_init_one(sg, &evts[i], sizeof(*evts));
sg                999 drivers/iommu/virtio-iommu.c 		ret = virtqueue_add_inbuf(vq, sg, 1, &evts[i], GFP_KERNEL);
sg                576 drivers/mailbox/bcm-flexrm-mailbox.c 	struct scatterlist *sg;
sg                580 drivers/mailbox/bcm-flexrm-mailbox.c 	for (sg = msg->spu.src; sg; sg = sg_next(sg)) {
sg                581 drivers/mailbox/bcm-flexrm-mailbox.c 		if (sg->length & 0xf) {
sg                582 drivers/mailbox/bcm-flexrm-mailbox.c 			if (sg->length > SRC_LENGTH_MASK)
sg                585 drivers/mailbox/bcm-flexrm-mailbox.c 			if (sg->length > (MSRC_LENGTH_MASK * 16))
sg                589 drivers/mailbox/bcm-flexrm-mailbox.c 	for (sg = msg->spu.dst; sg; sg = sg_next(sg)) {
sg                590 drivers/mailbox/bcm-flexrm-mailbox.c 		if (sg->length & 0xf) {
sg                591 drivers/mailbox/bcm-flexrm-mailbox.c 			if (sg->length > DST_LENGTH_MASK)
sg                594 drivers/mailbox/bcm-flexrm-mailbox.c 			if (sg->length > (MDST_LENGTH_MASK * 16))
sg                706 drivers/mailbox/bcm-pdc-mailbox.c static int pdc_tx_list_sg_add(struct pdc_state *pdcs, struct scatterlist *sg)
sg                721 drivers/mailbox/bcm-pdc-mailbox.c 	num_desc = (u32)sg_nents(sg);
sg                735 drivers/mailbox/bcm-pdc-mailbox.c 		pdcs->src_sg[pdcs->txout] = sg;
sg                739 drivers/mailbox/bcm-pdc-mailbox.c 	while (sg) {
sg                749 drivers/mailbox/bcm-pdc-mailbox.c 		bufcnt = sg_dma_len(sg);
sg                750 drivers/mailbox/bcm-pdc-mailbox.c 		databufptr = sg_dma_address(sg);
sg                762 drivers/mailbox/bcm-pdc-mailbox.c 		sg = sg_next(sg);
sg                763 drivers/mailbox/bcm-pdc-mailbox.c 		if (!sg)
sg                874 drivers/mailbox/bcm-pdc-mailbox.c static int pdc_rx_list_sg_add(struct pdc_state *pdcs, struct scatterlist *sg)
sg                888 drivers/mailbox/bcm-pdc-mailbox.c 	num_desc = (u32)sg_nents(sg);
sg                897 drivers/mailbox/bcm-pdc-mailbox.c 	while (sg) {
sg                907 drivers/mailbox/bcm-pdc-mailbox.c 		bufcnt = sg_dma_len(sg);
sg                908 drivers/mailbox/bcm-pdc-mailbox.c 		databufptr = sg_dma_address(sg);
sg                921 drivers/mailbox/bcm-pdc-mailbox.c 		sg = sg_next(sg);
sg               1104 drivers/mailbox/bcm-pdc-mailbox.c static u32 pdc_desc_count(struct scatterlist *sg)
sg               1108 drivers/mailbox/bcm-pdc-mailbox.c 	while (sg) {
sg               1109 drivers/mailbox/bcm-pdc-mailbox.c 		cnt += ((sg->length / PDC_DMA_BUF_MAX) + 1);
sg               1110 drivers/mailbox/bcm-pdc-mailbox.c 		sg = sg_next(sg);
sg                224 drivers/md/dm-crypt.c 					     struct scatterlist *sg);
sg                504 drivers/md/dm-crypt.c 	struct scatterlist *sg;
sg                509 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
sg                510 drivers/md/dm-crypt.c 		src = kmap_atomic(sg_page(sg));
sg                511 drivers/md/dm-crypt.c 		r = crypt_iv_lmk_one(cc, iv, dmreq, src + sg->offset);
sg                522 drivers/md/dm-crypt.c 	struct scatterlist *sg;
sg                529 drivers/md/dm-crypt.c 	sg = crypt_get_sg_data(cc, dmreq->sg_out);
sg                530 drivers/md/dm-crypt.c 	dst = kmap_atomic(sg_page(sg));
sg                531 drivers/md/dm-crypt.c 	r = crypt_iv_lmk_one(cc, iv, dmreq, dst + sg->offset);
sg                535 drivers/md/dm-crypt.c 		crypto_xor(dst + sg->offset, iv, cc->iv_size);
sg                650 drivers/md/dm-crypt.c 	struct scatterlist *sg;
sg                658 drivers/md/dm-crypt.c 		sg = crypt_get_sg_data(cc, dmreq->sg_in);
sg                659 drivers/md/dm-crypt.c 		src = kmap_atomic(sg_page(sg));
sg                660 drivers/md/dm-crypt.c 		r = crypt_iv_tcw_whitening(cc, dmreq, src + sg->offset);
sg                676 drivers/md/dm-crypt.c 	struct scatterlist *sg;
sg                684 drivers/md/dm-crypt.c 	sg = crypt_get_sg_data(cc, dmreq->sg_out);
sg                685 drivers/md/dm-crypt.c 	dst = kmap_atomic(sg_page(sg));
sg                686 drivers/md/dm-crypt.c 	r = crypt_iv_tcw_whitening(cc, dmreq, dst + sg->offset);
sg                811 drivers/md/dm-crypt.c 					     struct scatterlist *sg)
sg                814 drivers/md/dm-crypt.c 		return &sg[2];
sg                816 drivers/md/dm-crypt.c 	return sg;
sg               3370 drivers/md/dm-integrity.c 			struct scatterlist *sg;
sg               3393 drivers/md/dm-integrity.c 			sg = kvmalloc_array(ic->journal_pages + 1,
sg               3396 drivers/md/dm-integrity.c 			if (!sg) {
sg               3401 drivers/md/dm-integrity.c 			sg_init_table(sg, ic->journal_pages + 1);
sg               3405 drivers/md/dm-integrity.c 				sg_set_buf(&sg[i], va, PAGE_SIZE);
sg               3407 drivers/md/dm-integrity.c 			sg_set_buf(&sg[i], &ic->commit_ids, sizeof ic->commit_ids);
sg               3409 drivers/md/dm-integrity.c 			skcipher_request_set_crypt(req, sg, sg,
sg               3415 drivers/md/dm-integrity.c 			kvfree(sg);
sg               3470 drivers/md/dm-integrity.c 				struct scatterlist sg;
sg               3478 drivers/md/dm-integrity.c 				sg_init_one(&sg, crypt_data, crypt_len);
sg               3479 drivers/md/dm-integrity.c 				skcipher_request_set_crypt(req, &sg, &sg, crypt_len, crypt_iv);
sg                100 drivers/md/dm-verity-target.c 	struct scatterlist sg;
sg                103 drivers/md/dm-verity-target.c 		sg_init_one(&sg, data, len);
sg                104 drivers/md/dm-verity-target.c 		ahash_request_set_crypt(req, &sg, NULL, len);
sg                111 drivers/md/dm-verity-target.c 			sg_init_table(&sg, 1);
sg                112 drivers/md/dm-verity-target.c 			sg_set_page(&sg, vmalloc_to_page(data), this_step, offset_in_page(data));
sg                113 drivers/md/dm-verity-target.c 			ahash_request_set_crypt(req, &sg, NULL, this_step);
sg                372 drivers/md/dm-verity-target.c 	struct scatterlist sg;
sg                380 drivers/md/dm-verity-target.c 		sg_init_table(&sg, 1);
sg                391 drivers/md/dm-verity-target.c 		sg_set_page(&sg, bv.bv_page, len, bv.bv_offset);
sg                392 drivers/md/dm-verity-target.c 		ahash_request_set_crypt(req, &sg, NULL, len);
sg                218 drivers/media/common/videobuf2/videobuf2-vmalloc.c 	struct scatterlist *sg;
sg                233 drivers/media/common/videobuf2/videobuf2-vmalloc.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                241 drivers/media/common/videobuf2/videobuf2-vmalloc.c 		sg_set_page(sg, page, PAGE_SIZE, 0);
sg                 42 drivers/media/pci/bt8xx/bttv-risc.c 	struct scatterlist *sg;
sg                 68 drivers/media/pci/bt8xx/bttv-risc.c 	sg = sglist;
sg                 73 drivers/media/pci/bt8xx/bttv-risc.c 		while (offset && offset >= sg_dma_len(sg)) {
sg                 74 drivers/media/pci/bt8xx/bttv-risc.c 			offset -= sg_dma_len(sg);
sg                 75 drivers/media/pci/bt8xx/bttv-risc.c 			sg = sg_next(sg);
sg                 77 drivers/media/pci/bt8xx/bttv-risc.c 		if (bpl <= sg_dma_len(sg)-offset) {
sg                 81 drivers/media/pci/bt8xx/bttv-risc.c 			*(rp++)=cpu_to_le32(sg_dma_address(sg)+offset);
sg                 87 drivers/media/pci/bt8xx/bttv-risc.c 					    (sg_dma_len(sg)-offset));
sg                 88 drivers/media/pci/bt8xx/bttv-risc.c 			*(rp++)=cpu_to_le32(sg_dma_address(sg)+offset);
sg                 89 drivers/media/pci/bt8xx/bttv-risc.c 			todo -= (sg_dma_len(sg)-offset);
sg                 91 drivers/media/pci/bt8xx/bttv-risc.c 			sg = sg_next(sg);
sg                 92 drivers/media/pci/bt8xx/bttv-risc.c 			while (todo > sg_dma_len(sg)) {
sg                 94 drivers/media/pci/bt8xx/bttv-risc.c 						    sg_dma_len(sg));
sg                 95 drivers/media/pci/bt8xx/bttv-risc.c 				*(rp++)=cpu_to_le32(sg_dma_address(sg));
sg                 96 drivers/media/pci/bt8xx/bttv-risc.c 				todo -= sg_dma_len(sg);
sg                 97 drivers/media/pci/bt8xx/bttv-risc.c 				sg = sg_next(sg);
sg                101 drivers/media/pci/bt8xx/bttv-risc.c 			*(rp++)=cpu_to_le32(sg_dma_address(sg));
sg               1136 drivers/media/pci/cx23885/cx23885-core.c 	struct scatterlist *sg;
sg               1151 drivers/media/pci/cx23885/cx23885-core.c 	sg = sglist;
sg               1153 drivers/media/pci/cx23885/cx23885-core.c 		while (offset && offset >= sg_dma_len(sg)) {
sg               1154 drivers/media/pci/cx23885/cx23885-core.c 			offset -= sg_dma_len(sg);
sg               1155 drivers/media/pci/cx23885/cx23885-core.c 			sg = sg_next(sg);
sg               1163 drivers/media/pci/cx23885/cx23885-core.c 		if (bpl <= sg_dma_len(sg)-offset) {
sg               1166 drivers/media/pci/cx23885/cx23885-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg)+offset);
sg               1173 drivers/media/pci/cx23885/cx23885-core.c 					    (sg_dma_len(sg)-offset));
sg               1174 drivers/media/pci/cx23885/cx23885-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg)+offset);
sg               1176 drivers/media/pci/cx23885/cx23885-core.c 			todo -= (sg_dma_len(sg)-offset);
sg               1178 drivers/media/pci/cx23885/cx23885-core.c 			sg = sg_next(sg);
sg               1179 drivers/media/pci/cx23885/cx23885-core.c 			while (todo > sg_dma_len(sg)) {
sg               1181 drivers/media/pci/cx23885/cx23885-core.c 						    sg_dma_len(sg));
sg               1182 drivers/media/pci/cx23885/cx23885-core.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg               1184 drivers/media/pci/cx23885/cx23885-core.c 				todo -= sg_dma_len(sg);
sg               1185 drivers/media/pci/cx23885/cx23885-core.c 				sg = sg_next(sg);
sg               1188 drivers/media/pci/cx23885/cx23885-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                998 drivers/media/pci/cx25821/cx25821-core.c 	struct scatterlist *sg;
sg               1012 drivers/media/pci/cx25821/cx25821-core.c 	sg = sglist;
sg               1014 drivers/media/pci/cx25821/cx25821-core.c 		while (offset && offset >= sg_dma_len(sg)) {
sg               1015 drivers/media/pci/cx25821/cx25821-core.c 			offset -= sg_dma_len(sg);
sg               1016 drivers/media/pci/cx25821/cx25821-core.c 			sg = sg_next(sg);
sg               1018 drivers/media/pci/cx25821/cx25821-core.c 		if (bpl <= sg_dma_len(sg) - offset) {
sg               1022 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg               1029 drivers/media/pci/cx25821/cx25821-core.c 					(sg_dma_len(sg) - offset));
sg               1030 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg               1032 drivers/media/pci/cx25821/cx25821-core.c 			todo -= (sg_dma_len(sg) - offset);
sg               1034 drivers/media/pci/cx25821/cx25821-core.c 			sg = sg_next(sg);
sg               1035 drivers/media/pci/cx25821/cx25821-core.c 			while (todo > sg_dma_len(sg)) {
sg               1037 drivers/media/pci/cx25821/cx25821-core.c 						sg_dma_len(sg));
sg               1038 drivers/media/pci/cx25821/cx25821-core.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg               1040 drivers/media/pci/cx25821/cx25821-core.c 				todo -= sg_dma_len(sg);
sg               1041 drivers/media/pci/cx25821/cx25821-core.c 				sg = sg_next(sg);
sg               1044 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg               1109 drivers/media/pci/cx25821/cx25821-core.c 	struct scatterlist *sg;
sg               1117 drivers/media/pci/cx25821/cx25821-core.c 	sg = sglist;
sg               1119 drivers/media/pci/cx25821/cx25821-core.c 		while (offset && offset >= sg_dma_len(sg)) {
sg               1120 drivers/media/pci/cx25821/cx25821-core.c 			offset -= sg_dma_len(sg);
sg               1121 drivers/media/pci/cx25821/cx25821-core.c 			sg = sg_next(sg);
sg               1129 drivers/media/pci/cx25821/cx25821-core.c 		if (bpl <= sg_dma_len(sg) - offset) {
sg               1133 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg               1140 drivers/media/pci/cx25821/cx25821-core.c 					(sg_dma_len(sg) - offset));
sg               1141 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg               1143 drivers/media/pci/cx25821/cx25821-core.c 			todo -= (sg_dma_len(sg) - offset);
sg               1145 drivers/media/pci/cx25821/cx25821-core.c 			sg = sg_next(sg);
sg               1146 drivers/media/pci/cx25821/cx25821-core.c 			while (todo > sg_dma_len(sg)) {
sg               1148 drivers/media/pci/cx25821/cx25821-core.c 						sg_dma_len(sg));
sg               1149 drivers/media/pci/cx25821/cx25821-core.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg               1151 drivers/media/pci/cx25821/cx25821-core.c 				todo -= sg_dma_len(sg);
sg               1152 drivers/media/pci/cx25821/cx25821-core.c 				sg = sg_next(sg);
sg               1155 drivers/media/pci/cx25821/cx25821-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                 75 drivers/media/pci/cx88/cx88-core.c 	struct scatterlist *sg;
sg                 88 drivers/media/pci/cx88/cx88-core.c 	sg = sglist;
sg                 90 drivers/media/pci/cx88/cx88-core.c 		while (offset && offset >= sg_dma_len(sg)) {
sg                 91 drivers/media/pci/cx88/cx88-core.c 			offset -= sg_dma_len(sg);
sg                 92 drivers/media/pci/cx88/cx88-core.c 			sg = sg_next(sg);
sg                 98 drivers/media/pci/cx88/cx88-core.c 		if (bpl <= sg_dma_len(sg) - offset) {
sg                102 drivers/media/pci/cx88/cx88-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg                108 drivers/media/pci/cx88/cx88-core.c 					      (sg_dma_len(sg) - offset));
sg                109 drivers/media/pci/cx88/cx88-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg                110 drivers/media/pci/cx88/cx88-core.c 			todo -= (sg_dma_len(sg) - offset);
sg                112 drivers/media/pci/cx88/cx88-core.c 			sg = sg_next(sg);
sg                113 drivers/media/pci/cx88/cx88-core.c 			while (todo > sg_dma_len(sg)) {
sg                115 drivers/media/pci/cx88/cx88-core.c 						      sg_dma_len(sg));
sg                116 drivers/media/pci/cx88/cx88-core.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                117 drivers/media/pci/cx88/cx88-core.c 				todo -= sg_dma_len(sg);
sg                118 drivers/media/pci/cx88/cx88-core.c 				sg = sg_next(sg);
sg                121 drivers/media/pci/cx88/cx88-core.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                848 drivers/media/pci/intel/ipu3/ipu3-cio2.c 	struct sg_table *sg;
sg                868 drivers/media/pci/intel/ipu3/ipu3-cio2.c 	sg = vb2_dma_sg_plane_desc(vb, 0);
sg                869 drivers/media/pci/intel/ipu3/ipu3-cio2.c 	if (!sg)
sg                872 drivers/media/pci/intel/ipu3/ipu3-cio2.c 	if (sg->nents && sg->sgl)
sg                873 drivers/media/pci/intel/ipu3/ipu3-cio2.c 		b->offset = sg->sgl->offset;
sg                876 drivers/media/pci/intel/ipu3/ipu3-cio2.c 	for_each_sg_dma_page (sg->sgl, &sg_iter, sg->nents, 0) {
sg                 65 drivers/media/pci/ivtv/ivtv-udma.c 	struct scatterlist *sg;
sg                 67 drivers/media/pci/ivtv/ivtv-udma.c 	for_each_sg(dma->SGlist, sg, dma->SG_length, i) {
sg                 68 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg));
sg                 69 drivers/media/pci/ivtv/ivtv-udma.c 		dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg));
sg                 71 drivers/media/pci/ivtv/ivtv-udma.c 		buffer_offset += sg_dma_len(sg);
sg                 73 drivers/media/pci/ivtv/ivtv-udma.c 		split -= sg_dma_len(sg);
sg                313 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	struct scatterlist *sg;
sg                322 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 	for_each_sg(vbuf->sgl, sg, vbuf->nents, i) {
sg                329 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 		dma = sg_dma_address(sg);
sg                330 drivers/media/pci/solo6x10/solo6x10-v4l2-enc.c 		len = sg_dma_len(sg);
sg                 38 drivers/media/pci/tw68/tw68-risc.c 	struct scatterlist *sg;
sg                 54 drivers/media/pci/tw68/tw68-risc.c 	sg = sglist;
sg                 57 drivers/media/pci/tw68/tw68-risc.c 		while (offset && offset >= sg_dma_len(sg)) {
sg                 58 drivers/media/pci/tw68/tw68-risc.c 			offset -= sg_dma_len(sg);
sg                 59 drivers/media/pci/tw68/tw68-risc.c 			sg = sg_next(sg);
sg                 61 drivers/media/pci/tw68/tw68-risc.c 		if (bpl <= sg_dma_len(sg) - offset) {
sg                 65 drivers/media/pci/tw68/tw68-risc.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg                 76 drivers/media/pci/tw68/tw68-risc.c 			done = (sg_dma_len(sg) - offset);
sg                 80 drivers/media/pci/tw68/tw68-risc.c 			*(rp++) = cpu_to_le32(sg_dma_address(sg) + offset);
sg                 82 drivers/media/pci/tw68/tw68-risc.c 			sg = sg_next(sg);
sg                 84 drivers/media/pci/tw68/tw68-risc.c 			while (todo > sg_dma_len(sg)) {
sg                 87 drivers/media/pci/tw68/tw68-risc.c 						sg_dma_len(sg));
sg                 88 drivers/media/pci/tw68/tw68-risc.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                 89 drivers/media/pci/tw68/tw68-risc.c 				todo -= sg_dma_len(sg);
sg                 90 drivers/media/pci/tw68/tw68-risc.c 				sg = sg_next(sg);
sg                 91 drivers/media/pci/tw68/tw68-risc.c 				done += sg_dma_len(sg);
sg                 98 drivers/media/pci/tw68/tw68-risc.c 				*(rp++) = cpu_to_le32(sg_dma_address(sg));
sg                190 drivers/media/pci/tw686x/tw686x-video.c 	struct scatterlist *sg;
sg                197 drivers/media/pci/tw686x/tw686x-video.c 	for_each_sg(vbuf->sgl, sg, vbuf->nents, i) {
sg                198 drivers/media/pci/tw686x/tw686x-video.c 		dma_addr_t phys = sg_dma_address(sg);
sg                199 drivers/media/pci/tw686x/tw686x-video.c 		len = sg_dma_len(sg);
sg               1257 drivers/media/platform/marvell-ccic/mcam-core.c 	struct scatterlist *sg;
sg               1260 drivers/media/platform/marvell-ccic/mcam-core.c 	for_each_sg(sg_table->sgl, sg, sg_table->nents, i) {
sg               1261 drivers/media/platform/marvell-ccic/mcam-core.c 		desc->dma_addr = sg_dma_address(sg);
sg               1262 drivers/media/platform/marvell-ccic/mcam-core.c 		desc->segment_len = sg_dma_len(sg);
sg                678 drivers/media/platform/pxa_camera.c 	struct scatterlist		*sg[3];
sg                861 drivers/media/platform/pxa_camera.c 				struct scatterlist *sg, int sglen)
sg                866 drivers/media/platform/pxa_camera.c 	tx = dmaengine_prep_slave_sg(dma_chan, sg, sglen, DMA_DEV_TO_MEM,
sg               1362 drivers/media/platform/pxa_camera.c 		kfree(buf->sg[i]);
sg               1364 drivers/media/platform/pxa_camera.c 		buf->sg[i] = NULL;
sg               1395 drivers/media/platform/pxa_camera.c 		       buf->plane_sizes, buf->sg, buf->sg_len, GFP_KERNEL);
sg               1403 drivers/media/platform/pxa_camera.c 					   buf->sg[i], buf->sg_len[i]);
sg                266 drivers/memstick/core/memstick.c 			  const struct scatterlist *sg)
sg                274 drivers/memstick/core/memstick.c 	mrq->sg = *sg;
sg                 94 drivers/memstick/core/ms_block.c static int msb_sg_compare_to_buffer(struct scatterlist *sg,
sg                100 drivers/memstick/core/ms_block.c 	sg_miter_start(&miter, sg, sg_nents(sg),
sg                340 drivers/memstick/core/ms_block.c 	struct scatterlist sg[2];
sg                430 drivers/memstick/core/ms_block.c 		sg_init_table(sg, ARRAY_SIZE(sg));
sg                431 drivers/memstick/core/ms_block.c 		msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg),
sg                435 drivers/memstick/core/ms_block.c 		memstick_init_req_sg(mrq, MS_TPC_READ_LONG_DATA, sg);
sg                476 drivers/memstick/core/ms_block.c 	struct scatterlist sg[2];
sg                555 drivers/memstick/core/ms_block.c 		sg_init_table(sg, ARRAY_SIZE(sg));
sg                557 drivers/memstick/core/ms_block.c 		if (msb_sg_copy(msb->current_sg, sg, ARRAY_SIZE(sg),
sg                562 drivers/memstick/core/ms_block.c 		memstick_init_req_sg(mrq, MS_TPC_WRITE_LONG_DATA, sg);
sg                856 drivers/memstick/core/ms_block.c 					struct scatterlist *sg,  int offset)
sg                868 drivers/memstick/core/ms_block.c 		sg_miter_start(&miter, sg, sg_nents(sg),
sg                907 drivers/memstick/core/ms_block.c 		msb->current_sg = sg;
sg                975 drivers/memstick/core/ms_block.c 	struct scatterlist sg;
sg                978 drivers/memstick/core/ms_block.c 	sg_init_one(&sg, msb->block_buffer, msb->block_size);
sg                983 drivers/memstick/core/ms_block.c 				NULL, &sg, page * msb->page_size);
sg                997 drivers/memstick/core/ms_block.c 			u16 pba, u32 lba, struct scatterlist *sg, int offset)
sg               1000 drivers/memstick/core/ms_block.c 	BUG_ON(sg->length < msb->page_size);
sg               1041 drivers/memstick/core/ms_block.c 		msb->current_sg = sg;
sg               1055 drivers/memstick/core/ms_block.c 			error = msb_verify_block(msb, pba, sg, offset);
sg               1112 drivers/memstick/core/ms_block.c 	struct scatterlist *sg, int offset)
sg               1137 drivers/memstick/core/ms_block.c 		error = msb_write_block(msb, new_pba, lba, sg, offset);
sg               1189 drivers/memstick/core/ms_block.c 	struct scatterlist sg;
sg               1213 drivers/memstick/core/ms_block.c 		sg_init_one(&sg, page, sizeof(*page));
sg               1214 drivers/memstick/core/ms_block.c 		if (msb_read_page(msb, pba, 0, &extra, &sg, 0)) {
sg               1252 drivers/memstick/core/ms_block.c 	struct scatterlist sg;
sg               1286 drivers/memstick/core/ms_block.c 	sg_init_one(&sg, buffer, size_to_read);
sg               1289 drivers/memstick/core/ms_block.c 		error = msb_read_page(msb, pba, page, NULL, &sg, offset);
sg               1528 drivers/memstick/core/ms_block.c 	struct scatterlist sg;
sg               1545 drivers/memstick/core/ms_block.c 	sg_init_one(&sg, msb->cache , msb->block_size);
sg               1557 drivers/memstick/core/ms_block.c 		error = msb_read_page(msb, pba, page, &extra, &sg, offset);
sg               1578 drivers/memstick/core/ms_block.c 	error = msb_update_block(msb, msb->cache_block_lba, &sg, 0);
sg               1600 drivers/memstick/core/ms_block.c 	int page, bool add_to_cache_only, struct scatterlist *sg, int offset)
sg               1631 drivers/memstick/core/ms_block.c 	msb_sg_copy(sg, sg_tmp, ARRAY_SIZE(sg_tmp), offset, msb->page_size);
sg               1641 drivers/memstick/core/ms_block.c 				int page, struct scatterlist *sg, int offset)
sg               1654 drivers/memstick/core/ms_block.c 		msb_sg_copy(sg, sg_tmp, ARRAY_SIZE(sg_tmp),
sg               1663 drivers/memstick/core/ms_block.c 		error = msb_read_page(msb, pba, page, NULL, sg, offset);
sg               1667 drivers/memstick/core/ms_block.c 		msb_cache_write(msb, lba, page, true, sg, offset);
sg               1803 drivers/memstick/core/ms_block.c 	int page, struct scatterlist *sg, size_t len, int *sucessfuly_written)
sg               1816 drivers/memstick/core/ms_block.c 			error = msb_update_block(msb, lba, sg, offset);
sg               1826 drivers/memstick/core/ms_block.c 		error = msb_cache_write(msb, lba, page, false, sg, offset);
sg               1843 drivers/memstick/core/ms_block.c 		int page, struct scatterlist *sg, int len, int *sucessfuly_read)
sg               1851 drivers/memstick/core/ms_block.c 		error = msb_cache_read(msb, lba, page, sg, offset);
sg               1872 drivers/memstick/core/ms_block.c 	struct scatterlist *sg = msb->prealloc_sg;
sg               1898 drivers/memstick/core/ms_block.c 		blk_rq_map_sg(msb->queue, req, sg);
sg               1906 drivers/memstick/core/ms_block.c 			error = msb_do_read_request(msb, lba, page, sg,
sg               1909 drivers/memstick/core/ms_block.c 			error = msb_do_write_request(msb, lba, page, sg,
sg                309 drivers/memstick/host/jmb38x_ms.c 		length = host->req->sg.length - host->block_pos;
sg                310 drivers/memstick/host/jmb38x_ms.c 		off = host->req->sg.offset + host->block_pos;
sg                320 drivers/memstick/host/jmb38x_ms.c 			pg = nth_page(sg_page(&host->req->sg),
sg                404 drivers/memstick/host/jmb38x_ms.c 		data_len = host->req->sg.length;
sg                418 drivers/memstick/host/jmb38x_ms.c 		if (1 != dma_map_sg(&host->chip->pdev->dev, &host->req->sg, 1,
sg                425 drivers/memstick/host/jmb38x_ms.c 		data_len = sg_dma_len(&host->req->sg);
sg                426 drivers/memstick/host/jmb38x_ms.c 		writel(sg_dma_address(&host->req->sg),
sg                486 drivers/memstick/host/jmb38x_ms.c 		dma_unmap_sg(&host->chip->pdev->dev, &host->req->sg, 1,
sg                236 drivers/memstick/host/r592.c 	r592_write_reg(dev, R592_FIFO_DMA, sg_dma_address(&dev->req->sg));
sg                283 drivers/memstick/host/r592.c 	len = dev->req->sg.length;
sg                295 drivers/memstick/host/r592.c 	sg_count = dma_map_sg(&dev->pci_dev->dev, &dev->req->sg, 1, is_write ?
sg                298 drivers/memstick/host/r592.c 	if (sg_count != 1 || sg_dma_len(&dev->req->sg) < R592_LFIFO_SIZE) {
sg                312 drivers/memstick/host/r592.c 	dma_unmap_sg(&dev->pci_dev->dev, &dev->req->sg, 1, is_write ?
sg                432 drivers/memstick/host/r592.c 	sg_miter_start(&miter, &dev->req->sg, 1, SG_MITER_ATOMIC |
sg                466 drivers/memstick/host/r592.c 		dev->req->sg.length : dev->req->data_len;
sg                129 drivers/memstick/host/rtsx_pci_ms.c 		u8 tpc, u8 cfg, struct scatterlist *sg)
sg                133 drivers/memstick/host/rtsx_pci_ms.c 	unsigned int length = sg->length;
sg                180 drivers/memstick/host/rtsx_pci_ms.c 	err = rtsx_pci_transfer_data(pcr, sg, 1, data_dir == READ, 10000);
sg                353 drivers/memstick/host/rtsx_pci_ms.c 				req->tpc, cfg, &(req->sg));
sg                231 drivers/memstick/host/rtsx_usb_ms.c 		u8 tpc, u8 cfg, struct scatterlist *sg)
sg                235 drivers/memstick/host/rtsx_usb_ms.c 	unsigned int length = sg->length;
sg                296 drivers/memstick/host/rtsx_usb_ms.c 	err = rtsx_usb_transfer_data(ucr, pipe, sg, length,
sg                471 drivers/memstick/host/rtsx_usb_ms.c 				req->tpc, cfg, &(req->sg));
sg                191 drivers/memstick/host/tifm_ms.c 		length = host->req->sg.length - host->block_pos;
sg                192 drivers/memstick/host/tifm_ms.c 		off = host->req->sg.offset + host->block_pos;
sg                204 drivers/memstick/host/tifm_ms.c 			pg = nth_page(sg_page(&host->req->sg),
sg                266 drivers/memstick/host/tifm_ms.c 		data_len = host->req->sg.length;
sg                280 drivers/memstick/host/tifm_ms.c 		if (1 != tifm_map_sg(sock, &host->req->sg, 1,
sg                287 drivers/memstick/host/tifm_ms.c 		data_len = sg_dma_len(&host->req->sg);
sg                300 drivers/memstick/host/tifm_ms.c 		writel(sg_dma_address(&host->req->sg),
sg                351 drivers/memstick/host/tifm_ms.c 		tifm_unmap_sg(sock, &host->req->sg, 1,
sg                608 drivers/memstick/host/tifm_ms.c 			tifm_unmap_sg(sock, &host->req->sg, 1,
sg               1160 drivers/message/fusion/mptctl.c 	MptSge_t	*sg = sgl;
sg               1166 drivers/message/fusion/mptctl.c 	if (sg->FlagsLength & 0x04000000)
sg               1171 drivers/message/fusion/mptctl.c 	nib = (sg->FlagsLength & 0xF0000000) >> 28;
sg               1176 drivers/message/fusion/mptctl.c 		} else if (sg->Address) {
sg               1181 drivers/message/fusion/mptctl.c 			dma_addr = sg->Address;
sg               1188 drivers/message/fusion/mptctl.c 		sg++;
sg               1190 drivers/message/fusion/mptctl.c 		nib = (le32_to_cpu(sg->FlagsLength) & 0xF0000000) >> 28;
sg               1194 drivers/message/fusion/mptctl.c 	if (sg->Address) {
sg               1199 drivers/message/fusion/mptctl.c 		dma_addr = sg->Address;
sg                184 drivers/message/fusion/mptscsih.c 	struct scatterlist *sg;
sg                216 drivers/message/fusion/mptscsih.c 	sg = scsi_sglist(SCpnt);
sg                238 drivers/message/fusion/mptscsih.c 		thisxfer = sg_dma_len(sg);
sg                241 drivers/message/fusion/mptscsih.c 			sg = sg_next(sg);
sg                246 drivers/message/fusion/mptscsih.c 		v2 = sg_dma_address(sg);
sg                250 drivers/message/fusion/mptscsih.c 		sg = sg_next(sg);
sg                266 drivers/message/fusion/mptscsih.c 		thisxfer = sg_dma_len(sg);
sg                268 drivers/message/fusion/mptscsih.c 		v2 = sg_dma_address(sg);
sg                497 drivers/misc/cardreader/rtsx_pcr.c 	struct scatterlist *sg;
sg                514 drivers/misc/cardreader/rtsx_pcr.c 	for_each_sg(sglist, sg, count, i) {
sg                515 drivers/misc/cardreader/rtsx_pcr.c 		addr = sg_dma_address(sg);
sg                516 drivers/misc/cardreader/rtsx_pcr.c 		len = sg_dma_len(sg);
sg                 41 drivers/misc/cardreader/rtsx_usb.c 		unsigned int pipe, struct scatterlist *sg, int num_sg,
sg                 49 drivers/misc/cardreader/rtsx_usb.c 			sg, num_sg, length, GFP_NOIO);
sg               3110 drivers/misc/habanalabs/goya/goya.c 	struct scatterlist *sg;
sg               3117 drivers/misc/habanalabs/goya/goya.c 	for_each_sg(sgl, sg, nents, i)
sg               3118 drivers/misc/habanalabs/goya/goya.c 		sg->dma_address += HOST_PHYS_BASE;
sg               3126 drivers/misc/habanalabs/goya/goya.c 	struct scatterlist *sg;
sg               3130 drivers/misc/habanalabs/goya/goya.c 	for_each_sg(sgl, sg, nents, i)
sg               3131 drivers/misc/habanalabs/goya/goya.c 		sg->dma_address -= HOST_PHYS_BASE;
sg               3138 drivers/misc/habanalabs/goya/goya.c 	struct scatterlist *sg, *sg_next_iter;
sg               3145 drivers/misc/habanalabs/goya/goya.c 	for_each_sg(sgt->sgl, sg, sgt->nents, count) {
sg               3147 drivers/misc/habanalabs/goya/goya.c 		len = sg_dma_len(sg);
sg               3148 drivers/misc/habanalabs/goya/goya.c 		addr = sg_dma_address(sg);
sg               3154 drivers/misc/habanalabs/goya/goya.c 			sg_next_iter = sg_next(sg);
sg               3165 drivers/misc/habanalabs/goya/goya.c 				sg = sg_next_iter;
sg               3596 drivers/misc/habanalabs/goya/goya.c 	struct scatterlist *sg, *sg_next_iter;
sg               3657 drivers/misc/habanalabs/goya/goya.c 	for_each_sg(sgt->sgl, sg, sgt->nents, count) {
sg               3658 drivers/misc/habanalabs/goya/goya.c 		len = sg_dma_len(sg);
sg               3659 drivers/misc/habanalabs/goya/goya.c 		dma_addr = sg_dma_address(sg);
sg               3665 drivers/misc/habanalabs/goya/goya.c 			sg_next_iter = sg_next(sg);
sg               3676 drivers/misc/habanalabs/goya/goya.c 				sg = sg_next_iter;
sg                624 drivers/misc/habanalabs/memory.c static u32 get_sg_info(struct scatterlist *sg, dma_addr_t *dma_addr)
sg                626 drivers/misc/habanalabs/memory.c 	*dma_addr = sg_dma_address(sg);
sg                628 drivers/misc/habanalabs/memory.c 	return ((((*dma_addr) & (PAGE_SIZE - 1)) + sg_dma_len(sg)) +
sg                650 drivers/misc/habanalabs/memory.c 	struct scatterlist *sg;
sg                673 drivers/misc/habanalabs/memory.c 	for_each_sg(userptr->sgt->sgl, sg, userptr->sgt->nents, i) {
sg                674 drivers/misc/habanalabs/memory.c 		npages = get_sg_info(sg, &dma_addr);
sg                702 drivers/misc/habanalabs/memory.c 	for_each_sg(userptr->sgt->sgl, sg, userptr->sgt->nents, i) {
sg                703 drivers/misc/habanalabs/memory.c 		npages = get_sg_info(sg, &dma_addr);
sg                187 drivers/misc/mic/host/mic_boot.c static int __mic_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                197 drivers/misc/mic/host/mic_boot.c 	ret = dma_map_sg(&mdev->pdev->dev, sg, nents, dir);
sg                201 drivers/misc/mic/host/mic_boot.c 	for_each_sg(sg, s, nents, i) {
sg                209 drivers/misc/mic/host/mic_boot.c 	for_each_sg(sg, s, i, j) {
sg                213 drivers/misc/mic/host/mic_boot.c 	dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir);
sg                218 drivers/misc/mic/host/mic_boot.c 			       struct scatterlist *sg, int nents,
sg                228 drivers/misc/mic/host/mic_boot.c 	for_each_sg(sg, s, nents, i) {
sg                233 drivers/misc/mic/host/mic_boot.c 	dma_unmap_sg(&mdev->pdev->dev, sg, nents, dir);
sg                 43 drivers/misc/mic/scif/scif_debugfs.c 	struct scatterlist *sg;
sg                 66 drivers/misc/mic/scif/scif_debugfs.c 		for_each_sg(window->st->sgl, sg, window->st->nents, j)
sg                 68 drivers/misc/mic/scif/scif_debugfs.c 				   j, sg_dma_address(sg), sg_dma_len(sg));
sg                354 drivers/misc/mic/scif/scif_nodeqp.c static void scif_p2p_freesg(struct scatterlist *sg)
sg                356 drivers/misc/mic/scif/scif_nodeqp.c 	kfree(sg);
sg                362 drivers/misc/mic/scif/scif_nodeqp.c 	struct scatterlist *sg;
sg                366 drivers/misc/mic/scif/scif_nodeqp.c 	sg = kcalloc(page_cnt, sizeof(struct scatterlist), GFP_KERNEL);
sg                367 drivers/misc/mic/scif/scif_nodeqp.c 	if (!sg)
sg                369 drivers/misc/mic/scif/scif_nodeqp.c 	sg_init_table(sg, page_cnt);
sg                372 drivers/misc/mic/scif/scif_nodeqp.c 		sg_set_page(&sg[i], page, page_size, 0);
sg                375 drivers/misc/mic/scif/scif_nodeqp.c 	return sg;
sg                527 drivers/misc/mic/scif/scif_rma.c 	struct scatterlist *sg;
sg                539 drivers/misc/mic/scif/scif_rma.c 	for_each_sg(window->st->sgl, sg, window->st->nents, i)
sg                540 drivers/misc/mic/scif/scif_rma.c 		sg_set_page(sg, pin->pages[i], PAGE_SIZE, 0x0);
sg                547 drivers/misc/mic/scif/scif_rma.c 	sg = window->st->sgl;
sg                548 drivers/misc/mic/scif/scif_rma.c 	for (i = 0; sg; i++) {
sg                551 drivers/misc/mic/scif/scif_rma.c 		window->dma_addr[i] = sg_dma_address(sg);
sg                552 drivers/misc/mic/scif/scif_rma.c 		window->num_pages[i] = sg_dma_len(sg) >> PAGE_SHIFT;
sg                553 drivers/misc/mic/scif/scif_rma.c 		last_da = sg_dma_address(sg) + sg_dma_len(sg);
sg                554 drivers/misc/mic/scif/scif_rma.c 		while ((sg = sg_next(sg)) && sg_dma_address(sg) == last_da) {
sg                556 drivers/misc/mic/scif/scif_rma.c 				(sg_dma_len(sg) >> PAGE_SHIFT);
sg                558 drivers/misc/mic/scif/scif_rma.c 				sg_dma_len(sg);
sg                294 drivers/misc/tifm_core.c int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
sg                297 drivers/misc/tifm_core.c 	return pci_map_sg(to_pci_dev(sock->dev.parent), sg, nents, direction);
sg                301 drivers/misc/tifm_core.c void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
sg                304 drivers/misc/tifm_core.c 	pci_unmap_sg(to_pci_dev(sock->dev.parent), sg, nents, direction);
sg                497 drivers/mmc/core/block.c 	struct scatterlist sg;
sg                523 drivers/mmc/core/block.c 		data.sg = &sg;
sg                528 drivers/mmc/core/block.c 		sg_init_one(data.sg, idata->buf, idata->buf_bytes);
sg                913 drivers/mmc/core/block.c 	struct scatterlist sg;
sg                934 drivers/mmc/core/block.c 	data.sg = &sg;
sg                945 drivers/mmc/core/block.c 	sg_init_one(&sg, blocks, 4);
sg               1383 drivers/mmc/core/block.c 	brq->data.sg = mqrq->sg;
sg               1392 drivers/mmc/core/block.c 		struct scatterlist *sg;
sg               1394 drivers/mmc/core/block.c 		for_each_sg(brq->data.sg, sg, brq->data.sg_len, i) {
sg               1395 drivers/mmc/core/block.c 			data_size -= sg->length;
sg               1397 drivers/mmc/core/block.c 				sg->length += data_size;
sg                301 drivers/mmc/core/core.c 	struct scatterlist *sg;
sg                318 drivers/mmc/core/core.c 		for_each_sg(mrq->data->sg, sg, mrq->data->sg_len, i)
sg                319 drivers/mmc/core/core.c 			sz += sg->length;
sg                254 drivers/mmc/core/mmc_ops.c 	struct scatterlist sg;
sg                272 drivers/mmc/core/mmc_ops.c 	data.sg = &sg;
sg                275 drivers/mmc/core/mmc_ops.c 	sg_init_one(&sg, buf, len);
sg                616 drivers/mmc/core/mmc_ops.c 	struct scatterlist sg;
sg                652 drivers/mmc/core/mmc_ops.c 	data.sg = &sg;
sg                654 drivers/mmc/core/mmc_ops.c 	sg_init_one(&sg, data_buf, size);
sg                712 drivers/mmc/core/mmc_ops.c 	struct scatterlist sg;
sg                759 drivers/mmc/core/mmc_ops.c 	data.sg = &sg;
sg                762 drivers/mmc/core/mmc_ops.c 	sg_init_one(&sg, data_buf, len);
sg                 84 drivers/mmc/core/mmc_test.c 	struct scatterlist *sg;
sg                209 drivers/mmc/core/mmc_test.c 	struct mmc_request *mrq, struct scatterlist *sg, unsigned sg_len,
sg                240 drivers/mmc/core/mmc_test.c 	mrq->data->sg = sg;
sg                296 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg                302 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, buffer, blksz);
sg                304 drivers/mmc/core/mmc_test.c 	mmc_test_prepare_mrq(test, &mrq, &sg, 1, addr, 1, blksz, write);
sg                412 drivers/mmc/core/mmc_test.c 	struct scatterlist *sg = NULL;
sg                431 drivers/mmc/core/mmc_test.c 			if (sg)
sg                432 drivers/mmc/core/mmc_test.c 				sg = sg_next(sg);
sg                434 drivers/mmc/core/mmc_test.c 				sg = sglist;
sg                435 drivers/mmc/core/mmc_test.c 			if (!sg)
sg                437 drivers/mmc/core/mmc_test.c 			sg_set_page(sg, mem->arr[i].page, len, 0);
sg                448 drivers/mmc/core/mmc_test.c 	if (sg)
sg                449 drivers/mmc/core/mmc_test.c 		sg_mark_end(sg);
sg                465 drivers/mmc/core/mmc_test.c 	struct scatterlist *sg = NULL;
sg                486 drivers/mmc/core/mmc_test.c 			if (sg)
sg                487 drivers/mmc/core/mmc_test.c 				sg = sg_next(sg);
sg                489 drivers/mmc/core/mmc_test.c 				sg = sglist;
sg                490 drivers/mmc/core/mmc_test.c 			if (!sg)
sg                492 drivers/mmc/core/mmc_test.c 			sg_set_page(sg, virt_to_page(addr), len, 0);
sg                500 drivers/mmc/core/mmc_test.c 	if (sg)
sg                501 drivers/mmc/core/mmc_test.c 		sg_mark_end(sg);
sg                839 drivers/mmc/core/mmc_test.c 				      struct scatterlist *sg, unsigned sg_len,
sg                860 drivers/mmc/core/mmc_test.c 		mmc_test_prepare_mrq(test, mrq, sg, sg_len, dev_addr, blocks,
sg                884 drivers/mmc/core/mmc_test.c 	struct scatterlist *sg, unsigned sg_len, unsigned dev_addr,
sg                896 drivers/mmc/core/mmc_test.c 	mmc_test_prepare_mrq(test, &mrq, sg, sg_len, dev_addr,
sg                917 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg                923 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, blocks * blksz);
sg                925 drivers/mmc/core/mmc_test.c 	mmc_test_prepare_mrq(test, &mrq, &sg, 1, 0, blocks, blksz, write);
sg                941 drivers/mmc/core/mmc_test.c 	struct scatterlist *sg, unsigned sg_len, unsigned dev_addr,
sg                954 drivers/mmc/core/mmc_test.c 	sg_copy_from_buffer(sg, sg_len, test->scratch, BUFFER_SIZE);
sg                961 drivers/mmc/core/mmc_test.c 	ret = mmc_test_simple_transfer(test, sg, sg_len, dev_addr,
sg               1001 drivers/mmc/core/mmc_test.c 		sg_copy_to_buffer(sg, sg_len, test->scratch, BUFFER_SIZE);
sg               1027 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1033 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, 512);
sg               1035 drivers/mmc/core/mmc_test.c 	return mmc_test_simple_transfer(test, &sg, 1, 0, 1, 512, 1);
sg               1041 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1047 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, 512);
sg               1049 drivers/mmc/core/mmc_test.c 	return mmc_test_simple_transfer(test, &sg, 1, 0, 1, 512, 0);
sg               1054 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1056 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, 512);
sg               1058 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, 1, 512, 1);
sg               1063 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1065 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, 512);
sg               1067 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, 1, 512, 0);
sg               1073 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1086 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, size);
sg               1088 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 1);
sg               1094 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1107 drivers/mmc/core/mmc_test.c 	sg_init_one(&sg, test->buffer, size);
sg               1109 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 0);
sg               1115 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1121 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer, i);
sg               1122 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, i, 1);
sg               1133 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1139 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer, i);
sg               1140 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, i, 0);
sg               1151 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1157 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer, i);
sg               1158 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, i, 1);
sg               1169 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1175 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer, i);
sg               1176 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, i, 0);
sg               1187 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1190 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer + i, 512);
sg               1191 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, 512, 1);
sg               1202 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1205 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer + i, 512);
sg               1206 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, 1, 512, 0);
sg               1218 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1232 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer + i, size);
sg               1233 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 1);
sg               1245 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1259 drivers/mmc/core/mmc_test.c 		sg_init_one(&sg, test->buffer + i, size);
sg               1260 drivers/mmc/core/mmc_test.c 		ret = mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 0);
sg               1322 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1324 drivers/mmc/core/mmc_test.c 	sg_init_table(&sg, 1);
sg               1325 drivers/mmc/core/mmc_test.c 	sg_set_page(&sg, test->highmem, 512, 0);
sg               1327 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, 1, 512, 1);
sg               1332 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1334 drivers/mmc/core/mmc_test.c 	sg_init_table(&sg, 1);
sg               1335 drivers/mmc/core/mmc_test.c 	sg_set_page(&sg, test->highmem, 512, 0);
sg               1337 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, 1, 512, 0);
sg               1343 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1356 drivers/mmc/core/mmc_test.c 	sg_init_table(&sg, 1);
sg               1357 drivers/mmc/core/mmc_test.c 	sg_set_page(&sg, test->highmem, size, 0);
sg               1359 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 1);
sg               1365 drivers/mmc/core/mmc_test.c 	struct scatterlist sg;
sg               1378 drivers/mmc/core/mmc_test.c 	sg_init_table(&sg, 1);
sg               1379 drivers/mmc/core/mmc_test.c 	sg_set_page(&sg, test->highmem, size, 0);
sg               1381 drivers/mmc/core/mmc_test.c 	return mmc_test_transfer(test, &sg, 1, 0, size / 512, 512, 0);
sg               1407 drivers/mmc/core/mmc_test.c 		err = mmc_test_map_sg_max_scatter(t->mem, sz, t->sg,
sg               1411 drivers/mmc/core/mmc_test.c 		err = mmc_test_map_sg(t->mem, sz, t->sg, 1, t->max_segs,
sg               1428 drivers/mmc/core/mmc_test.c 	return mmc_test_simple_transfer(test, t->sg, t->sg_len, dev_addr,
sg               1468 drivers/mmc/core/mmc_test.c 		ret = mmc_test_nonblock_transfer(test, t->sg, t->sg_len,
sg               1527 drivers/mmc/core/mmc_test.c 	kfree(t->sg);
sg               1581 drivers/mmc/core/mmc_test.c 	t->sg = kmalloc_array(t->max_segs, sizeof(*t->sg), GFP_KERNEL);
sg               1582 drivers/mmc/core/mmc_test.c 	if (!t->sg) {
sg               2369 drivers/mmc/core/mmc_test.c 	mmc_test_prepare_mrq(test, mrq, t->sg, t->sg_len, dev_addr, t->blocks,
sg                164 drivers/mmc/core/queue.c 	struct scatterlist *sg;
sg                166 drivers/mmc/core/queue.c 	sg = kmalloc_array(sg_len, sizeof(*sg), gfp);
sg                167 drivers/mmc/core/queue.c 	if (sg)
sg                168 drivers/mmc/core/queue.c 		sg_init_table(sg, sg_len);
sg                170 drivers/mmc/core/queue.c 	return sg;
sg                211 drivers/mmc/core/queue.c 	mq_rq->sg = mmc_alloc_sg(mmc_get_max_segments(host), gfp);
sg                212 drivers/mmc/core/queue.c 	if (!mq_rq->sg)
sg                222 drivers/mmc/core/queue.c 	kfree(mq_rq->sg);
sg                223 drivers/mmc/core/queue.c 	mq_rq->sg = NULL;
sg                523 drivers/mmc/core/queue.c 	return blk_rq_map_sg(mq->queue, req, mqrq->sg);
sg                 66 drivers/mmc/core/queue.h 	struct scatterlist	*sg;
sg                216 drivers/mmc/core/sd_ops.c 	struct scatterlist sg;
sg                242 drivers/mmc/core/sd_ops.c 	data.sg = &sg;
sg                245 drivers/mmc/core/sd_ops.c 	sg_init_one(&sg, scr, 8);
sg                270 drivers/mmc/core/sd_ops.c 	struct scatterlist sg;
sg                289 drivers/mmc/core/sd_ops.c 	data.sg = &sg;
sg                292 drivers/mmc/core/sd_ops.c 	sg_init_one(&sg, resp, 64);
sg                312 drivers/mmc/core/sd_ops.c 	struct scatterlist sg;
sg                330 drivers/mmc/core/sd_ops.c 	data.sg = &sg;
sg                333 drivers/mmc/core/sd_ops.c 	sg_init_one(&sg, ssr, 64);
sg                120 drivers/mmc/core/sdio_ops.c 	struct scatterlist sg, *sg_ptr;
sg                156 drivers/mmc/core/sdio_ops.c 		data.sg = sgtable.sgl;
sg                159 drivers/mmc/core/sdio_ops.c 		for_each_sg(data.sg, sg_ptr, data.sg_len, i) {
sg                165 drivers/mmc/core/sdio_ops.c 		data.sg = &sg;
sg                168 drivers/mmc/core/sdio_ops.c 		sg_init_one(&sg, buf, left_size);
sg                 56 drivers/mmc/host/alcor.c 	struct scatterlist *sg;
sg                130 drivers/mmc/host/alcor.c 	if (!host->sg) {
sg                135 drivers/mmc/host/alcor.c 	if (!sg_dma_len(host->sg)) {
sg                141 drivers/mmc/host/alcor.c 	addr = (u32)sg_dma_address(host->sg);
sg                144 drivers/mmc/host/alcor.c 	host->sg = sg_next(host->sg);
sg                235 drivers/mmc/host/alcor.c 	sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg                251 drivers/mmc/host/alcor.c 	host->sg = data->sg;
sg                781 drivers/mmc/host/alcor.c 	struct scatterlist *sg;
sg                808 drivers/mmc/host/alcor.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg                809 drivers/mmc/host/alcor.c 		if (sg->length != AU6601_MAX_DMA_BLOCK_SIZE)
sg                811 drivers/mmc/host/alcor.c 		if (sg->offset != 0)
sg                817 drivers/mmc/host/alcor.c 	sg_len = dma_map_sg(host->dev, data->sg, data->sg_len,
sg                837 drivers/mmc/host/alcor.c 			     data->sg,
sg                216 drivers/mmc/host/android-goldfish.c 			sg_copy_from_buffer(data->sg, 1, host->virt_base,
sg                217 drivers/mmc/host/android-goldfish.c 					data->sg->length);
sg                219 drivers/mmc/host/android-goldfish.c 		host->data->bytes_xfered += data->sg->length;
sg                220 drivers/mmc/host/android-goldfish.c 		dma_unmap_sg(mmc_dev(mmc_from_priv(host)), data->sg,
sg                383 drivers/mmc/host/android-goldfish.c 	host->sg_len = dma_map_sg(mmc_dev(mmc_from_priv(host)), data->sg,
sg                393 drivers/mmc/host/android-goldfish.c 		sg_copy_to_buffer(data->sg, 1, host->virt_base,
sg                394 drivers/mmc/host/android-goldfish.c 				data->sg->length);
sg                307 drivers/mmc/host/atmel-mci.c 	struct scatterlist	*sg;
sg                856 drivers/mmc/host/atmel-mci.c 		buf_size = sg_dma_len(host->sg);
sg                857 drivers/mmc/host/atmel-mci.c 		atmci_writel(host, pointer_reg, sg_dma_address(host->sg));
sg                872 drivers/mmc/host/atmel-mci.c 		atmci_writel(host, counter_reg, sg_dma_len(host->sg) / 4);
sg                873 drivers/mmc/host/atmel-mci.c 		host->data_size -= sg_dma_len(host->sg);
sg                875 drivers/mmc/host/atmel-mci.c 			host->sg = sg_next(host->sg);
sg                900 drivers/mmc/host/atmel-mci.c 				data->sg, data->sg_len,
sg                921 drivers/mmc/host/atmel-mci.c 		sg_copy_from_buffer(host->data->sg, host->data->sg_len,
sg                938 drivers/mmc/host/atmel-mci.c 				data->sg, data->sg_len,
sg               1002 drivers/mmc/host/atmel-mci.c 	host->sg = data->sg;
sg               1044 drivers/mmc/host/atmel-mci.c 	host->sg = data->sg;
sg               1063 drivers/mmc/host/atmel-mci.c 	dma_map_sg(&host->pdev->dev, data->sg, data->sg_len,
sg               1068 drivers/mmc/host/atmel-mci.c 		sg_copy_to_buffer(host->data->sg, host->data->sg_len,
sg               1086 drivers/mmc/host/atmel-mci.c 	struct scatterlist		*sg;
sg               1096 drivers/mmc/host/atmel-mci.c 	host->sg = NULL;
sg               1111 drivers/mmc/host/atmel-mci.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg               1112 drivers/mmc/host/atmel-mci.c 		if (sg->offset & 3 || sg->length & 3)
sg               1138 drivers/mmc/host/atmel-mci.c 	sglen = dma_map_sg(chan->device->dev, data->sg,
sg               1143 drivers/mmc/host/atmel-mci.c 			data->sg, sglen, slave_dirn,
sg               1154 drivers/mmc/host/atmel-mci.c 	dma_unmap_sg(chan->device->dev, data->sg, data->sg_len,
sg               1925 drivers/mmc/host/atmel-mci.c 	struct scatterlist	*sg = host->sg;
sg               1934 drivers/mmc/host/atmel-mci.c 		if (likely(offset + 4 <= sg->length)) {
sg               1935 drivers/mmc/host/atmel-mci.c 			sg_pcopy_from_buffer(sg, 1, &value, sizeof(u32), offset);
sg               1940 drivers/mmc/host/atmel-mci.c 			if (offset == sg->length) {
sg               1941 drivers/mmc/host/atmel-mci.c 				flush_dcache_page(sg_page(sg));
sg               1942 drivers/mmc/host/atmel-mci.c 				host->sg = sg = sg_next(sg);
sg               1944 drivers/mmc/host/atmel-mci.c 				if (!sg || !host->sg_len)
sg               1950 drivers/mmc/host/atmel-mci.c 			unsigned int remaining = sg->length - offset;
sg               1952 drivers/mmc/host/atmel-mci.c 			sg_pcopy_from_buffer(sg, 1, &value, remaining, offset);
sg               1955 drivers/mmc/host/atmel-mci.c 			flush_dcache_page(sg_page(sg));
sg               1956 drivers/mmc/host/atmel-mci.c 			host->sg = sg = sg_next(sg);
sg               1958 drivers/mmc/host/atmel-mci.c 			if (!sg || !host->sg_len)
sg               1962 drivers/mmc/host/atmel-mci.c 			sg_pcopy_from_buffer(sg, 1, (u8 *)&value + remaining,
sg               1992 drivers/mmc/host/atmel-mci.c 	struct scatterlist	*sg = host->sg;
sg               2000 drivers/mmc/host/atmel-mci.c 		if (likely(offset + 4 <= sg->length)) {
sg               2001 drivers/mmc/host/atmel-mci.c 			sg_pcopy_to_buffer(sg, 1, &value, sizeof(u32), offset);
sg               2006 drivers/mmc/host/atmel-mci.c 			if (offset == sg->length) {
sg               2007 drivers/mmc/host/atmel-mci.c 				host->sg = sg = sg_next(sg);
sg               2009 drivers/mmc/host/atmel-mci.c 				if (!sg || !host->sg_len)
sg               2015 drivers/mmc/host/atmel-mci.c 			unsigned int remaining = sg->length - offset;
sg               2018 drivers/mmc/host/atmel-mci.c 			sg_pcopy_to_buffer(sg, 1, &value, remaining, offset);
sg               2021 drivers/mmc/host/atmel-mci.c 			host->sg = sg = sg_next(sg);
sg               2023 drivers/mmc/host/atmel-mci.c 			if (!sg || !host->sg_len) {
sg               2029 drivers/mmc/host/atmel-mci.c 			sg_pcopy_to_buffer(sg, 1, (u8 *)&value + remaining,
sg                352 drivers/mmc/host/au1xmmc.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, host->dma.dir);
sg                398 drivers/mmc/host/au1xmmc.c 	struct scatterlist *sg;
sg                406 drivers/mmc/host/au1xmmc.c 	sg = &data->sg[host->pio.index];
sg                407 drivers/mmc/host/au1xmmc.c 	sg_ptr = kmap_atomic(sg_page(sg)) + sg->offset + host->pio.offset;
sg                410 drivers/mmc/host/au1xmmc.c 	sg_len = data->sg[host->pio.index].length - host->pio.offset;
sg                454 drivers/mmc/host/au1xmmc.c 	struct scatterlist *sg;
sg                464 drivers/mmc/host/au1xmmc.c 		sg = &data->sg[host->pio.index];
sg                465 drivers/mmc/host/au1xmmc.c 		sg_ptr = kmap_atomic(sg_page(sg)) + sg->offset + host->pio.offset;
sg                468 drivers/mmc/host/au1xmmc.c 		sg_len = sg_dma_len(&data->sg[host->pio.index]) - host->pio.offset;
sg                632 drivers/mmc/host/au1xmmc.c 	host->dma.len = dma_map_sg(mmc_dev(host->mmc), data->sg,
sg                648 drivers/mmc/host/au1xmmc.c 			struct scatterlist *sg = &data->sg[i];
sg                649 drivers/mmc/host/au1xmmc.c 			int sg_len = sg->length;
sg                658 drivers/mmc/host/au1xmmc.c 					sg_phys(sg), len, flags);
sg                661 drivers/mmc/host/au1xmmc.c 					sg_phys(sg), len, flags);
sg                684 drivers/mmc/host/au1xmmc.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                477 drivers/mmc/host/bcm2835.c 		struct scatterlist *sg;
sg                484 drivers/mmc/host/bcm2835.c 		for_each_sg(data->sg, sg, data->sg_len, i) {
sg                485 drivers/mmc/host/bcm2835.c 			if (sg_is_last(sg)) {
sg                486 drivers/mmc/host/bcm2835.c 				WARN_ON(sg->length < len);
sg                487 drivers/mmc/host/bcm2835.c 				sg->length -= len;
sg                488 drivers/mmc/host/bcm2835.c 				host->drain_page = sg_page(sg);
sg                489 drivers/mmc/host/bcm2835.c 				host->drain_offset = sg->offset + sg->length;
sg                501 drivers/mmc/host/bcm2835.c 	sg_len = dma_map_sg(dma_chan->device->dev, data->sg, data->sg_len,
sg                506 drivers/mmc/host/bcm2835.c 	desc = dmaengine_prep_slave_sg(dma_chan, data->sg, sg_len, dir_slave,
sg                510 drivers/mmc/host/bcm2835.c 		dma_unmap_sg(dma_chan->device->dev, data->sg, sg_len, dir_data);
sg                566 drivers/mmc/host/bcm2835.c 		sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg               1064 drivers/mmc/host/bcm2835.c 			     data->sg, data->sg_len,
sg                377 drivers/mmc/host/cavium.c 	dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data));
sg                397 drivers/mmc/host/cavium.c 	dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data));
sg                522 drivers/mmc/host/cavium.c 	count = dma_map_sg(host->dev, data->sg, data->sg_len,
sg                534 drivers/mmc/host/cavium.c 			      (sg_dma_len(&data->sg[0]) / 8) - 1);
sg                536 drivers/mmc/host/cavium.c 	addr = sg_dma_address(&data->sg[0]);
sg                542 drivers/mmc/host/cavium.c 		 (rw) ? "W" : "R", sg_dma_len(&data->sg[0]), count);
sg                555 drivers/mmc/host/cavium.c 	struct scatterlist *sg;
sg                559 drivers/mmc/host/cavium.c 	count = dma_map_sg(host->dev, data->sg, data->sg_len,
sg                569 drivers/mmc/host/cavium.c 	for_each_sg(data->sg, sg, count, i) {
sg                571 drivers/mmc/host/cavium.c 		addr = sg_dma_address(sg);
sg                592 drivers/mmc/host/cavium.c 				       sg_dma_len(sg) / 8 - 1);
sg                599 drivers/mmc/host/cavium.c 			 (rw) ? "W" : "R", sg_dma_len(sg), i, count);
sg                612 drivers/mmc/host/cavium.c 	dma_unmap_sg(host->dev, data->sg, data->sg_len, get_dma_dir(data));
sg                658 drivers/mmc/host/cavium.c 	if (!mrq->data || !mrq->data->sg || !mrq->data->sg_len ||
sg                711 drivers/mmc/host/cavium.c 	sg_miter_start(&host->smi, mrq->data->sg, mrq->data->sg_len,
sg                724 drivers/mmc/host/cavium.c 	sg_miter_start(smi, mrq->data->sg, mrq->data->sg_len, SG_MITER_FROM_SG);
sg                274 drivers/mmc/host/cb710-mmc.c 	sg_miter_start(&miter, data->sg, data->sg_len, SG_MITER_TO_SG);
sg                318 drivers/mmc/host/cb710-mmc.c 	sg_miter_start(&miter, data->sg, data->sg_len, SG_MITER_FROM_SG);
sg                431 drivers/mmc/host/cqhci.c 	sg_count = dma_map_sg(mmc_dev(host), data->sg,
sg                474 drivers/mmc/host/cqhci.c 	struct scatterlist *sg;
sg                485 drivers/mmc/host/cqhci.c 	for_each_sg(data->sg, sg, sg_count, i) {
sg                486 drivers/mmc/host/cqhci.c 		addr = sg_dma_address(sg);
sg                487 drivers/mmc/host/cqhci.c 		len = sg_dma_len(sg);
sg                547 drivers/mmc/host/cqhci.c 		dma_unmap_sg(mmc_dev(host), data->sg, data->sg_len,
sg                201 drivers/mmc/host/davinci_mmc.c 	struct scatterlist *sg;
sg                219 drivers/mmc/host/davinci_mmc.c 	host->buffer_bytes_left = sg_dma_len(host->sg);
sg                220 drivers/mmc/host/davinci_mmc.c 	host->buffer = sg_virt(host->sg);
sg                232 drivers/mmc/host/davinci_mmc.c 		host->sg = sg_next(host->data->sg);
sg                419 drivers/mmc/host/davinci_mmc.c 				data->sg,
sg                441 drivers/mmc/host/davinci_mmc.c 				data->sg,
sg                467 drivers/mmc/host/davinci_mmc.c 	host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                472 drivers/mmc/host/davinci_mmc.c 		if (sg_dma_len(data->sg + i) & mask) {
sg                474 drivers/mmc/host/davinci_mmc.c 				     data->sg, data->sg_len,
sg                580 drivers/mmc/host/davinci_mmc.c 		host->sg = host->data->sg;
sg                786 drivers/mmc/host/davinci_mmc.c 		dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                437 drivers/mmc/host/dw_mmc.c 			     data->sg,
sg                480 drivers/mmc/host/dw_mmc.c 				    data->sg,
sg                586 drivers/mmc/host/dw_mmc.c 		unsigned int length = sg_dma_len(&data->sg[i]);
sg                588 drivers/mmc/host/dw_mmc.c 		u64 mem_addr = sg_dma_address(&data->sg[i]);
sg                658 drivers/mmc/host/dw_mmc.c 		unsigned int length = sg_dma_len(&data->sg[i]);
sg                660 drivers/mmc/host/dw_mmc.c 		u32 mem_addr = sg_dma_address(&data->sg[i]);
sg                777 drivers/mmc/host/dw_mmc.c 	struct scatterlist *sgl = host->data->sg;
sg                872 drivers/mmc/host/dw_mmc.c 	struct scatterlist *sg;
sg                889 drivers/mmc/host/dw_mmc.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg                890 drivers/mmc/host/dw_mmc.c 		if (sg->offset & 3 || sg->length & 3)
sg                895 drivers/mmc/host/dw_mmc.c 			    data->sg,
sg                935 drivers/mmc/host/dw_mmc.c 			     data->sg,
sg               1142 drivers/mmc/host/dw_mmc.c 	host->sg = NULL;
sg               1158 drivers/mmc/host/dw_mmc.c 		sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg               1159 drivers/mmc/host/dw_mmc.c 		host->sg = data->sg;
sg               1716 drivers/mmc/host/dw_mmc.c 	if (host->sg) {
sg               1718 drivers/mmc/host/dw_mmc.c 		host->sg = NULL;
sg               2498 drivers/mmc/host/dw_mmc.c 		host->sg = sg_miter->piter.sg;
sg               2532 drivers/mmc/host/dw_mmc.c 	host->sg = NULL;
sg               2553 drivers/mmc/host/dw_mmc.c 		host->sg = sg_miter->piter.sg;
sg               2586 drivers/mmc/host/dw_mmc.c 	host->sg = NULL;
sg               2672 drivers/mmc/host/dw_mmc.c 				if (host->sg != NULL)
sg               2683 drivers/mmc/host/dw_mmc.c 			if (host->dir_status == DW_MCI_RECV_STATUS && host->sg)
sg               2689 drivers/mmc/host/dw_mmc.c 			if (host->dir_status == DW_MCI_SEND_STATUS && host->sg)
sg                163 drivers/mmc/host/dw_mmc.h 	struct scatterlist	*sg;
sg                242 drivers/mmc/host/jz4740_mmc.c 	dma_unmap_sg(chan->device->dev, data->sg, data->sg_len, dir);
sg                261 drivers/mmc/host/jz4740_mmc.c 			data->sg,
sg                303 drivers/mmc/host/jz4740_mmc.c 	desc = dmaengine_prep_slave_sg(chan, data->sg, sg_count,
sg                705 drivers/mmc/host/jz4740_mmc.c 	sg_miter_start(&host->miter, data->sg, data->sg_len, direction);
sg                229 drivers/mmc/host/meson-gx-mmc.c 	struct scatterlist *sg;
sg                250 drivers/mmc/host/meson-gx-mmc.c 	for_each_sg(data->sg, sg, data->sg_len, i)
sg                252 drivers/mmc/host/meson-gx-mmc.c 		if (sg->offset & 7) {
sg                286 drivers/mmc/host/meson-gx-mmc.c 	data->sg_count = dma_map_sg(mmc_dev(mmc), data->sg, data->sg_len,
sg                298 drivers/mmc/host/meson-gx-mmc.c 		dma_unmap_sg(mmc_dev(mmc), data->sg, data->sg_len,
sg                707 drivers/mmc/host/meson-gx-mmc.c 	struct scatterlist *sg;
sg                719 drivers/mmc/host/meson-gx-mmc.c 	for_each_sg(data->sg, sg, data->sg_count, i) {
sg                720 drivers/mmc/host/meson-gx-mmc.c 		unsigned int len = sg_dma_len(sg);
sg                731 drivers/mmc/host/meson-gx-mmc.c 		desc[i].cmd_data = sg_dma_address(sg);
sg                783 drivers/mmc/host/meson-gx-mmc.c 			sg_copy_to_buffer(data->sg, data->sg_len,
sg                953 drivers/mmc/host/meson-gx-mmc.c 		sg_copy_from_buffer(data->sg, data->sg_len,
sg                311 drivers/mmc/host/meson-mx-sdio.c 	struct scatterlist *sg;
sg                316 drivers/mmc/host/meson-mx-sdio.c 	sg = data->sg;
sg                317 drivers/mmc/host/meson-mx-sdio.c 	if (sg->offset & 3 || sg->length & 3) {
sg                320 drivers/mmc/host/meson-mx-sdio.c 			sg->offset, sg->length);
sg                324 drivers/mmc/host/meson-mx-sdio.c 	dma_len = dma_map_sg(mmc_dev(mmc), data->sg, data->sg_len,
sg                351 drivers/mmc/host/meson-mx-sdio.c 		writel(sg_dma_address(mrq->data->sg),
sg                450 drivers/mmc/host/meson-mx-sdio.c 		dma_unmap_sg(mmc_dev(host->mmc), cmd->data->sg,
sg                872 drivers/mmc/host/mmc_spi.c 	struct scatterlist	*sg;
sg                894 drivers/mmc/host/mmc_spi.c 	for_each_sg(data->sg, sg, data->sg_len, n_sg) {
sg                898 drivers/mmc/host/mmc_spi.c 		unsigned		length = sg->length;
sg                906 drivers/mmc/host/mmc_spi.c 			if ((sg->offset != 0 || length != PAGE_SIZE)
sg                910 drivers/mmc/host/mmc_spi.c 			dma_addr = dma_map_page(dma_dev, sg_page(sg), 0,
sg                917 drivers/mmc/host/mmc_spi.c 				t->tx_dma = dma_addr + sg->offset;
sg                919 drivers/mmc/host/mmc_spi.c 				t->rx_dma = dma_addr + sg->offset;
sg                923 drivers/mmc/host/mmc_spi.c 		kmap_addr = kmap(sg_page(sg));
sg                925 drivers/mmc/host/mmc_spi.c 			t->tx_buf = kmap_addr + sg->offset;
sg                927 drivers/mmc/host/mmc_spi.c 			t->rx_buf = kmap_addr + sg->offset;
sg                954 drivers/mmc/host/mmc_spi.c 			flush_kernel_dcache_page(sg_page(sg));
sg                955 drivers/mmc/host/mmc_spi.c 		kunmap(sg_page(sg));
sg                600 drivers/mmc/host/mmci.c 	sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg                721 drivers/mmc/host/mmci.c 	dma_unmap_sg(chan->device->dev, data->sg, data->sg_len,
sg                826 drivers/mmc/host/mmci.c 	nr_sg = dma_map_sg(device->dev, data->sg, data->sg_len,
sg                835 drivers/mmc/host/mmci.c 	desc = dmaengine_prep_slave_sg(chan, data->sg, nr_sg,
sg                846 drivers/mmc/host/mmci.c 	dma_unmap_sg(device->dev, data->sg, data->sg_len,
sg                 31 drivers/mmc/host/mmci_stm32_sdmmc.c 	struct scatterlist *sg;
sg                 38 drivers/mmc/host/mmci_stm32_sdmmc.c 	for_each_sg(data->sg, sg, data->sg_len - 1, i) {
sg                 39 drivers/mmc/host/mmci_stm32_sdmmc.c 		if (!IS_ALIGNED(sg_dma_address(data->sg), sizeof(u32)) ||
sg                 40 drivers/mmc/host/mmci_stm32_sdmmc.c 		    !IS_ALIGNED(sg_dma_len(data->sg), SDMMC_IDMA_BURST)) {
sg                 43 drivers/mmc/host/mmci_stm32_sdmmc.c 				data->sg->offset, data->sg->length);
sg                 48 drivers/mmc/host/mmci_stm32_sdmmc.c 	if (!IS_ALIGNED(sg_dma_address(data->sg), sizeof(u32))) {
sg                 51 drivers/mmc/host/mmci_stm32_sdmmc.c 			data->sg->offset, data->sg->length);
sg                 64 drivers/mmc/host/mmci_stm32_sdmmc.c 			    data->sg,
sg                 89 drivers/mmc/host/mmci_stm32_sdmmc.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                129 drivers/mmc/host/mmci_stm32_sdmmc.c 	struct scatterlist *sg;
sg                133 drivers/mmc/host/mmci_stm32_sdmmc.c 		writel_relaxed(sg_dma_address(data->sg),
sg                140 drivers/mmc/host/mmci_stm32_sdmmc.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg                144 drivers/mmc/host/mmci_stm32_sdmmc.c 		desc[i].idmabase = sg_dma_address(sg);
sg                145 drivers/mmc/host/mmci_stm32_sdmmc.c 		desc[i].idmasize = sg_dma_len(sg);
sg                154 drivers/mmc/host/moxart-mmc.c 	host->cur_sg = data->sg;
sg                275 drivers/mmc/host/moxart-mmc.c 	len = dma_map_sg(dma_chan->device->dev, data->sg,
sg                279 drivers/mmc/host/moxart-mmc.c 		desc = dmaengine_prep_slave_sg(dma_chan, data->sg,
sg                301 drivers/mmc/host/moxart-mmc.c 		     data->sg, data->sg_len,
sg                337 drivers/mmc/host/mtk-sd.c 	struct scatterlist *sg;	/* I/O scatter list */
sg                617 drivers/mmc/host/mtk-sd.c 	struct scatterlist *sg;
sg                621 drivers/mmc/host/mtk-sd.c 	sg = data->sg;
sg                634 drivers/mmc/host/mtk-sd.c 	for_each_sg(data->sg, sg, data->sg_count, j) {
sg                635 drivers/mmc/host/mtk-sd.c 		dma_address = sg_dma_address(sg);
sg                636 drivers/mmc/host/mtk-sd.c 		dma_len = sg_dma_len(sg);
sg                683 drivers/mmc/host/mtk-sd.c 		data->sg_count = dma_map_sg(host->dev, data->sg, data->sg_len,
sg                696 drivers/mmc/host/mtk-sd.c 		dma_unmap_sg(host->dev, data->sg, data->sg_len,
sg                 98 drivers/mmc/host/mvsdio.c 		(u32)sg_virt(data->sg), data->blocks, data->blksz,
sg                107 drivers/mmc/host/mvsdio.c 	if (nodma || (data->blksz | data->sg->offset) & 3 ||
sg                108 drivers/mmc/host/mvsdio.c 	    ((!(data->flags & MMC_DATA_READ) && data->sg->offset & 0x3f))) {
sg                118 drivers/mmc/host/mvsdio.c 		host->pio_ptr = sg_virt(data->sg);
sg                127 drivers/mmc/host/mvsdio.c 					    data->sg, data->sg_len,
sg                129 drivers/mmc/host/mvsdio.c 		phys_addr = sg_dma_address(data->sg);
sg                295 drivers/mmc/host/mvsdio.c 		dma_unmap_sg(mmc_dev(host->mmc), data->sg, host->sg_frags,
sg                286 drivers/mmc/host/mxcmmc.c 	struct scatterlist *sg;
sg                289 drivers/mmc/host/mxcmmc.c 	for_each_sg(data->sg, sg, data->sg_len, i)
sg                290 drivers/mmc/host/mxcmmc.c 		buffer_swap32(sg_virt(sg), sg->length);
sg                301 drivers/mmc/host/mxcmmc.c 	struct scatterlist *sg;
sg                315 drivers/mmc/host/mxcmmc.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg                316 drivers/mmc/host/mxcmmc.c 		if (sg->offset & 3 || sg->length & 3 || sg->length < 512) {
sg                332 drivers/mmc/host/mxcmmc.c 	nents = dma_map_sg(host->dma->device->dev, data->sg,
sg                338 drivers/mmc/host/mxcmmc.c 		data->sg, data->sg_len, slave_dirn,
sg                342 drivers/mmc/host/mxcmmc.c 		dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len,
sg                452 drivers/mmc/host/mxcmmc.c 		dma_unmap_sg(host->dma->device->dev, data->sg, data->sg_len,
sg                606 drivers/mmc/host/mxcmmc.c 	struct scatterlist *sg;
sg                613 drivers/mmc/host/mxcmmc.c 		for_each_sg(data->sg, sg, data->sg_len, i) {
sg                614 drivers/mmc/host/mxcmmc.c 			stat = mxcmci_pull(host, sg_virt(sg), sg->length);
sg                617 drivers/mmc/host/mxcmmc.c 			host->datasize += sg->length;
sg                620 drivers/mmc/host/mxcmmc.c 		for_each_sg(data->sg, sg, data->sg_len, i) {
sg                621 drivers/mmc/host/mxcmmc.c 			stat = mxcmci_push(host, sg_virt(sg), sg->length);
sg                624 drivers/mmc/host/mxcmmc.c 			host->datasize += sg->length;
sg                147 drivers/mmc/host/mxs-mmc.c 		dma_unmap_sg(mmc_dev(host->mmc), data->sg,
sg                225 drivers/mmc/host/mxs-mmc.c 		dma_map_sg(mmc_dev(host->mmc), data->sg,
sg                227 drivers/mmc/host/mxs-mmc.c 		sgl = data->sg;
sg                242 drivers/mmc/host/mxs-mmc.c 			dma_unmap_sg(mmc_dev(host->mmc), data->sg,
sg                349 drivers/mmc/host/mxs-mmc.c 	struct scatterlist *sgl = data->sg, *sg;
sg                395 drivers/mmc/host/mxs-mmc.c 	for_each_sg(sgl, sg, sg_len, i)
sg                396 drivers/mmc/host/mxs-mmc.c 		data_size += sg->length;
sg                424 drivers/mmc/host/omap.c 	dma_unmap_sg(dev, data->sg, host->sg_len, dma_data_dir);
sg                645 drivers/mmc/host/omap.c 	struct scatterlist *sg;
sg                647 drivers/mmc/host/omap.c 	sg = host->data->sg + host->sg_idx;
sg                648 drivers/mmc/host/omap.c 	host->buffer_bytes_left = sg->length;
sg                649 drivers/mmc/host/omap.c 	host->buffer = sg_virt(sg);
sg                948 drivers/mmc/host/omap.c 	struct scatterlist *sg;
sg                973 drivers/mmc/host/omap.c 	for_each_sg(data->sg, sg, sg_len, i) {
sg                974 drivers/mmc/host/omap.c 		if ((sg->length % block_size) != 0) {
sg               1034 drivers/mmc/host/omap.c 		host->sg_len = dma_map_sg(c->device->dev, data->sg, sg_len,
sg               1039 drivers/mmc/host/omap.c 		tx = dmaengine_prep_slave_sg(c, data->sg, host->sg_len,
sg                925 drivers/mmc/host/omap_hsmmc.c 			host->data->sg, host->data->sg_len,
sg               1182 drivers/mmc/host/omap_hsmmc.c 			     data->sg, data->sg_len,
sg               1215 drivers/mmc/host/omap_hsmmc.c 		dma_len = dma_map_sg(chan->device->dev, data->sg, data->sg_len,
sg               1259 drivers/mmc/host/omap_hsmmc.c 		sgl = data->sg + i;
sg               1281 drivers/mmc/host/omap_hsmmc.c 	tx = dmaengine_prep_slave_sg(chan, data->sg, data->sg_len,
sg               1401 drivers/mmc/host/omap_hsmmc.c 		dma_unmap_sg(c->device->dev, data->sg, data->sg_len,
sg                206 drivers/mmc/host/pxamci.c 	host->dma_len = dma_map_sg(chan->device->dev, data->sg, data->sg_len,
sg                209 drivers/mmc/host/pxamci.c 	tx = dmaengine_prep_slave_sg(chan, data->sg, host->dma_len, direction,
sg                343 drivers/mmc/host/pxamci.c 		     data->sg, data->sg_len, host->dma_dir);
sg                179 drivers/mmc/host/renesas_sdhi_internal_dmac.c 	struct scatterlist *sg = host->sg_ptr;
sg                185 drivers/mmc/host/renesas_sdhi_internal_dmac.c 	if (!dma_map_sg(&host->pdev->dev, sg, host->sg_len,
sg                190 drivers/mmc/host/renesas_sdhi_internal_dmac.c 	if (!IS_ALIGNED(sg_dma_address(sg), 8))
sg                208 drivers/mmc/host/renesas_sdhi_internal_dmac.c 					    sg_dma_address(sg));
sg                215 drivers/mmc/host/renesas_sdhi_internal_dmac.c 	dma_unmap_sg(&host->pdev->dev, sg, host->sg_len, mmc_get_dma_dir(data));
sg                155 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	struct scatterlist *sg = host->sg_ptr, *sg_tmp;
sg                163 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	for_each_sg(sg, sg_tmp, host->sg_len, i) {
sg                172 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_SIZE ||
sg                178 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	if (sg->length < TMIO_MMC_MIN_DMA_LEN)
sg                183 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length);
sg                185 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		sg = host->sg_ptr;
sg                188 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE);
sg                190 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		desc = dmaengine_prep_slave_sg(chan, sg, ret, DMA_DEV_TO_MEM,
sg                227 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	struct scatterlist *sg = host->sg_ptr, *sg_tmp;
sg                235 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	for_each_sg(sg, sg_tmp, host->sg_len, i) {
sg                244 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_SIZE ||
sg                250 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	if (sg->length < TMIO_MMC_MIN_DMA_LEN)
sg                256 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		void *sg_vaddr = tmio_mmc_kmap_atomic(sg, &flags);
sg                258 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length);
sg                260 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		tmio_mmc_kunmap_atomic(sg, &flags, sg_vaddr);
sg                262 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		sg = host->sg_ptr;
sg                265 drivers/mmc/host/renesas_sdhi_sys_dmac.c 	ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE);
sg                267 drivers/mmc/host/renesas_sdhi_sys_dmac.c 		desc = dmaengine_prep_slave_sg(chan, sg, ret, DMA_MEM_TO_DEV,
sg                163 drivers/mmc/host/rtsx_pci_sdmmc.c 		count = rtsx_pci_dma_map_sg(pcr, data->sg, data->sg_len, read);
sg                205 drivers/mmc/host/rtsx_pci_sdmmc.c 	rtsx_pci_dma_unmap_sg(pcr, data->sg, data->sg_len, read);
sg                475 drivers/mmc/host/rtsx_pci_sdmmc.c 	err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 1, 10000);
sg                533 drivers/mmc/host/rtsx_pci_sdmmc.c 	err = rtsx_pci_dma_transfer(pcr, data->sg, host->sg_count, 0, 10000);
sg                594 drivers/mmc/host/rtsx_pci_sdmmc.c 		sg_copy_from_buffer(data->sg, data->sg_len, buf, data->blksz);
sg                596 drivers/mmc/host/rtsx_pci_sdmmc.c 		sg_copy_to_buffer(data->sg, data->sg_len, buf, data->blksz);
sg                521 drivers/mmc/host/rtsx_usb_sdmmc.c 	err = rtsx_usb_transfer_data(ucr, pipe, data->sg, data_len,
sg                568 drivers/mmc/host/rtsx_usb_sdmmc.c 		sg_copy_from_buffer(data->sg, data->sg_len, buf, data->blksz);
sg                570 drivers/mmc/host/rtsx_usb_sdmmc.c 		sg_copy_to_buffer(data->sg, data->sg_len, buf, data->blksz);
sg                320 drivers/mmc/host/s3cmci.c 	struct scatterlist *sg;
sg                333 drivers/mmc/host/s3cmci.c 	sg = &host->mrq->data->sg[host->pio_sgptr];
sg                335 drivers/mmc/host/s3cmci.c 	*bytes = sg->length;
sg                336 drivers/mmc/host/s3cmci.c 	*pointer = sg_virt(sg);
sg               1091 drivers/mmc/host/s3cmci.c 	dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg               1095 drivers/mmc/host/s3cmci.c 	desc = dmaengine_prep_slave_sg(host->dma, data->sg, data->sg_len,
sg               1108 drivers/mmc/host/s3cmci.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                508 drivers/mmc/host/sdhci-of-esdhc.c 	dmastart = sg_dma_address(host->data->sg);
sg                624 drivers/mmc/host/sdhci.c 			sg_copy_to_buffer(data->sg, data->sg_len,
sg                638 drivers/mmc/host/sdhci.c 				      data->sg, data->sg_len,
sg                651 drivers/mmc/host/sdhci.c static char *sdhci_kmap_atomic(struct scatterlist *sg, unsigned long *flags)
sg                654 drivers/mmc/host/sdhci.c 	return kmap_atomic(sg_page(sg)) + sg->offset;
sg                701 drivers/mmc/host/sdhci.c 	struct scatterlist *sg;
sg                720 drivers/mmc/host/sdhci.c 	for_each_sg(data->sg, sg, host->sg_count, i) {
sg                721 drivers/mmc/host/sdhci.c 		addr = sg_dma_address(sg);
sg                722 drivers/mmc/host/sdhci.c 		len = sg_dma_len(sg);
sg                734 drivers/mmc/host/sdhci.c 				buffer = sdhci_kmap_atomic(sg, &flags);
sg                781 drivers/mmc/host/sdhci.c 	struct scatterlist *sg;
sg                791 drivers/mmc/host/sdhci.c 		for_each_sg(data->sg, sg, host->sg_count, i)
sg                792 drivers/mmc/host/sdhci.c 			if (sg_dma_address(sg) & SDHCI_ADMA2_MASK) {
sg                798 drivers/mmc/host/sdhci.c 			dma_sync_sg_for_cpu(mmc_dev(host->mmc), data->sg,
sg                803 drivers/mmc/host/sdhci.c 			for_each_sg(data->sg, sg, host->sg_count, i) {
sg                804 drivers/mmc/host/sdhci.c 				if (sg_dma_address(sg) & SDHCI_ADMA2_MASK) {
sg                806 drivers/mmc/host/sdhci.c 					       (sg_dma_address(sg) & SDHCI_ADMA2_MASK);
sg                808 drivers/mmc/host/sdhci.c 					buffer = sdhci_kmap_atomic(sg, &flags);
sg                831 drivers/mmc/host/sdhci.c 		return sg_dma_address(host->data->sg);
sg               1044 drivers/mmc/host/sdhci.c 		struct scatterlist *sg;
sg               1077 drivers/mmc/host/sdhci.c 			for_each_sg(data->sg, sg, data->sg_len, i) {
sg               1078 drivers/mmc/host/sdhci.c 				if (sg->length & length_mask) {
sg               1080 drivers/mmc/host/sdhci.c 					    sg->length);
sg               1084 drivers/mmc/host/sdhci.c 				if (sg->offset & offset_mask) {
sg               1122 drivers/mmc/host/sdhci.c 		sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg               2542 drivers/mmc/host/sdhci.c 		dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg               2678 drivers/mmc/host/sdhci.c 					sg_copy_from_buffer(data->sg,
sg               2692 drivers/mmc/host/sdhci.c 				dma_unmap_sg(mmc_dev(host->mmc), data->sg,
sg                298 drivers/mmc/host/sdricoh_cs.c 			data->sg_len, data->sg->length);
sg                307 drivers/mmc/host/sdricoh_cs.c 			page = sg_page(data->sg);
sg                309 drivers/mmc/host/sdricoh_cs.c 			buf = kmap(page) + data->sg->offset + (len * i);
sg                290 drivers/mmc/host/sh_mmcif.c 	struct scatterlist *sg = data->sg;
sg                297 drivers/mmc/host/sh_mmcif.c 	ret = dma_map_sg(chan->device->dev, sg, data->sg_len,
sg                301 drivers/mmc/host/sh_mmcif.c 		desc = dmaengine_prep_slave_sg(chan, sg, ret,
sg                340 drivers/mmc/host/sh_mmcif.c 	struct scatterlist *sg = data->sg;
sg                347 drivers/mmc/host/sh_mmcif.c 	ret = dma_map_sg(chan->device->dev, sg, data->sg_len,
sg                351 drivers/mmc/host/sh_mmcif.c 		desc = dmaengine_prep_slave_sg(chan, sg, ret,
sg                605 drivers/mmc/host/sh_mmcif.c 	BUG_ON(host->sg_blkidx > data->sg->length);
sg                607 drivers/mmc/host/sh_mmcif.c 	if (host->sg_blkidx == data->sg->length) {
sg                610 drivers/mmc/host/sh_mmcif.c 			host->pio_ptr = sg_virt(++data->sg);
sg                634 drivers/mmc/host/sh_mmcif.c 	u32 *p = sg_virt(data->sg);
sg                658 drivers/mmc/host/sh_mmcif.c 	if (!data->sg_len || !data->sg->length)
sg                667 drivers/mmc/host/sh_mmcif.c 	host->pio_ptr = sg_virt(data->sg);
sg                685 drivers/mmc/host/sh_mmcif.c 	BUG_ON(!data->sg->length);
sg                714 drivers/mmc/host/sh_mmcif.c 	u32 *p = sg_virt(data->sg);
sg                738 drivers/mmc/host/sh_mmcif.c 	if (!data->sg_len || !data->sg->length)
sg                747 drivers/mmc/host/sh_mmcif.c 	host->pio_ptr = sg_virt(data->sg);
sg                765 drivers/mmc/host/sh_mmcif.c 	BUG_ON(!data->sg->length);
sg               1135 drivers/mmc/host/sh_mmcif.c 			     data->sg, data->sg_len,
sg               1139 drivers/mmc/host/sh_mmcif.c 			     data->sg, data->sg_len,
sg                369 drivers/mmc/host/sunxi-mmc.c 		if (data->sg[i].length == max_len)
sg                372 drivers/mmc/host/sunxi-mmc.c 			pdes[i].buf_size = cpu_to_le32(data->sg[i].length);
sg                376 drivers/mmc/host/sunxi-mmc.c 			cpu_to_le32(sg_dma_address(&data->sg[i]));
sg                397 drivers/mmc/host/sunxi-mmc.c 	struct scatterlist *sg;
sg                399 drivers/mmc/host/sunxi-mmc.c 	dma_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                406 drivers/mmc/host/sunxi-mmc.c 	for_each_sg(data->sg, sg, data->sg_len, i) {
sg                407 drivers/mmc/host/sunxi-mmc.c 		if (sg->offset & 3 || sg->length & 3) {
sg                410 drivers/mmc/host/sunxi-mmc.c 				sg->offset, sg->length);
sg                550 drivers/mmc/host/sunxi-mmc.c 		dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg               1072 drivers/mmc/host/sunxi-mmc.c 			dma_unmap_sg(mmc_dev(mmc), data->sg, data->sg_len,
sg                166 drivers/mmc/host/tifm_sd.c 	struct scatterlist *sg = r_data->sg;
sg                174 drivers/mmc/host/tifm_sd.c 		cnt = sg[host->sg_pos].length - host->block_pos;
sg                187 drivers/mmc/host/tifm_sd.c 			cnt = sg[host->sg_pos].length;
sg                189 drivers/mmc/host/tifm_sd.c 		off = sg[host->sg_pos].offset + host->block_pos;
sg                191 drivers/mmc/host/tifm_sd.c 		pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT);
sg                222 drivers/mmc/host/tifm_sd.c 	struct scatterlist *sg = r_data->sg;
sg                230 drivers/mmc/host/tifm_sd.c 		cnt = sg[host->sg_pos].length - host->block_pos;
sg                236 drivers/mmc/host/tifm_sd.c 			cnt = sg[host->sg_pos].length;
sg                238 drivers/mmc/host/tifm_sd.c 		off = sg[host->sg_pos].offset + host->block_pos;
sg                240 drivers/mmc/host/tifm_sd.c 		pg = nth_page(sg_page(&sg[host->sg_pos]), off >> PAGE_SHIFT);
sg                264 drivers/mmc/host/tifm_sd.c 	struct scatterlist *sg = NULL;
sg                279 drivers/mmc/host/tifm_sd.c 	dma_len = sg_dma_len(&r_data->sg[host->sg_pos]) - host->block_pos;
sg                285 drivers/mmc/host/tifm_sd.c 		dma_len = sg_dma_len(&r_data->sg[host->sg_pos]);
sg                299 drivers/mmc/host/tifm_sd.c 		sg = &r_data->sg[host->sg_pos];
sg                308 drivers/mmc/host/tifm_sd.c 		sg = &host->bounce_buf;
sg                315 drivers/mmc/host/tifm_sd.c 	writel(sg_dma_address(sg) + dma_off, sock->addr + SOCK_DMA_ADDRESS);
sg                677 drivers/mmc/host/tifm_sd.c 			host->sg_len = tifm_map_sg(sock, r_data->sg,
sg                764 drivers/mmc/host/tifm_sd.c 			tifm_unmap_sg(sock, r_data->sg, r_data->sg_len,
sg                213 drivers/mmc/host/tmio_mmc.h static inline char *tmio_mmc_kmap_atomic(struct scatterlist *sg,
sg                217 drivers/mmc/host/tmio_mmc.h 	return kmap_atomic(sg_page(sg)) + sg->offset;
sg                220 drivers/mmc/host/tmio_mmc.h static inline void tmio_mmc_kunmap_atomic(struct scatterlist *sg,
sg                223 drivers/mmc/host/tmio_mmc.h 	kunmap_atomic(virt - sg->offset);
sg                117 drivers/mmc/host/tmio_mmc_core.c 	host->sg_ptr = data->sg;
sg                118 drivers/mmc/host/tmio_mmc_core.c 	host->sg_orig = data->sg;
sg                479 drivers/mmc/host/toshsd.c 		data->blksz, data->blocks, data->sg->offset);
sg                488 drivers/mmc/host/toshsd.c 	sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags);
sg                249 drivers/mmc/host/uniphier-sd.c 	struct scatterlist *sg = host->sg_ptr;
sg                261 drivers/mmc/host/uniphier-sd.c 	if (!IS_ALIGNED(sg->offset, 8))
sg                272 drivers/mmc/host/uniphier-sd.c 	sg_len = dma_map_sg(mmc_dev(host->mmc), sg, 1, priv->dma_dir);
sg                283 drivers/mmc/host/uniphier-sd.c 	dma_addr = sg_dma_address(data->sg);
sg                178 drivers/mmc/host/usdhi6rol0.c 	struct scatterlist *sg;	/* current SG segment */
sg                314 drivers/mmc/host/usdhi6rol0.c 			      struct scatterlist *sg)
sg                321 drivers/mmc/host/usdhi6rol0.c 		data->blksz, data->blocks, sg->offset);
sg                348 drivers/mmc/host/usdhi6rol0.c 	host->sg = data->sg;
sg                350 drivers/mmc/host/usdhi6rol0.c 	host->offset = host->sg->offset;
sg                357 drivers/mmc/host/usdhi6rol0.c 	struct scatterlist *sg = data->sg_len > 1 ? host->sg : data->sg;
sg                358 drivers/mmc/host/usdhi6rol0.c 	size_t head = PAGE_SIZE - sg->offset;
sg                362 drivers/mmc/host/usdhi6rol0.c 	if (WARN(sg_dma_len(sg) % data->blksz,
sg                364 drivers/mmc/host/usdhi6rol0.c 		 sg_dma_len(sg), data->blksz))
sg                367 drivers/mmc/host/usdhi6rol0.c 	host->pg.page = sg_page(sg);
sg                369 drivers/mmc/host/usdhi6rol0.c 	host->offset = sg->offset;
sg                382 drivers/mmc/host/usdhi6rol0.c 		usdhi6_blk_bounce(host, sg);
sg                388 drivers/mmc/host/usdhi6rol0.c 		sg->offset, host->mrq->cmd->opcode, host->mrq);
sg                401 drivers/mmc/host/usdhi6rol0.c 		struct scatterlist *sg = data->sg_len > 1 ?
sg                402 drivers/mmc/host/usdhi6rol0.c 			host->sg : data->sg;
sg                417 drivers/mmc/host/usdhi6rol0.c 		if (!force && sg_dma_len(sg) + sg->offset >
sg                463 drivers/mmc/host/usdhi6rol0.c 	total = host->sg->offset + sg_dma_len(host->sg);
sg                472 drivers/mmc/host/usdhi6rol0.c 			usdhi6_blk_bounce(host, host->sg);
sg                486 drivers/mmc/host/usdhi6rol0.c 		struct scatterlist *next = sg_next(host->sg);
sg                492 drivers/mmc/host/usdhi6rol0.c 		host->sg = next;
sg                505 drivers/mmc/host/usdhi6rol0.c 	host->pg.page = nth_page(sg_page(host->sg), host->page_idx);
sg                542 drivers/mmc/host/usdhi6rol0.c 		dma_unmap_sg(host->chan_rx->device->dev, data->sg,
sg                545 drivers/mmc/host/usdhi6rol0.c 		dma_unmap_sg(host->chan_tx->device->dev, data->sg,
sg                569 drivers/mmc/host/usdhi6rol0.c 	struct scatterlist *sg = data->sg;
sg                586 drivers/mmc/host/usdhi6rol0.c 	ret = dma_map_sg(chan->device->dev, sg, data->sg_len, data_dir);
sg                589 drivers/mmc/host/usdhi6rol0.c 		desc = dmaengine_prep_slave_sg(chan, sg, ret, dir,
sg               1045 drivers/mmc/host/usdhi6rol0.c 		     data->sg->offset % 4))
sg               1048 drivers/mmc/host/usdhi6rol0.c 				data->blksz, data->blocks, data->sg->offset);
sg               1063 drivers/mmc/host/usdhi6rol0.c 			data->sg->offset, mrq->stop ? " + stop" : "");
sg               1094 drivers/mmc/host/usdhi6rol0.c 	host->sg = NULL;
sg               1673 drivers/mmc/host/usdhi6rol0.c 	struct scatterlist *sg;
sg               1705 drivers/mmc/host/usdhi6rol0.c 		sg = host->sg ?: data->sg;
sg               1710 drivers/mmc/host/usdhi6rol0.c 			sg_dma_len(sg), sg->offset);
sg                299 drivers/mmc/host/ushc.c 				  NULL, data->sg->length,
sg                302 drivers/mmc/host/ushc.c 		ushc->data_urb->sg = data->sg;
sg                490 drivers/mmc/host/via-sdmmc.c 	count = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg                495 drivers/mmc/host/via-sdmmc.c 	via_set_ddma(host, sg_dma_address(data->sg), sg_dma_len(data->sg),
sg                634 drivers/mmc/host/via-sdmmc.c 	dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len,
sg               1443 drivers/mmc/host/vub300.c 				     pipe, 0, data->sg,
sg               1488 drivers/mmc/host/vub300.c 				sg_copy_from_buffer(data->sg, data->sg_len, buf,
sg               1513 drivers/mmc/host/vub300.c 		sg_copy_to_buffer(data->sg, data->sg_len,
sg               1537 drivers/mmc/host/vub300.c 			sg_copy_to_buffer(data->sg, data->sg_len, buf,
sg               1559 drivers/mmc/host/vub300.c 		sg_copy_to_buffer(data->sg, data->sg_len, buf, sizeof(buf));
sg               1561 drivers/mmc/host/vub300.c 				     pipe, 0, data->sg,
sg                240 drivers/mmc/host/wbsd.c 	host->cur_sg = data->sg;
sg                277 drivers/mmc/host/wbsd.c 		len += data->sg[i].length;
sg                278 drivers/mmc/host/wbsd.c 	sg_copy_to_buffer(data->sg, data->sg_len, host->dma_buffer, len);
sg                287 drivers/mmc/host/wbsd.c 		len += data->sg[i].length;
sg                288 drivers/mmc/host/wbsd.c 	sg_copy_from_buffer(data->sg, data->sg_len, host->dma_buffer, len);
sg                303 drivers/mmc/host/wmt-sdmmc.c 		dma_unmap_sg(mmc_dev(priv->mmc), req->data->sg,
sg                306 drivers/mmc/host/wmt-sdmmc.c 		dma_unmap_sg(mmc_dev(priv->mmc), req->data->sg,
sg                569 drivers/mmc/host/wmt-sdmmc.c 	struct scatterlist *sg;
sg                620 drivers/mmc/host/wmt-sdmmc.c 			sg_cnt = dma_map_sg(mmc_dev(mmc), req->data->sg,
sg                626 drivers/mmc/host/wmt-sdmmc.c 			sg_cnt = dma_map_sg(mmc_dev(mmc), req->data->sg,
sg                636 drivers/mmc/host/wmt-sdmmc.c 		for_each_sg(req->data->sg, sg, sg_cnt, i) {
sg                638 drivers/mmc/host/wmt-sdmmc.c 			while (offset < sg_dma_len(sg)) {
sg                640 drivers/mmc/host/wmt-sdmmc.c 						sg_dma_address(sg)+offset,
sg                824 drivers/mtd/nand/raw/marvell_nand.c 	struct scatterlist sg;
sg                830 drivers/mtd/nand/raw/marvell_nand.c 	sg_init_one(&sg, nfc->dma_buf, dma_len);
sg                831 drivers/mtd/nand/raw/marvell_nand.c 	dma_map_sg(nfc->dma_chan->device->dev, &sg, 1, direction);
sg                832 drivers/mtd/nand/raw/marvell_nand.c 	tx = dmaengine_prep_slave_sg(nfc->dma_chan, &sg, 1,
sg                849 drivers/mtd/nand/raw/marvell_nand.c 	dma_unmap_sg(nfc->dma_chan->device->dev, &sg, 1, direction);
sg                461 drivers/mtd/nand/raw/omap2.c 	struct scatterlist sg;
sg                470 drivers/mtd/nand/raw/omap2.c 	sg_init_one(&sg, addr, len);
sg                471 drivers/mtd/nand/raw/omap2.c 	n = dma_map_sg(info->dma->device->dev, &sg, 1, dir);
sg                478 drivers/mtd/nand/raw/omap2.c 	tx = dmaengine_prep_slave_sg(info->dma, &sg, n,
sg                513 drivers/mtd/nand/raw/omap2.c 	dma_unmap_sg(info->dma->device->dev, &sg, 1, dir);
sg                517 drivers/mtd/nand/raw/omap2.c 	dma_unmap_sg(info->dma->device->dev, &sg, 1, dir);
sg                898 drivers/mtd/nand/raw/stm32_fmc2_nand.c 	struct scatterlist *sg;
sg                915 drivers/mtd/nand/raw/stm32_fmc2_nand.c 	for_each_sg(fmc2->dma_data_sg.sgl, sg, eccsteps, s) {
sg                916 drivers/mtd/nand/raw/stm32_fmc2_nand.c 		sg_set_buf(sg, p, eccsize);
sg                946 drivers/mtd/nand/raw/stm32_fmc2_nand.c 		for_each_sg(fmc2->dma_ecc_sg.sgl, sg, eccsteps, s) {
sg                947 drivers/mtd/nand/raw/stm32_fmc2_nand.c 			sg_set_buf(sg, p, fmc2->dma_ecc_len);
sg                349 drivers/mtd/nand/raw/sunxi_nand.c 				    struct scatterlist *sg)
sg                361 drivers/mtd/nand/raw/sunxi_nand.c 	sg_init_one(sg, buf, nchunks * chunksize);
sg                362 drivers/mtd/nand/raw/sunxi_nand.c 	ret = dma_map_sg(nfc->dev, sg, 1, ddir);
sg                366 drivers/mtd/nand/raw/sunxi_nand.c 	dmad = dmaengine_prep_slave_sg(nfc->dmac, sg, 1, tdir, DMA_CTRL_ACK);
sg                392 drivers/mtd/nand/raw/sunxi_nand.c 	dma_unmap_sg(nfc->dev, sg, 1, ddir);
sg                398 drivers/mtd/nand/raw/sunxi_nand.c 				     struct scatterlist *sg)
sg                400 drivers/mtd/nand/raw/sunxi_nand.c 	dma_unmap_sg(nfc->dev, sg, 1, ddir);
sg                913 drivers/mtd/nand/raw/sunxi_nand.c 	struct scatterlist sg;
sg                921 drivers/mtd/nand/raw/sunxi_nand.c 				       DMA_FROM_DEVICE, &sg);
sg                944 drivers/mtd/nand/raw/sunxi_nand.c 	sunxi_nfc_dma_op_cleanup(nfc, DMA_FROM_DEVICE, &sg);
sg               1278 drivers/mtd/nand/raw/sunxi_nand.c 	struct scatterlist sg;
sg               1288 drivers/mtd/nand/raw/sunxi_nand.c 				       DMA_TO_DEVICE, &sg);
sg               1320 drivers/mtd/nand/raw/sunxi_nand.c 	sunxi_nfc_dma_op_cleanup(nfc, DMA_TO_DEVICE, &sg);
sg                236 drivers/mtd/nand/raw/tango_nand.c 	struct scatterlist sg;
sg                241 drivers/mtd/nand/raw/tango_nand.c 	sg_init_one(&sg, buf, len);
sg                242 drivers/mtd/nand/raw/tango_nand.c 	if (dma_map_sg(chan->device->dev, &sg, 1, dir) != 1)
sg                246 drivers/mtd/nand/raw/tango_nand.c 	desc = dmaengine_prep_slave_sg(chan, &sg, 1, tdir, DMA_PREP_INTERRUPT);
sg                270 drivers/mtd/nand/raw/tango_nand.c 	dma_unmap_sg(chan->device->dev, &sg, 1, dir);
sg                305 drivers/mtd/ubi/block.c 	blk_rq_map_sg(req->q, req, pdu->usgl.sg);
sg                337 drivers/mtd/ubi/block.c 	sg_init_table(pdu->usgl.sg, UBI_MAX_SG_COUNT);
sg                754 drivers/mtd/ubi/eba.c 	struct scatterlist *sg;
sg                758 drivers/mtd/ubi/eba.c 		sg = &sgl->sg[sgl->list_pos];
sg                759 drivers/mtd/ubi/eba.c 		if (len < sg->length - sgl->page_pos)
sg                762 drivers/mtd/ubi/eba.c 			to_read = sg->length - sgl->page_pos;
sg                765 drivers/mtd/ubi/eba.c 				       sg_virt(sg) + sgl->page_pos, offset,
sg                774 drivers/mtd/ubi/eba.c 			if (sgl->page_pos == sg->length) {
sg                481 drivers/net/caif/caif_virtio.c 						       struct scatterlist *sg)
sg                512 drivers/net/caif/caif_virtio.c 	sg_init_one(sg, buf_info->vaddr + pad_len,
sg                526 drivers/net/caif/caif_virtio.c 	struct scatterlist sg;
sg                548 drivers/net/caif/caif_virtio.c 	buf_info = cfv_alloc_and_copy_to_shm(cfv, skb, &sg);
sg                557 drivers/net/caif/caif_virtio.c 			buf_info = cfv_alloc_and_copy_to_shm(cfv, skb, &sg);
sg                575 drivers/net/caif/caif_virtio.c 	ret = virtqueue_add_outbuf(cfv->vq_tx, &sg, 1, buf_info, GFP_ATOMIC);
sg                138 drivers/net/ethernet/cavium/liquidio/lio_core.c 			g->sg = lio->glists_virt_base[i] +
sg               1490 drivers/net/ethernet/cavium/liquidio/lio_main.c 			 g->sg[0].ptr[0], (skb->len - skb->data_len),
sg               1498 drivers/net/ethernet/cavium/liquidio/lio_main.c 			       g->sg[(i >> 2)].ptr[(i & 3)],
sg               1533 drivers/net/ethernet/cavium/liquidio/lio_main.c 			 g->sg[0].ptr[0], (skb->len - skb->data_len),
sg               1541 drivers/net/ethernet/cavium/liquidio/lio_main.c 			       g->sg[(i >> 2)].ptr[(i & 3)],
sg               2445 drivers/net/ethernet/cavium/liquidio/lio_main.c 		memset(g->sg, 0, g->sg_size);
sg               2447 drivers/net/ethernet/cavium/liquidio/lio_main.c 		g->sg[0].ptr[0] = dma_map_single(&oct->pci_dev->dev,
sg               2451 drivers/net/ethernet/cavium/liquidio/lio_main.c 		if (dma_mapping_error(&oct->pci_dev->dev, g->sg[0].ptr[0])) {
sg               2457 drivers/net/ethernet/cavium/liquidio/lio_main.c 		add_sg_size(&g->sg[0], (skb->len - skb->data_len), 0);
sg               2464 drivers/net/ethernet/cavium/liquidio/lio_main.c 			g->sg[(i >> 2)].ptr[(i & 3)] =
sg               2470 drivers/net/ethernet/cavium/liquidio/lio_main.c 					      g->sg[i >> 2].ptr[i & 3])) {
sg               2472 drivers/net/ethernet/cavium/liquidio/lio_main.c 						 g->sg[0].ptr[0],
sg               2478 drivers/net/ethernet/cavium/liquidio/lio_main.c 						       g->sg[j >> 2].ptr[j & 3],
sg               2487 drivers/net/ethernet/cavium/liquidio/lio_main.c 			add_sg_size(&g->sg[(i >> 2)], skb_frag_size(frag),
sg                835 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			 g->sg[0].ptr[0], (skb->len - skb->data_len),
sg                843 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			       g->sg[(i >> 2)].ptr[(i & 3)],
sg                879 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			 g->sg[0].ptr[0], (skb->len - skb->data_len),
sg                887 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			       g->sg[(i >> 2)].ptr[(i & 3)],
sg               1519 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		memset(g->sg, 0, g->sg_size);
sg               1521 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		g->sg[0].ptr[0] = dma_map_single(&oct->pci_dev->dev,
sg               1525 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		if (dma_mapping_error(&oct->pci_dev->dev, g->sg[0].ptr[0])) {
sg               1530 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 		add_sg_size(&g->sg[0], (skb->len - skb->data_len), 0);
sg               1537 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			g->sg[(i >> 2)].ptr[(i & 3)] =
sg               1542 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 					      g->sg[i >> 2].ptr[i & 3])) {
sg               1544 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 						 g->sg[0].ptr[0],
sg               1550 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 						       g->sg[j >> 2].ptr[j & 3],
sg               1559 drivers/net/ethernet/cavium/liquidio/lio_vf_main.c 			add_sg_size(&g->sg[(i >> 2)], skb_frag_size(frag),
sg                 63 drivers/net/ethernet/cavium/liquidio/octeon_network.h 	struct octeon_sg_entry *sg;
sg                 46 drivers/net/ethernet/freescale/dpaa/dpaa_eth_trace.h 	fd_format_name(sg)
sg                588 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	swa->sg.skb = skb;
sg                589 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	swa->sg.scl = scl;
sg                590 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	swa->sg.num_sg = num_sg;
sg                591 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 	swa->sg.sgt_size = sgt_buf_size;
sg                703 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 		skb = swa->sg.skb;
sg                706 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 		dma_unmap_sg(dev, swa->sg.scl, swa->sg.num_sg,
sg                708 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 		kfree(swa->sg.scl);
sg                711 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c 		dma_unmap_single(dev, fd_addr, swa->sg.sgt_size,
sg                113 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.h 		} sg;
sg               1771 drivers/net/ethernet/ibm/emac/core.c 			goto sg;
sg               1818 drivers/net/ethernet/ibm/emac/core.c 	sg:
sg                141 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	struct scatterlist *sg;
sg                205 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	for_each_sg(sgl, sg, dmacount, i) {
sg                206 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		addr = sg_dma_address(sg);
sg                207 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		len = sg_dma_len(sg);
sg                 60 drivers/net/ethernet/mellanox/mlx4/icm.c 		dma_unmap_sg(&dev->persist->pdev->dev, chunk->sg, chunk->npages,
sg                 64 drivers/net/ethernet/mellanox/mlx4/icm.c 		__free_pages(sg_page(&chunk->sg[i]),
sg                 65 drivers/net/ethernet/mellanox/mlx4/icm.c 			     get_order(chunk->sg[i].length));
sg                175 drivers/net/ethernet/mellanox/mlx4/icm.c 				sg_init_table(chunk->sg, MLX4_ICM_CHUNK_LEN);
sg                191 drivers/net/ethernet/mellanox/mlx4/icm.c 			ret = mlx4_alloc_icm_pages(&chunk->sg[chunk->npages],
sg                208 drivers/net/ethernet/mellanox/mlx4/icm.c 						chunk->sg, chunk->npages,
sg                222 drivers/net/ethernet/mellanox/mlx4/icm.c 		chunk->nsg = dma_map_sg(&dev->persist->pdev->dev, chunk->sg,
sg                347 drivers/net/ethernet/mellanox/mlx4/icm.c 				len = sg_dma_len(&chunk->sg[i]);
sg                348 drivers/net/ethernet/mellanox/mlx4/icm.c 				dma_addr = sg_dma_address(&chunk->sg[i]);
sg                355 drivers/net/ethernet/mellanox/mlx4/icm.c 				page = sg_page(&chunk->sg[i]);
sg                 62 drivers/net/ethernet/mellanox/mlx4/icm.h 		struct scatterlist	sg[MLX4_ICM_CHUNK_LEN];
sg                130 drivers/net/ethernet/mellanox/mlx4/icm.h 		return sg_dma_address(&iter->chunk->sg[iter->page_idx]);
sg                138 drivers/net/ethernet/mellanox/mlx4/icm.h 		return sg_dma_len(&iter->chunk->sg[iter->page_idx]);
sg                 54 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	if (unlikely(!buf->sg[0].data))
sg                 58 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[0].dma_addr = dma_map_single(dma_device, buf->sg[0].data,
sg                 59 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 					     buf->sg[0].size, buf->dma_dir);
sg                 60 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	err = dma_mapping_error(dma_device, buf->sg[0].dma_addr);
sg                 67 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	if (!buf->sg[1].data)
sg                 70 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[1].dma_addr = dma_map_single(dma_device, buf->sg[1].data,
sg                 71 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 					     buf->sg[1].size, buf->dma_dir);
sg                 72 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	err = dma_mapping_error(dma_device, buf->sg[1].dma_addr);
sg                 75 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		dma_unmap_single(dma_device, buf->sg[0].dma_addr,
sg                 76 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 				 buf->sg[0].size, buf->dma_dir);
sg                 90 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	if (buf->sg[1].data)
sg                 91 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		dma_unmap_single(dma_device, buf->sg[1].dma_addr,
sg                 92 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 				 buf->sg[1].size, buf->dma_dir);
sg                 94 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	if (likely(buf->sg[0].data))
sg                 95 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		dma_unmap_single(dma_device, buf->sg[0].dma_addr,
sg                 96 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 				 buf->sg[0].size, buf->dma_dir);
sg                117 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	data->byte_count = cpu_to_be32(buf->sg[0].size);
sg                119 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	data->addr = cpu_to_be64(buf->sg[0].dma_addr);
sg                154 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	for (sgi = 0; sgi < ARRAY_SIZE(buf->sg); sgi++) {
sg                155 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		if (!buf->sg[sgi].data)
sg                157 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		data->byte_count = cpu_to_be32(buf->sg[sgi].size);
sg                159 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		data->addr = cpu_to_be64(buf->sg[sgi].dma_addr);
sg                212 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[0].data = (void *)(buf + 1);
sg                213 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[0].size = MLX5_FPGA_RECV_SIZE;
sg                276 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[0].size = be32_to_cpu(cqe->byte_cnt);
sg                278 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 		      buf->sg[0].size);
sg                281 drivers/net/ethernet/mellanox/mlx5/core/fpga/conn.c 	buf->sg[0].size = MLX5_FPGA_RECV_SIZE;
sg                175 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	struct mlx5_ifc_fpga_ipsec_cmd_resp *resp = buf->sg[0].data;
sg                181 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	if (buf->sg[0].size < sizeof(*resp)) {
sg                183 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 			       buf->sg[0].size, sizeof(*resp));
sg                239 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	context->buf.sg[0].size = cmd_size;
sg                240 drivers/net/ethernet/mellanox/mlx5/core/fpga/ipsec.c 	context->buf.sg[0].data = &context->command;
sg                 81 drivers/net/ethernet/mellanox/mlx5/core/fpga/sdk.h 	struct mlx5_fpga_dma_entry sg[2];
sg                177 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		u32 syndrome = MLX5_GET(tls_resp, resp->sg[0].data, syndrome);
sg                228 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].data = cmd;
sg                229 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].size = MLX5_TLS_COMMAND_SIZE;
sg                258 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].data = cmd;
sg                259 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].size = MLX5_TLS_COMMAND_SIZE;
sg                317 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 		ctx->syndrome = MLX5_GET(tls_resp, resp->sg[0].data, syndrome);
sg                355 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].data = cmd;
sg                356 drivers/net/ethernet/mellanox/mlx5/core/fpga/tls.c 	buf->sg[0].size = MLX5_TLS_COMMAND_SIZE;
sg                134 drivers/net/ethernet/micrel/ks8842.c 	struct scatterlist sg;
sg                142 drivers/net/ethernet/micrel/ks8842.c 	struct scatterlist sg;
sg                431 drivers/net/ethernet/micrel/ks8842.c 	sg_dma_len(&ctl->sg) = skb->len + sizeof(u32);
sg                442 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_address(&ctl->sg), 0, sg_dma_len(&ctl->sg),
sg                446 drivers/net/ethernet/micrel/ks8842.c 	if (sg_dma_len(&ctl->sg) % 4)
sg                447 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_len(&ctl->sg) += 4 - sg_dma_len(&ctl->sg) % 4;
sg                450 drivers/net/ethernet/micrel/ks8842.c 		&ctl->sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT);
sg                544 drivers/net/ethernet/micrel/ks8842.c 	struct scatterlist *sg = &ctl->sg;
sg                549 drivers/net/ethernet/micrel/ks8842.c 		sg_init_table(sg, 1);
sg                550 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_address(sg) = dma_map_single(adapter->dev,
sg                552 drivers/net/ethernet/micrel/ks8842.c 		if (dma_mapping_error(adapter->dev, sg_dma_address(sg))) {
sg                554 drivers/net/ethernet/micrel/ks8842.c 			sg_dma_address(sg) = 0;
sg                558 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_len(sg) = DMA_BUFFER_SIZE;
sg                561 drivers/net/ethernet/micrel/ks8842.c 			sg, 1, DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT);
sg                573 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_address(sg) = 0;
sg                579 drivers/net/ethernet/micrel/ks8842.c 	if (sg_dma_address(sg))
sg                580 drivers/net/ethernet/micrel/ks8842.c 		dma_unmap_single(adapter->dev, sg_dma_address(sg),
sg                582 drivers/net/ethernet/micrel/ks8842.c 	sg_dma_address(sg) = 0;
sg                596 drivers/net/ethernet/micrel/ks8842.c 	dma_addr_t addr = sg_dma_address(&ctl->sg);
sg                872 drivers/net/ethernet/micrel/ks8842.c 	if (sg_dma_address(&rx_ctl->sg))
sg                873 drivers/net/ethernet/micrel/ks8842.c 		dma_unmap_single(adapter->dev, sg_dma_address(&rx_ctl->sg),
sg                875 drivers/net/ethernet/micrel/ks8842.c 	sg_dma_address(&rx_ctl->sg) = 0;
sg                898 drivers/net/ethernet/micrel/ks8842.c 	if (sg_dma_address(&tx_ctl->sg))
sg                899 drivers/net/ethernet/micrel/ks8842.c 		dma_unmap_single(adapter->dev, sg_dma_address(&tx_ctl->sg),
sg                901 drivers/net/ethernet/micrel/ks8842.c 	sg_dma_address(&tx_ctl->sg) = 0;
sg                925 drivers/net/ethernet/micrel/ks8842.c 	sg_init_table(&tx_ctl->sg, 1);
sg                941 drivers/net/ethernet/micrel/ks8842.c 	sg_dma_address(&tx_ctl->sg) = dma_map_single(adapter->dev,
sg                943 drivers/net/ethernet/micrel/ks8842.c 	if (dma_mapping_error(adapter->dev, sg_dma_address(&tx_ctl->sg))) {
sg                945 drivers/net/ethernet/micrel/ks8842.c 		sg_dma_address(&tx_ctl->sg) = 0;
sg                504 drivers/net/ethernet/natsemi/ns83820.c 	__le32 *sg;
sg                523 drivers/net/ethernet/natsemi/ns83820.c 	sg = dev->rx_info.descs + (next_empty * DESC_SIZE);
sg                531 drivers/net/ethernet/natsemi/ns83820.c 	build_rx_desc(dev, sg, 0, buf, cmdsts, 0);
sg                618 drivers/net/macsec.c 					     struct scatterlist **sg,
sg                638 drivers/net/macsec.c 	*sg = (struct scatterlist *)(tmp + sg_offset);
sg                650 drivers/net/macsec.c 	struct scatterlist *sg;
sg                733 drivers/net/macsec.c 	req = macsec_alloc_req(tx_sa->key.tfm, &iv, &sg, ret);
sg                742 drivers/net/macsec.c 	sg_init_table(sg, ret);
sg                743 drivers/net/macsec.c 	ret = skb_to_sgvec(skb, sg, 0, skb->len);
sg                754 drivers/net/macsec.c 		aead_request_set_crypt(req, sg, sg, len, iv);
sg                757 drivers/net/macsec.c 		aead_request_set_crypt(req, sg, sg, 0, iv);
sg                930 drivers/net/macsec.c 	struct scatterlist *sg;
sg                947 drivers/net/macsec.c 	req = macsec_alloc_req(rx_sa->key.tfm, &iv, &sg, ret);
sg                956 drivers/net/macsec.c 	sg_init_table(sg, ret);
sg                957 drivers/net/macsec.c 	ret = skb_to_sgvec(skb, sg, 0, skb->len);
sg                970 drivers/net/macsec.c 		aead_request_set_crypt(req, sg, sg, len, iv);
sg                979 drivers/net/macsec.c 		aead_request_set_crypt(req, sg, sg, icv_len, iv);
sg               1323 drivers/net/usb/usbnet.c 	urb->sg = kmalloc_array(num_sgs + 1, sizeof(struct scatterlist),
sg               1325 drivers/net/usb/usbnet.c 	if (!urb->sg)
sg               1329 drivers/net/usb/usbnet.c 	sg_init_table(urb->sg, urb->num_sgs + 1);
sg               1331 drivers/net/usb/usbnet.c 	sg_set_buf(&urb->sg[s++], skb->data, skb_headlen(skb));
sg               1338 drivers/net/usb/usbnet.c 		sg_set_page(&urb->sg[i + s], skb_frag_page(f), skb_frag_size(f),
sg               1405 drivers/net/usb/usbnet.c 					sg_set_buf(&urb->sg[urb->num_sgs++],
sg               1478 drivers/net/usb/usbnet.c 			kfree(urb->sg);
sg               1531 drivers/net/usb/usbnet.c 			kfree(entry->urb->sg);
sg               1899 drivers/net/usb/usbnet.c 				kfree(res->sg);
sg                123 drivers/net/virtio_net.c 	struct scatterlist sg[MAX_SKB_FRAGS + 2];
sg                154 drivers/net/virtio_net.c 	struct scatterlist sg[MAX_SKB_FRAGS + 2];
sg                467 drivers/net/virtio_net.c 	sg_init_one(sq->sg, xdpf->data, xdpf->len);
sg                469 drivers/net/virtio_net.c 	err = virtqueue_add_outbuf(sq->vq, sq->sg, 1, xdp_to_ptr(xdpf),
sg               1092 drivers/net/virtio_net.c 	sg_init_one(rq->sg, buf + VIRTNET_RX_PAD + xdp_headroom,
sg               1094 drivers/net/virtio_net.c 	err = virtqueue_add_inbuf_ctx(rq->vq, rq->sg, 1, buf, ctx, gfp);
sg               1107 drivers/net/virtio_net.c 	sg_init_table(rq->sg, MAX_SKB_FRAGS + 2);
sg               1117 drivers/net/virtio_net.c 		sg_set_buf(&rq->sg[i], page_address(first), PAGE_SIZE);
sg               1133 drivers/net/virtio_net.c 	sg_set_buf(&rq->sg[0], p, vi->hdr_len);
sg               1137 drivers/net/virtio_net.c 	sg_set_buf(&rq->sg[1], p + offset, PAGE_SIZE - offset);
sg               1141 drivers/net/virtio_net.c 	err = virtqueue_add_inbuf(rq->vq, rq->sg, MAX_SKB_FRAGS + 2,
sg               1199 drivers/net/virtio_net.c 	sg_init_one(rq->sg, buf, len);
sg               1201 drivers/net/virtio_net.c 	err = virtqueue_add_inbuf_ctx(rq->vq, rq->sg, 1, buf, ctx, gfp);
sg               1535 drivers/net/virtio_net.c 	sg_init_table(sq->sg, skb_shinfo(skb)->nr_frags + (can_push ? 1 : 2));
sg               1538 drivers/net/virtio_net.c 		num_sg = skb_to_sgvec(skb, sq->sg, 0, skb->len);
sg               1544 drivers/net/virtio_net.c 		sg_set_buf(sq->sg, hdr, hdr_len);
sg               1545 drivers/net/virtio_net.c 		num_sg = skb_to_sgvec(skb, sq->sg + 1, 0, skb->len);
sg               1550 drivers/net/virtio_net.c 	return virtqueue_add_outbuf(sq->vq, sq->sg, num_sg, skb, GFP_ATOMIC);
sg               1677 drivers/net/virtio_net.c 	struct scatterlist sg;
sg               1691 drivers/net/virtio_net.c 		sg_init_one(&sg, addr->sa_data, dev->addr_len);
sg               1693 drivers/net/virtio_net.c 					  VIRTIO_NET_CTRL_MAC_ADDR_SET, &sg)) {
sg               1767 drivers/net/virtio_net.c 	struct scatterlist sg;
sg               1774 drivers/net/virtio_net.c 	sg_init_one(&sg, &vi->ctrl->mq, sizeof(vi->ctrl->mq));
sg               1777 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_MQ_VQ_PAIRS_SET, &sg)) {
sg               1821 drivers/net/virtio_net.c 	struct scatterlist sg[2];
sg               1836 drivers/net/virtio_net.c 	sg_init_one(sg, &vi->ctrl->promisc, sizeof(vi->ctrl->promisc));
sg               1839 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_RX_PROMISC, sg))
sg               1843 drivers/net/virtio_net.c 	sg_init_one(sg, &vi->ctrl->allmulti, sizeof(vi->ctrl->allmulti));
sg               1846 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_RX_ALLMULTI, sg))
sg               1859 drivers/net/virtio_net.c 	sg_init_table(sg, 2);
sg               1867 drivers/net/virtio_net.c 	sg_set_buf(&sg[0], mac_data,
sg               1878 drivers/net/virtio_net.c 	sg_set_buf(&sg[1], mac_data,
sg               1882 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_MAC_TABLE_SET, sg))
sg               1892 drivers/net/virtio_net.c 	struct scatterlist sg;
sg               1895 drivers/net/virtio_net.c 	sg_init_one(&sg, &vi->ctrl->vid, sizeof(vi->ctrl->vid));
sg               1898 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_VLAN_ADD, &sg))
sg               1907 drivers/net/virtio_net.c 	struct scatterlist sg;
sg               1910 drivers/net/virtio_net.c 	sg_init_one(&sg, &vi->ctrl->vid, sizeof(vi->ctrl->vid));
sg               1913 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_VLAN_DEL, &sg))
sg               2370 drivers/net/virtio_net.c 	struct scatterlist sg;
sg               2373 drivers/net/virtio_net.c 	sg_init_one(&sg, &vi->ctrl->offloads, sizeof(vi->ctrl->offloads));
sg               2376 drivers/net/virtio_net.c 				  VIRTIO_NET_CTRL_GUEST_OFFLOADS_SET, &sg)) {
sg               2857 drivers/net/virtio_net.c 		sg_init_table(vi->rq[i].sg, ARRAY_SIZE(vi->rq[i].sg));
sg               2859 drivers/net/virtio_net.c 		sg_init_table(vi->sq[i].sg, ARRAY_SIZE(vi->sq[i].sg));
sg                226 drivers/net/wireless/ath/ath6kl/sdio.c 	struct scatterlist *sg;
sg                242 drivers/net/wireless/ath/ath6kl/sdio.c 	sg = scat_req->sgentries;
sg                243 drivers/net/wireless/ath/ath6kl/sdio.c 	sg_init_table(sg, scat_req->scat_entries);
sg                246 drivers/net/wireless/ath/ath6kl/sdio.c 	for (i = 0; i < scat_req->scat_entries; i++, sg++) {
sg                251 drivers/net/wireless/ath/ath6kl/sdio.c 		sg_set_buf(sg, scat_req->scat_list[i].buf,
sg                256 drivers/net/wireless/ath/ath6kl/sdio.c 	data->sg = scat_req->sgentries;
sg                432 drivers/net/wireless/broadcom/brcm80211/brcmfmac/bcmsdh.c 	mmc_dat.sg = sdiodev->sgtable.sgl;
sg               1640 drivers/net/wireless/cisco/airo.c 	struct scatterlist sg;
sg               1647 drivers/net/wireless/cisco/airo.c 	sg_init_one(&sg, context->coeff, sizeof(context->coeff));
sg               1651 drivers/net/wireless/cisco/airo.c 	skcipher_request_set_crypt(req, &sg, &sg, sizeof(context->coeff), iv);
sg                291 drivers/net/wireless/mediatek/mt76/usb.c 		sg_set_page(&urb->sg[i], page, q->buf_size, offset);
sg                298 drivers/net/wireless/mediatek/mt76/usb.c 			skb_free_frag(sg_virt(&urb->sg[j]));
sg                304 drivers/net/wireless/mediatek/mt76/usb.c 	sg_init_marker(urb->sg, urb->num_sgs);
sg                339 drivers/net/wireless/mediatek/mt76/usb.c 		e->urb->sg = (struct scatterlist *)(e->urb + 1);
sg                362 drivers/net/wireless/mediatek/mt76/usb.c 		skb_free_frag(sg_virt(&urb->sg[i]));
sg                463 drivers/net/wireless/mediatek/mt76/usb.c 	u8 *data = urb->num_sgs ? sg_virt(&urb->sg[0]) : urb->transfer_buffer;
sg                464 drivers/net/wireless/mediatek/mt76/usb.c 	int data_len = urb->num_sgs ? urb->sg[0].length : urb->actual_length;
sg                482 drivers/net/wireless/mediatek/mt76/usb.c 		data_len = min_t(int, len, urb->sg[nsgs].length);
sg                484 drivers/net/wireless/mediatek/mt76/usb.c 				sg_page(&urb->sg[nsgs]),
sg                485 drivers/net/wireless/mediatek/mt76/usb.c 				urb->sg[nsgs].offset,
sg                759 drivers/net/wireless/mediatek/mt76/usb.c 	sg_init_table(urb->sg, MT_TX_SG_MAX_SIZE);
sg                760 drivers/net/wireless/mediatek/mt76/usb.c 	urb->num_sgs = skb_to_sgvec(skb, urb->sg, 0, skb->len);
sg                 35 drivers/net/wireless/ti/wl12xx/main.c 	.sg = {
sg                305 drivers/net/wireless/ti/wl18xx/acx.c 		     wl->conf.sg.params[WL18XX_CONF_SG_TIME_SYNC],
sg                314 drivers/net/wireless/ti/wl18xx/acx.c 	acx->sync_mode = wl->conf.sg.params[WL18XX_CONF_SG_TIME_SYNC];
sg                165 drivers/net/wireless/ti/wl18xx/main.c 	.sg = {
sg                492 drivers/net/wireless/ti/wlcore/acx.c 		pta->enable = wl->conf.sg.state;
sg                510 drivers/net/wireless/ti/wlcore/acx.c 	struct conf_sg_settings *c = &wl->conf.sg;
sg               1131 drivers/net/wireless/ti/wlcore/conf.h 	struct conf_sg_settings sg;
sg                229 drivers/net/xen-netback/xenbus.c 	int sg;
sg                247 drivers/net/xen-netback/xenbus.c 	sg = 1;
sg                256 drivers/net/xen-netback/xenbus.c 		err = xenbus_printf(xbt, dev->nodename, "feature-sg", "%d", sg);
sg                263 drivers/net/xen-netback/xenbus.c 				    "%d", sg);
sg                270 drivers/net/xen-netback/xenbus.c 				    "%d", sg);
sg                 43 drivers/nvdimm/nd_virtio.c 	struct scatterlist *sgs[2], sg, ret;
sg                 57 drivers/nvdimm/nd_virtio.c 	sg_init_one(&sg, &req_data->req, sizeof(req_data->req));
sg                 58 drivers/nvdimm/nd_virtio.c 	sgs[0] = &sg;
sg                967 drivers/nvme/host/fc.c fc_map_sg(struct scatterlist *sg, int nents)
sg                972 drivers/nvme/host/fc.c 	WARN_ON(nents == 0 || sg[0].length == 0);
sg                974 drivers/nvme/host/fc.c 	for_each_sg(sg, s, nents, i) {
sg                984 drivers/nvme/host/fc.c fc_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                987 drivers/nvme/host/fc.c 	return dev ? dma_map_sg(dev, sg, nents, dir) : fc_map_sg(sg, nents);
sg                991 drivers/nvme/host/fc.c fc_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                995 drivers/nvme/host/fc.c 		dma_unmap_sg(dev, sg, nents, dir);
sg                210 drivers/nvme/host/pci.c 	struct scatterlist *sg;
sg                501 drivers/nvme/host/pci.c 	return (void **)(iod->sg + blk_rq_nr_phys_segments(req));
sg                539 drivers/nvme/host/pci.c 	if (is_pci_p2pdma_page(sg_page(iod->sg)))
sg                540 drivers/nvme/host/pci.c 		pci_p2pdma_unmap_sg(dev->dev, iod->sg, iod->nents,
sg                543 drivers/nvme/host/pci.c 		dma_unmap_sg(dev->dev, iod->sg, iod->nents, rq_dma_dir(req));
sg                568 drivers/nvme/host/pci.c 	mempool_free(iod->sg, dev->iod_mempool);
sg                574 drivers/nvme/host/pci.c 	struct scatterlist *sg;
sg                576 drivers/nvme/host/pci.c 	for_each_sg(sgl, sg, nents, i) {
sg                577 drivers/nvme/host/pci.c 		dma_addr_t phys = sg_phys(sg);
sg                580 drivers/nvme/host/pci.c 			i, &phys, sg->offset, sg->length, &sg_dma_address(sg),
sg                581 drivers/nvme/host/pci.c 			sg_dma_len(sg));
sg                591 drivers/nvme/host/pci.c 	struct scatterlist *sg = iod->sg;
sg                592 drivers/nvme/host/pci.c 	int dma_len = sg_dma_len(sg);
sg                593 drivers/nvme/host/pci.c 	u64 dma_addr = sg_dma_address(sg);
sg                611 drivers/nvme/host/pci.c 		sg = sg_next(sg);
sg                612 drivers/nvme/host/pci.c 		dma_addr = sg_dma_address(sg);
sg                613 drivers/nvme/host/pci.c 		dma_len = sg_dma_len(sg);
sg                660 drivers/nvme/host/pci.c 		sg = sg_next(sg);
sg                661 drivers/nvme/host/pci.c 		dma_addr = sg_dma_address(sg);
sg                662 drivers/nvme/host/pci.c 		dma_len = sg_dma_len(sg);
sg                666 drivers/nvme/host/pci.c 	cmnd->dptr.prp1 = cpu_to_le64(sg_dma_address(iod->sg));
sg                672 drivers/nvme/host/pci.c 	WARN(DO_ONCE(nvme_print_sgl, iod->sg, iod->nents),
sg                679 drivers/nvme/host/pci.c 		struct scatterlist *sg)
sg                681 drivers/nvme/host/pci.c 	sge->addr = cpu_to_le64(sg_dma_address(sg));
sg                682 drivers/nvme/host/pci.c 	sge->length = cpu_to_le32(sg_dma_len(sg));
sg                705 drivers/nvme/host/pci.c 	struct scatterlist *sg = iod->sg;
sg                713 drivers/nvme/host/pci.c 		nvme_pci_sgl_set_data(&cmd->dptr.sgl, sg);
sg                751 drivers/nvme/host/pci.c 		nvme_pci_sgl_set_data(&sg_list[i++], sg);
sg                752 drivers/nvme/host/pci.c 		sg = sg_next(sg);
sg                818 drivers/nvme/host/pci.c 	iod->sg = mempool_alloc(dev->iod_mempool, GFP_ATOMIC);
sg                819 drivers/nvme/host/pci.c 	if (!iod->sg)
sg                821 drivers/nvme/host/pci.c 	sg_init_table(iod->sg, blk_rq_nr_phys_segments(req));
sg                822 drivers/nvme/host/pci.c 	iod->nents = blk_rq_map_sg(req->q, req, iod->sg);
sg                826 drivers/nvme/host/pci.c 	if (is_pci_p2pdma_page(sg_page(iod->sg)))
sg                827 drivers/nvme/host/pci.c 		nr_mapped = pci_p2pdma_map_sg_attrs(dev->dev, iod->sg,
sg                830 drivers/nvme/host/pci.c 		nr_mapped = dma_map_sg_attrs(dev->dev, iod->sg, iod->nents,
sg               1172 drivers/nvme/host/rdma.c 	struct nvme_keyed_sgl_desc *sg = &c->common.dptr.ksgl;
sg               1174 drivers/nvme/host/rdma.c 	sg->addr = 0;
sg               1175 drivers/nvme/host/rdma.c 	put_unaligned_le24(0, sg->length);
sg               1176 drivers/nvme/host/rdma.c 	put_unaligned_le32(0, sg->key);
sg               1177 drivers/nvme/host/rdma.c 	sg->type = NVME_KEY_SGL_FMT_DATA_DESC << 4;
sg               1185 drivers/nvme/host/rdma.c 	struct nvme_sgl_desc *sg = &c->common.dptr.sgl;
sg               1198 drivers/nvme/host/rdma.c 	sg->addr = cpu_to_le64(queue->ctrl->ctrl.icdoff);
sg               1199 drivers/nvme/host/rdma.c 	sg->length = cpu_to_le32(len);
sg               1200 drivers/nvme/host/rdma.c 	sg->type = (NVME_SGL_FMT_DATA_DESC << 4) | NVME_SGL_FMT_OFFSET;
sg               1209 drivers/nvme/host/rdma.c 	struct nvme_keyed_sgl_desc *sg = &c->common.dptr.ksgl;
sg               1211 drivers/nvme/host/rdma.c 	sg->addr = cpu_to_le64(sg_dma_address(req->sg_table.sgl));
sg               1212 drivers/nvme/host/rdma.c 	put_unaligned_le24(sg_dma_len(req->sg_table.sgl), sg->length);
sg               1213 drivers/nvme/host/rdma.c 	put_unaligned_le32(queue->device->pd->unsafe_global_rkey, sg->key);
sg               1214 drivers/nvme/host/rdma.c 	sg->type = NVME_KEY_SGL_FMT_DATA_DESC << 4;
sg               1222 drivers/nvme/host/rdma.c 	struct nvme_keyed_sgl_desc *sg = &c->common.dptr.ksgl;
sg               1255 drivers/nvme/host/rdma.c 	sg->addr = cpu_to_le64(req->mr->iova);
sg               1256 drivers/nvme/host/rdma.c 	put_unaligned_le24(req->mr->length, sg->length);
sg               1257 drivers/nvme/host/rdma.c 	put_unaligned_le32(req->mr->rkey, sg->key);
sg               1258 drivers/nvme/host/rdma.c 	sg->type = (NVME_KEY_SGL_FMT_DATA_DESC << 4) |
sg                286 drivers/nvme/host/tcp.c 	struct scatterlist sg;
sg                288 drivers/nvme/host/tcp.c 	sg_init_marker(&sg, 1);
sg                289 drivers/nvme/host/tcp.c 	sg_set_page(&sg, page, len, off);
sg                290 drivers/nvme/host/tcp.c 	ahash_request_set_crypt(hash, &sg, NULL, len);
sg                297 drivers/nvme/host/tcp.c 	struct scatterlist sg;
sg                299 drivers/nvme/host/tcp.c 	sg_init_one(&sg, pdu, len);
sg                300 drivers/nvme/host/tcp.c 	ahash_request_set_crypt(hash, &sg, pdu + len, len);
sg               1988 drivers/nvme/host/tcp.c 	struct nvme_sgl_desc *sg = &c->common.dptr.sgl;
sg               1990 drivers/nvme/host/tcp.c 	sg->addr = 0;
sg               1991 drivers/nvme/host/tcp.c 	sg->length = 0;
sg               1992 drivers/nvme/host/tcp.c 	sg->type = (NVME_TRANSPORT_SGL_DATA_DESC << 4) |
sg               1999 drivers/nvme/host/tcp.c 	struct nvme_sgl_desc *sg = &c->common.dptr.sgl;
sg               2001 drivers/nvme/host/tcp.c 	sg->addr = cpu_to_le64(queue->ctrl->ctrl.icdoff);
sg               2002 drivers/nvme/host/tcp.c 	sg->length = cpu_to_le32(data_len);
sg               2003 drivers/nvme/host/tcp.c 	sg->type = (NVME_SGL_FMT_DATA_DESC << 4) | NVME_SGL_FMT_OFFSET;
sg               2009 drivers/nvme/host/tcp.c 	struct nvme_sgl_desc *sg = &c->common.dptr.sgl;
sg               2011 drivers/nvme/host/tcp.c 	sg->addr = 0;
sg               2012 drivers/nvme/host/tcp.c 	sg->length = cpu_to_le32(data_len);
sg               2013 drivers/nvme/host/tcp.c 	sg->type = (NVME_TRANSPORT_SGL_DATA_DESC << 4) |
sg                559 drivers/nvme/target/admin-cmd.c 	if (sg_zero_buffer(req->sg, req->sg_cnt, NVME_IDENTIFY_DATA_SIZE - off,
sg                 91 drivers/nvme/target/core.c 	if (sg_pcopy_from_buffer(req->sg, req->sg_cnt, buf, len, off) != len) {
sg                100 drivers/nvme/target/core.c 	if (sg_pcopy_to_buffer(req->sg, req->sg_cnt, buf, len, off) != len) {
sg                109 drivers/nvme/target/core.c 	if (sg_zero_buffer(req->sg, req->sg_cnt, len, off) != len) {
sg                866 drivers/nvme/target/core.c 	req->sg = NULL;
sg                955 drivers/nvme/target/core.c 			req->sg = pci_p2pmem_alloc_sgl(p2p_dev, &req->sg_cnt,
sg                957 drivers/nvme/target/core.c 			if (req->sg) {
sg                969 drivers/nvme/target/core.c 	req->sg = sgl_alloc(req->transfer_len, GFP_KERNEL, &req->sg_cnt);
sg                970 drivers/nvme/target/core.c 	if (!req->sg)
sg                980 drivers/nvme/target/core.c 		pci_p2pmem_free_sgl(req->p2p_dev, req->sg);
sg                982 drivers/nvme/target/core.c 		sgl_free(req->sg);
sg                984 drivers/nvme/target/core.c 	req->sg = NULL;
sg                 44 drivers/nvme/target/fc.c 	struct scatterlist		sg[2];
sg                289 drivers/nvme/target/fc.c fc_map_sg(struct scatterlist *sg, int nents)
sg                294 drivers/nvme/target/fc.c 	WARN_ON(nents == 0 || sg[0].length == 0);
sg                296 drivers/nvme/target/fc.c 	for_each_sg(sg, s, nents, i) {
sg                306 drivers/nvme/target/fc.c fc_dma_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                309 drivers/nvme/target/fc.c 	return dev ? dma_map_sg(dev, sg, nents, dir) : fc_map_sg(sg, nents);
sg                313 drivers/nvme/target/fc.c fc_dma_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                317 drivers/nvme/target/fc.c 		dma_unmap_sg(dev, sg, nents, dir);
sg               1720 drivers/nvme/target/fc.c 	struct scatterlist *sg;
sg               1723 drivers/nvme/target/fc.c 	sg = sgl_alloc(fod->req.transfer_len, GFP_KERNEL, &nent);
sg               1724 drivers/nvme/target/fc.c 	if (!sg)
sg               1727 drivers/nvme/target/fc.c 	fod->data_sg = sg;
sg               1729 drivers/nvme/target/fc.c 	fod->data_sg_cnt = fc_dma_map_sg(fod->tgtport->dev, sg, nent,
sg               1878 drivers/nvme/target/fc.c 	struct scatterlist *sg = fod->next_sg;
sg               1897 drivers/nvme/target/fc.c 	fcpreq->sg = sg;
sg               1901 drivers/nvme/target/fc.c 	       tlen + sg_dma_len(sg) < NVMET_FC_MAX_SEQ_LENGTH) {
sg               1903 drivers/nvme/target/fc.c 		tlen += sg_dma_len(sg);
sg               1904 drivers/nvme/target/fc.c 		sg = sg_next(sg);
sg               1908 drivers/nvme/target/fc.c 		tlen += min_t(u32, sg_dma_len(sg), remaininglen);
sg               1909 drivers/nvme/target/fc.c 		sg = sg_next(sg);
sg               1912 drivers/nvme/target/fc.c 		fod->next_sg = sg;
sg               2218 drivers/nvme/target/fc.c 	fod->req.sg = fod->data_sg;
sg                656 drivers/nvme/target/fcloop.c 			fcloop_fcp_copy_data(op, tgt_fcpreq->sg,
sg                667 drivers/nvme/target/fcloop.c 			fcloop_fcp_copy_data(op, tgt_fcpreq->sg,
sg                149 drivers/nvme/target/io-cmd-bdev.c 	struct scatterlist *sg;
sg                167 drivers/nvme/target/io-cmd-bdev.c 	if (is_pci_p2pdma_page(sg_page(req->sg)))
sg                185 drivers/nvme/target/io-cmd-bdev.c 	for_each_sg(req->sg, sg, req->sg_cnt, i) {
sg                186 drivers/nvme/target/io-cmd-bdev.c 		while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset)
sg                187 drivers/nvme/target/io-cmd-bdev.c 				!= sg->length) {
sg                199 drivers/nvme/target/io-cmd-bdev.c 		sector += sg->length >> 9;
sg                 83 drivers/nvme/target/io-cmd-file.c static void nvmet_file_init_bvec(struct bio_vec *bv, struct scatterlist *sg)
sg                 85 drivers/nvme/target/io-cmd-file.c 	bv->bv_page = sg_page(sg);
sg                 86 drivers/nvme/target/io-cmd-file.c 	bv->bv_offset = sg->offset;
sg                 87 drivers/nvme/target/io-cmd-file.c 	bv->bv_len = sg->length;
sg                143 drivers/nvme/target/io-cmd-file.c 	struct scatterlist *sg;
sg                155 drivers/nvme/target/io-cmd-file.c 	for_each_sg(req->sg, sg, req->sg_cnt, i) {
sg                156 drivers/nvme/target/io-cmd-file.c 		nvmet_file_init_bvec(&req->f.bvec[bv_cnt], sg);
sg                165 drivers/nvme/target/loop.c 		iod->req.sg = iod->sg_table.sgl;
sg                293 drivers/nvme/target/nvmet.h 	struct scatterlist	*sg;
sg                217 drivers/nvme/target/rdma.c 	struct scatterlist *sg;
sg                224 drivers/nvme/target/rdma.c 	sg = c->inline_sg;
sg                227 drivers/nvme/target/rdma.c 	for (i = 0; i < ndev->inline_page_count; i++, sg++, sge++) {
sg                231 drivers/nvme/target/rdma.c 		if (sg_page(sg))
sg                232 drivers/nvme/target/rdma.c 			__free_page(sg_page(sg));
sg                239 drivers/nvme/target/rdma.c 	struct scatterlist *sg;
sg                248 drivers/nvme/target/rdma.c 	sg = c->inline_sg;
sg                249 drivers/nvme/target/rdma.c 	sg_init_table(sg, ndev->inline_page_count);
sg                253 drivers/nvme/target/rdma.c 	for (i = 0; i < ndev->inline_page_count; i++, sg++, sge++) {
sg                257 drivers/nvme/target/rdma.c 		sg_assign_page(sg, pg);
sg                269 drivers/nvme/target/rdma.c 	for (; i >= 0; i--, sg--, sge--) {
sg                273 drivers/nvme/target/rdma.c 		if (sg_page(sg))
sg                274 drivers/nvme/target/rdma.c 			__free_page(sg_page(sg));
sg                507 drivers/nvme/target/rdma.c 				queue->cm_id->port_num, rsp->req.sg,
sg                511 drivers/nvme/target/rdma.c 	if (rsp->req.sg != rsp->cmd->inline_sg)
sg                591 drivers/nvme/target/rdma.c 			queue->cm_id->port_num, rsp->req.sg,
sg                613 drivers/nvme/target/rdma.c 	struct scatterlist *sg;
sg                616 drivers/nvme/target/rdma.c 	sg = rsp->cmd->inline_sg;
sg                617 drivers/nvme/target/rdma.c 	for (i = 0; i < sg_count; i++, sg++) {
sg                619 drivers/nvme/target/rdma.c 			sg_unmark_end(sg);
sg                621 drivers/nvme/target/rdma.c 			sg_mark_end(sg);
sg                622 drivers/nvme/target/rdma.c 		sg->offset = off;
sg                623 drivers/nvme/target/rdma.c 		sg->length = min_t(int, len, PAGE_SIZE - off);
sg                624 drivers/nvme/target/rdma.c 		len -= sg->length;
sg                629 drivers/nvme/target/rdma.c 	rsp->req.sg = rsp->cmd->inline_sg;
sg                679 drivers/nvme/target/rdma.c 			rsp->req.sg, rsp->req.sg_cnt, 0, addr, key,
sg                220 drivers/nvme/target/tcp.c 	struct scatterlist sg;
sg                222 drivers/nvme/target/tcp.c 	sg_init_one(&sg, pdu, len);
sg                223 drivers/nvme/target/tcp.c 	ahash_request_set_crypt(hash, &sg, pdu + len, len);
sg                272 drivers/nvme/target/tcp.c 	struct scatterlist *sg;
sg                275 drivers/nvme/target/tcp.c 	sg = &cmd->req.sg[cmd->sg_idx];
sg                278 drivers/nvme/target/tcp.c 		kunmap(sg_page(&sg[i]));
sg                284 drivers/nvme/target/tcp.c 	struct scatterlist *sg;
sg                292 drivers/nvme/target/tcp.c 	sg = &cmd->req.sg[cmd->sg_idx];
sg                295 drivers/nvme/target/tcp.c 		u32 iov_len = min_t(u32, length, sg->length - sg_offset);
sg                297 drivers/nvme/target/tcp.c 		iov->iov_base = kmap(sg_page(sg)) + sg->offset + sg_offset;
sg                301 drivers/nvme/target/tcp.c 		sg = sg_next(sg);
sg                337 drivers/nvme/target/tcp.c 	cmd->req.sg = sgl_alloc(len, GFP_KERNEL, &cmd->req.sg_cnt);
sg                338 drivers/nvme/target/tcp.c 	if (!cmd->req.sg)
sg                340 drivers/nvme/target/tcp.c 	cmd->cur_sg = cmd->req.sg;
sg                351 drivers/nvme/target/tcp.c 	sgl_free(cmd->req.sg);
sg                358 drivers/nvme/target/tcp.c 	ahash_request_set_crypt(hash, cmd->req.sg,
sg                562 drivers/nvme/target/tcp.c 		sgl_free(cmd->req.sg);
sg                593 drivers/nvme/target/tcp.c 	sgl_free(cmd->req.sg);
sg               1316 drivers/nvme/target/tcp.c 	sgl_free(cmd->req.sg);
sg                354 drivers/pci/controller/vmd.c static int vmd_map_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                357 drivers/pci/controller/vmd.c 	return dma_map_sg_attrs(to_vmd_dev(dev), sg, nents, dir, attrs);
sg                360 drivers/pci/controller/vmd.c static void vmd_unmap_sg(struct device *dev, struct scatterlist *sg, int nents,
sg                363 drivers/pci/controller/vmd.c 	dma_unmap_sg_attrs(to_vmd_dev(dev), sg, nents, dir, attrs);
sg                378 drivers/pci/controller/vmd.c static void vmd_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
sg                381 drivers/pci/controller/vmd.c 	dma_sync_sg_for_cpu(to_vmd_dev(dev), sg, nents, dir);
sg                384 drivers/pci/controller/vmd.c static void vmd_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
sg                387 drivers/pci/controller/vmd.c 	dma_sync_sg_for_device(to_vmd_dev(dev), sg, nents, dir);
sg                744 drivers/pci/p2pdma.c 	struct scatterlist *sg;
sg                747 drivers/pci/p2pdma.c 	sg = kzalloc(sizeof(*sg), GFP_KERNEL);
sg                748 drivers/pci/p2pdma.c 	if (!sg)
sg                751 drivers/pci/p2pdma.c 	sg_init_table(sg, 1);
sg                757 drivers/pci/p2pdma.c 	sg_set_buf(sg, addr, length);
sg                759 drivers/pci/p2pdma.c 	return sg;
sg                762 drivers/pci/p2pdma.c 	kfree(sg);
sg                774 drivers/pci/p2pdma.c 	struct scatterlist *sg;
sg                777 drivers/pci/p2pdma.c 	for_each_sg(sgl, sg, INT_MAX, count) {
sg                778 drivers/pci/p2pdma.c 		if (!sg)
sg                781 drivers/pci/p2pdma.c 		pci_free_p2pmem(pdev, sg_virt(sg), sg->length);
sg                816 drivers/pci/p2pdma.c 		struct device *dev, struct scatterlist *sg, int nents)
sg                832 drivers/pci/p2pdma.c 	for_each_sg(sg, s, nents, i) {
sg                855 drivers/pci/p2pdma.c int pci_p2pdma_map_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                859 drivers/pci/p2pdma.c 		to_p2p_pgmap(sg_page(sg)->pgmap);
sg                869 drivers/pci/p2pdma.c 		return dma_map_sg_attrs(dev, sg, nents, dir, attrs);
sg                871 drivers/pci/p2pdma.c 		return __pci_p2pdma_map_sg(p2p_pgmap, dev, sg, nents);
sg                888 drivers/pci/p2pdma.c void pci_p2pdma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                892 drivers/pci/p2pdma.c 		to_p2p_pgmap(sg_page(sg)->pgmap);
sg                904 drivers/pci/p2pdma.c 		dma_unmap_sg_attrs(dev, sg, nents, dir, attrs);
sg                622 drivers/rapidio/devices/rio_mport_cdev.c 	tx_data.sg = sgt->sgl;
sg                683 drivers/rapidio/devices/tsi721.h 	struct scatterlist		*sg;
sg                333 drivers/rapidio/devices/tsi721_dma.c 		      struct scatterlist *sg, u32 sys_size)
sg                350 drivers/rapidio/devices/tsi721_dma.c 					(u64)sg_dma_address(sg) & 0xffffffff);
sg                351 drivers/rapidio/devices/tsi721_dma.c 	bd_ptr->t1.bufptr_hi = cpu_to_le32((u64)sg_dma_address(sg) >> 32);
sg                417 drivers/rapidio/devices/tsi721_dma.c 	struct scatterlist *sg;
sg                453 drivers/rapidio/devices/tsi721_dma.c 	for_each_sg(desc->sg, sg, desc->sg_len, i) {
sg                457 drivers/rapidio/devices/tsi721_dma.c 			(unsigned long long)sg_dma_address(sg), sg_dma_len(sg));
sg                459 drivers/rapidio/devices/tsi721_dma.c 		if (sg_dma_len(sg) > TSI721_BDMA_MAX_BCOUNT) {
sg                470 drivers/rapidio/devices/tsi721_dma.c 		if (next_addr == sg_dma_address(sg) &&
sg                471 drivers/rapidio/devices/tsi721_dma.c 		    bcount + sg_dma_len(sg) <= TSI721_BDMA_MAX_BCOUNT) {
sg                473 drivers/rapidio/devices/tsi721_dma.c 			bcount += sg_dma_len(sg);
sg                488 drivers/rapidio/devices/tsi721_dma.c 			desc->sg = sg;
sg                494 drivers/rapidio/devices/tsi721_dma.c 		err = tsi721_desc_fill_init(desc, bd_ptr, sg, sys_size);
sg                503 drivers/rapidio/devices/tsi721_dma.c 		next_addr = sg_dma_address(sg);
sg                504 drivers/rapidio/devices/tsi721_dma.c 		bcount = sg_dma_len(sg);
sg                514 drivers/rapidio/devices/tsi721_dma.c 		if (sg_is_last(sg)) {
sg                521 drivers/rapidio/devices/tsi721_dma.c 			rio_addr += sg_dma_len(sg);
sg                522 drivers/rapidio/devices/tsi721_dma.c 			next_addr += sg_dma_len(sg);
sg                877 drivers/rapidio/devices/tsi721_dma.c 		desc->sg	= sgl;
sg               1897 drivers/rapidio/rio.c 	return dmaengine_prep_rio_sg(dchan, data->sg, data->sg_len,
sg                199 drivers/rpmsg/virtio_rpmsg_bus.c rpmsg_sg_init(struct scatterlist *sg, void *cpu_addr, unsigned int len)
sg                202 drivers/rpmsg/virtio_rpmsg_bus.c 		sg_init_table(sg, 1);
sg                203 drivers/rpmsg/virtio_rpmsg_bus.c 		sg_set_page(sg, vmalloc_to_page(cpu_addr), len,
sg                207 drivers/rpmsg/virtio_rpmsg_bus.c 		sg_init_one(sg, cpu_addr, len);
sg                561 drivers/rpmsg/virtio_rpmsg_bus.c 	struct scatterlist sg;
sg                629 drivers/rpmsg/virtio_rpmsg_bus.c 	rpmsg_sg_init(&sg, msg, sizeof(*msg) + len);
sg                634 drivers/rpmsg/virtio_rpmsg_bus.c 	err = virtqueue_add_outbuf(vrp->svq, &sg, 1, msg, GFP_KERNEL);
sg                706 drivers/rpmsg/virtio_rpmsg_bus.c 	struct scatterlist sg;
sg                753 drivers/rpmsg/virtio_rpmsg_bus.c 	rpmsg_sg_init(&sg, msg, vrp->buf_size);
sg                756 drivers/rpmsg/virtio_rpmsg_bus.c 	err = virtqueue_add_inbuf(vrp->rvq, &sg, 1, msg, GFP_KERNEL);
sg                934 drivers/rpmsg/virtio_rpmsg_bus.c 		struct scatterlist sg;
sg                937 drivers/rpmsg/virtio_rpmsg_bus.c 		rpmsg_sg_init(&sg, cpu_addr, vrp->buf_size);
sg                939 drivers/rpmsg/virtio_rpmsg_bus.c 		err = virtqueue_add_inbuf(vrp->rvq, &sg, 1, cpu_addr,
sg                424 drivers/s390/scsi/zfcp_dbf.c 		  char *paytag, struct scatterlist *sg, u8 id, u16 len,
sg                441 drivers/s390/scsi/zfcp_dbf.c 	if (!sg)
sg                443 drivers/s390/scsi/zfcp_dbf.c 	rec_len = min_t(unsigned int, sg->length, ZFCP_DBF_SAN_MAX_PAYLOAD);
sg                444 drivers/s390/scsi/zfcp_dbf.c 	memcpy(rec->payload, sg_virt(sg), rec_len); /* part of 1st sg entry */
sg                456 drivers/s390/scsi/zfcp_dbf.c 	for (; sg && pay_sum < cap_len; sg = sg_next(sg)) {
sg                459 drivers/s390/scsi/zfcp_dbf.c 		while (offset < sg->length && pay_sum < cap_len) {
sg                461 drivers/s390/scsi/zfcp_dbf.c 				      (u16)(sg->length - offset));
sg                463 drivers/s390/scsi/zfcp_dbf.c 			memcpy(payload->data, sg_virt(sg) + offset, pay_len);
sg                592 drivers/s390/scsi/zfcp_dbf.c 	struct scatterlist sg;
sg                599 drivers/s390/scsi/zfcp_dbf.c 	sg_init_one(&sg, srb->payload.data, length);
sg                600 drivers/s390/scsi/zfcp_dbf.c 	zfcp_dbf_san(tag, dbf, "san_els", &sg, ZFCP_DBF_SAN_ELS, length,
sg                619 drivers/s390/scsi/zfcp_fc.c static void zfcp_fc_sg_free_table(struct scatterlist *sg, int count)
sg                623 drivers/s390/scsi/zfcp_fc.c 	for (i = 0; i < count; i++, sg = sg_next(sg))
sg                624 drivers/s390/scsi/zfcp_fc.c 		if (sg)
sg                625 drivers/s390/scsi/zfcp_fc.c 			free_page((unsigned long) sg_virt(sg));
sg                638 drivers/s390/scsi/zfcp_fc.c static int zfcp_fc_sg_setup_table(struct scatterlist *sg, int count)
sg                643 drivers/s390/scsi/zfcp_fc.c 	sg_init_table(sg, count);
sg                644 drivers/s390/scsi/zfcp_fc.c 	for (i = 0; i < count; i++, sg = sg_next(sg)) {
sg                647 drivers/s390/scsi/zfcp_fc.c 			zfcp_fc_sg_free_table(sg, i);
sg                650 drivers/s390/scsi/zfcp_fc.c 		sg_set_buf(sg, addr, PAGE_SIZE);
sg                715 drivers/s390/scsi/zfcp_fc.c 	struct scatterlist *sg = &fc_req->sg_rsp;
sg                716 drivers/s390/scsi/zfcp_fc.c 	struct fc_ct_hdr *hdr = sg_virt(sg);
sg                717 drivers/s390/scsi/zfcp_fc.c 	struct fc_gpn_ft_resp *acc = sg_virt(sg);
sg                745 drivers/s390/scsi/zfcp_fc.c 			acc = sg_virt(++sg);
sg                186 drivers/s390/scsi/zfcp_qdio.c 			    struct scatterlist *sg)
sg                194 drivers/s390/scsi/zfcp_qdio.c 	for (; sg; sg = sg_next(sg)) {
sg                202 drivers/s390/scsi/zfcp_qdio.c 		sbale->addr = sg_virt(sg);
sg                203 drivers/s390/scsi/zfcp_qdio.c 		sbale->length = sg->length;
sg                182 drivers/s390/scsi/zfcp_qdio.h int zfcp_qdio_sg_one_sbale(struct scatterlist *sg)
sg                184 drivers/s390/scsi/zfcp_qdio.h 	return sg_is_last(sg) && sg->length <= ZFCP_QDIO_SBALE_LEN;
sg                236 drivers/s390/scsi/zfcp_qdio.h unsigned int zfcp_qdio_real_bytes(struct scatterlist *sg)
sg                240 drivers/s390/scsi/zfcp_qdio.h 	for (; sg; sg = sg_next(sg))
sg                241 drivers/s390/scsi/zfcp_qdio.h 		real_bytes += sg->length;
sg               1812 drivers/scsi/3w-9xxx.c 	struct scatterlist *sglist = NULL, *sg;
sg               1866 drivers/scsi/3w-9xxx.c 				scsi_for_each_sg(srb, sg, sg_count, i) {
sg               1867 drivers/scsi/3w-9xxx.c 					command_packet->sg_list[i].address = TW_CPU_TO_SGL(sg_dma_address(sg));
sg               1868 drivers/scsi/3w-9xxx.c 					command_packet->sg_list[i].length = cpu_to_le32(sg_dma_len(sg));
sg                298 drivers/scsi/3w-sas.c 	struct scatterlist *sglist = NULL, *sg;
sg                342 drivers/scsi/3w-sas.c 			scsi_for_each_sg(srb, sg, sg_count, i) {
sg                343 drivers/scsi/3w-sas.c 				command_packet->sg_list[i].address = TW_CPU_TO_SGL(sg_dma_address(sg));
sg                344 drivers/scsi/3w-sas.c 				command_packet->sg_list[i].length = TW_CPU_TO_SGL(sg_dma_len(sg));
sg               1704 drivers/scsi/3w-xxxx.c 	struct scatterlist *sglist, *sg;
sg               1761 drivers/scsi/3w-xxxx.c 	scsi_for_each_sg(tw_dev->srb[request_id], sg, use_sg, i) {
sg               1762 drivers/scsi/3w-xxxx.c 		command_packet->byte8.io.sgl[i].address = sg_dma_address(sg);
sg               1763 drivers/scsi/3w-xxxx.c 		command_packet->byte8.io.sgl[i].length = sg_dma_len(sg);
sg               1238 drivers/scsi/53c700.c 			struct scatterlist *sg;
sg               1241 drivers/scsi/53c700.c 			scsi_for_each_sg(SCp, sg, scsi_sg_count(SCp) + 1, i) {
sg               1242 drivers/scsi/53c700.c 				printk(KERN_INFO " SG[%d].length = %d, move_insn=%08x, addr %08x\n", i, sg->length, ((struct NCR_700_command_slot *)SCp->host_scribble)->SG[i].ins, ((struct NCR_700_command_slot *)SCp->host_scribble)->SG[i].pAddr);
sg               1864 drivers/scsi/53c700.c 		struct scatterlist *sg;
sg               1870 drivers/scsi/53c700.c 		scsi_for_each_sg(SCp, sg, sg_count, i) {
sg               1871 drivers/scsi/53c700.c 			vPtr = sg_dma_address(sg);
sg               1872 drivers/scsi/53c700.c 			count = sg_dma_len(sg);
sg               3074 drivers/scsi/BusLogic.c 		struct scatterlist *sg;
sg               3086 drivers/scsi/BusLogic.c 		scsi_for_each_sg(command, sg, count, i) {
sg               3087 drivers/scsi/BusLogic.c 			ccb->sglist[i].segbytes = sg_dma_len(sg);
sg               3088 drivers/scsi/BusLogic.c 			ccb->sglist[i].segdata = sg_dma_address(sg);
sg                844 drivers/scsi/a100u2w.c 	struct scatterlist *sg;
sg                874 drivers/scsi/a100u2w.c 		scsi_for_each_sg(cmd, sg, count_sg, i) {
sg                875 drivers/scsi/a100u2w.c 			sgent->base = cpu_to_le32((u32) sg_dma_address(sg));
sg                876 drivers/scsi/a100u2w.c 			sgent->length = cpu_to_le32((u32) sg_dma_len(sg));
sg               1248 drivers/scsi/aacraid/aachba.c 		ret = aac_build_sgraw(cmd, &readcmd->sg);
sg               1253 drivers/scsi/aacraid/aachba.c 			((le32_to_cpu(readcmd->sg.count)-1) * sizeof(struct sgentryraw));
sg               1284 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg64(cmd, &readcmd->sg);
sg               1288 drivers/scsi/aacraid/aachba.c 		((le32_to_cpu(readcmd->sg.count) - 1) *
sg               1319 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg(cmd, &readcmd->sg);
sg               1323 drivers/scsi/aacraid/aachba.c 			((le32_to_cpu(readcmd->sg.count) - 1) *
sg               1382 drivers/scsi/aacraid/aachba.c 		ret = aac_build_sgraw(cmd, &writecmd->sg);
sg               1387 drivers/scsi/aacraid/aachba.c 			((le32_to_cpu(writecmd->sg.count)-1) * sizeof (struct sgentryraw));
sg               1418 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg64(cmd, &writecmd->sg);
sg               1422 drivers/scsi/aacraid/aachba.c 		((le32_to_cpu(writecmd->sg.count) - 1) *
sg               1452 drivers/scsi/aacraid/aachba.c 	writecmd->sg.count = cpu_to_le32(1);
sg               1455 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg(cmd, &writecmd->sg);
sg               1459 drivers/scsi/aacraid/aachba.c 		((le32_to_cpu(writecmd->sg.count) - 1) *
sg               1567 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg64(cmd, (struct sgmap64 *) &srbcmd->sg);
sg               1578 drivers/scsi/aacraid/aachba.c 		((le32_to_cpu(srbcmd->sg.count) & 0xff) *
sg               1598 drivers/scsi/aacraid/aachba.c 	ret = aac_build_sg(cmd, (struct sgmap *)&srbcmd->sg);
sg               1609 drivers/scsi/aacraid/aachba.c 		(((le32_to_cpu(srbcmd->sg.count) & 0xff) - 1) *
sg               1707 drivers/scsi/aacraid/aachba.c 	sg64 = (struct sgmap64 *)&srb->sg;
sg               1709 drivers/scsi/aacraid/aachba.c 	sg64->sg[0].addr[1]	= cpu_to_le32(upper_32_bits(addr));
sg               1710 drivers/scsi/aacraid/aachba.c 	sg64->sg[0].addr[0]	= cpu_to_le32(lower_32_bits(addr));
sg               1711 drivers/scsi/aacraid/aachba.c 	sg64->sg[0].count	= cpu_to_le32(xfer_len);
sg               3912 drivers/scsi/aacraid/aachba.c 	struct scatterlist *sg;
sg               3917 drivers/scsi/aacraid/aachba.c 	psg->sg[0].addr = 0;
sg               3918 drivers/scsi/aacraid/aachba.c 	psg->sg[0].count = 0;
sg               3926 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
sg               3927 drivers/scsi/aacraid/aachba.c 		psg->sg[i].addr = cpu_to_le32(sg_dma_address(sg));
sg               3928 drivers/scsi/aacraid/aachba.c 		psg->sg[i].count = cpu_to_le32(sg_dma_len(sg));
sg               3929 drivers/scsi/aacraid/aachba.c 		byte_count += sg_dma_len(sg);
sg               3933 drivers/scsi/aacraid/aachba.c 		u32 temp = le32_to_cpu(psg->sg[i-1].count) -
sg               3935 drivers/scsi/aacraid/aachba.c 		psg->sg[i-1].count = cpu_to_le32(temp);
sg               3953 drivers/scsi/aacraid/aachba.c 	struct scatterlist *sg;
sg               3958 drivers/scsi/aacraid/aachba.c 	psg->sg[0].addr[0] = 0;
sg               3959 drivers/scsi/aacraid/aachba.c 	psg->sg[0].addr[1] = 0;
sg               3960 drivers/scsi/aacraid/aachba.c 	psg->sg[0].count = 0;
sg               3966 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
sg               3967 drivers/scsi/aacraid/aachba.c 		int count = sg_dma_len(sg);
sg               3968 drivers/scsi/aacraid/aachba.c 		addr = sg_dma_address(sg);
sg               3969 drivers/scsi/aacraid/aachba.c 		psg->sg[i].addr[0] = cpu_to_le32(addr & 0xffffffff);
sg               3970 drivers/scsi/aacraid/aachba.c 		psg->sg[i].addr[1] = cpu_to_le32(addr>>32);
sg               3971 drivers/scsi/aacraid/aachba.c 		psg->sg[i].count = cpu_to_le32(count);
sg               3977 drivers/scsi/aacraid/aachba.c 		u32 temp = le32_to_cpu(psg->sg[i-1].count) -
sg               3979 drivers/scsi/aacraid/aachba.c 		psg->sg[i-1].count = cpu_to_le32(temp);
sg               3995 drivers/scsi/aacraid/aachba.c 	struct scatterlist *sg;
sg               4000 drivers/scsi/aacraid/aachba.c 	psg->sg[0].next = 0;
sg               4001 drivers/scsi/aacraid/aachba.c 	psg->sg[0].prev = 0;
sg               4002 drivers/scsi/aacraid/aachba.c 	psg->sg[0].addr[0] = 0;
sg               4003 drivers/scsi/aacraid/aachba.c 	psg->sg[0].addr[1] = 0;
sg               4004 drivers/scsi/aacraid/aachba.c 	psg->sg[0].count = 0;
sg               4005 drivers/scsi/aacraid/aachba.c 	psg->sg[0].flags = 0;
sg               4011 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
sg               4012 drivers/scsi/aacraid/aachba.c 		int count = sg_dma_len(sg);
sg               4013 drivers/scsi/aacraid/aachba.c 		u64 addr = sg_dma_address(sg);
sg               4014 drivers/scsi/aacraid/aachba.c 		psg->sg[i].next = 0;
sg               4015 drivers/scsi/aacraid/aachba.c 		psg->sg[i].prev = 0;
sg               4016 drivers/scsi/aacraid/aachba.c 		psg->sg[i].addr[1] = cpu_to_le32((u32)(addr>>32));
sg               4017 drivers/scsi/aacraid/aachba.c 		psg->sg[i].addr[0] = cpu_to_le32((u32)(addr & 0xffffffff));
sg               4018 drivers/scsi/aacraid/aachba.c 		psg->sg[i].count = cpu_to_le32(count);
sg               4019 drivers/scsi/aacraid/aachba.c 		psg->sg[i].flags = 0;
sg               4025 drivers/scsi/aacraid/aachba.c 		u32 temp = le32_to_cpu(psg->sg[i-1].count) -
sg               4027 drivers/scsi/aacraid/aachba.c 		psg->sg[i-1].count = cpu_to_le32(temp);
sg               4044 drivers/scsi/aacraid/aachba.c 	struct scatterlist *sg;
sg               4052 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
sg               4053 drivers/scsi/aacraid/aachba.c 		int count = sg_dma_len(sg);
sg               4054 drivers/scsi/aacraid/aachba.c 		u64 addr = sg_dma_address(sg);
sg               4162 drivers/scsi/aacraid/aachba.c 	struct scatterlist *sg;
sg               4178 drivers/scsi/aacraid/aachba.c 	scsi_for_each_sg(scsicmd, sg, nseg, i) {
sg               4179 drivers/scsi/aacraid/aachba.c 		int count = sg_dma_len(sg);
sg               4180 drivers/scsi/aacraid/aachba.c 		u64 addr = sg_dma_address(sg);
sg                507 drivers/scsi/aacraid/aacraid.h 	struct sgentry	sg[1];
sg                512 drivers/scsi/aacraid/aacraid.h 	struct user_sgentry	sg[1];
sg                517 drivers/scsi/aacraid/aacraid.h 	struct sgentry64 sg[1];
sg                522 drivers/scsi/aacraid/aacraid.h 	struct user_sgentry64 sg[1];
sg                527 drivers/scsi/aacraid/aacraid.h 	struct sgentryraw sg[1];
sg                532 drivers/scsi/aacraid/aacraid.h 	struct user_sgentryraw sg[1];
sg               1857 drivers/scsi/aacraid/aacraid.h 	struct sgmap	sg;	// Must be last in struct because it is variable
sg               1868 drivers/scsi/aacraid/aacraid.h 	struct sgmap64	sg;	// Must be last in struct because it is variable
sg               1884 drivers/scsi/aacraid/aacraid.h 	struct sgmap	sg;	// Must be last in struct because it is variable
sg               1895 drivers/scsi/aacraid/aacraid.h 	struct sgmap64	sg;	// Must be last in struct because it is variable
sg               1912 drivers/scsi/aacraid/aacraid.h 	struct sgmapraw	sg;
sg               1992 drivers/scsi/aacraid/aacraid.h 	struct	sgmap	sg;
sg               2011 drivers/scsi/aacraid/aacraid.h 	struct	user_sgmap	sg;
sg                542 drivers/scsi/aacraid/commctrl.c 	if (user_srbcmd->sg.count > ARRAY_SIZE(sg_list)) {
sg                544 drivers/scsi/aacraid/commctrl.c 			user_srbcmd->sg.count));
sg                548 drivers/scsi/aacraid/commctrl.c 	if ((data_dir == DMA_NONE) && user_srbcmd->sg.count) {
sg                554 drivers/scsi/aacraid/commctrl.c 		((user_srbcmd->sg.count & 0xff) * sizeof(struct sgentry));
sg                555 drivers/scsi/aacraid/commctrl.c 	actual_fibsize64 = actual_fibsize + (user_srbcmd->sg.count & 0xff) *
sg                563 drivers/scsi/aacraid/commctrl.c 		  actual_fibsize, actual_fibsize64, user_srbcmd->sg.count,
sg                608 drivers/scsi/aacraid/commctrl.c 					cpu_to_le32(user_srbcmd->sg.count);
sg                610 drivers/scsi/aacraid/commctrl.c 			user_srbcmd->sg.count * sizeof(struct aac_hba_sgl);
sg                637 drivers/scsi/aacraid/commctrl.c 		struct user_sgmap *usg32 = &user_srbcmd->sg;
sg                639 drivers/scsi/aacraid/commctrl.c 			(struct user_sgmap64 *)&user_srbcmd->sg;
sg                646 drivers/scsi/aacraid/commctrl.c 				usg64->sg[i].count : usg32->sg[i].count;
sg                663 drivers/scsi/aacraid/commctrl.c 				addr = (u64)usg64->sg[i].addr[0];
sg                664 drivers/scsi/aacraid/commctrl.c 				addr += ((u64)usg64->sg[i].addr[1]) << 32;
sg                666 drivers/scsi/aacraid/commctrl.c 				addr = (u64)usg32->sg[i].addr;
sg                699 drivers/scsi/aacraid/commctrl.c 		struct user_sgmap64* upsg = (struct user_sgmap64*)&user_srbcmd->sg;
sg                700 drivers/scsi/aacraid/commctrl.c 		struct sgmap64* psg = (struct sgmap64*)&srbcmd->sg;
sg                711 drivers/scsi/aacraid/commctrl.c 				sg_count[i] = upsg->sg[i].count;
sg                728 drivers/scsi/aacraid/commctrl.c 				addr = (u64)upsg->sg[i].addr[0];
sg                729 drivers/scsi/aacraid/commctrl.c 				addr += ((u64)upsg->sg[i].addr[1]) << 32;
sg                745 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr[0] = cpu_to_le32(addr & 0xffffffff);
sg                746 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr[1] = cpu_to_le32(addr>>32);
sg                748 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].count = cpu_to_le32(sg_count[i]);
sg                766 drivers/scsi/aacraid/commctrl.c 				sg_count[i] = usg->sg[i].count;
sg                785 drivers/scsi/aacraid/commctrl.c 				sg_user[i] = (void __user *)(uintptr_t)usg->sg[i].addr;
sg                801 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr[0] = cpu_to_le32(addr & 0xffffffff);
sg                802 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr[1] = cpu_to_le32(addr>>32);
sg                804 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].count = cpu_to_le32(sg_count[i]);
sg                809 drivers/scsi/aacraid/commctrl.c 		if (user_srbcmd->sg.count)
sg                815 drivers/scsi/aacraid/commctrl.c 		struct user_sgmap* upsg = &user_srbcmd->sg;
sg                816 drivers/scsi/aacraid/commctrl.c 		struct sgmap* psg = &srbcmd->sg;
sg                824 drivers/scsi/aacraid/commctrl.c 				sg_count[i] = usg->sg[i].count;
sg                840 drivers/scsi/aacraid/commctrl.c 				addr = (u64)usg->sg[i].addr[0];
sg                841 drivers/scsi/aacraid/commctrl.c 				addr += ((u64)usg->sg[i].addr[1]) << 32;
sg                854 drivers/scsi/aacraid/commctrl.c 				addr = pci_map_single(dev->pdev, p, usg->sg[i].count, data_dir);
sg                856 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr = cpu_to_le32(addr & 0xffffffff);
sg                857 drivers/scsi/aacraid/commctrl.c 				byte_count += usg->sg[i].count;
sg                858 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].count = cpu_to_le32(sg_count[i]);
sg                865 drivers/scsi/aacraid/commctrl.c 				sg_count[i] = upsg->sg[i].count;
sg                881 drivers/scsi/aacraid/commctrl.c 				sg_user[i] = (void __user *)(uintptr_t)upsg->sg[i].addr;
sg                896 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].addr = cpu_to_le32(addr);
sg                898 drivers/scsi/aacraid/commctrl.c 				psg->sg[i].count = cpu_to_le32(sg_count[i]);
sg                902 drivers/scsi/aacraid/commctrl.c 		if (user_srbcmd->sg.count)
sg               2334 drivers/scsi/aacraid/commsup.c 	sg64 = (struct sgmap64 *)&srbcmd->sg;
sg               2336 drivers/scsi/aacraid/commsup.c 	sg64->sg[0].addr[1] = cpu_to_le32((u32)(((addr) >> 16) >> 16));
sg               2337 drivers/scsi/aacraid/commsup.c 	sg64->sg[0].addr[0] = cpu_to_le32((u32)(addr & 0xffffffff));
sg               2338 drivers/scsi/aacraid/commsup.c 	sg64->sg[0].count = cpu_to_le32(datasize);
sg                363 drivers/scsi/advansys.c 	ASC_SG_LIST_Q sg;
sg               2149 drivers/scsi/aha152x.c 		struct scatterlist *sg = scsi_sglist(CURRENT_SC);
sg               2155 drivers/scsi/aha152x.c 		while (done > 0 && !sg_is_last(sg)) {
sg               2156 drivers/scsi/aha152x.c 			if (done < sg->length)
sg               2158 drivers/scsi/aha152x.c 			done -= sg->length;
sg               2159 drivers/scsi/aha152x.c 			sg = sg_next(sg);
sg               2162 drivers/scsi/aha152x.c 		CURRENT_SC->SCp.buffer = sg;
sg                480 drivers/scsi/aha1542.c 		struct scatterlist *sg;
sg                484 drivers/scsi/aha1542.c 		scsi_for_each_sg(cmd, sg, sg_count, i) {
sg                485 drivers/scsi/aha1542.c 			any2scsi(acmd->chain[i].dataptr, sg_dma_address(sg));
sg                486 drivers/scsi/aha1542.c 			any2scsi(acmd->chain[i].datalen, sg_dma_len(sg));
sg                398 drivers/scsi/aha1740.c 		struct scatterlist *sg;
sg                403 drivers/scsi/aha1740.c 		host->ecb[ecbno].sg = 1;  /* SCSI Initiator Command
sg                406 drivers/scsi/aha1740.c 		scsi_for_each_sg(SCpnt, sg, nseg, i) {
sg                407 drivers/scsi/aha1740.c 			cptr[i].datalen = sg_dma_len (sg);
sg                408 drivers/scsi/aha1740.c 			cptr[i].dataptr = sg_dma_address (sg);
sg                111 drivers/scsi/aha1740.h 	:1, sg:1,		/* Scatter/Gather */
sg                412 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma64_seg *sg;
sg                414 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = (struct ahd_dma64_seg *)sgptr;
sg                415 drivers/scsi/aic7xxx/aic79xx_core.c 		sg->addr = ahd_htole64(addr);
sg                416 drivers/scsi/aic7xxx/aic79xx_core.c 		sg->len = ahd_htole32(len | (last ? AHD_DMA_LAST_SEG : 0));
sg                417 drivers/scsi/aic7xxx/aic79xx_core.c 		return (sg + 1);
sg                419 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma_seg *sg;
sg                421 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = (struct ahd_dma_seg *)sgptr;
sg                422 drivers/scsi/aic7xxx/aic79xx_core.c 		sg->addr = ahd_htole32(addr & 0xFFFFFFFF);
sg                423 drivers/scsi/aic7xxx/aic79xx_core.c 		sg->len = ahd_htole32(len | ((addr >> 8) & 0x7F000000)
sg                425 drivers/scsi/aic7xxx/aic79xx_core.c 		return (sg + 1);
sg                457 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma64_seg *sg;
sg                459 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = (struct ahd_dma64_seg *)scb->sg_list;
sg                460 drivers/scsi/aic7xxx/aic79xx_core.c 		scb->hscb->dataptr = sg->addr;
sg                461 drivers/scsi/aic7xxx/aic79xx_core.c 		scb->hscb->datacnt = sg->len;
sg                463 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma_seg *sg;
sg                466 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = (struct ahd_dma_seg *)scb->sg_list;
sg                468 drivers/scsi/aic7xxx/aic79xx_core.c 		dataptr_words[0] = sg->addr;
sg                473 drivers/scsi/aic7xxx/aic79xx_core.c 			high_addr = ahd_le32toh(sg->len) & 0x7F000000;
sg                476 drivers/scsi/aic7xxx/aic79xx_core.c 		scb->hscb->datacnt = sg->len;
sg                507 drivers/scsi/aic7xxx/aic79xx_core.c ahd_sg_virt_to_bus(struct ahd_softc *ahd, struct scb *scb, void *sg)
sg                512 drivers/scsi/aic7xxx/aic79xx_core.c 	sg_offset = ((uint8_t *)sg - (uint8_t *)scb->sg_list)
sg               1576 drivers/scsi/aic7xxx/aic79xx_core.c 				struct ahd_dma64_seg *sg;
sg               1578 drivers/scsi/aic7xxx/aic79xx_core.c 				sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               1579 drivers/scsi/aic7xxx/aic79xx_core.c 				data_addr = sg->addr;
sg               1580 drivers/scsi/aic7xxx/aic79xx_core.c 				data_len = sg->len;
sg               1581 drivers/scsi/aic7xxx/aic79xx_core.c 				sgptr += sizeof(*sg);
sg               1583 drivers/scsi/aic7xxx/aic79xx_core.c 				struct	ahd_dma_seg *sg;
sg               1585 drivers/scsi/aic7xxx/aic79xx_core.c 				sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               1586 drivers/scsi/aic7xxx/aic79xx_core.c 				data_addr = sg->len & AHD_SG_HIGH_ADDR_MASK;
sg               1588 drivers/scsi/aic7xxx/aic79xx_core.c 				data_addr |= sg->addr;
sg               1589 drivers/scsi/aic7xxx/aic79xx_core.c 				data_len = sg->len;
sg               1590 drivers/scsi/aic7xxx/aic79xx_core.c 				sgptr += sizeof(*sg);
sg               5743 drivers/scsi/aic7xxx/aic79xx_core.c 				struct ahd_dma64_seg *sg;
sg               5745 drivers/scsi/aic7xxx/aic79xx_core.c 				sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               5751 drivers/scsi/aic7xxx/aic79xx_core.c 				sg--;
sg               5752 drivers/scsi/aic7xxx/aic79xx_core.c 				sglen = ahd_le32toh(sg->len) & AHD_SG_LEN_MASK;
sg               5753 drivers/scsi/aic7xxx/aic79xx_core.c 				if (sg != scb->sg_list
sg               5756 drivers/scsi/aic7xxx/aic79xx_core.c 					sg--;
sg               5757 drivers/scsi/aic7xxx/aic79xx_core.c 					sglen = ahd_le32toh(sg->len);
sg               5763 drivers/scsi/aic7xxx/aic79xx_core.c 					data_addr = ahd_le64toh(sg->addr)
sg               5771 drivers/scsi/aic7xxx/aic79xx_core.c 					sg++;
sg               5773 drivers/scsi/aic7xxx/aic79xx_core.c 								   sg);
sg               5776 drivers/scsi/aic7xxx/aic79xx_core.c 				struct ahd_dma_seg *sg;
sg               5778 drivers/scsi/aic7xxx/aic79xx_core.c 				sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               5784 drivers/scsi/aic7xxx/aic79xx_core.c 				sg--;
sg               5785 drivers/scsi/aic7xxx/aic79xx_core.c 				sglen = ahd_le32toh(sg->len) & AHD_SG_LEN_MASK;
sg               5786 drivers/scsi/aic7xxx/aic79xx_core.c 				if (sg != scb->sg_list
sg               5789 drivers/scsi/aic7xxx/aic79xx_core.c 					sg--;
sg               5790 drivers/scsi/aic7xxx/aic79xx_core.c 					sglen = ahd_le32toh(sg->len);
sg               5796 drivers/scsi/aic7xxx/aic79xx_core.c 					data_addr = ahd_le32toh(sg->addr)
sg               5804 drivers/scsi/aic7xxx/aic79xx_core.c 					sg++;
sg               5806 drivers/scsi/aic7xxx/aic79xx_core.c 								  sg);
sg               5882 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma64_seg *sg;
sg               5884 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               5887 drivers/scsi/aic7xxx/aic79xx_core.c 		sg--;
sg               5889 drivers/scsi/aic7xxx/aic79xx_core.c 		dataptr = ahd_le64toh(sg->addr)
sg               5890 drivers/scsi/aic7xxx/aic79xx_core.c 			+ (ahd_le32toh(sg->len) & AHD_SG_LEN_MASK)
sg               5894 drivers/scsi/aic7xxx/aic79xx_core.c 		struct	 ahd_dma_seg *sg;
sg               5896 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = ahd_sg_bus_to_virt(ahd, scb, sgptr);
sg               5899 drivers/scsi/aic7xxx/aic79xx_core.c 		sg--;
sg               5901 drivers/scsi/aic7xxx/aic79xx_core.c 		dataptr = ahd_le32toh(sg->addr)
sg               5902 drivers/scsi/aic7xxx/aic79xx_core.c 			+ (ahd_le32toh(sg->len) & AHD_SG_LEN_MASK)
sg               5905 drivers/scsi/aic7xxx/aic79xx_core.c 			 (ahd_le32toh(sg->len) & ~AHD_SG_LEN_MASK) >> 24);
sg               8976 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma_seg *sg;
sg               9003 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = scb->sg_list;
sg               9016 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = ahd_sg_setup(ahd, scb, sg, ahd_get_sense_bufaddr(ahd, scb),
sg               9149 drivers/scsi/aic7xxx/aic79xx_core.c 		struct ahd_dma_seg *sg;
sg               9156 drivers/scsi/aic7xxx/aic79xx_core.c 		sg = ahd_sg_bus_to_virt(ahd, scb, resid_sgptr & SG_PTR_MASK);
sg               9159 drivers/scsi/aic7xxx/aic79xx_core.c 		sg--;
sg               9166 drivers/scsi/aic7xxx/aic79xx_core.c 		while ((ahd_le32toh(sg->len) & AHD_DMA_LAST_SEG) == 0) {
sg               9167 drivers/scsi/aic7xxx/aic79xx_core.c 			sg++;
sg               9168 drivers/scsi/aic7xxx/aic79xx_core.c 			resid += ahd_le32toh(sg->len) & AHD_SG_LEN_MASK;
sg               1635 drivers/scsi/aic7xxx/aic79xx_osm.c 		void *sg = scb->sg_list;
sg               1648 drivers/scsi/aic7xxx/aic79xx_osm.c 			sg = ahd_sg_setup(ahd, scb, sg, addr, len,
sg                377 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_sg_virt_to_bus(struct scb *scb, struct ahc_dma_seg *sg)
sg                382 drivers/scsi/aic7xxx/aic7xxx_core.c 	sg_index = sg - &scb->sg_list[1];
sg               1051 drivers/scsi/aic7xxx/aic7xxx_core.c 			struct ahc_dma_seg *sg;
sg               1073 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg = scb->sg_list;
sg               1085 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg->addr = ahc_get_sense_bufaddr(ahc, scb);
sg               1086 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg->len = ahc_get_sense_bufsize(ahc, scb);
sg               1087 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg->len |= AHC_DMA_LAST_SEG;
sg               1090 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg->addr = ahc_htole32(sg->addr);
sg               1091 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg->len = ahc_htole32(sg->len);
sg               1100 drivers/scsi/aic7xxx/aic7xxx_core.c 			sc->length = sg->len;
sg               1132 drivers/scsi/aic7xxx/aic7xxx_core.c 			hscb->dataptr = sg->addr; 
sg               1133 drivers/scsi/aic7xxx/aic7xxx_core.c 			hscb->datacnt = sg->len;
sg               4187 drivers/scsi/aic7xxx/aic7xxx_core.c 			struct ahc_dma_seg *sg;
sg               4211 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg = ahc_sg_bus_to_virt(scb, sgptr);
sg               4217 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg--;
sg               4218 drivers/scsi/aic7xxx/aic7xxx_core.c 			sglen = ahc_le32toh(sg->len) & AHC_SG_LEN_MASK;
sg               4219 drivers/scsi/aic7xxx/aic7xxx_core.c 			if (sg != scb->sg_list
sg               4222 drivers/scsi/aic7xxx/aic7xxx_core.c 				sg--;
sg               4223 drivers/scsi/aic7xxx/aic7xxx_core.c 				sglen = ahc_le32toh(sg->len);
sg               4229 drivers/scsi/aic7xxx/aic7xxx_core.c 				data_addr = ahc_le32toh(sg->addr)
sg               4236 drivers/scsi/aic7xxx/aic7xxx_core.c 				sg++;
sg               4237 drivers/scsi/aic7xxx/aic7xxx_core.c 				sgptr = ahc_sg_virt_to_bus(scb, sg);
sg               4262 drivers/scsi/aic7xxx/aic7xxx_core.c 	struct	 ahc_dma_seg *sg;
sg               4276 drivers/scsi/aic7xxx/aic7xxx_core.c 	sg = ahc_sg_bus_to_virt(scb, sgptr);
sg               4279 drivers/scsi/aic7xxx/aic7xxx_core.c 	sg--;
sg               4285 drivers/scsi/aic7xxx/aic7xxx_core.c 	dataptr = ahc_le32toh(sg->addr)
sg               4286 drivers/scsi/aic7xxx/aic7xxx_core.c 		+ (ahc_le32toh(sg->len) & AHC_SG_LEN_MASK)
sg               4294 drivers/scsi/aic7xxx/aic7xxx_core.c 			 (ahc_le32toh(sg->len) >> 24) & SG_HIGH_ADDR_BITS);
sg               6667 drivers/scsi/aic7xxx/aic7xxx_core.c 		struct ahc_dma_seg *sg;
sg               6674 drivers/scsi/aic7xxx/aic7xxx_core.c 		sg = ahc_sg_bus_to_virt(scb, resid_sgptr & SG_PTR_MASK);
sg               6677 drivers/scsi/aic7xxx/aic7xxx_core.c 		sg--;
sg               6684 drivers/scsi/aic7xxx/aic7xxx_core.c 		while ((ahc_le32toh(sg->len) & AHC_DMA_LAST_SEG) == 0) {
sg               6685 drivers/scsi/aic7xxx/aic7xxx_core.c 			sg++;
sg               6686 drivers/scsi/aic7xxx/aic7xxx_core.c 			resid += ahc_le32toh(sg->len) & AHC_SG_LEN_MASK;
sg                455 drivers/scsi/aic7xxx/aic7xxx_osm.c 		 		      struct ahc_dma_seg *sg,
sg                471 drivers/scsi/aic7xxx/aic7xxx_osm.c 		  struct ahc_dma_seg *sg, dma_addr_t addr, bus_size_t len)
sg                480 drivers/scsi/aic7xxx/aic7xxx_osm.c 	sg->addr = ahc_htole32(addr & 0xFFFFFFFF);
sg                487 drivers/scsi/aic7xxx/aic7xxx_osm.c 	sg->len = ahc_htole32(len);
sg               1520 drivers/scsi/aic7xxx/aic7xxx_osm.c 		struct	ahc_dma_seg *sg;
sg               1525 drivers/scsi/aic7xxx/aic7xxx_osm.c 		sg = scb->sg_list;
sg               1538 drivers/scsi/aic7xxx/aic7xxx_osm.c 						     sg, addr, len);
sg               1539 drivers/scsi/aic7xxx/aic7xxx_osm.c 			sg += consumed;
sg               1542 drivers/scsi/aic7xxx/aic7xxx_osm.c 		sg--;
sg               1543 drivers/scsi/aic7xxx/aic7xxx_osm.c 		sg->len |= ahc_htole32(AHC_DMA_LAST_SEG);
sg                 80 drivers/scsi/aic94xx/aic94xx_task.c 			struct sg_el *sg =
sg                 82 drivers/scsi/aic94xx/aic94xx_task.c 			sg->bus_addr = cpu_to_le64((u64)sg_dma_address(sc));
sg                 83 drivers/scsi/aic94xx/aic94xx_task.c 			sg->size = cpu_to_le32((u32)sg_dma_len(sc));
sg                 85 drivers/scsi/aic94xx/aic94xx_task.c 				sg->flags |= ASD_SG_EL_LIST_EOL;
sg               1733 drivers/scsi/arcmsr/arcmsr_hba.c 	struct scatterlist *sg;
sg               1746 drivers/scsi/arcmsr/arcmsr_hba.c 	scsi_for_each_sg(pcmd, sg, nseg, i) {
sg               1748 drivers/scsi/arcmsr/arcmsr_hba.c 		length = cpu_to_le32(sg_dma_len(sg));
sg               1749 drivers/scsi/arcmsr/arcmsr_hba.c 		address_lo = cpu_to_le32(dma_addr_lo32(sg_dma_address(sg)));
sg               1750 drivers/scsi/arcmsr/arcmsr_hba.c 		address_hi = cpu_to_le32(dma_addr_hi32(sg_dma_address(sg)));
sg               2779 drivers/scsi/arcmsr/arcmsr_hba.c 	struct scatterlist *sg;
sg               2782 drivers/scsi/arcmsr/arcmsr_hba.c 	sg = scsi_sglist(cmd);
sg               2783 drivers/scsi/arcmsr/arcmsr_hba.c 	buffer = kmap_atomic(sg_page(sg)) + sg->offset;
sg               2788 drivers/scsi/arcmsr/arcmsr_hba.c 	transfer_len += sg->length;
sg               3015 drivers/scsi/arcmsr/arcmsr_hba.c 		struct scatterlist *sg = scsi_sglist(cmd);
sg               3016 drivers/scsi/arcmsr/arcmsr_hba.c 		kunmap_atomic(buffer - sg->offset);
sg               3045 drivers/scsi/arcmsr/arcmsr_hba.c 		struct scatterlist *sg;
sg               3066 drivers/scsi/arcmsr/arcmsr_hba.c 		sg = scsi_sglist(cmd);
sg               3067 drivers/scsi/arcmsr/arcmsr_hba.c 		buffer = kmap_atomic(sg_page(sg)) + sg->offset;
sg               3070 drivers/scsi/arcmsr/arcmsr_hba.c 		sg = scsi_sglist(cmd);
sg               3071 drivers/scsi/arcmsr/arcmsr_hba.c 		kunmap_atomic(buffer - sg->offset);
sg                 78 drivers/scsi/arm/cumana_2.c 	struct scatterlist	sg[NR_SG];	/* Scatter DMA list	*/
sg                167 drivers/scsi/arm/cumana_2.c 		bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG);
sg                178 drivers/scsi/arm/cumana_2.c 		dma_map_sg(dev, info->sg, bufs, map_dir);
sg                181 drivers/scsi/arm/cumana_2.c 		set_dma_sg(dmach, info->sg, bufs);
sg                 74 drivers/scsi/arm/eesox.c 	struct scatterlist	sg[NR_SG];	/* Scatter DMA list	*/
sg                166 drivers/scsi/arm/eesox.c 		bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG);
sg                175 drivers/scsi/arm/eesox.c 		dma_map_sg(dev, info->sg, bufs, map_dir);
sg                178 drivers/scsi/arm/eesox.c 		set_dma_sg(dmach, info->sg, bufs);
sg                 61 drivers/scsi/arm/powertec.c 	struct scatterlist	sg[NR_SG];
sg                139 drivers/scsi/arm/powertec.c 		bufs = copy_SCp_to_sg(&info->sg[0], SCp, NR_SG);
sg                148 drivers/scsi/arm/powertec.c 		dma_map_sg(dev, info->sg, bufs, map_dir);
sg                151 drivers/scsi/arm/powertec.c 		set_dma_sg(dmach, info->sg, bufs);
sg                 24 drivers/scsi/arm/scsi.h static inline int copy_SCp_to_sg(struct scatterlist *sg, struct scsi_pointer *SCp, int max)
sg                 33 drivers/scsi/arm/scsi.h 	sg_set_buf(sg, SCp->ptr, SCp->this_residual);
sg                 40 drivers/scsi/arm/scsi.h 			*(++sg) = *src_sg;
sg                 41 drivers/scsi/arm/scsi.h 		sg_mark_end(sg);
sg                 97 drivers/scsi/arm/scsi.h 			struct scatterlist *sg;
sg                100 drivers/scsi/arm/scsi.h 			scsi_for_each_sg(SCpnt, sg, sg_count, i)
sg                101 drivers/scsi/arm/scsi.h 				len += sg->length;
sg               2074 drivers/scsi/be2iscsi/be_main.c hwi_write_sgl_v2(struct iscsi_wrb *pwrb, struct scatterlist *sg,
sg               2089 drivers/scsi/be2iscsi/be_main.c 	l_sg = sg;
sg               2091 drivers/scsi/be2iscsi/be_main.c 			sg = sg_next(sg)) {
sg               2093 drivers/scsi/be2iscsi/be_main.c 			sg_len = sg_dma_len(sg);
sg               2094 drivers/scsi/be2iscsi/be_main.c 			addr = (u64) sg_dma_address(sg);
sg               2108 drivers/scsi/be2iscsi/be_main.c 			sg_len = sg_dma_len(sg);
sg               2109 drivers/scsi/be2iscsi/be_main.c 			addr = (u64) sg_dma_address(sg);
sg               2148 drivers/scsi/be2iscsi/be_main.c 	sg = l_sg;
sg               2152 drivers/scsi/be2iscsi/be_main.c 	for (index = 0; index < num_sg; index++, sg = sg_next(sg), psgl++) {
sg               2153 drivers/scsi/be2iscsi/be_main.c 		sg_len = sg_dma_len(sg);
sg               2154 drivers/scsi/be2iscsi/be_main.c 		addr = (u64) sg_dma_address(sg);
sg               2169 drivers/scsi/be2iscsi/be_main.c hwi_write_sgl(struct iscsi_wrb *pwrb, struct scatterlist *sg,
sg               2184 drivers/scsi/be2iscsi/be_main.c 	l_sg = sg;
sg               2186 drivers/scsi/be2iscsi/be_main.c 							 sg = sg_next(sg)) {
sg               2188 drivers/scsi/be2iscsi/be_main.c 			sg_len = sg_dma_len(sg);
sg               2189 drivers/scsi/be2iscsi/be_main.c 			addr = (u64) sg_dma_address(sg);
sg               2200 drivers/scsi/be2iscsi/be_main.c 			sg_len = sg_dma_len(sg);
sg               2201 drivers/scsi/be2iscsi/be_main.c 			addr = (u64) sg_dma_address(sg);
sg               2236 drivers/scsi/be2iscsi/be_main.c 	sg = l_sg;
sg               2240 drivers/scsi/be2iscsi/be_main.c 	for (index = 0; index < num_sg; index++, sg = sg_next(sg), psgl++) {
sg               2241 drivers/scsi/be2iscsi/be_main.c 		sg_len = sg_dma_len(sg);
sg               2242 drivers/scsi/be2iscsi/be_main.c 		addr = (u64) sg_dma_address(sg);
sg               4518 drivers/scsi/be2iscsi/be_main.c static int beiscsi_iotask_v2(struct iscsi_task *task, struct scatterlist *sg,
sg               4558 drivers/scsi/be2iscsi/be_main.c 	hwi_write_sgl_v2(pwrb, sg, num_sg, io_task);
sg               4579 drivers/scsi/be2iscsi/be_main.c static int beiscsi_iotask(struct iscsi_task *task, struct scatterlist *sg,
sg               4618 drivers/scsi/be2iscsi/be_main.c 	hwi_write_sgl(pwrb, sg, num_sg, io_task);
sg               4763 drivers/scsi/be2iscsi/be_main.c 	struct scatterlist *sg;
sg               4801 drivers/scsi/be2iscsi/be_main.c 	sg = scsi_sglist(sc);
sg               4807 drivers/scsi/be2iscsi/be_main.c 	return phba->iotask_fn(task, sg, num_sg, xferlen, writedir);
sg                374 drivers/scsi/be2iscsi/be_main.h 			struct scatterlist *sg,
sg               2458 drivers/scsi/bfa/bfa_fcpim.c 	struct scatterlist *sg;
sg               2487 drivers/scsi/bfa/bfa_fcpim.c 	scsi_for_each_sg(cmnd, sg, ioim->nsges, i) {
sg               2490 drivers/scsi/bfa/bfa_fcpim.c 			addr = bfa_sgaddr_le(sg_dma_address(sg));
sg               2492 drivers/scsi/bfa/bfa_fcpim.c 			pgdlen = sg_dma_len(sg);
sg               2502 drivers/scsi/bfa/bfa_fcpim.c 			addr = bfa_sgaddr_le(sg_dma_address(sg));
sg               2504 drivers/scsi/bfa/bfa_fcpim.c 			sgpge->sg_len = sg_dma_len(sg);
sg               1650 drivers/scsi/bnx2fc/bnx2fc_io.c 	struct scatterlist *sg;
sg               1666 drivers/scsi/bnx2fc/bnx2fc_io.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sg               1667 drivers/scsi/bnx2fc/bnx2fc_io.c 		sg_len = sg_dma_len(sg);
sg               1668 drivers/scsi/bnx2fc/bnx2fc_io.c 		addr = sg_dma_address(sg);
sg                261 drivers/scsi/bnx2i/bnx2i.h 	struct scatterlist *sg;
sg                147 drivers/scsi/bnx2i/bnx2i_iscsi.c 	struct scatterlist *sg;
sg                159 drivers/scsi/bnx2i/bnx2i_iscsi.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sg                160 drivers/scsi/bnx2i/bnx2i_iscsi.c 		sg_len = sg_dma_len(sg);
sg                161 drivers/scsi/bnx2i/bnx2i_iscsi.c 		addr = (u64) sg_dma_address(sg);
sg               1492 drivers/scsi/csiostor/csio_scsi.c 	struct scatterlist *sg;
sg               1503 drivers/scsi/csiostor/csio_scsi.c 	sg = scsi_sglist(scmnd);
sg               1507 drivers/scsi/csiostor/csio_scsi.c 	while (bytes_left > 0 && sg && dma_buf) {
sg               1515 drivers/scsi/csiostor/csio_scsi.c 		if (start_off >= sg->length) {
sg               1516 drivers/scsi/csiostor/csio_scsi.c 			start_off -= sg->length;
sg               1517 drivers/scsi/csiostor/csio_scsi.c 			sg = sg_next(sg);
sg               1522 drivers/scsi/csiostor/csio_scsi.c 		sg_off = sg->offset + start_off;
sg               1524 drivers/scsi/csiostor/csio_scsi.c 				sg->length - start_off);
sg               1528 drivers/scsi/csiostor/csio_scsi.c 		sg_addr = kmap_atomic(sg_page(sg) + (sg_off >> PAGE_SHIFT));
sg               1531 drivers/scsi/csiostor/csio_scsi.c 				sg, req);
sg               1092 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 	struct scatterlist *sg = ttinfo->sgl;
sg               1109 drivers/scsi/cxgbi/cxgb3i/cxgb3i.c 		cxgbi_ddp_set_one_ppod(ppod, ttinfo, &sg,
sg               1977 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c 	struct scatterlist *sg = ttinfo->sgl;
sg               1988 drivers/scsi/cxgbi/cxgb4i/cxgb4i.c 					   &sg, &offset);
sg               1232 drivers/scsi/cxgbi/libcxgbi.c 	struct scatterlist *sg = sg_pp ? *sg_pp : NULL;
sg               1240 drivers/scsi/cxgbi/libcxgbi.c 	if (sg) {
sg               1241 drivers/scsi/cxgbi/libcxgbi.c 		addr = sg_dma_address(sg);
sg               1242 drivers/scsi/cxgbi/libcxgbi.c 		len = sg_dma_len(sg);
sg               1246 drivers/scsi/cxgbi/libcxgbi.c 		if (sg) {
sg               1249 drivers/scsi/cxgbi/libcxgbi.c 			if (offset == (len + sg->offset)) {
sg               1251 drivers/scsi/cxgbi/libcxgbi.c 				sg = sg_next(sg);
sg               1252 drivers/scsi/cxgbi/libcxgbi.c 				if (sg) {
sg               1253 drivers/scsi/cxgbi/libcxgbi.c 					addr = sg_dma_address(sg);
sg               1254 drivers/scsi/cxgbi/libcxgbi.c 					len = sg_dma_len(sg);
sg               1267 drivers/scsi/cxgbi/libcxgbi.c 		*sg_pp = sg;
sg               1273 drivers/scsi/cxgbi/libcxgbi.c 		sg = sg_next(sg);
sg               1274 drivers/scsi/cxgbi/libcxgbi.c 		if (sg) {
sg               1275 drivers/scsi/cxgbi/libcxgbi.c 			addr = sg_dma_address(sg);
sg               1276 drivers/scsi/cxgbi/libcxgbi.c 			len = sg_dma_len(sg);
sg               1279 drivers/scsi/cxgbi/libcxgbi.c 	ppod->addr[i] = sg ? cpu_to_be64(addr + offset) : 0ULL;
sg               1318 drivers/scsi/cxgbi/libcxgbi.c 	struct scatterlist *sg = sgl;
sg               1320 drivers/scsi/cxgbi/libcxgbi.c 	for (i = 0; i < nents; i++, sg = sg_next(sg)) {
sg               1321 drivers/scsi/cxgbi/libcxgbi.c 		unsigned int len = sg->length + sg->offset;
sg               1323 drivers/scsi/cxgbi/libcxgbi.c 		if ((sg->offset & 0x3) || (i && sg->offset) ||
sg               1327 drivers/scsi/cxgbi/libcxgbi.c 				  i, nents, sg->offset, sg->length);
sg               1823 drivers/scsi/cxgbi/libcxgbi.c 	struct scatterlist *sg;
sg               1825 drivers/scsi/cxgbi/libcxgbi.c 	for_each_sg(sgl, sg, sgcnt, i) {
sg               1826 drivers/scsi/cxgbi/libcxgbi.c 		if (offset < sg->length) {
sg               1828 drivers/scsi/cxgbi/libcxgbi.c 			*sgp = sg;
sg               1831 drivers/scsi/cxgbi/libcxgbi.c 		offset -= sg->length;
sg               1836 drivers/scsi/cxgbi/libcxgbi.c static int sgl_read_to_frags(struct scatterlist *sg, unsigned int sgoffset,
sg               1841 drivers/scsi/cxgbi/libcxgbi.c 	unsigned int sglen = sg->length - sgoffset;
sg               1842 drivers/scsi/cxgbi/libcxgbi.c 	struct page *page = sg_page(sg);
sg               1850 drivers/scsi/cxgbi/libcxgbi.c 			sg = sg_next(sg);
sg               1851 drivers/scsi/cxgbi/libcxgbi.c 			if (!sg) {
sg               1857 drivers/scsi/cxgbi/libcxgbi.c 			sglen = sg->length;
sg               1858 drivers/scsi/cxgbi/libcxgbi.c 			page = sg_page(sg);
sg               1863 drivers/scsi/cxgbi/libcxgbi.c 		    sgoffset + sg->offset ==
sg               1874 drivers/scsi/cxgbi/libcxgbi.c 			frags[i].offset = sg->offset + sgoffset;
sg               1981 drivers/scsi/cxgbi/libcxgbi.c 		struct scatterlist *sg = NULL;
sg               1988 drivers/scsi/cxgbi/libcxgbi.c 					tdata->offset, &tdata->sgoffset, &sg);
sg               1994 drivers/scsi/cxgbi/libcxgbi.c 		err = sgl_read_to_frags(sg, tdata->sgoffset, tdata->count,
sg                571 drivers/scsi/cxlflash/main.c 	struct scatterlist *sg = scsi_sglist(scp);
sg                617 drivers/scsi/cxlflash/main.c 	if (likely(sg)) {
sg                618 drivers/scsi/cxlflash/main.c 		cmd->rcb.data_len = sg->length;
sg                619 drivers/scsi/cxlflash/main.c 		cmd->rcb.data_ea = (uintptr_t)sg_virt(sg);
sg                914 drivers/scsi/dc395x.c 		struct scatterlist *sg;
sg                924 drivers/scsi/dc395x.c 		scsi_for_each_sg(cmd, sg, srb->sg_count, i) {
sg                925 drivers/scsi/dc395x.c 			u32 busaddr = (u32)sg_dma_address(sg);
sg                926 drivers/scsi/dc395x.c 			u32 seglen = (u32)sg->length;
sg               3360 drivers/scsi/dc395x.c 		struct scatterlist* sg = scsi_sglist(cmd);
sg               3364 drivers/scsi/dc395x.c 		base = scsi_kmap_atomic_sg(sg, scsi_sg_count(cmd), &offset, &len);
sg               1717 drivers/scsi/dpt_i2o.c 		struct sg_simple_element *sg =  (struct sg_simple_element*) (msg+sg_offset);
sg               1728 drivers/scsi/dpt_i2o.c 			if (!(sg[i].flag_count & 0x10000000 /*I2O_SGL_FLAGS_SIMPLE_ADDRESS_ELEMENT*/)) {
sg               1729 drivers/scsi/dpt_i2o.c 				printk(KERN_DEBUG"%s:Bad SG element %d - not simple (%x)\n",pHba->name,i,  sg[i].flag_count);
sg               1733 drivers/scsi/dpt_i2o.c 			sg_size = sg[i].flag_count & 0xffffff;      
sg               1744 drivers/scsi/dpt_i2o.c 			if(sg[i].flag_count & 0x04000000 /*I2O_SGL_FLAGS_DIR*/) {
sg               1746 drivers/scsi/dpt_i2o.c 				if (copy_from_user(p,(void __user *)(ulong)sg[i].addr_bus, sg_size)) {
sg               1753 drivers/scsi/dpt_i2o.c 			sg[i].addr_bus = addr;
sg               1784 drivers/scsi/dpt_i2o.c 		struct sg_simple_element* sg;
sg               1808 drivers/scsi/dpt_i2o.c 		sg 	 = (struct sg_simple_element*)(msg + sg_offset);
sg               1811 drivers/scsi/dpt_i2o.c 			if(! (sg[j].flag_count & 0x4000000 /*I2O_SGL_FLAGS_DIR*/)) {
sg               1812 drivers/scsi/dpt_i2o.c 				sg_size = sg[j].flag_count & 0xffffff; 
sg               1814 drivers/scsi/dpt_i2o.c 				if (copy_to_user((void __user *)(ulong)sg[j].addr_bus,sg_list[j], sg_size)) {
sg               1815 drivers/scsi/dpt_i2o.c 					printk(KERN_WARNING"%s: Could not copy %p TO user %x\n",pHba->name, sg_list[j], sg[j].addr_bus);
sg               1839 drivers/scsi/dpt_i2o.c 		struct sg_simple_element *sg =
sg               1844 drivers/scsi/dpt_i2o.c 					sg[sg_index].flag_count & 0xffffff,
sg               1846 drivers/scsi/dpt_i2o.c 					sg[sg_index].addr_bus);
sg               2273 drivers/scsi/dpt_i2o.c 		struct scatterlist *sg;
sg               2276 drivers/scsi/dpt_i2o.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
sg               2278 drivers/scsi/dpt_i2o.c 			*mptr++ = direction|0x10000000|sg_dma_len(sg);
sg               2279 drivers/scsi/dpt_i2o.c 			len+=sg_dma_len(sg);
sg               2280 drivers/scsi/dpt_i2o.c 			addr = sg_dma_address(sg);
sg               2286 drivers/scsi/dpt_i2o.c 				*lptr = direction|0xD0000000|sg_dma_len(sg);
sg                372 drivers/scsi/esp_scsi.c 	struct scatterlist *sg = scsi_sglist(cmd);
sg                395 drivers/scsi/esp_scsi.c 	spriv->cur_residue = sg_dma_len(sg);
sg                397 drivers/scsi/esp_scsi.c 	spriv->cur_sg = sg;
sg                322 drivers/scsi/fnic/fnic_scsi.c 	struct scatterlist *sg;
sg                336 drivers/scsi/fnic/fnic_scsi.c 		for_each_sg(scsi_sglist(sc), sg, sg_count, i) {
sg                337 drivers/scsi/fnic/fnic_scsi.c 			desc->addr = cpu_to_le64(sg_dma_address(sg));
sg                338 drivers/scsi/fnic/fnic_scsi.c 			desc->len = cpu_to_le32(sg_dma_len(sg));
sg                894 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c 	struct scatterlist *sg;
sg                897 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c 	for_each_sg(scatter, sg, n_elem, i) {
sg                900 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c 		entry->addr = cpu_to_le64(sg_dma_address(sg));
sg                902 drivers/scsi/hisi_sas/hisi_sas_v1_hw.c 		entry->data_len = cpu_to_le32(sg_dma_len(sg));
sg               1674 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c 	struct scatterlist *sg;
sg               1677 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c 	for_each_sg(scatter, sg, n_elem, i) {
sg               1680 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c 		entry->addr = cpu_to_le64(sg_dma_address(sg));
sg               1682 drivers/scsi/hisi_sas/hisi_sas_v2_hw.c 		entry->data_len = cpu_to_le32(sg_dma_len(sg));
sg               1055 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 	struct scatterlist *sg;
sg               1058 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 	for_each_sg(scatter, sg, n_elem, i) {
sg               1061 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 		entry->addr = cpu_to_le64(sg_dma_address(sg));
sg               1063 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 		entry->data_len = cpu_to_le32(sg_dma_len(sg));
sg               1079 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 	struct scatterlist *sg;
sg               1084 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 	for_each_sg(scatter, sg, n_elem, i) {
sg               1087 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 		entry->addr = cpu_to_le64(sg_dma_address(sg));
sg               1090 drivers/scsi/hisi_sas/hisi_sas_v3_hw.c 		entry->data_len = cpu_to_le32(sg_dma_len(sg));
sg               2255 drivers/scsi/hpsa.c 	chain_size = le32_to_cpu(cp->sg[0].length);
sg               2260 drivers/scsi/hpsa.c 		cp->sg->address = 0;
sg               2263 drivers/scsi/hpsa.c 	cp->sg->address = cpu_to_le64(temp64);
sg               2274 drivers/scsi/hpsa.c 	chain_sg = cp->sg;
sg               2276 drivers/scsi/hpsa.c 	chain_size = le32_to_cpu(cp->sg[0].length);
sg               2583 drivers/scsi/hpsa.c 		(c2->sg[0].chain_indicator == IOACCEL2_CHAIN))
sg               4529 drivers/scsi/hpsa.c 				   struct scatterlist *sg)
sg               4531 drivers/scsi/hpsa.c 	u64 addr64 = (u64) sg_dma_address(sg);
sg               4532 drivers/scsi/hpsa.c 	unsigned int len = sg_dma_len(sg);
sg               4548 drivers/scsi/hpsa.c 	struct scatterlist *sg;
sg               4572 drivers/scsi/hpsa.c 	scsi_for_each_sg(cmd, sg, sg_limit, i) {
sg               4573 drivers/scsi/hpsa.c 		hpsa_set_sg_descriptor(curr_sg, sg);
sg               4586 drivers/scsi/hpsa.c 		for_each_sg(sg, sg, sg_limit, i) {
sg               4587 drivers/scsi/hpsa.c 			hpsa_set_sg_descriptor(curr_sg, sg);
sg               4709 drivers/scsi/hpsa.c 	struct scatterlist *sg;
sg               4749 drivers/scsi/hpsa.c 		scsi_for_each_sg(cmd, sg, use_sg, i) {
sg               4750 drivers/scsi/hpsa.c 			addr64 = (u64) sg_dma_address(sg);
sg               4751 drivers/scsi/hpsa.c 			len  = sg_dma_len(sg);
sg               4883 drivers/scsi/hpsa.c 	struct scatterlist *sg;
sg               4923 drivers/scsi/hpsa.c 		curr_sg = cp->sg;
sg               4936 drivers/scsi/hpsa.c 		scsi_for_each_sg(cmd, sg, use_sg, i) {
sg               4937 drivers/scsi/hpsa.c 			addr64 = (u64) sg_dma_address(sg);
sg               4938 drivers/scsi/hpsa.c 			len  = sg_dma_len(sg);
sg               4993 drivers/scsi/hpsa.c 		cp->sg[0].length = cpu_to_le32(use_sg * sizeof(cp->sg[0]));
sg               9153 drivers/scsi/hpsa.c 	BUILD_BUG_ON(offsetof(struct io_accel2_cmd, sg) >
sg               9269 drivers/scsi/hpsa.c 		BUILD_BUG_ON(offsetof(struct io_accel2_cmd, sg) != 64);
sg               9930 drivers/scsi/hpsa.c 	VERIFY_OFFSET(sg, 64);
sg                592 drivers/scsi/hpsa_cmd.h 	struct ioaccel2_sg_element sg[IOACCEL2_MAXSGENTRIES];
sg                851 drivers/scsi/hptiop.c 	struct scatterlist *sg;
sg                864 drivers/scsi/hptiop.c 	scsi_for_each_sg(scp, sg, HPT_SCP(scp)->sgcnt, idx) {
sg                865 drivers/scsi/hptiop.c 		psg[idx].pci_address = cpu_to_le64(sg_dma_address(sg)) |
sg                867 drivers/scsi/hptiop.c 		psg[idx].size = cpu_to_le32(sg_dma_len(sg));
sg               1301 drivers/scsi/ibmvscsi/ibmvfc.c 	struct scatterlist *sg;
sg               1303 drivers/scsi/ibmvscsi/ibmvfc.c 	scsi_for_each_sg(scmd, sg, nseg, i) {
sg               1304 drivers/scsi/ibmvscsi/ibmvfc.c 		md[i].va = cpu_to_be64(sg_dma_address(sg));
sg               1305 drivers/scsi/ibmvscsi/ibmvfc.c 		md[i].len = cpu_to_be32(sg_dma_len(sg));
sg                656 drivers/scsi/ibmvscsi/ibmvscsi.c 	struct scatterlist *sg;
sg                659 drivers/scsi/ibmvscsi/ibmvscsi.c 	scsi_for_each_sg(cmd, sg, nseg, i) {
sg                661 drivers/scsi/ibmvscsi/ibmvscsi.c 		descr->va = cpu_to_be64(sg_dma_address(sg));
sg                662 drivers/scsi/ibmvscsi/ibmvscsi.c 		descr->len = cpu_to_be32(sg_dma_len(sg));
sg                664 drivers/scsi/ibmvscsi/ibmvscsi.c 		total_length += sg_dma_len(sg);
sg               3203 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c static int ibmvscsis_rdma(struct ibmvscsis_cmd *cmd, struct scatterlist *sg,
sg               3221 drivers/scsi/ibmvscsi_tgt/ibmvscsi_tgt.c 	sgp = sg;
sg                166 drivers/scsi/ibmvscsi_tgt/libsrp.c 	struct scatterlist *sg = NULL;
sg                171 drivers/scsi/ibmvscsi_tgt/libsrp.c 		sg = cmd->se_cmd.t_data_sg;
sg                172 drivers/scsi/ibmvscsi_tgt/libsrp.c 		nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents,
sg                184 drivers/scsi/ibmvscsi_tgt/libsrp.c 	err = rdma_io(cmd, sg, nsg, md, 1, dir, len);
sg                187 drivers/scsi/ibmvscsi_tgt/libsrp.c 		dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL);
sg                199 drivers/scsi/ibmvscsi_tgt/libsrp.c 	struct scatterlist dummy, *sg = NULL;
sg                206 drivers/scsi/ibmvscsi_tgt/libsrp.c 		sg = cmd->se_cmd.t_data_sg;
sg                243 drivers/scsi/ibmvscsi_tgt/libsrp.c 		nsg = dma_map_sg(iue->target->dev, sg, cmd->se_cmd.t_data_nents,
sg                256 drivers/scsi/ibmvscsi_tgt/libsrp.c 	err = rdma_io(cmd, sg, nsg, md, nmd, dir, len);
sg                259 drivers/scsi/ibmvscsi_tgt/libsrp.c 		dma_unmap_sg(iue->target->dev, sg, nsg, DMA_BIDIRECTIONAL);
sg               2535 drivers/scsi/initio.c 	struct sg_entry *sg;		/* Pointer to SG list           */
sg               2586 drivers/scsi/initio.c 		sg = &cblk->sglist[0];
sg               2588 drivers/scsi/initio.c 			sg->data = cpu_to_le32((u32)sg_dma_address(sglist));
sg               2589 drivers/scsi/initio.c 			sg->len = cpu_to_le32((u32)sg_dma_len(sglist));
sg               2591 drivers/scsi/initio.c 			++sg;
sg               3904 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               3910 drivers/scsi/ipr.c 	sg = sglist->scatterlist;
sg               3912 drivers/scsi/ipr.c 	for (i = 0; i < (len / bsize_elem); i++, sg = sg_next(sg),
sg               3914 drivers/scsi/ipr.c 		struct page *page = sg_page(sg);
sg               3920 drivers/scsi/ipr.c 		sg->length = bsize_elem;
sg               3929 drivers/scsi/ipr.c 		struct page *page = sg_page(sg);
sg               3935 drivers/scsi/ipr.c 		sg->length = len % bsize_elem;
sg               3956 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               3965 drivers/scsi/ipr.c 	for_each_sg(scatterlist, sg, ipr_cmd->dma_use_sg, i) {
sg               3967 drivers/scsi/ipr.c 		ioadl64[i].data_len = cpu_to_be32(sg_dma_len(sg));
sg               3968 drivers/scsi/ipr.c 		ioadl64[i].address = cpu_to_be64(sg_dma_address(sg));
sg               3988 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               3998 drivers/scsi/ipr.c 	for_each_sg(scatterlist, sg, ipr_cmd->dma_use_sg, i) {
sg               4000 drivers/scsi/ipr.c 			cpu_to_be32(IPR_IOADL_FLAGS_WRITE | sg_dma_len(sg));
sg               4002 drivers/scsi/ipr.c 			cpu_to_be32(sg_dma_address(sg));
sg               5921 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               5951 drivers/scsi/ipr.c 	scsi_for_each_sg(scsi_cmd, sg, ipr_cmd->dma_use_sg, i) {
sg               5953 drivers/scsi/ipr.c 		ioadl64[i].data_len = cpu_to_be32(sg_dma_len(sg));
sg               5954 drivers/scsi/ipr.c 		ioadl64[i].address = cpu_to_be64(sg_dma_address(sg));
sg               5973 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               6012 drivers/scsi/ipr.c 	scsi_for_each_sg(scsi_cmd, sg, ipr_cmd->dma_use_sg, i) {
sg               6014 drivers/scsi/ipr.c 			cpu_to_be32(ioadl_flags | sg_dma_len(sg));
sg               6015 drivers/scsi/ipr.c 		ioadl[i].address = cpu_to_be32(sg_dma_address(sg));
sg               6906 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               6925 drivers/scsi/ipr.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               6927 drivers/scsi/ipr.c 		ioadl64->data_len = cpu_to_be32(sg_dma_len(sg));
sg               6928 drivers/scsi/ipr.c 		ioadl64->address = cpu_to_be64(sg_dma_address(sg));
sg               6952 drivers/scsi/ipr.c 	struct scatterlist *sg;
sg               6971 drivers/scsi/ipr.c 	for_each_sg(qc->sg, sg, qc->n_elem, si) {
sg               6972 drivers/scsi/ipr.c 		ioadl->flags_and_data_len = cpu_to_be32(ioadl_flags | sg_dma_len(sg));
sg               6973 drivers/scsi/ipr.c 		ioadl->address = cpu_to_be32(sg_dma_address(sg));
sg               1495 drivers/scsi/ips.c                 struct scatterlist *sg = scsi_sglist(SC);
sg               1501 drivers/scsi/ips.c                 buffer = kmap_atomic(sg_page(sg)) + sg->offset;
sg               1504 drivers/scsi/ips.c                         kunmap_atomic(buffer - sg->offset);
sg               1508 drivers/scsi/ips.c                 kunmap_atomic(buffer - sg->offset);
sg               1562 drivers/scsi/ips.c         struct scatterlist *sg = scsi_sglist(SC);
sg               1566 drivers/scsi/ips.c         scsi_for_each_sg(SC, sg, scsi_sg_count(SC), i)
sg               1567 drivers/scsi/ips.c 		length += sg->length;
sg               2663 drivers/scsi/ips.c 			struct scatterlist *sg;
sg               2668 drivers/scsi/ips.c                         scsi_for_each_sg(SC, sg, scb->sg_count, i) {
sg               2670 drivers/scsi/ips.c 				    (ha, sg_dma_address(sg), scb, i,
sg               2671 drivers/scsi/ips.c 				     sg_dma_len(sg)) < 0)
sg               3157 drivers/scsi/ips.c                         struct scatterlist *sg;
sg               3163 drivers/scsi/ips.c                         sg = scsi_sglist(scb->scsi_cmd);
sg               3168 drivers/scsi/ips.c                                 sg = sg_next(sg);
sg               3172 drivers/scsi/ips.c                                                sg_dma_address(sg),
sg               3174 drivers/scsi/ips.c                                                sg_dma_len(sg));
sg               3177 drivers/scsi/ips.c                              sg_dma_index++, sg = sg_next(sg)) {
sg               3180 drivers/scsi/ips.c                                      sg_dma_address(sg),
sg               3182 drivers/scsi/ips.c                                      sg_dma_len(sg)) < 0)
sg                105 drivers/scsi/isci/request.c static void init_sgl_element(struct scu_sgl_element *e, struct scatterlist *sg)
sg                107 drivers/scsi/isci/request.c 	e->length = sg_dma_len(sg);
sg                108 drivers/scsi/isci/request.c 	e->address_upper = upper_32_bits(sg_dma_address(sg));
sg                109 drivers/scsi/isci/request.c 	e->address_lower = lower_32_bits(sg_dma_address(sg));
sg                117 drivers/scsi/isci/request.c 	struct scatterlist *sg = NULL;
sg                124 drivers/scsi/isci/request.c 		sg = task->scatter;
sg                126 drivers/scsi/isci/request.c 		while (sg) {
sg                128 drivers/scsi/isci/request.c 			init_sgl_element(&scu_sg->A, sg);
sg                129 drivers/scsi/isci/request.c 			sg = sg_next(sg);
sg                130 drivers/scsi/isci/request.c 			if (sg) {
sg                131 drivers/scsi/isci/request.c 				init_sgl_element(&scu_sg->B, sg);
sg                132 drivers/scsi/isci/request.c 				sg = sg_next(sg);
sg               1415 drivers/scsi/isci/request.c 	struct scatterlist *sg;
sg               1424 drivers/scsi/isci/request.c 		sg = task->scatter;
sg               1427 drivers/scsi/isci/request.c 			struct page *page = sg_page(sg);
sg               1429 drivers/scsi/isci/request.c 			copy_len = min_t(int, total_len, sg_dma_len(sg));
sg               1431 drivers/scsi/isci/request.c 			memcpy(kaddr + sg->offset, src_addr, copy_len);
sg               1435 drivers/scsi/isci/request.c 			sg = sg_next(sg);
sg               1773 drivers/scsi/isci/request.c 		struct scatterlist *sg = &task->smp_task.smp_resp;
sg               1780 drivers/scsi/isci/request.c 		kaddr = kmap_atomic(sg_page(sg));
sg               1781 drivers/scsi/isci/request.c 		rsp = kaddr + sg->offset;
sg               1791 drivers/scsi/isci/request.c 			word_cnt = (sg->length/4)-1;
sg               2920 drivers/scsi/isci/request.c 		struct scatterlist *sg = &task->smp_task.smp_req;
sg               2924 drivers/scsi/isci/request.c 		dma_unmap_sg(&ihost->pdev->dev, sg, 1, DMA_TO_DEVICE);
sg               2927 drivers/scsi/isci/request.c 		kaddr = kmap_atomic(sg_page(sg));
sg               2928 drivers/scsi/isci/request.c 		smp_req = kaddr + sg->offset;
sg               2929 drivers/scsi/isci/request.c 		sci_swab32_cpy(smp_req, smp_req, sg->length / sizeof(u32));
sg               3188 drivers/scsi/isci/request.c 	struct scatterlist *sg = &task->smp_task.smp_req;
sg               3197 drivers/scsi/isci/request.c 	kaddr = kmap_atomic(sg_page(sg));
sg               3198 drivers/scsi/isci/request.c 	smp_req = kaddr + sg->offset;
sg               3221 drivers/scsi/isci/request.c 	sci_swab32_cpy(smp_req, smp_req, sg->length / sizeof(u32));
sg               3225 drivers/scsi/isci/request.c 	if (!dma_map_sg(dev, sg, 1, DMA_TO_DEVICE))
sg               3295 drivers/scsi/isci/request.c 	task_context->command_iu_upper = upper_32_bits(sg_dma_address(sg));
sg               3296 drivers/scsi/isci/request.c 	task_context->command_iu_lower = lower_32_bits(sg_dma_address(sg) + sizeof(u32));
sg                268 drivers/scsi/iscsi_tcp.c 		struct scatterlist *sg;
sg                281 drivers/scsi/iscsi_tcp.c 			sg = segment->sg;
sg                282 drivers/scsi/iscsi_tcp.c 			offset += segment->sg_offset + sg->offset;
sg                283 drivers/scsi/iscsi_tcp.c 			r = tcp_sw_conn->sendpage(sk, sg_page(sg), offset,
sg                456 drivers/scsi/iscsi_tcp.c iscsi_sw_tcp_send_data_prep(struct iscsi_conn *conn, struct scatterlist *sg,
sg                478 drivers/scsi/iscsi_tcp.c 				     sg, count, offset, len,
sg                485 drivers/scsi/libfc/fc_fcp.c 	struct scatterlist *sg;
sg                523 drivers/scsi/libfc/fc_fcp.c 	sg = scsi_sglist(sc);
sg                527 drivers/scsi/libfc/fc_fcp.c 		copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents,
sg                531 drivers/scsi/libfc/fc_fcp.c 		copy_len = fc_copy_buffer_to_sglist(buf, len, sg, &nents,
sg                598 drivers/scsi/libfc/fc_fcp.c 	struct scatterlist *sg;
sg                650 drivers/scsi/libfc/fc_fcp.c 	sg = scsi_sglist(sc);
sg                652 drivers/scsi/libfc/fc_fcp.c 	while (remaining > 0 && sg) {
sg                653 drivers/scsi/libfc/fc_fcp.c 		if (offset >= sg->length) {
sg                654 drivers/scsi/libfc/fc_fcp.c 			offset -= sg->length;
sg                655 drivers/scsi/libfc/fc_fcp.c 			sg = sg_next(sg);
sg                677 drivers/scsi/libfc/fc_fcp.c 		off = offset + sg->offset;
sg                678 drivers/scsi/libfc/fc_fcp.c 		sg_bytes = min(tlen, sg->length - offset);
sg                681 drivers/scsi/libfc/fc_fcp.c 		page = sg_page(sg) + (off >> PAGE_SHIFT);
sg                100 drivers/scsi/libfc/fc_libfc.c 			     struct scatterlist *sg,
sg                107 drivers/scsi/libfc/fc_libfc.c 	while (remaining > 0 && sg) {
sg                111 drivers/scsi/libfc/fc_libfc.c 		if (*offset >= sg->length) {
sg                119 drivers/scsi/libfc/fc_libfc.c 			*offset -= sg->length;
sg                120 drivers/scsi/libfc/fc_libfc.c 			sg = sg_next(sg);
sg                123 drivers/scsi/libfc/fc_libfc.c 		sg_bytes = min(remaining, sg->length - *offset);
sg                129 drivers/scsi/libfc/fc_libfc.c 		off = *offset + sg->offset;
sg                132 drivers/scsi/libfc/fc_libfc.c 		page_addr = kmap_atomic(sg_page(sg) + (off >> PAGE_SHIFT));
sg                123 drivers/scsi/libfc/fc_libfc.h 			     struct scatterlist *sg,
sg                143 drivers/scsi/libfc/fc_lport.c 	struct scatterlist *sg;
sg               1911 drivers/scsi/libfc/fc_lport.c 		fc_copy_buffer_to_sglist(buf, len, info->sg, &info->nents,
sg               1979 drivers/scsi/libfc/fc_lport.c 	info->sg = job->reply_payload.sg_list;
sg               2039 drivers/scsi/libfc/fc_lport.c 	info->sg = job->reply_payload.sg_list;
sg                 91 drivers/scsi/libiscsi_tcp.c 			  struct scatterlist *sg, unsigned int offset)
sg                 93 drivers/scsi/libiscsi_tcp.c 	segment->sg = sg;
sg                 95 drivers/scsi/libiscsi_tcp.c 	segment->size = min(sg->length - offset,
sg                111 drivers/scsi/libiscsi_tcp.c 	struct scatterlist *sg;
sg                113 drivers/scsi/libiscsi_tcp.c 	if (segment->data != NULL || !segment->sg)
sg                116 drivers/scsi/libiscsi_tcp.c 	sg = segment->sg;
sg                118 drivers/scsi/libiscsi_tcp.c 	BUG_ON(sg->length == 0);
sg                131 drivers/scsi/libiscsi_tcp.c 	if (!recv && page_count(sg_page(sg)) >= 1 && !PageSlab(sg_page(sg)))
sg                136 drivers/scsi/libiscsi_tcp.c 		segment->sg_mapped = kmap_atomic(sg_page(sg));
sg                140 drivers/scsi/libiscsi_tcp.c 		segment->sg_mapped = kmap(sg_page(sg));
sg                143 drivers/scsi/libiscsi_tcp.c 	segment->data = segment->sg_mapped + sg->offset + segment->sg_offset;
sg                152 drivers/scsi/libiscsi_tcp.c 			kunmap(sg_page(segment->sg));
sg                170 drivers/scsi/libiscsi_tcp.c 	segment->sg = NULL;
sg                194 drivers/scsi/libiscsi_tcp.c 	struct scatterlist sg;
sg                208 drivers/scsi/libiscsi_tcp.c 			sg_init_table(&sg, 1);
sg                209 drivers/scsi/libiscsi_tcp.c 			sg_set_page(&sg, sg_page(segment->sg), copied,
sg                211 drivers/scsi/libiscsi_tcp.c 							segment->sg->offset);
sg                213 drivers/scsi/libiscsi_tcp.c 			sg_init_one(&sg, segment->data + segment->copied,
sg                215 drivers/scsi/libiscsi_tcp.c 		ahash_request_set_crypt(segment->hash, &sg, NULL, copied);
sg                237 drivers/scsi/libiscsi_tcp.c 		iscsi_tcp_segment_init_sg(segment, sg_next(segment->sg),
sg                317 drivers/scsi/libiscsi_tcp.c 	struct scatterlist sg;
sg                319 drivers/scsi/libiscsi_tcp.c 	sg_init_one(&sg, hdr, hdrlen);
sg                320 drivers/scsi/libiscsi_tcp.c 	ahash_request_set_crypt(hash, &sg, digest, hdrlen);
sg                376 drivers/scsi/libiscsi_tcp.c 	struct scatterlist *sg;
sg                380 drivers/scsi/libiscsi_tcp.c 	for_each_sg(sg_list, sg, sg_count, i) {
sg                381 drivers/scsi/libiscsi_tcp.c 		if (offset < sg->length) {
sg                382 drivers/scsi/libiscsi_tcp.c 			iscsi_tcp_segment_init_sg(segment, sg, offset);
sg                385 drivers/scsi/libiscsi_tcp.c 		offset -= sg->length;
sg                165 drivers/scsi/libsas/sas_ata.c 	struct scatterlist *sg;
sg                204 drivers/scsi/libsas/sas_ata.c 		for_each_sg(qc->sg, sg, qc->n_elem, si)
sg                205 drivers/scsi/libsas/sas_ata.c 			xfer += sg_dma_len(sg);
sg                212 drivers/scsi/libsas/sas_ata.c 	task->scatter = qc->sg;
sg               2672 drivers/scsi/lpfc/lpfc_nvmet.c 		sgel = &rsp->sg[0];
sg               2903 drivers/scsi/lpfc/lpfc_nvmet.c 	for_each_sg(rsp->sg, sgel, rsp->sg_cnt, i) {
sg                660 drivers/scsi/megaraid.c 			struct scatterlist *sg;
sg                662 drivers/scsi/megaraid.c 			sg = scsi_sglist(cmd);
sg                663 drivers/scsi/megaraid.c 			buf = kmap_atomic(sg_page(sg)) + sg->offset;
sg                666 drivers/scsi/megaraid.c 			kunmap_atomic(buf - sg->offset);
sg               1720 drivers/scsi/megaraid.c 	struct scatterlist *sg;
sg               1741 drivers/scsi/megaraid.c 		sg = scsi_sglist(cmd);
sg               1742 drivers/scsi/megaraid.c 		scb->dma_h_bulkdata = sg_dma_address(sg);
sg               1744 drivers/scsi/megaraid.c 		*len = sg_dma_len(sg);
sg               1748 drivers/scsi/megaraid.c 	scsi_for_each_sg(cmd, sg, sgcnt, idx) {
sg               1750 drivers/scsi/megaraid.c 			scb->sgl64[idx].address = sg_dma_address(sg);
sg               1751 drivers/scsi/megaraid.c 			*len += scb->sgl64[idx].length = sg_dma_len(sg);
sg               1753 drivers/scsi/megaraid.c 			scb->sgl[idx].address = sg_dma_address(sg);
sg               1754 drivers/scsi/megaraid.c 			*len += scb->sgl[idx].length = sg_dma_len(sg);
sg                554 drivers/scsi/mvsas/mv_64xx.c 	struct scatterlist *sg;
sg                556 drivers/scsi/mvsas/mv_64xx.c 	for_each_sg(scatter, sg, nr, i) {
sg                557 drivers/scsi/mvsas/mv_64xx.c 		buf_prd->addr = cpu_to_le64(sg_dma_address(sg));
sg                558 drivers/scsi/mvsas/mv_64xx.c 		buf_prd->len = cpu_to_le32(sg_dma_len(sg));
sg                786 drivers/scsi/mvsas/mv_94xx.c 	struct scatterlist *sg;
sg                790 drivers/scsi/mvsas/mv_94xx.c 	for_each_sg(scatter, sg, nr, i) {
sg                791 drivers/scsi/mvsas/mv_94xx.c 		buf_prd->addr = cpu_to_le64(sg_dma_address(sg));
sg                792 drivers/scsi/mvsas/mv_94xx.c 		im_len.len = sg_dma_len(sg);
sg                192 drivers/scsi/mvumi.c 	struct scatterlist *sg;
sg                208 drivers/scsi/mvumi.c 	scsi_for_each_sg(scmd, sg, *sg_count, i) {
sg                209 drivers/scsi/mvumi.c 		busaddr = sg_dma_address(sg);
sg                213 drivers/scsi/mvumi.c 		sgd_setsz(mhba, m_sg, cpu_to_le32(sg_dma_len(sg)));
sg               7648 drivers/scsi/ncr53c8xx.c 		struct scatterlist *sg;
sg               7658 drivers/scsi/ncr53c8xx.c 		scsi_for_each_sg(cmd, sg, use_sg, segment) {
sg               7659 drivers/scsi/ncr53c8xx.c 			dma_addr_t baddr = sg_dma_address(sg);
sg               7660 drivers/scsi/ncr53c8xx.c 			unsigned int len = sg_dma_len(sg);
sg                854 drivers/scsi/nsp32.c 	struct scatterlist *sg;
sg                870 drivers/scsi/nsp32.c 		scsi_for_each_sg(SCpnt, sg, num, i) {
sg                874 drivers/scsi/nsp32.c 			sgt[i].addr = cpu_to_le32(sg_dma_address(sg));
sg                875 drivers/scsi/nsp32.c 			sgt[i].len  = cpu_to_le32(sg_dma_len(sg));
sg                412 drivers/scsi/pcmcia/sym53c500_cs.c 			struct scatterlist *sg;
sg                421 drivers/scsi/pcmcia/sym53c500_cs.c 			scsi_for_each_sg(curSC, sg, scsi_sg_count(curSC), i) {
sg                423 drivers/scsi/pcmcia/sym53c500_cs.c 				    sg_virt(sg), sg->length);
sg                431 drivers/scsi/pcmcia/sym53c500_cs.c 			struct scatterlist *sg;
sg                440 drivers/scsi/pcmcia/sym53c500_cs.c 			scsi_for_each_sg(curSC, sg, scsi_sg_count(curSC), i) {
sg                442 drivers/scsi/pcmcia/sym53c500_cs.c 					sg_virt(sg), sg->length);
sg               4215 drivers/scsi/pm8001/pm8001_hwi.c 	struct scatterlist *sg;
sg               4218 drivers/scsi/pm8001/pm8001_hwi.c 	for_each_sg(scatter, sg, nr, i) {
sg               4219 drivers/scsi/pm8001/pm8001_hwi.c 		buf_prd->addr = cpu_to_le64(sg_dma_address(sg));
sg               4220 drivers/scsi/pm8001/pm8001_hwi.c 		buf_prd->im_len.len = cpu_to_le32(sg_dma_len(sg));
sg               3258 drivers/scsi/pmcraid.c 	struct scatterlist *sg;
sg               3267 drivers/scsi/pmcraid.c 	sg = sglist->scatterlist;
sg               3269 drivers/scsi/pmcraid.c 	for (i = 0; i < (len / bsize_elem); i++, sg = sg_next(sg), buffer += bsize_elem) {
sg               3270 drivers/scsi/pmcraid.c 		struct page *page = sg_page(sg);
sg               3285 drivers/scsi/pmcraid.c 		sg->length = bsize_elem;
sg               3289 drivers/scsi/pmcraid.c 		struct page *page = sg_page(sg);
sg               3300 drivers/scsi/pmcraid.c 		sg->length = len % bsize_elem;
sg               3490 drivers/scsi/pmcraid.c 	struct scatterlist *sg = NULL;
sg               3519 drivers/scsi/pmcraid.c 	for_each_sg(sglist->scatterlist, sg, sglist->num_dma_sg, i) {
sg               3520 drivers/scsi/pmcraid.c 		ioadl[i].data_len = cpu_to_le32(sg_dma_len(sg));
sg               3521 drivers/scsi/pmcraid.c 		ioadl[i].address = cpu_to_le64(sg_dma_address(sg));
sg                485 drivers/scsi/qedf/qedf_io.c 	struct scatterlist *sg;
sg                495 drivers/scsi/qedf/qedf_io.c 	sg = scsi_sglist(sc);
sg                502 drivers/scsi/qedf/qedf_io.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sg                503 drivers/scsi/qedf/qedf_io.c 		sg_len = (u32)sg_dma_len(sg);
sg                504 drivers/scsi/qedf/qedf_io.c 		addr = (u64)sg_dma_address(sg);
sg               1842 drivers/scsi/qedi/qedi_fw.c 	struct scatterlist *sg;
sg               1860 drivers/scsi/qedi/qedi_fw.c 	sg = scsi_sglist(sc);
sg               1861 drivers/scsi/qedi/qedi_fw.c 	if ((sg_count == 1) && (sg_dma_len(sg) <= MAX_SGLEN_FOR_CACHESGL)) {
sg               1862 drivers/scsi/qedi/qedi_fw.c 		sg_len = sg_dma_len(sg);
sg               1863 drivers/scsi/qedi/qedi_fw.c 		addr = (u64)sg_dma_address(sg);
sg               1876 drivers/scsi/qedi/qedi_fw.c 	scsi_for_each_sg(sc, sg, sg_count, i) {
sg               1877 drivers/scsi/qedi/qedi_fw.c 		sg_len = sg_dma_len(sg);
sg               1878 drivers/scsi/qedi/qedi_fw.c 		addr = (u64)sg_dma_address(sg);
sg                181 drivers/scsi/qedi/qedi_iscsi.h 	struct scatterlist *sg;
sg               2852 drivers/scsi/qla1280.c 		struct scatterlist *sg, *s;
sg               2855 drivers/scsi/qla1280.c 		sg = scsi_sglist(cmd);
sg               2861 drivers/scsi/qla1280.c 		for_each_sg(sg, s, seg_cnt, cnt) {
sg               2892 drivers/scsi/qla1280.c 			sg = s;
sg               2918 drivers/scsi/qla1280.c 			for_each_sg(sg, s, remseg, cnt) {
sg               3107 drivers/scsi/qla1280.c 		struct scatterlist *sg, *s;
sg               3110 drivers/scsi/qla1280.c 		sg = scsi_sglist(cmd);
sg               3116 drivers/scsi/qla1280.c 		qla1280_dump_buffer(1, (char *)sg, 4 * 16);
sg               3119 drivers/scsi/qla1280.c 		for_each_sg(sg, s, seg_cnt, cnt) {
sg               3137 drivers/scsi/qla1280.c 			sg = s;
sg               3165 drivers/scsi/qla1280.c 			for_each_sg(sg, s, remseg, cnt) {
sg               3397 drivers/scsi/qla2xxx/qla_def.h 	struct scatterlist *sg;
sg                 12 drivers/scsi/qla2xxx/qla_dsd.h static inline void append_dsd32(struct dsd32 **dsd, struct scatterlist *sg)
sg                 14 drivers/scsi/qla2xxx/qla_dsd.h 	put_unaligned_le32(sg_dma_address(sg), &(*dsd)->address);
sg                 15 drivers/scsi/qla2xxx/qla_dsd.h 	put_unaligned_le32(sg_dma_len(sg),     &(*dsd)->length);
sg                 25 drivers/scsi/qla2xxx/qla_dsd.h static inline void append_dsd64(struct dsd64 **dsd, struct scatterlist *sg)
sg                 27 drivers/scsi/qla2xxx/qla_dsd.h 	put_unaligned_le64(sg_dma_address(sg), &(*dsd)->address);
sg                 28 drivers/scsi/qla2xxx/qla_dsd.h 	put_unaligned_le32(sg_dma_len(sg),     &(*dsd)->length);
sg                198 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg                220 drivers/scsi/qla2xxx/qla_iocb.c 	scsi_for_each_sg(cmd, sg, tot_dsds, i) {
sg                234 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd32(&cur_dsd, sg);
sg                254 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg                276 drivers/scsi/qla2xxx/qla_iocb.c 	scsi_for_each_sg(cmd, sg, tot_dsds, i) {
sg                290 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg                705 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg                738 drivers/scsi/qla2xxx/qla_iocb.c 	scsi_for_each_sg(cmd, sg, tot_dsds, i) {
sg                752 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg                848 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg                855 drivers/scsi/qla2xxx/qla_iocb.c 	sg = sgx->cur_sg;
sg                858 drivers/scsi/qla2xxx/qla_iocb.c 	sg_dma_addr = sg_dma_address(sg);
sg                859 drivers/scsi/qla2xxx/qla_iocb.c 	sg_len = sg_dma_len(sg);
sg                877 drivers/scsi/qla2xxx/qla_iocb.c 		sg = sg_next(sg);
sg                879 drivers/scsi/qla2xxx/qla_iocb.c 		sgx->cur_sg = sg;
sg                917 drivers/scsi/qla2xxx/qla_iocb.c 		sgx.cur_sg    = tc->sg;
sg               1008 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg, *sgl;
sg               1018 drivers/scsi/qla2xxx/qla_iocb.c 		sgl = tc->sg;
sg               1025 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(sgl, sg, tot_dsds, i) {
sg               1069 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               1085 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg, *sgl;
sg               1120 drivers/scsi/qla2xxx/qla_iocb.c 		for_each_sg(sgl, sg, tot_dsds, i) {
sg               1121 drivers/scsi/qla2xxx/qla_iocb.c 			u64 sle_phys = sg_phys(sg);
sg               1124 drivers/scsi/qla2xxx/qla_iocb.c 			if (MSD(sle_phys + sg->length) ^ MSD(sle_phys)) {
sg               1127 drivers/scsi/qla2xxx/qla_iocb.c 				    __func__, sle_phys, sg->length);
sg               1162 drivers/scsi/qla2xxx/qla_iocb.c 		for_each_sg(sgl, sg, tot_dsds, i) {
sg               1163 drivers/scsi/qla2xxx/qla_iocb.c 			u32 sglen = sg_dma_len(sg);
sg               1167 drivers/scsi/qla2xxx/qla_iocb.c 			    __func__, i, (u64)sg_phys(sg), sglen, ldma_sg_len,
sg               1226 drivers/scsi/qla2xxx/qla_iocb.c 				    sg_is_last(sg)) {
sg               1321 drivers/scsi/qla2xxx/qla_iocb.c 		for_each_sg(sgl, sg, tot_dsds, i) {
sg               1366 drivers/scsi/qla2xxx/qla_iocb.c 			append_dsd64(&cur_dsd, sg);
sg               2932 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg               2970 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(bsg_job->reply_payload.sg_list, sg, tot_dsds, index) {
sg               2986 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               2999 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg               3030 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(bsg_job->request_payload.sg_list, sg, cmd_dsds, index) {
sg               3044 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               3050 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(bsg_job->reply_payload.sg_list, sg, rsp_dsds, index) {
sg               3064 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               3645 drivers/scsi/qla2xxx/qla_iocb.c 	struct scatterlist *sg;
sg               3681 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(bsg_job->request_payload.sg_list, sg,
sg               3695 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               3703 drivers/scsi/qla2xxx/qla_iocb.c 	for_each_sg(bsg_job->reply_payload.sg_list, sg,
sg               3717 drivers/scsi/qla2xxx/qla_iocb.c 		append_dsd64(&cur_dsd, sg);
sg               2186 drivers/scsi/qla2xxx/qla_isr.c 			struct scatterlist *sg;
sg               2190 drivers/scsi/qla2xxx/qla_isr.c 			scsi_for_each_prot_sg(cmd, sg,
sg               2192 drivers/scsi/qla2xxx/qla_isr.c 				num_ent = sg_dma_len(sg) / 8;
sg               2209 drivers/scsi/qla2xxx/qla_isr.c 			spt = page_address(sg_page(sg)) + sg->offset;
sg               3001 drivers/scsi/qla2xxx/qla_mr.c 	struct scatterlist *sg;
sg               3037 drivers/scsi/qla2xxx/qla_mr.c 	scsi_for_each_sg(cmd, sg, tot_dsds, i) {
sg               3052 drivers/scsi/qla2xxx/qla_mr.c 		append_dsd64(&cur_dsd, sg);
sg               3295 drivers/scsi/qla2xxx/qla_mr.c 		struct scatterlist *sg;
sg               3325 drivers/scsi/qla2xxx/qla_mr.c 			for_each_sg(bsg_job->request_payload.sg_list, sg,
sg               3344 drivers/scsi/qla2xxx/qla_mr.c 				append_dsd64(&cur_dsd, sg);
sg               3381 drivers/scsi/qla2xxx/qla_mr.c 			for_each_sg(bsg_job->reply_payload.sg_list, sg,
sg               3400 drivers/scsi/qla2xxx/qla_mr.c 				append_dsd64(&cur_dsd, sg);
sg                368 drivers/scsi/qla2xxx/qla_nvme.c 	struct scatterlist *sgl, *sg;
sg                473 drivers/scsi/qla2xxx/qla_nvme.c 	for_each_sg(sgl, sg, tot_dsds, i) {
sg                499 drivers/scsi/qla2xxx/qla_nvme.c 		append_dsd64(&cur_dsd, sg);
sg               2390 drivers/scsi/qla2xxx/qla_target.c 	prm->sg = (struct scatterlist *)cmd->sg;
sg               2391 drivers/scsi/qla2xxx/qla_target.c 	prm->seg_cnt = dma_map_sg(&cmd->qpair->pdev->dev, cmd->sg,
sg               2454 drivers/scsi/qla2xxx/qla_target.c 	dma_unmap_sg(&qpair->pdev->dev, cmd->sg, cmd->sg_cnt,
sg               2624 drivers/scsi/qla2xxx/qla_target.c 			append_dsd64(&cur_dsd, prm->sg);
sg               2625 drivers/scsi/qla2xxx/qla_target.c 			prm->sg = sg_next(prm->sg);
sg               2662 drivers/scsi/qla2xxx/qla_target.c 		append_dsd64(&cur_dsd, prm->sg);
sg               2663 drivers/scsi/qla2xxx/qla_target.c 		prm->sg = sg_next(prm->sg);
sg               2734 drivers/scsi/qla2xxx/qla_target.c 	prm->sg = NULL;
sg               3157 drivers/scsi/qla2xxx/qla_target.c 	tc.sg = cmd->sg;
sg               3365 drivers/scsi/qla2xxx/qla_target.c 	prm.sg = NULL;
sg               3802 drivers/scsi/qla2xxx/qla_target.c 		kfree(cmd->sg);
sg                885 drivers/scsi/qla2xxx/qla_target.h 	struct scatterlist *sg;	/* cmd data buffer SG vector */
sg                970 drivers/scsi/qla2xxx/qla_target.h 	struct scatterlist *sg;	/* cmd data buffer SG vector */
sg                387 drivers/scsi/qla2xxx/tcm_qla2xxx.c 	cmd->sg = se_cmd->t_data_sg;
sg                648 drivers/scsi/qla2xxx/tcm_qla2xxx.c 	cmd->sg = se_cmd->t_data_sg;
sg                685 drivers/scsi/qla2xxx/tcm_qla2xxx.c 	cmd->sg = NULL;
sg                156 drivers/scsi/qla4xxx/ql4_iocb.c 	struct scatterlist *sg;
sg                171 drivers/scsi/qla4xxx/ql4_iocb.c 	scsi_for_each_sg(cmd, sg, tot_dsds, i) {
sg                185 drivers/scsi/qla4xxx/ql4_iocb.c 		sle_dma = sg_dma_address(sg);
sg                188 drivers/scsi/qla4xxx/ql4_iocb.c 		cur_dsd->count = cpu_to_le32(sg_dma_len(sg));
sg                305 drivers/scsi/qlogicfas408.c 		struct scatterlist *sg;
sg                314 drivers/scsi/qlogicfas408.c 		scsi_for_each_sg(cmd, sg, scsi_sg_count(cmd), i) {
sg                320 drivers/scsi/qlogicfas408.c 			buf = sg_virt(sg);
sg                321 drivers/scsi/qlogicfas408.c 			if (ql_pdma(priv, phase, buf, sg->length))
sg                902 drivers/scsi/qlogicpti.c 	struct scatterlist *sg, *s;
sg                908 drivers/scsi/qlogicpti.c 		sg = scsi_sglist(Cmnd);
sg                909 drivers/scsi/qlogicpti.c 		sg_count = dma_map_sg(&qpti->op->dev, sg,
sg                920 drivers/scsi/qlogicpti.c 		for_each_sg(sg, s, n, i) {
sg                925 drivers/scsi/qlogicpti.c 		sg = s;
sg                944 drivers/scsi/qlogicpti.c 			for_each_sg(sg, s, n, i) {
sg                949 drivers/scsi/qlogicpti.c 			sg = s;
sg               1584 drivers/scsi/scsi_lib.c 	struct scatterlist *sg;
sg               1592 drivers/scsi/scsi_lib.c 	sg = (void *)cmd + sizeof(struct scsi_cmnd) + shost->hostt->cmd_size;
sg               1593 drivers/scsi/scsi_lib.c 	cmd->sdb.table.sgl = sg;
sg               1745 drivers/scsi/scsi_lib.c 	struct scatterlist *sg;
sg               1756 drivers/scsi/scsi_lib.c 		sg = (void *)cmd + sizeof(struct scsi_cmnd) +
sg               1758 drivers/scsi/scsi_lib.c 		cmd->prot_sdb = (void *)sg + scsi_mq_inline_sgl_size(shost);
sg               2861 drivers/scsi/scsi_lib.c 	struct scatterlist *sg;
sg               2866 drivers/scsi/scsi_lib.c 	for_each_sg(sgl, sg, sg_count, i) {
sg               2868 drivers/scsi/scsi_lib.c 		sg_len += sg->length;
sg               2882 drivers/scsi/scsi_lib.c 	*offset = *offset - len_complete + sg->offset;
sg               2885 drivers/scsi/scsi_lib.c 	page = nth_page(sg_page(sg), (*offset >> PAGE_SHIFT));
sg               4777 drivers/scsi/smartpqi/smartpqi_init.c 	struct pqi_sg_descriptor *sg_descriptor, struct scatterlist *sg)
sg               4779 drivers/scsi/smartpqi/smartpqi_init.c 	u64 address = (u64)sg_dma_address(sg);
sg               4780 drivers/scsi/smartpqi/smartpqi_init.c 	unsigned int length = sg_dma_len(sg);
sg               4797 drivers/scsi/smartpqi/smartpqi_init.c 	struct scatterlist *sg;
sg               4810 drivers/scsi/smartpqi/smartpqi_init.c 	sg = scsi_sglist(scmd);
sg               4818 drivers/scsi/smartpqi/smartpqi_init.c 		pqi_set_sg_descriptor(sg_descriptor, sg);
sg               4838 drivers/scsi/smartpqi/smartpqi_init.c 		sg = sg_next(sg);
sg               4861 drivers/scsi/smartpqi/smartpqi_init.c 	struct scatterlist *sg;
sg               4875 drivers/scsi/smartpqi/smartpqi_init.c 	sg = scsi_sglist(scmd);
sg               4882 drivers/scsi/smartpqi/smartpqi_init.c 		pqi_set_sg_descriptor(sg_descriptor, sg);
sg               4902 drivers/scsi/smartpqi/smartpqi_init.c 		sg = sg_next(sg);
sg                168 drivers/scsi/snic/snic_scsi.c 	struct scatterlist *sg;
sg                180 drivers/scsi/snic/snic_scsi.c 		for_each_sg(scsi_sglist(sc), sg, sg_cnt, i) {
sg                181 drivers/scsi/snic/snic_scsi.c 			sgd->addr = cpu_to_le64(sg_dma_address(sg));
sg                182 drivers/scsi/snic/snic_scsi.c 			sgd->len = cpu_to_le32(sg_dma_len(sg));
sg                457 drivers/scsi/sr.c 		struct scatterlist *sg;
sg                460 drivers/scsi/sr.c 		scsi_for_each_sg(SCpnt, sg, sg_count, i)
sg                461 drivers/scsi/sr.c 			size += sg->length;
sg                429 drivers/scsi/stex.c 	struct scatterlist *sg;
sg                446 drivers/scsi/stex.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
sg                447 drivers/scsi/stex.c 			table[i].count = cpu_to_le32((u32)sg_dma_len(sg));
sg                448 drivers/scsi/stex.c 			table[i].addr = cpu_to_le64(sg_dma_address(sg));
sg                461 drivers/scsi/stex.c 	struct scatterlist *sg;
sg                478 drivers/scsi/stex.c 		scsi_for_each_sg(cmd, sg, nseg, i) {
sg                479 drivers/scsi/stex.c 			table[i].count = cpu_to_le32((u32)sg_dma_len(sg));
sg                481 drivers/scsi/stex.c 				cpu_to_le32(sg_dma_address(sg) & 0xffffffff);
sg                483 drivers/scsi/stex.c 				cpu_to_le32((sg_dma_address(sg) >> 16) >> 16);
sg               1199 drivers/scsi/sym53c8xx_2/sym_fw1.h 		offsetof (struct sym_ccb, phys.pm0.sg),
sg               1219 drivers/scsi/sym53c8xx_2/sym_fw1.h 		offsetof (struct sym_ccb, phys.pm0.sg),
sg               1275 drivers/scsi/sym53c8xx_2/sym_fw1.h 		offsetof (struct sym_ccb, phys.pm1.sg),
sg               1295 drivers/scsi/sym53c8xx_2/sym_fw1.h 		offsetof (struct sym_ccb, phys.pm1.sg),
sg               1085 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof (struct sym_ccb, phys.pm0.sg),
sg               1105 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof (struct sym_ccb, phys.pm0.sg),
sg               1150 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof (struct sym_ccb, phys.pm1.sg),
sg               1170 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof (struct sym_ccb, phys.pm1.sg),
sg               1682 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof(struct sym_ccb, phys.pm0.sg.size),
sg               1684 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof(struct sym_ccb, phys.pm0.sg.addr),
sg               1712 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof(struct sym_ccb, phys.pm1.sg.size),
sg               1714 drivers/scsi/sym53c8xx_2/sym_fw2.h 		offsetof(struct sym_ccb, phys.pm1.sg.addr),
sg                254 drivers/scsi/sym53c8xx_2/sym_glue.c 		struct scatterlist *sg;
sg                265 drivers/scsi/sym53c8xx_2/sym_glue.c 		scsi_for_each_sg(cmd, sg, use_sg, segment) {
sg                266 drivers/scsi/sym53c8xx_2/sym_glue.c 			dma_addr_t baddr = sg_dma_address(sg);
sg                267 drivers/scsi/sym53c8xx_2/sym_glue.c 			unsigned int len = sg_dma_len(sg);
sg               2598 drivers/scsi/sym53c8xx_2/sym_hipd.c 	pm->sg.addr = cpu_to_scr(oadr + olen - rest);
sg               2599 drivers/scsi/sym53c8xx_2/sym_hipd.c 	pm->sg.size = cpu_to_scr(rest);
sg               2618 drivers/scsi/sym53c8xx_2/sym_hipd.c 		tmp = scr_to_cpu(pm->sg.addr);
sg               2620 drivers/scsi/sym53c8xx_2/sym_hipd.c 		pm->sg.addr = cpu_to_scr(tmp + 1);
sg               2621 drivers/scsi/sym53c8xx_2/sym_hipd.c 		tmp = scr_to_cpu(pm->sg.size);
sg               2623 drivers/scsi/sym53c8xx_2/sym_hipd.c 		pm->sg.size = cpu_to_scr(tmp - 1);
sg               2642 drivers/scsi/sym53c8xx_2/sym_hipd.c 			(unsigned)scr_to_cpu(pm->sg.addr),
sg               2643 drivers/scsi/sym53c8xx_2/sym_hipd.c 			(unsigned)scr_to_cpu(pm->sg.size),
sg               3649 drivers/scsi/sym53c8xx_2/sym_hipd.c 		dp_ofs -= scr_to_cpu(pm->sg.size) & 0x00ffffff;
sg               3816 drivers/scsi/sym53c8xx_2/sym_hipd.c 	pm->sg.addr = cpu_to_scr(tmp);
sg               3817 drivers/scsi/sym53c8xx_2/sym_hipd.c 	pm->sg.size = cpu_to_scr(-dp_ofs);
sg                557 drivers/scsi/sym53c8xx_2/sym_hipd.h 	struct	sym_tblmove sg;	/* Updated interrupted SG block	*/
sg               2109 drivers/scsi/ufs/ufshcd.c 	struct scatterlist *sg;
sg               2130 drivers/scsi/ufs/ufshcd.c 		scsi_for_each_sg(cmd, sg, sg_segments, i) {
sg               2132 drivers/scsi/ufs/ufshcd.c 				cpu_to_le32(((u32) sg_dma_len(sg))-1);
sg               2134 drivers/scsi/ufs/ufshcd.c 				cpu_to_le32(lower_32_bits(sg->dma_address));
sg               2136 drivers/scsi/ufs/ufshcd.c 				cpu_to_le32(upper_32_bits(sg->dma_address));
sg                234 drivers/scsi/virtio_scsi.c 	struct scatterlist sg;
sg                238 drivers/scsi/virtio_scsi.c 	sg_init_one(&sg, &event_node->event, sizeof(struct virtio_scsi_event));
sg                242 drivers/scsi/virtio_scsi.c 	err = virtqueue_add_inbuf(vscsi->event_vq.vq, &sg, 1, event_node,
sg                330 drivers/scsi/vmw_pvscsi.c 			     struct scatterlist *sg, unsigned count)
sg                338 drivers/scsi/vmw_pvscsi.c 	for (i = 0; i < count; i++, sg = sg_next(sg)) {
sg                339 drivers/scsi/vmw_pvscsi.c 		sge[i].addr   = sg_dma_address(sg);
sg                340 drivers/scsi/vmw_pvscsi.c 		sge[i].length = sg_dma_len(sg);
sg                355 drivers/scsi/vmw_pvscsi.c 	struct scatterlist *sg;
sg                362 drivers/scsi/vmw_pvscsi.c 	sg = scsi_sglist(cmd);
sg                372 drivers/scsi/vmw_pvscsi.c 			pvscsi_create_sg(ctx, sg, segs);
sg                386 drivers/scsi/vmw_pvscsi.c 			e->dataAddr = sg_dma_address(sg);
sg                392 drivers/scsi/vmw_pvscsi.c 		ctx->dataPA = dma_map_single(&adapter->dev->dev, sg, bufflen,
sg                255 drivers/scsi/wd719x.c 		struct scatterlist *sg;
sg                262 drivers/scsi/wd719x.c 		scsi_for_each_sg(cmd, sg, count_sg, i) {
sg                263 drivers/scsi/wd719x.c 			scb->sg_list[i].ptr = cpu_to_le32(sg_dma_address(sg));
sg                264 drivers/scsi/wd719x.c 			scb->sg_list[i].length = cpu_to_le32(sg_dma_len(sg));
sg                 87 drivers/scsi/xen-scsifront.c 	struct scsiif_request_segment *sg;	/* scatter/gather elements */
sg                244 drivers/scsi/xen-scsifront.c 	kfree(shadow->sg);
sg                403 drivers/scsi/xen-scsifront.c 	struct scatterlist *sg;
sg                409 drivers/scsi/xen-scsifront.c 	scsi_for_each_sg(sc, sg, scsi_sg_count(sc), i)
sg                410 drivers/scsi/xen-scsifront.c 		data_grants += PFN_UP(sg->offset + sg->length);
sg                419 drivers/scsi/xen-scsifront.c 		shadow->sg = kcalloc(data_grants,
sg                421 drivers/scsi/xen-scsifront.c 		if (!shadow->sg)
sg                424 drivers/scsi/xen-scsifront.c 	seg = shadow->sg ? : shadow->seg;
sg                429 drivers/scsi/xen-scsifront.c 		kfree(shadow->sg);
sg                462 drivers/scsi/xen-scsifront.c 	scsi_for_each_sg(sc, sg, scsi_sg_count(sc), i) {
sg                463 drivers/scsi/xen-scsifront.c 		page = sg_page(sg);
sg                464 drivers/scsi/xen-scsifront.c 		off = sg->offset;
sg                465 drivers/scsi/xen-scsifront.c 		len = sg->length;
sg                276 drivers/spi/spi-ep93xx.c 	struct scatterlist *sg;
sg                331 drivers/spi/spi-ep93xx.c 	for_each_sg(sgt->sgl, sg, sgt->nents, i) {
sg                335 drivers/spi/spi-ep93xx.c 			sg_set_page(sg, virt_to_page(pbuf), bytes,
sg                338 drivers/spi/spi-ep93xx.c 			sg_set_page(sg, virt_to_page(espi->zeropage),
sg                179 drivers/spi/spi-mxs.c 		struct scatterlist	sg;
sg                227 drivers/spi/spi-mxs.c 			sg_init_table(&dma_xfer[sg_count].sg, 1);
sg                228 drivers/spi/spi-mxs.c 			sg_set_page(&dma_xfer[sg_count].sg, vm_page,
sg                231 drivers/spi/spi-mxs.c 			sg_init_one(&dma_xfer[sg_count].sg, buf, min);
sg                234 drivers/spi/spi-mxs.c 		ret = dma_map_sg(ssp->dev, &dma_xfer[sg_count].sg, 1,
sg                254 drivers/spi/spi-mxs.c 				&dma_xfer[sg_count].sg, 1,
sg                290 drivers/spi/spi-mxs.c 		dma_unmap_sg(ssp->dev, &dma_xfer[sg_count].sg, 1,
sg                262 drivers/spi/spi-pic32-sqi.c 	struct scatterlist *sg, *sgl;
sg                296 drivers/spi/spi-pic32-sqi.c 	for_each_sg(sgl, sg, nents, i) {
sg                305 drivers/spi/spi-pic32-sqi.c 		rdesc->xfer_len = sg_dma_len(sg);
sg                313 drivers/spi/spi-pic32-sqi.c 		bd->bd_addr = sg->dma_address;
sg                821 drivers/spi/spi-pl022.c 		struct scatterlist *sg;
sg                829 drivers/spi/spi-pl022.c 		for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) {
sg                835 drivers/spi/spi-pl022.c 				       sg_virt(sg),
sg                836 drivers/spi/spi-pl022.c 				       sg_dma_len(sg),
sg                839 drivers/spi/spi-pl022.c 		for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) {
sg                845 drivers/spi/spi-pl022.c 				       sg_virt(sg),
sg                846 drivers/spi/spi-pl022.c 				       sg_dma_len(sg),
sg                868 drivers/spi/spi-pl022.c 	struct scatterlist *sg;
sg                875 drivers/spi/spi-pl022.c 		for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
sg                886 drivers/spi/spi-pl022.c 			sg_set_page(sg, virt_to_page(bufp),
sg                896 drivers/spi/spi-pl022.c 		for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
sg                901 drivers/spi/spi-pl022.c 			sg_set_page(sg, virt_to_page(pl022->dummypage),
sg                428 drivers/spi/spi-qup.c 	struct scatterlist *sg;
sg                431 drivers/spi/spi-qup.c 	for (sg = sgl; sg; sg = sg_next(sg)) {
sg                432 drivers/spi/spi-qup.c 		unsigned int len = sg_dma_len(sg);
sg                490 drivers/spi/spi-sprd.c 			       struct sg_table *sg,
sg                504 drivers/spi/spi-sprd.c 	desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags);
sg                463 drivers/spi/spi-ti-qspi.c 	struct scatterlist *sg;
sg                468 drivers/spi/spi-ti-qspi.c 	for_each_sg(rx_sg.sgl, sg, rx_sg.nents, i) {
sg                469 drivers/spi/spi-ti-qspi.c 		dma_dst = sg_dma_address(sg);
sg                470 drivers/spi/spi-ti-qspi.c 		len = sg_dma_len(sg);
sg                917 drivers/spi/spi-topcliff-pch.c 	struct scatterlist *sg;
sg               1007 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_rx_p;
sg               1008 drivers/spi/spi-topcliff-pch.c 	for (i = 0; i < num; i++, sg++) {
sg               1010 drivers/spi/spi-topcliff-pch.c 			sg->offset = size * i;
sg               1011 drivers/spi/spi-topcliff-pch.c 			sg->offset = sg->offset * (*bpw / 8);
sg               1012 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), rem,
sg               1013 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1014 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = rem;
sg               1016 drivers/spi/spi-topcliff-pch.c 			sg->offset = size * (i - 1) + rem;
sg               1017 drivers/spi/spi-topcliff-pch.c 			sg->offset = sg->offset * (*bpw / 8);
sg               1018 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size,
sg               1019 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1020 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = size;
sg               1022 drivers/spi/spi-topcliff-pch.c 			sg->offset = size * i;
sg               1023 drivers/spi/spi-topcliff-pch.c 			sg->offset = sg->offset * (*bpw / 8);
sg               1024 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->rx_buf_virt), size,
sg               1025 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1026 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = size;
sg               1028 drivers/spi/spi-topcliff-pch.c 		sg_dma_address(sg) = dma->rx_buf_dma + sg->offset;
sg               1030 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_rx_p;
sg               1031 drivers/spi/spi-topcliff-pch.c 	desc_rx = dmaengine_prep_slave_sg(dma->chan_rx, sg,
sg               1039 drivers/spi/spi-topcliff-pch.c 	dma_sync_sg_for_device(&data->master->dev, sg, num, DMA_FROM_DEVICE);
sg               1070 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_tx_p;
sg               1071 drivers/spi/spi-topcliff-pch.c 	for (i = 0; i < num; i++, sg++) {
sg               1073 drivers/spi/spi-topcliff-pch.c 			sg->offset = 0;
sg               1074 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size + head,
sg               1075 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1076 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = size + head;
sg               1078 drivers/spi/spi-topcliff-pch.c 			sg->offset = head + size * i;
sg               1079 drivers/spi/spi-topcliff-pch.c 			sg->offset = sg->offset * (*bpw / 8);
sg               1080 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), rem,
sg               1081 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1082 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = rem;
sg               1084 drivers/spi/spi-topcliff-pch.c 			sg->offset = head + size * i;
sg               1085 drivers/spi/spi-topcliff-pch.c 			sg->offset = sg->offset * (*bpw / 8);
sg               1086 drivers/spi/spi-topcliff-pch.c 			sg_set_page(sg, virt_to_page(dma->tx_buf_virt), size,
sg               1087 drivers/spi/spi-topcliff-pch.c 				    sg->offset);
sg               1088 drivers/spi/spi-topcliff-pch.c 			sg_dma_len(sg) = size;
sg               1090 drivers/spi/spi-topcliff-pch.c 		sg_dma_address(sg) = dma->tx_buf_dma + sg->offset;
sg               1092 drivers/spi/spi-topcliff-pch.c 	sg = dma->sg_tx_p;
sg               1094 drivers/spi/spi-topcliff-pch.c 					sg, num, DMA_MEM_TO_DEV,
sg               1101 drivers/spi/spi-topcliff-pch.c 	dma_sync_sg_for_device(&data->master->dev, sg, num, DMA_TO_DEVICE);
sg                822 drivers/spi/spi.c 	struct scatterlist *sg;
sg                841 drivers/spi/spi.c 	sg = &sgt->sgl[0];
sg                861 drivers/spi/spi.c 			sg_set_page(sg, vm_page,
sg                866 drivers/spi/spi.c 			sg_set_buf(sg, sg_buf, min);
sg                871 drivers/spi/spi.c 		sg = sg_next(sg);
sg                144 drivers/staging/android/ion/ion.c 	struct scatterlist *sg, *new_sg;
sg                157 drivers/staging/android/ion/ion.c 	for_each_sg(table->sgl, sg, table->nents, i) {
sg                158 drivers/staging/android/ion/ion.c 		memcpy(new_sg, sg, sizeof(*sg));
sg                 23 drivers/staging/android/ion/ion_heap.c 	struct scatterlist *sg;
sg                 41 drivers/staging/android/ion/ion_heap.c 	for_each_sg(table->sgl, sg, table->nents, i) {
sg                 42 drivers/staging/android/ion/ion_heap.c 		int npages_this_entry = PAGE_ALIGN(sg->length) / PAGE_SIZE;
sg                 43 drivers/staging/android/ion/ion_heap.c 		struct page *page = sg_page(sg);
sg                 70 drivers/staging/android/ion/ion_heap.c 	struct scatterlist *sg;
sg                 74 drivers/staging/android/ion/ion_heap.c 	for_each_sg(table->sgl, sg, table->nents, i) {
sg                 75 drivers/staging/android/ion/ion_heap.c 		struct page *page = sg_page(sg);
sg                 77 drivers/staging/android/ion/ion_heap.c 		unsigned long len = sg->length;
sg                 79 drivers/staging/android/ion/ion_heap.c 		if (offset >= sg->length) {
sg                 80 drivers/staging/android/ion/ion_heap.c 			offset -= sg->length;
sg                 84 drivers/staging/android/ion/ion_heap.c 			len = sg->length - offset;
sg                150 drivers/staging/android/ion/ion_heap.c 	struct scatterlist sg;
sg                152 drivers/staging/android/ion/ion_heap.c 	sg_init_table(&sg, 1);
sg                153 drivers/staging/android/ion/ion_heap.c 	sg_set_page(&sg, page, size, 0);
sg                154 drivers/staging/android/ion/ion_heap.c 	return ion_heap_sglist_zero(&sg, 1, pgprot);
sg                106 drivers/staging/android/ion/ion_system_heap.c 	struct scatterlist *sg;
sg                134 drivers/staging/android/ion/ion_system_heap.c 	sg = table->sgl;
sg                136 drivers/staging/android/ion/ion_system_heap.c 		sg_set_page(sg, page, page_size(page), 0);
sg                137 drivers/staging/android/ion/ion_system_heap.c 		sg = sg_next(sg);
sg                158 drivers/staging/android/ion/ion_system_heap.c 	struct scatterlist *sg;
sg                165 drivers/staging/android/ion/ion_system_heap.c 	for_each_sg(table->sgl, sg, table->nents, i)
sg                166 drivers/staging/android/ion/ion_system_heap.c 		free_buffer_page(sys_heap, buffer, sg_page(sg));
sg                262 drivers/staging/greybus/sdio.c 	struct scatterlist *sg = data->sg;
sg                282 drivers/staging/greybus/sdio.c 	copied = sg_pcopy_to_buffer(sg, sg_len, &request->data[0], len, skip);
sg                316 drivers/staging/greybus/sdio.c 	struct scatterlist *sg = data->sg;
sg                351 drivers/staging/greybus/sdio.c 	copied = sg_pcopy_from_buffer(sg, sg_len, &response->data[0], len,
sg                 28 drivers/staging/kpc2000/kpc_dma/fileops.c unsigned int  count_parts_for_sge(struct scatterlist *sg)
sg                 30 drivers/staging/kpc2000/kpc_dma/fileops.c 	return DIV_ROUND_UP(sg_dma_len(sg), 0x80000);
sg                 43 drivers/staging/kpc2000/kpc_dma/fileops.c 	struct scatterlist *sg;
sg                102 drivers/staging/kpc2000/kpc_dma/fileops.c 	for_each_sg(acd->sgt.sgl, sg, acd->mapped_entry_count, i) {
sg                103 drivers/staging/kpc2000/kpc_dma/fileops.c 		desc_needed += count_parts_for_sge(sg);
sg                125 drivers/staging/kpc2000/kpc_dma/fileops.c 	for_each_sg(acd->sgt.sgl, sg, acd->mapped_entry_count, i) {
sg                126 drivers/staging/kpc2000/kpc_dma/fileops.c 		pcnt = count_parts_for_sge(sg);
sg                134 drivers/staging/kpc2000/kpc_dma/fileops.c 				desc->DescByteCount = sg_dma_len(sg) - (p * 0x80000);
sg                148 drivers/staging/kpc2000/kpc_dma/fileops.c 			dma_addr  = sg_dma_address(sg) + (p * 0x80000);
sg                202 drivers/staging/media/ipu3/ipu3-dmamap.c 	struct scatterlist *sg;
sg                207 drivers/staging/media/ipu3/ipu3-dmamap.c 	for_each_sg(sglist, sg, nents, i) {
sg                208 drivers/staging/media/ipu3/ipu3-dmamap.c 		if (sg->offset)
sg                211 drivers/staging/media/ipu3/ipu3-dmamap.c 		if (i != nents - 1 && !PAGE_ALIGNED(sg->length))
sg                214 drivers/staging/media/ipu3/ipu3-dmamap.c 		size += sg->length;
sg                298 drivers/staging/media/ipu3/ipu3-mmu.c 		       struct scatterlist *sg, unsigned int nents)
sg                306 drivers/staging/media/ipu3/ipu3-mmu.c 	for_each_sg(sg, s, nents, i) {
sg                 35 drivers/staging/media/ipu3/ipu3-mmu.h 		       struct scatterlist *sg, unsigned int nents);
sg                307 drivers/staging/media/ipu3/ipu3-v4l2.c 	struct sg_table *sg = vb2_dma_sg_plane_desc(vb, 0);
sg                318 drivers/staging/media/ipu3/ipu3-v4l2.c 	return imgu_dmamap_map_sg(imgu, sg->sgl, sg->nents, &buf->map);
sg                140 drivers/staging/mt7621-dma/mtk-hsdma.c 	struct mtk_hsdma_sg sg[1];
sg                321 drivers/staging/mt7621-dma/mtk-hsdma.c 	struct mtk_hsdma_sg *sg;
sg                325 drivers/staging/mt7621-dma/mtk-hsdma.c 	sg = &chan->desc->sg[0];
sg                326 drivers/staging/mt7621-dma/mtk-hsdma.c 	len = sg->len;
sg                330 drivers/staging/mt7621-dma/mtk-hsdma.c 	src = sg->src_addr;
sg                360 drivers/staging/mt7621-dma/mtk-hsdma.c 	len = sg->len;
sg                361 drivers/staging/mt7621-dma/mtk-hsdma.c 	dst = sg->dst_addr;
sg                477 drivers/staging/mt7621-dma/mtk-hsdma.c 	desc->sg[0].src_addr = src;
sg                478 drivers/staging/mt7621-dma/mtk-hsdma.c 	desc->sg[0].dst_addr = dest;
sg                479 drivers/staging/mt7621-dma/mtk-hsdma.c 	desc->sg[0].len = len;
sg                430 drivers/staging/octeon-usb/octeon-hcd.c 	if (urb->num_sgs || urb->sg ||
sg                104 drivers/staging/ralink-gdma/ralink-gdma.c 	struct gdma_dma_sg sg[];
sg                274 drivers/staging/ralink-gdma/ralink-gdma.c 	struct gdma_dma_sg *sg;
sg                286 drivers/staging/ralink-gdma/ralink-gdma.c 	sg = &chan->desc->sg[chan->next_sg];
sg                288 drivers/staging/ralink-gdma/ralink-gdma.c 		src_addr = sg->src_addr;
sg                295 drivers/staging/ralink-gdma/ralink-gdma.c 		dst_addr = sg->dst_addr;
sg                304 drivers/staging/ralink-gdma/ralink-gdma.c 		src_addr = sg->src_addr;
sg                305 drivers/staging/ralink-gdma/ralink-gdma.c 		dst_addr = sg->dst_addr;
sg                315 drivers/staging/ralink-gdma/ralink-gdma.c 	ctrl0 |= (sg->len << GDMA_REG_CTRL0_TX_SHIFT) |
sg                351 drivers/staging/ralink-gdma/ralink-gdma.c 	struct gdma_dma_sg *sg;
sg                363 drivers/staging/ralink-gdma/ralink-gdma.c 	sg = &chan->desc->sg[chan->next_sg];
sg                365 drivers/staging/ralink-gdma/ralink-gdma.c 		src_addr = sg->src_addr;
sg                372 drivers/staging/ralink-gdma/ralink-gdma.c 		dst_addr = sg->dst_addr;
sg                378 drivers/staging/ralink-gdma/ralink-gdma.c 		src_addr = sg->src_addr;
sg                379 drivers/staging/ralink-gdma/ralink-gdma.c 		dst_addr = sg->dst_addr;
sg                390 drivers/staging/ralink-gdma/ralink-gdma.c 	ctrl0 |= (sg->len << GDMA_REG_CTRL0_TX_SHIFT) |
sg                445 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->residue -= desc->sg[chan->next_sg - 1].len;
sg                520 drivers/staging/ralink-gdma/ralink-gdma.c 	struct scatterlist *sg;
sg                523 drivers/staging/ralink-gdma/ralink-gdma.c 	desc = kzalloc(struct_size(desc, sg, sg_len), GFP_ATOMIC);
sg                530 drivers/staging/ralink-gdma/ralink-gdma.c 	for_each_sg(sgl, sg, sg_len, i) {
sg                532 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].src_addr = sg_dma_address(sg);
sg                534 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].dst_addr = sg_dma_address(sg);
sg                541 drivers/staging/ralink-gdma/ralink-gdma.c 		if (unlikely(sg_dma_len(sg) > GDMA_REG_CTRL0_TX_MASK)) {
sg                543 drivers/staging/ralink-gdma/ralink-gdma.c 				sg_dma_len(sg));
sg                546 drivers/staging/ralink-gdma/ralink-gdma.c 		desc->sg[i].len = sg_dma_len(sg);
sg                547 drivers/staging/ralink-gdma/ralink-gdma.c 		desc->residue += sg_dma_len(sg);
sg                578 drivers/staging/ralink-gdma/ralink-gdma.c 	desc = kzalloc(struct_size(desc, sg, num_periods), GFP_ATOMIC);
sg                586 drivers/staging/ralink-gdma/ralink-gdma.c 		desc->sg[i].src_addr = src;
sg                587 drivers/staging/ralink-gdma/ralink-gdma.c 		desc->sg[i].dst_addr = dest;
sg                589 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].len = xfer_count;
sg                591 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].len = len;
sg                592 drivers/staging/ralink-gdma/ralink-gdma.c 		src += desc->sg[i].len;
sg                593 drivers/staging/ralink-gdma/ralink-gdma.c 		dest += desc->sg[i].len;
sg                594 drivers/staging/ralink-gdma/ralink-gdma.c 		len -= desc->sg[i].len;
sg                623 drivers/staging/ralink-gdma/ralink-gdma.c 	desc = kzalloc(struct_size(desc, sg, num_periods), GFP_ATOMIC);
sg                632 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].src_addr = buf_addr;
sg                634 drivers/staging/ralink-gdma/ralink-gdma.c 			desc->sg[i].dst_addr = buf_addr;
sg                640 drivers/staging/ralink-gdma/ralink-gdma.c 		desc->sg[i].len = period_len;
sg                679 drivers/staging/ralink-gdma/ralink-gdma.c 				((chan->next_sg - 1) * desc->sg[0].len);
sg                190 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		struct scatterlist sg[2];
sg                203 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_init_table(sg, 2);
sg                204 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_set_buf(&sg[0], aad, aad_len);
sg                205 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_set_buf(&sg[1], skb->data + hdr_len + CCMP_HDR_LEN,
sg                210 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                275 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		struct scatterlist sg[2];
sg                286 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_init_table(sg, 2);
sg                287 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_set_buf(&sg[0], aad, aad_len);
sg                288 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		sg_set_buf(&sg[1], pos, data_len);
sg                292 drivers/staging/rtl8192e/rtllib_crypt_ccmp.c 		aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                292 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 	struct scatterlist sg;
sg                343 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 		sg_init_one(&sg, pos, len+4);
sg                349 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 		skcipher_request_set_crypt(req, &sg, &sg, len + 4, NULL);
sg                379 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 	struct scatterlist sg;
sg                442 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 		sg_init_one(&sg, pos, plen+4);
sg                447 drivers/staging/rtl8192e/rtllib_crypt_tkip.c 		skcipher_request_set_crypt(req, &sg, &sg, plen + 4, NULL);
sg                 95 drivers/staging/rtl8192e/rtllib_crypt_wep.c 	struct scatterlist sg;
sg                144 drivers/staging/rtl8192e/rtllib_crypt_wep.c 		sg_init_one(&sg, pos, len+4);
sg                148 drivers/staging/rtl8192e/rtllib_crypt_wep.c 		skcipher_request_set_crypt(req, &sg, &sg, len + 4, NULL);
sg                175 drivers/staging/rtl8192e/rtllib_crypt_wep.c 	struct scatterlist sg;
sg                200 drivers/staging/rtl8192e/rtllib_crypt_wep.c 		sg_init_one(&sg, pos, plen+4);
sg                204 drivers/staging/rtl8192e/rtllib_crypt_wep.c 		skcipher_request_set_crypt(req, &sg, &sg, plen + 4, NULL);
sg                197 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		struct scatterlist sg[2];
sg                211 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_init_table(sg, 2);
sg                212 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_set_buf(&sg[0], aad, aad_len);
sg                213 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_set_buf(&sg[1], skb->data + hdr_len + CCMP_HDR_LEN,
sg                218 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                284 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		struct scatterlist sg[2];
sg                296 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_init_table(sg, 2);
sg                297 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_set_buf(&sg[0], aad, aad_len);
sg                298 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		sg_set_buf(&sg[1], pos, data_len);
sg                302 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_ccmp.c 		aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                296 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 	struct scatterlist sg;
sg                346 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 		sg_init_one(&sg, pos, len + 4);
sg                349 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 		skcipher_request_set_crypt(req, &sg, &sg, len + 4, NULL);
sg                379 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 	struct scatterlist sg;
sg                438 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 		sg_init_one(&sg, pos, plen + 4);
sg                442 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_tkip.c 		skcipher_request_set_crypt(req, &sg, &sg, plen + 4, NULL);
sg                 90 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 	struct scatterlist sg;
sg                138 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 		sg_init_one(&sg, pos, len + 4);
sg                142 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 		skcipher_request_set_crypt(req, &sg, &sg, len + 4, NULL);
sg                169 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 	struct scatterlist sg;
sg                195 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 		sg_init_one(&sg, pos, plen + 4);
sg                199 drivers/staging/rtl8192u/ieee80211/ieee80211_crypt_wep.c 		skcipher_request_set_crypt(req, &sg, &sg, plen + 4, NULL);
sg                 63 drivers/staging/rts5208/rtsx_transport.c 		struct scatterlist *sg =
sg                 75 drivers/staging/rts5208/rtsx_transport.c 			struct page *page = sg_page(sg) +
sg                 76 drivers/staging/rts5208/rtsx_transport.c 					((sg->offset + *offset) >> PAGE_SHIFT);
sg                 77 drivers/staging/rts5208/rtsx_transport.c 			unsigned int poff = (sg->offset + *offset) &
sg                 79 drivers/staging/rts5208/rtsx_transport.c 			unsigned int sglen = sg->length - *offset;
sg                 89 drivers/staging/rts5208/rtsx_transport.c 				++sg;
sg                314 drivers/staging/rts5208/rtsx_transport.c 					     struct scatterlist *sg, int num_sg,
sg                329 drivers/staging/rts5208/rtsx_transport.c 	if (!sg || (num_sg <= 0) || !offset || !index)
sg                358 drivers/staging/rts5208/rtsx_transport.c 	sg_cnt = dma_map_sg(&rtsx->pci->dev, sg, num_sg, dma_dir);
sg                361 drivers/staging/rts5208/rtsx_transport.c 	sg_ptr = sg;
sg                471 drivers/staging/rts5208/rtsx_transport.c 	dma_unmap_sg(&rtsx->pci->dev, sg, num_sg, dma_dir);
sg                480 drivers/staging/rts5208/rtsx_transport.c 				     struct scatterlist *sg, int num_sg,
sg                492 drivers/staging/rts5208/rtsx_transport.c 	if (!sg || (num_sg <= 0))
sg                521 drivers/staging/rts5208/rtsx_transport.c 	buf_cnt = dma_map_sg(&rtsx->pci->dev, sg, num_sg, dma_dir);
sg                523 drivers/staging/rts5208/rtsx_transport.c 	sg_ptr = sg;
sg                618 drivers/staging/rts5208/rtsx_transport.c 	dma_unmap_sg(&rtsx->pci->dev, sg, num_sg, dma_dir);
sg                722 drivers/staging/rts5208/rtsx_transport.c 		struct scatterlist *sg = buf;
sg                724 drivers/staging/rts5208/rtsx_transport.c 		err = rtsx_transfer_sglist_adma_partial(chip, card, sg, use_sg,
sg                515 drivers/staging/unisys/visorhba/visorhba_main.c 	struct scatterlist *sg = NULL;
sg                556 drivers/staging/unisys/visorhba/visorhba_main.c 	for_each_sg(sglist, sg, scsi_sg_count(scsicmd), i) {
sg                557 drivers/staging/unisys/visorhba/visorhba_main.c 		cmdrsp->scsi.gpi_list[i].address = sg_phys(sg);
sg                558 drivers/staging/unisys/visorhba/visorhba_main.c 		cmdrsp->scsi.gpi_list[i].length = sg->length;
sg                842 drivers/staging/unisys/visorhba/visorhba_main.c 	struct scatterlist *sg;
sg                874 drivers/staging/unisys/visorhba/visorhba_main.c 		scsi_for_each_sg(scsicmd, sg, scsi_sg_count(scsicmd), i) {
sg                875 drivers/staging/unisys/visorhba/visorhba_main.c 			this_page_orig = kmap_atomic(sg_page(sg));
sg                877 drivers/staging/unisys/visorhba/visorhba_main.c 					     sg->offset);
sg                878 drivers/staging/unisys/visorhba/visorhba_main.c 			memcpy(this_page, buf + bufind, sg->length);
sg                365 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_2835_arm.c 	struct scatterlist *scatterlist, *sg;
sg                496 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_2835_arm.c 	for_each_sg(scatterlist, sg, dma_buffers, i) {
sg                497 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_2835_arm.c 		u32 len = sg_dma_len(sg);
sg                498 drivers/staging/vc04_services/interface/vchiq_arm/vchiq_2835_arm.c 		u32 addr = sg_dma_address(sg);
sg                254 drivers/staging/wusbcore/host/whci/qset.c 	struct scatterlist *sg;
sg                261 drivers/staging/wusbcore/host/whci/qset.c 	sg = std->bounce_sg;
sg                267 drivers/staging/wusbcore/host/whci/qset.c 		len = min(sg->length - offset, remaining);
sg                268 drivers/staging/wusbcore/host/whci/qset.c 		memcpy(sg_virt(sg) + offset, bounce, len);
sg                274 drivers/staging/wusbcore/host/whci/qset.c 		if (offset >= sg->length) {
sg                275 drivers/staging/wusbcore/host/whci/qset.c 			sg = sg_next(sg);
sg                427 drivers/staging/wusbcore/host/whci/qset.c 	struct scatterlist *sg;
sg                438 drivers/staging/wusbcore/host/whci/qset.c 	for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) {
sg                448 drivers/staging/wusbcore/host/whci/qset.c 		dma_addr = sg_dma_address(sg);
sg                449 drivers/staging/wusbcore/host/whci/qset.c 		dma_remaining = min_t(size_t, sg_dma_len(sg), remaining);
sg                553 drivers/staging/wusbcore/host/whci/qset.c 	struct scatterlist *sg;
sg                561 drivers/staging/wusbcore/host/whci/qset.c 	for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) {
sg                570 drivers/staging/wusbcore/host/whci/qset.c 		sg_remaining = min_t(size_t, remaining, sg->length);
sg                571 drivers/staging/wusbcore/host/whci/qset.c 		orig = sg_virt(sg);
sg                581 drivers/staging/wusbcore/host/whci/qset.c 				std->bounce_sg = sg;
sg                582 drivers/staging/wusbcore/host/whci/qset.c 				std->bounce_offset = orig - sg_virt(sg);
sg                182 drivers/staging/wusbcore/wa-xfer.c 					kfree(seg->dto_urb->sg);
sg                746 drivers/staging/wusbcore/wa-xfer.c 	kfree(urb->sg);
sg                747 drivers/staging/wusbcore/wa-xfer.c 	urb->sg = NULL;
sg               1089 drivers/staging/wusbcore/wa-xfer.c 	seg->dto_urb->sg = NULL;
sg               1114 drivers/staging/wusbcore/wa-xfer.c 		seg->dto_urb->sg = NULL;
sg               1127 drivers/staging/wusbcore/wa-xfer.c 			seg->dto_urb->sg = NULL;
sg               1138 drivers/staging/wusbcore/wa-xfer.c 			seg->dto_urb->sg = wa_xfer_create_subset_sg(
sg               1139 drivers/staging/wusbcore/wa-xfer.c 				xfer->urb->sg,
sg               1142 drivers/staging/wusbcore/wa-xfer.c 			if (!(seg->dto_urb->sg))
sg               1830 drivers/staging/wusbcore/wa-xfer.c 	    && (urb->sg == NULL)
sg               2209 drivers/staging/wusbcore/wa-xfer.c 	buf_in_urb->sg = NULL;
sg               2233 drivers/staging/wusbcore/wa-xfer.c 		buf_in_urb->sg = NULL;
sg               2243 drivers/staging/wusbcore/wa-xfer.c 			buf_in_urb->sg = NULL;
sg               2250 drivers/staging/wusbcore/wa-xfer.c 			buf_in_urb->sg = wa_xfer_create_subset_sg(
sg               2251 drivers/staging/wusbcore/wa-xfer.c 				xfer->urb->sg,
sg               2256 drivers/staging/wusbcore/wa-xfer.c 			if (!(buf_in_urb->sg)) {
sg               2373 drivers/staging/wusbcore/wa-xfer.c 	kfree(buf_in_urb->sg);
sg               2374 drivers/staging/wusbcore/wa-xfer.c 	buf_in_urb->sg = NULL;
sg               2595 drivers/staging/wusbcore/wa-xfer.c 	kfree(urb->sg);
sg               2596 drivers/staging/wusbcore/wa-xfer.c 	urb->sg = NULL;
sg                 47 drivers/target/iscsi/cxgbit/cxgbit.h 	struct scatterlist sg;
sg                 13 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 	struct scatterlist *sg = sg_pp ? *sg_pp : NULL;
sg                 21 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 	if (sg) {
sg                 22 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		addr = sg_dma_address(sg);
sg                 23 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		len = sg_dma_len(sg);
sg                 27 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		if (sg) {
sg                 30 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 			if (offset == (len + sg->offset)) {
sg                 32 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 				sg = sg_next(sg);
sg                 33 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 				if (sg) {
sg                 34 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 					addr = sg_dma_address(sg);
sg                 35 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 					len = sg_dma_len(sg);
sg                 48 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		*sg_pp = sg;
sg                 54 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		if (sg) {
sg                 55 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 			sg = sg_next(sg);
sg                 56 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 			if (sg)
sg                 57 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 				addr = sg_dma_address(sg);
sg                 60 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 	ppod->addr[i] = sg ? cpu_to_be64(addr + offset) : 0ULL;
sg                133 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 	struct scatterlist *sg = ttinfo->sgl;
sg                144 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 					      &sg, &offset);
sg                152 drivers/target/iscsi/cxgbit/cxgbit_ddp.c static int cxgbit_ddp_sgl_check(struct scatterlist *sg,
sg                158 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 	for (i = 0; i < nents; i++, sg = sg_next(sg)) {
sg                159 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		unsigned int len = sg->length + sg->offset;
sg                161 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 		if ((sg->offset & 0x3) || (i && sg->offset) ||
sg                288 drivers/target/iscsi/cxgbit/cxgbit_ddp.c 			put_page(sg_page(&ccmd->sg));
sg                362 drivers/target/iscsi/cxgbit/cxgbit_target.c 	struct scatterlist *sg;
sg                372 drivers/target/iscsi/cxgbit/cxgbit_target.c 	sg = &cmd->se_cmd.t_data_sg[data_offset / PAGE_SIZE];
sg                376 drivers/target/iscsi/cxgbit/cxgbit_target.c 		u32 cur_len = min_t(u32, data_length, sg->length - page_off);
sg                378 drivers/target/iscsi/cxgbit/cxgbit_target.c 		page = sg_page(sg);
sg                381 drivers/target/iscsi/cxgbit/cxgbit_target.c 		skb_fill_page_desc(skb, i, page, sg->offset + page_off,
sg                389 drivers/target/iscsi/cxgbit/cxgbit_target.c 		sg = sg_next(sg);
sg                828 drivers/target/iscsi/cxgbit/cxgbit_target.c cxgbit_skb_copy_to_sg(struct sk_buff *skb, struct scatterlist *sg,
sg                847 drivers/target/iscsi/cxgbit/cxgbit_target.c 		consumed += sg_pcopy_from_buffer(sg, nents, (void *)buf,
sg                901 drivers/target/iscsi/cxgbit/cxgbit_target.c 		sg_init_table(&ccmd->sg, 1);
sg                902 drivers/target/iscsi/cxgbit/cxgbit_target.c 		sg_set_page(&ccmd->sg, skb_frag_page(dfrag),
sg                906 drivers/target/iscsi/cxgbit/cxgbit_target.c 		cmd->se_cmd.t_data_sg = &ccmd->sg;
sg                911 drivers/target/iscsi/cxgbit/cxgbit_target.c 		struct scatterlist *sg = &cmd->se_cmd.t_data_sg[0];
sg                914 drivers/target/iscsi/cxgbit/cxgbit_target.c 		cxgbit_skb_copy_to_sg(csk->skb, sg, sg_nents, 0);
sg                887 drivers/target/iscsi/iscsi_target.c 	struct scatterlist *sg;
sg                903 drivers/target/iscsi/iscsi_target.c 	sg = &cmd->se_cmd.t_data_sg[ent];
sg                906 drivers/target/iscsi/iscsi_target.c 	cmd->first_data_sg = sg;
sg                912 drivers/target/iscsi/iscsi_target.c 		if (WARN_ON_ONCE(!sg || i >= nvec))
sg                915 drivers/target/iscsi/iscsi_target.c 		cur_len = min_t(u32, data_length, sg->length - page_off);
sg                917 drivers/target/iscsi/iscsi_target.c 		iov[i].iov_base = kmap(sg_page(sg)) + sg->offset + page_off;
sg                922 drivers/target/iscsi/iscsi_target.c 		sg = sg_next(sg);
sg                933 drivers/target/iscsi/iscsi_target.c 	for_each_sg(cmd->se_cmd.t_data_sg, sg,
sg                936 drivers/target/iscsi/iscsi_target.c 		       i, sg->offset, sg->length);
sg                944 drivers/target/iscsi/iscsi_target.c 	struct scatterlist *sg;
sg                946 drivers/target/iscsi/iscsi_target.c 	sg = cmd->first_data_sg;
sg                949 drivers/target/iscsi/iscsi_target.c 		kunmap(sg_page(&sg[i]));
sg               1381 drivers/target/iscsi/iscsi_target.c 	struct scatterlist *sg;
sg               1386 drivers/target/iscsi/iscsi_target.c 	sg = cmd->first_data_sg;
sg               1390 drivers/target/iscsi/iscsi_target.c 		u32 cur_len = min_t(u32, data_length, (sg->length - page_off));
sg               1392 drivers/target/iscsi/iscsi_target.c 		ahash_request_set_crypt(hash, sg, NULL, cur_len);
sg               1398 drivers/target/iscsi/iscsi_target.c 		sg = sg_next(sg);
sg               1420 drivers/target/iscsi/iscsi_target.c 	struct scatterlist sg[2];
sg               1422 drivers/target/iscsi/iscsi_target.c 	sg_init_table(sg, ARRAY_SIZE(sg));
sg               1423 drivers/target/iscsi/iscsi_target.c 	sg_set_buf(sg, buf, payload_length);
sg               1425 drivers/target/iscsi/iscsi_target.c 		sg_set_buf(sg + 1, pad_bytes, padding);
sg               1427 drivers/target/iscsi/iscsi_target.c 	ahash_request_set_crypt(hash, sg, data_crc, payload_length + padding);
sg               1096 drivers/target/iscsi/iscsi_target_util.c 	struct scatterlist *sg = cmd->first_data_sg;
sg               1137 drivers/target/iscsi/iscsi_target_util.c 		u32 space = (sg->length - offset);
sg               1141 drivers/target/iscsi/iscsi_target_util.c 					sg_page(sg), sg->offset + offset, sub_len, 0);
sg               1156 drivers/target/iscsi/iscsi_target_util.c 		sg = sg_next(sg);
sg                270 drivers/target/target_core_file.c 	struct scatterlist *sg;
sg                285 drivers/target/target_core_file.c 	for_each_sg(sgl, sg, sgl_nents, i) {
sg                286 drivers/target/target_core_file.c 		bvec[i].bv_page = sg_page(sg);
sg                287 drivers/target/target_core_file.c 		bvec[i].bv_len = sg->length;
sg                288 drivers/target/target_core_file.c 		bvec[i].bv_offset = sg->offset;
sg                290 drivers/target/target_core_file.c 		len += sg->length;
sg                322 drivers/target/target_core_file.c 	struct scatterlist *sg;
sg                335 drivers/target/target_core_file.c 	for_each_sg(sgl, sg, sgl_nents, i) {
sg                336 drivers/target/target_core_file.c 		bvec[i].bv_page = sg_page(sg);
sg                337 drivers/target/target_core_file.c 		bvec[i].bv_len = sg->length;
sg                338 drivers/target/target_core_file.c 		bvec[i].bv_offset = sg->offset;
sg                340 drivers/target/target_core_file.c 		len += sg->length;
sg                414 drivers/target/target_core_iblock.c 	struct scatterlist *sg = &cmd->t_data_sg[0];
sg                418 drivers/target/target_core_iblock.c 	buf = kmap(sg_page(sg)) + sg->offset;
sg                426 drivers/target/target_core_iblock.c 	kunmap(sg_page(sg));
sg                448 drivers/target/target_core_iblock.c 	struct scatterlist *sg;
sg                461 drivers/target/target_core_iblock.c 	sg = &cmd->t_data_sg[0];
sg                464 drivers/target/target_core_iblock.c 	    sg->length != cmd->se_dev->dev_attrib.block_size) {
sg                466 drivers/target/target_core_iblock.c 			" block_size: %u\n", cmd->t_data_nents, sg->length,
sg                491 drivers/target/target_core_iblock.c 		while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset)
sg                492 drivers/target/target_core_iblock.c 				!= sg->length) {
sg                504 drivers/target/target_core_iblock.c 		block_lba += sg->length >> SECTOR_SHIFT;
sg                505 drivers/target/target_core_iblock.c 		sectors -= sg->length >> SECTOR_SHIFT;
sg                689 drivers/target/target_core_iblock.c 	struct scatterlist *sg;
sg                739 drivers/target/target_core_iblock.c 	for_each_sg(sgl, sg, sgl_nents, i) {
sg                745 drivers/target/target_core_iblock.c 		while (bio_add_page(bio, sg_page(sg), sg->length, sg->offset)
sg                746 drivers/target/target_core_iblock.c 				!= sg->length) {
sg                769 drivers/target/target_core_iblock.c 		block_lba += sg->length >> SECTOR_SHIFT;
sg                852 drivers/target/target_core_pscsi.c 	struct scatterlist *sg;
sg                863 drivers/target/target_core_pscsi.c 	for_each_sg(sgl, sg, sgl_nents, i) {
sg                864 drivers/target/target_core_pscsi.c 		page = sg_page(sg);
sg                865 drivers/target/target_core_pscsi.c 		off = sg->offset;
sg                866 drivers/target/target_core_pscsi.c 		len = sg->length;
sg                 67 drivers/target/target_core_rd.c 	struct scatterlist *sg;
sg                 71 drivers/target/target_core_rd.c 		sg = sg_table[i].sg_table;
sg                 75 drivers/target/target_core_rd.c 			pg = sg_page(&sg[j]);
sg                 81 drivers/target/target_core_rd.c 		kfree(sg);
sg                119 drivers/target/target_core_rd.c 	struct scatterlist *sg;
sg                134 drivers/target/target_core_rd.c 		sg = kcalloc(sg_per_table + chain_entry, sizeof(*sg),
sg                136 drivers/target/target_core_rd.c 		if (!sg)
sg                139 drivers/target/target_core_rd.c 		sg_init_table(sg, sg_per_table + chain_entry);
sg                143 drivers/target/target_core_rd.c 				 max_sg_per_table + 1, sg);
sg                146 drivers/target/target_core_rd.c 		sg_table[i].sg_table = sg;
sg                159 drivers/target/target_core_rd.c 			sg_assign_page(&sg[j], pg);
sg                160 drivers/target/target_core_rd.c 			sg[j].length = PAGE_SIZE;
sg                346 drivers/target/target_core_sbc.c 	struct scatterlist *sg;
sg                385 drivers/target/target_core_sbc.c 	for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, count) {
sg                386 drivers/target/target_core_sbc.c 		addr = kmap_atomic(sg_page(sg));
sg                392 drivers/target/target_core_sbc.c 		for (i = 0; i < sg->length; i++)
sg                393 drivers/target/target_core_sbc.c 			*(addr + sg->offset + i) ^= *(buf + offset + i);
sg                395 drivers/target/target_core_sbc.c 		offset += sg->length;
sg                442 drivers/target/target_core_sbc.c 	struct scatterlist *write_sg, *sg;
sg                502 drivers/target/target_core_sbc.c 	for_each_sg(cmd->t_bidi_data_sg, sg, cmd->t_bidi_data_nents, i) {
sg                503 drivers/target/target_core_sbc.c 		addr = (unsigned char *)kmap_atomic(sg_page(sg));
sg                509 drivers/target/target_core_sbc.c 		len = min(sg->length, compare_len);
sg                536 drivers/target/target_core_sbc.c 				    m.piter.sg->offset + block_size);
sg                540 drivers/target/target_core_sbc.c 				    m.piter.sg->offset);
sg               1331 drivers/target/target_core_sbc.c 		       struct scatterlist *sg, int sg_off)
sg               1339 drivers/target/target_core_sbc.c 	if (!sg)
sg               1350 drivers/target/target_core_sbc.c 			len = min(psg_len, sg->length - offset);
sg               1351 drivers/target/target_core_sbc.c 			addr = kmap_atomic(sg_page(sg)) + sg->offset + offset;
sg               1363 drivers/target/target_core_sbc.c 			kunmap_atomic(addr - sg->offset - offset);
sg               1365 drivers/target/target_core_sbc.c 			if (offset >= sg->length) {
sg               1366 drivers/target/target_core_sbc.c 				sg = sg_next(sg);
sg               2466 drivers/target/target_core_transport.c 	struct scatterlist *sg = cmd->t_data_sg;
sg               2478 drivers/target/target_core_transport.c 	BUG_ON(!sg);
sg               2480 drivers/target/target_core_transport.c 		return kmap(sg_page(sg)) + sg->offset;
sg               2488 drivers/target/target_core_transport.c 	for_each_sg(cmd->t_data_sg, sg, cmd->t_data_nents, i) {
sg               2489 drivers/target/target_core_transport.c 		pages[i] = sg_page(sg);
sg                671 drivers/target/target_core_user.c 	struct scatterlist *sg;
sg                674 drivers/target/target_core_user.c 	for_each_sg(data_sg, sg, data_nents, i) {
sg                675 drivers/target/target_core_user.c 		int sg_remaining = sg->length;
sg                676 drivers/target/target_core_user.c 		from = kmap_atomic(sg_page(sg)) + sg->offset;
sg                723 drivers/target/target_core_user.c 				       from + sg->length - sg_remaining,
sg                731 drivers/target/target_core_user.c 		kunmap_atomic(from - sg->offset);
sg                746 drivers/target/target_core_user.c 	struct scatterlist *sg, *data_sg;
sg                769 drivers/target/target_core_user.c 	for_each_sg(data_sg, sg, data_nents, i) {
sg                770 drivers/target/target_core_user.c 		int sg_remaining = sg->length;
sg                771 drivers/target/target_core_user.c 		to = kmap_atomic(sg_page(sg)) + sg->offset;
sg                788 drivers/target/target_core_user.c 			memcpy(to + sg->length - sg_remaining, from + offset,
sg                795 drivers/target/target_core_user.c 		kunmap_atomic(to - sg->offset);
sg                117 drivers/target/tcm_fc/tcm_fc.h 	struct scatterlist *sg;		/* Set only if DDP is setup */
sg                 37 drivers/target/tcm_fc/tfc_cmd.c 	struct scatterlist *sg;
sg                 48 drivers/target/tcm_fc/tfc_cmd.c 	for_each_sg(se_cmd->t_data_sg, sg, se_cmd->t_data_nents, count)
sg                 51 drivers/target/tcm_fc/tfc_cmd.c 			caller, cmd, sg,
sg                 52 drivers/target/tcm_fc/tfc_cmd.c 			sg_page(sg), sg->length, sg->offset);
sg                 48 drivers/target/tcm_fc/tfc_io.c 	struct scatterlist *sg = NULL;
sg                 82 drivers/target/tcm_fc/tfc_io.c 		sg = se_cmd->t_data_sg;
sg                 83 drivers/target/tcm_fc/tfc_io.c 		mem_len = sg->length;
sg                 84 drivers/target/tcm_fc/tfc_io.c 		mem_off = sg->offset;
sg                 85 drivers/target/tcm_fc/tfc_io.c 		page = sg_page(sg);
sg                100 drivers/target/tcm_fc/tfc_io.c 			sg = sg_next(sg);
sg                101 drivers/target/tcm_fc/tfc_io.c 			mem_len = min((size_t)sg->length, remaining);
sg                102 drivers/target/tcm_fc/tfc_io.c 			mem_off = sg->offset;
sg                103 drivers/target/tcm_fc/tfc_io.c 			page = sg_page(sg);
sg                203 drivers/target/tcm_fc/tfc_io.c 	struct scatterlist *sg = NULL;
sg                278 drivers/target/tcm_fc/tfc_io.c 		sg = se_cmd->t_data_sg;
sg                279 drivers/target/tcm_fc/tfc_io.c 		mem_len = sg->length;
sg                280 drivers/target/tcm_fc/tfc_io.c 		mem_off = sg->offset;
sg                281 drivers/target/tcm_fc/tfc_io.c 		page = sg_page(sg);
sg                286 drivers/target/tcm_fc/tfc_io.c 			sg = sg_next(sg);
sg                287 drivers/target/tcm_fc/tfc_io.c 			mem_len = sg->length;
sg                288 drivers/target/tcm_fc/tfc_io.c 			mem_off = sg->offset;
sg                289 drivers/target/tcm_fc/tfc_io.c 			page = sg_page(sg);
sg                305 drivers/thermal/tegra/soctherm.c 	const struct tegra_tsensor_group *sg;
sg                430 drivers/thermal/tegra/soctherm.c 	val = REG_GET_MASK(val, zone->sg->sensor_temp_mask);
sg                474 drivers/thermal/tegra/soctherm.c 			     const struct tegra_tsensor_group *sg,
sg                481 drivers/thermal/tegra/soctherm.c 	if (!sg || !sg->thermtrip_threshold_mask)
sg                487 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, sg->thermtrip_threshold_mask, temp);
sg                488 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, sg->thermtrip_enable_mask, 1);
sg                489 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, sg->thermtrip_any_en_mask, 0);
sg                513 drivers/thermal/tegra/soctherm.c 			     const struct tegra_tsensor_group *sg,
sg                522 drivers/thermal/tegra/soctherm.c 	if (!sg || !stc || !stc->init)
sg                529 drivers/thermal/tegra/soctherm.c 	reg_off = THERMCTL_LVL_REG(sg->thermctl_lvl0_offset, throt + 1);
sg                544 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, sg->thermctl_lvl0_up_thresh_mask, temp);
sg                545 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, sg->thermctl_lvl0_dn_thresh_mask, temp);
sg                589 drivers/thermal/tegra/soctherm.c 	const struct tegra_tsensor_group *sg = zone->sg;
sg                607 drivers/thermal/tegra/soctherm.c 		if (min_low_temp == tsensor_group_thermtrip_get(ts, sg->id))
sg                608 drivers/thermal/tegra/soctherm.c 			return thermtrip_program(dev, sg, temp);
sg                628 drivers/thermal/tegra/soctherm.c 			return throttrip_program(dev, sg, stc, temp);
sg                673 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, zn->sg->thermctl_isr_mask, TH_INTR_UP_DN_EN);
sg                685 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, zn->sg->thermctl_isr_mask, 0);
sg                697 drivers/thermal/tegra/soctherm.c 	r = readl(zone->ts->regs + zone->sg->thermctl_lvl0_offset);
sg                699 drivers/thermal/tegra/soctherm.c 	writel(r, zone->ts->regs + zone->sg->thermctl_lvl0_offset);
sg                705 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, zone->sg->thermctl_lvl0_up_thresh_mask, hi);
sg                706 drivers/thermal/tegra/soctherm.c 	r = REG_SET_MASK(r, zone->sg->thermctl_lvl0_dn_thresh_mask, lo);
sg                708 drivers/thermal/tegra/soctherm.c 	writel(r, zone->ts->regs + zone->sg->thermctl_lvl0_offset);
sg                770 drivers/thermal/tegra/soctherm.c 				      const struct tegra_tsensor_group *sg,
sg                778 drivers/thermal/tegra/soctherm.c 	temperature = tsensor_group_thermtrip_get(ts, sg->id);
sg                783 drivers/thermal/tegra/soctherm.c 	ret = thermtrip_program(dev, sg, temperature);
sg                785 drivers/thermal/tegra/soctherm.c 		dev_err(dev, "thermtrip: %s: error during enable\n", sg->name);
sg                790 drivers/thermal/tegra/soctherm.c 		 sg->name, temperature);
sg                795 drivers/thermal/tegra/soctherm.c 			 sg->name);
sg                811 drivers/thermal/tegra/soctherm.c 		ret = throttrip_program(dev, sg, stc, temperature);
sg                814 drivers/thermal/tegra/soctherm.c 				sg->name);
sg                820 drivers/thermal/tegra/soctherm.c 			 sg->name, temperature);
sg                826 drivers/thermal/tegra/soctherm.c 			 sg->name);
sg               2222 drivers/thermal/tegra/soctherm.c 		zone->sg = soc->ttgs[i];
sg                231 drivers/tty/serial/amba-pl011.c 	struct scatterlist sg;
sg                253 drivers/tty/serial/amba-pl011.c 	struct scatterlist	sg;
sg                371 drivers/tty/serial/amba-pl011.c static int pl011_sgbuf_init(struct dma_chan *chan, struct pl011_sgbuf *sg,
sg                376 drivers/tty/serial/amba-pl011.c 	sg->buf = dma_alloc_coherent(chan->device->dev,
sg                378 drivers/tty/serial/amba-pl011.c 	if (!sg->buf)
sg                381 drivers/tty/serial/amba-pl011.c 	sg_init_table(&sg->sg, 1);
sg                382 drivers/tty/serial/amba-pl011.c 	sg_set_page(&sg->sg, phys_to_page(dma_addr),
sg                384 drivers/tty/serial/amba-pl011.c 	sg_dma_address(&sg->sg) = dma_addr;
sg                385 drivers/tty/serial/amba-pl011.c 	sg_dma_len(&sg->sg) = PL011_DMA_BUFFER_SIZE;
sg                390 drivers/tty/serial/amba-pl011.c static void pl011_sgbuf_free(struct dma_chan *chan, struct pl011_sgbuf *sg,
sg                393 drivers/tty/serial/amba-pl011.c 	if (sg->buf) {
sg                395 drivers/tty/serial/amba-pl011.c 			PL011_DMA_BUFFER_SIZE, sg->buf,
sg                396 drivers/tty/serial/amba-pl011.c 			sg_dma_address(&sg->sg));
sg                557 drivers/tty/serial/amba-pl011.c 		dma_unmap_sg(dmatx->chan->device->dev, &dmatx->sg, 1,
sg                644 drivers/tty/serial/amba-pl011.c 	dmatx->sg.length = count;
sg                646 drivers/tty/serial/amba-pl011.c 	if (dma_map_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE) != 1) {
sg                652 drivers/tty/serial/amba-pl011.c 	desc = dmaengine_prep_slave_sg(chan, &dmatx->sg, 1, DMA_MEM_TO_DEV,
sg                655 drivers/tty/serial/amba-pl011.c 		dma_unmap_sg(dma_dev->dev, &dmatx->sg, 1, DMA_TO_DEVICE);
sg                819 drivers/tty/serial/amba-pl011.c 		dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1,
sg                842 drivers/tty/serial/amba-pl011.c 	desc = dmaengine_prep_slave_sg(rxchan, &sgbuf->sg, 1,
sg                892 drivers/tty/serial/amba-pl011.c 		dmataken = sgbuf->sg.length - dmarx->last_residue;
sg                917 drivers/tty/serial/amba-pl011.c 		dmarx->last_residue = sgbuf->sg.length;
sg                977 drivers/tty/serial/amba-pl011.c 	pending = sgbuf->sg.length - state.residue;
sg               1023 drivers/tty/serial/amba-pl011.c 	pending = sgbuf->sg.length - state.residue;
sg               1079 drivers/tty/serial/amba-pl011.c 		dmataken = sgbuf->sg.length - dmarx->last_residue;
sg               1128 drivers/tty/serial/amba-pl011.c 	sg_init_one(&uap->dmatx.sg, uap->dmatx.buf, PL011_DMA_BUFFER_SIZE);
sg               1205 drivers/tty/serial/amba-pl011.c 			dma_unmap_sg(uap->dmatx.chan->device->dev, &uap->dmatx.sg, 1,
sg                933 drivers/tty/serial/atmel_serial.c 	struct scatterlist sgl[2], *sg, *sg_tx = &atmel_port->sg_tx;
sg                969 drivers/tty/serial/atmel_serial.c 			sg = &sgl[sg_len++];
sg                970 drivers/tty/serial/atmel_serial.c 			sg_dma_address(sg) = phys_addr;
sg                971 drivers/tty/serial/atmel_serial.c 			sg_dma_len(sg) = part1_len;
sg                977 drivers/tty/serial/atmel_serial.c 			sg = &sgl[sg_len++];
sg                978 drivers/tty/serial/atmel_serial.c 			sg_dma_address(sg) = phys_addr;
sg                979 drivers/tty/serial/atmel_serial.c 			sg_dma_len(sg) = part2_len;
sg                782 drivers/tty/serial/pch_uart.c 	struct scatterlist *sg = priv->sg_tx_p;
sg                785 drivers/tty/serial/pch_uart.c 	for (i = 0; i < priv->nent; i++, sg++) {
sg                786 drivers/tty/serial/pch_uart.c 		xmit->tail += sg_dma_len(sg);
sg                787 drivers/tty/serial/pch_uart.c 		port->icount.tx += sg_dma_len(sg);
sg                791 drivers/tty/serial/pch_uart.c 	dma_unmap_sg(port->dev, sg, priv->orig_nent, DMA_TO_DEVICE);
sg                854 drivers/tty/serial/pch_uart.c 	struct scatterlist *sg;
sg                857 drivers/tty/serial/pch_uart.c 	sg = &priv->sg_rx;
sg                861 drivers/tty/serial/pch_uart.c 	sg_dma_len(sg) = priv->trigger_level;
sg                864 drivers/tty/serial/pch_uart.c 		     sg_dma_len(sg), offset_in_page(priv->rx_buf_virt));
sg                866 drivers/tty/serial/pch_uart.c 	sg_dma_address(sg) = priv->rx_buf_dma;
sg                869 drivers/tty/serial/pch_uart.c 			sg, 1, DMA_DEV_TO_MEM,
sg                933 drivers/tty/serial/pch_uart.c 	struct scatterlist *sg;
sg                998 drivers/tty/serial/pch_uart.c 	sg = priv->sg_tx_p;
sg               1000 drivers/tty/serial/pch_uart.c 	for (i = 0; i < num; i++, sg++) {
sg               1002 drivers/tty/serial/pch_uart.c 			sg_set_page(sg, virt_to_page(xmit->buf),
sg               1005 drivers/tty/serial/pch_uart.c 			sg_set_page(sg, virt_to_page(xmit->buf),
sg               1009 drivers/tty/serial/pch_uart.c 	sg = priv->sg_tx_p;
sg               1010 drivers/tty/serial/pch_uart.c 	nent = dma_map_sg(port->dev, sg, num, DMA_TO_DEVICE);
sg               1018 drivers/tty/serial/pch_uart.c 	for (i = 0; i < nent; i++, sg++) {
sg               1019 drivers/tty/serial/pch_uart.c 		sg->offset = (xmit->tail & (UART_XMIT_SIZE - 1)) +
sg               1021 drivers/tty/serial/pch_uart.c 		sg_dma_address(sg) = (sg_dma_address(sg) &
sg               1022 drivers/tty/serial/pch_uart.c 				    ~(UART_XMIT_SIZE - 1)) + sg->offset;
sg               1024 drivers/tty/serial/pch_uart.c 			sg_dma_len(sg) = rem;
sg               1026 drivers/tty/serial/pch_uart.c 			sg_dma_len(sg) = size;
sg               1362 drivers/tty/serial/sh-sci.c 		struct scatterlist *sg = &s->sg_rx[i];
sg               1366 drivers/tty/serial/sh-sci.c 			sg, 1, DMA_DEV_TO_MEM,
sg               1631 drivers/tty/serial/sh-sci.c 			struct scatterlist *sg = &s->sg_rx[i];
sg               1633 drivers/tty/serial/sh-sci.c 			sg_init_table(sg, 1);
sg               1635 drivers/tty/serial/sh-sci.c 			sg_dma_address(sg) = dma;
sg               1636 drivers/tty/serial/sh-sci.c 			sg_dma_len(sg) = s->buf_len_rx;
sg                903 drivers/usb/cdns3/gadget.c 				? trb_dma : request->sg[sg_iter].dma_address);
sg                908 drivers/usb/cdns3/gadget.c 			length = request->sg[sg_iter].length;
sg                401 drivers/usb/core/devio.c 		if (sg_page(&as->urb->sg[i]))
sg                402 drivers/usb/core/devio.c 			kfree(sg_virt(&as->urb->sg[i]));
sg                405 drivers/usb/core/devio.c 	kfree(as->urb->sg);
sg                528 drivers/usb/core/devio.c 			sg_virt(&urb->sg[i]), size, 1);
sg                550 drivers/usb/core/devio.c 		if (copy_to_user(userbuffer, sg_virt(&urb->sg[i]), size))
sg               1692 drivers/usb/core/devio.c 		as->urb->sg = kmalloc_array(num_sgs,
sg               1695 drivers/usb/core/devio.c 		if (!as->urb->sg) {
sg               1700 drivers/usb/core/devio.c 		sg_init_table(as->urb->sg, as->urb->num_sgs);
sg               1710 drivers/usb/core/devio.c 			sg_set_buf(&as->urb->sg[i], buf, u);
sg               1360 drivers/usb/core/hcd.c 				urb->sg,
sg               1467 drivers/usb/core/hcd.c 						urb->sg,
sg               1478 drivers/usb/core/hcd.c 			} else if (urb->sg) {
sg               1479 drivers/usb/core/hcd.c 				struct scatterlist *sg = urb->sg;
sg               1482 drivers/usb/core/hcd.c 						sg_page(sg),
sg               1483 drivers/usb/core/hcd.c 						sg->offset,
sg                368 drivers/usb/core/message.c 		unsigned pipe, unsigned	period, struct scatterlist *sg,
sg                375 drivers/usb/core/message.c 	if (!io || !dev || !sg
sg                402 drivers/usb/core/message.c 	for_each_sg(sg, sg, io->entries, i) {
sg                419 drivers/usb/core/message.c 		urb->sg = sg;
sg                432 drivers/usb/core/message.c 				for_each_sg(sg, sg2, nents, j)
sg                441 drivers/usb/core/message.c 			if (!PageHighMem(sg_page(sg)))
sg                442 drivers/usb/core/message.c 				urb->transfer_buffer = sg_virt(sg);
sg                446 drivers/usb/core/message.c 			len = sg->length;
sg                459 drivers/usb/core/urb.c 		struct scatterlist *sg;
sg                462 drivers/usb/core/urb.c 		for_each_sg(urb->sg, sg, urb->num_sgs - 1, i)
sg                463 drivers/usb/core/urb.c 			if (sg->length % max)
sg                843 drivers/usb/dwc2/gadget.c 	struct scatterlist *sg;
sg                858 drivers/usb/dwc2/gadget.c 	for_each_sg(ureq->sg, sg, ureq->num_sgs, i) {
sg                860 drivers/usb/dwc2/gadget.c 			sg_dma_address(sg) + sg->offset, sg_dma_len(sg),
sg                861 drivers/usb/dwc2/gadget.c 			sg_is_last(sg));
sg                983 drivers/usb/dwc2/gadget.c 			dma_addr = sg_dma_address(hs_req->req.sg);
sg               1446 drivers/usb/dwc2/gadget.c 				dma_addr = sg_dma_address(hs_req->req.sg);
sg               2507 drivers/usb/dwc2/hcd.c 	if (urb->num_sgs || urb->sg ||
sg                883 drivers/usb/dwc3/core.h 	struct scatterlist	*sg;
sg               1058 drivers/usb/dwc3/gadget.c 	struct scatterlist *sg = req->start_sg;
sg               1065 drivers/usb/dwc3/gadget.c 	for_each_sg(sg, s, remaining, i) {
sg               1205 drivers/usb/dwc3/gadget.c 		req->sg			= req->request.sg;
sg               1206 drivers/usb/dwc3/gadget.c 		req->start_sg		= req->sg;
sg               2474 drivers/usb/dwc3/gadget.c 	struct scatterlist *sg = req->sg;
sg               2480 drivers/usb/dwc3/gadget.c 	for_each_sg(sg, s, pending, i) {
sg               2483 drivers/usb/dwc3/gadget.c 		req->sg = sg_next(s);
sg               1061 drivers/usb/gadget/function/f_fs.c 			req->sg	= io_data->sgt.sgl;
sg               1105 drivers/usb/gadget/function/f_fs.c 			req->sg	= io_data->sgt.sgl;
sg                229 drivers/usb/gadget/function/f_tcm.c 		fu->bot_req_in->sg = se_cmd->t_data_sg;
sg                268 drivers/usb/gadget/function/f_tcm.c 		fu->bot_req_out->sg = se_cmd->t_data_sg;
sg                531 drivers/usb/gadget/function/f_tcm.c 		stream->req_in->sg = se_cmd->t_data_sg;
sg                991 drivers/usb/gadget/function/f_tcm.c 		req->sg = se_cmd->t_data_sg;
sg                823 drivers/usb/gadget/udc/core.c 		mapped = dma_map_sg(dev, req->sg, req->num_sgs,
sg                869 drivers/usb/gadget/udc/core.c 		dma_unmap_sg(dev, req->sg, req->num_sgs,
sg               1344 drivers/usb/gadget/udc/dummy_hcd.c 		sg_miter_start(miter, urb->sg, urb->num_sgs, flags);
sg                587 drivers/usb/host/ehci-q.c 	struct scatterlist	*sg;
sg                630 drivers/usb/host/ehci-q.c 		sg = urb->sg;
sg                631 drivers/usb/host/ehci-q.c 		buf = sg_dma_address(sg);
sg                636 drivers/usb/host/ehci-q.c 		this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                638 drivers/usb/host/ehci-q.c 		sg = NULL;
sg                678 drivers/usb/host/ehci-q.c 			sg = sg_next(sg);
sg                679 drivers/usb/host/ehci-q.c 			buf = sg_dma_address(sg);
sg                680 drivers/usb/host/ehci-q.c 			this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                320 drivers/usb/host/ehci-tegra.c 	if (urb->num_sgs || urb->sg ||
sg               2544 drivers/usb/host/fotg210-hcd.c 	struct scatterlist *sg;
sg               2587 drivers/usb/host/fotg210-hcd.c 		sg = urb->sg;
sg               2588 drivers/usb/host/fotg210-hcd.c 		buf = sg_dma_address(sg);
sg               2593 drivers/usb/host/fotg210-hcd.c 		this_sg_len = min_t(int, sg_dma_len(sg), len);
sg               2595 drivers/usb/host/fotg210-hcd.c 		sg = NULL;
sg               2635 drivers/usb/host/fotg210-hcd.c 			sg = sg_next(sg);
sg               2636 drivers/usb/host/fotg210-hcd.c 			buf = sg_dma_address(sg);
sg               2637 drivers/usb/host/fotg210-hcd.c 			this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                120 drivers/usb/host/ohci-hcd.c 	struct scatterlist	*sg;
sg                127 drivers/usb/host/ohci-hcd.c 		sg = urb->sg;
sg                129 drivers/usb/host/ohci-hcd.c 			this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                134 drivers/usb/host/ohci-hcd.c 			sg = sg_next(sg);
sg                604 drivers/usb/host/ohci-q.c 	struct scatterlist	*sg;
sg                620 drivers/usb/host/ohci-q.c 		sg = urb->sg;
sg                621 drivers/usb/host/ohci-q.c 		data = sg_dma_address(sg);
sg                627 drivers/usb/host/ohci-q.c 		this_sg_len = min_t(int, sg_dma_len(sg), data_len);
sg                629 drivers/usb/host/ohci-q.c 		sg = NULL;
sg                673 drivers/usb/host/ohci-q.c 				sg = sg_next(sg);
sg                674 drivers/usb/host/ohci-q.c 				data = sg_dma_address(sg);
sg                675 drivers/usb/host/ohci-q.c 				this_sg_len = min_t(int, sg_dma_len(sg),
sg                927 drivers/usb/host/uhci-q.c 	struct scatterlist  *sg;
sg                947 drivers/usb/host/uhci-q.c 		sg = urb->sg;
sg                948 drivers/usb/host/uhci-q.c 		data = sg_dma_address(sg);
sg                953 drivers/usb/host/uhci-q.c 		this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                955 drivers/usb/host/uhci-q.c 		sg = NULL;
sg                994 drivers/usb/host/uhci-q.c 			sg = sg_next(sg);
sg                995 drivers/usb/host/uhci-q.c 			data = sg_dma_address(sg);
sg                996 drivers/usb/host/uhci-q.c 			this_sg_len = min_t(int, sg_dma_len(sg), len);
sg                698 drivers/usb/host/xhci-ring.c 	len = sg_pcopy_from_buffer(urb->sg, urb->num_sgs, seg->bounce_buf,
sg               3077 drivers/usb/host/xhci-ring.c 	struct scatterlist *sg;
sg               3082 drivers/usb/host/xhci-ring.c 	for_each_sg(urb->sg, sg, urb->num_mapped_sgs, i) {
sg               3083 drivers/usb/host/xhci-ring.c 		len = sg_dma_len(sg);
sg               3084 drivers/usb/host/xhci-ring.c 		num_trbs += count_trbs(sg_dma_address(sg), len);
sg               3264 drivers/usb/host/xhci-ring.c 		len = sg_pcopy_to_buffer(urb->sg, urb->num_sgs,
sg               3299 drivers/usb/host/xhci-ring.c 	struct scatterlist *sg = NULL;
sg               3318 drivers/usb/host/xhci-ring.c 		sg = urb->sg;
sg               3319 drivers/usb/host/xhci-ring.c 		addr = (u64) sg_dma_address(sg);
sg               3320 drivers/usb/host/xhci-ring.c 		block_len = sg_dma_len(sg);
sg               3420 drivers/usb/host/xhci-ring.c 		while (sg && sent_len >= block_len) {
sg               3424 drivers/usb/host/xhci-ring.c 			sg = sg_next(sg);
sg               3425 drivers/usb/host/xhci-ring.c 			if (num_sgs != 0 && sg) {
sg               3426 drivers/usb/host/xhci-ring.c 				block_len = sg_dma_len(sg);
sg               3427 drivers/usb/host/xhci-ring.c 				addr = (u64) sg_dma_address(sg);
sg                521 drivers/usb/misc/usbtest.c static void free_sglist(struct scatterlist *sg, int nents)
sg                525 drivers/usb/misc/usbtest.c 	if (!sg)
sg                528 drivers/usb/misc/usbtest.c 		if (!sg_page(&sg[i]))
sg                530 drivers/usb/misc/usbtest.c 		kfree(sg_virt(&sg[i]));
sg                532 drivers/usb/misc/usbtest.c 	kfree(sg);
sg                538 drivers/usb/misc/usbtest.c 	struct scatterlist	*sg;
sg                548 drivers/usb/misc/usbtest.c 	sg = kmalloc_array(nents, sizeof(*sg), GFP_KERNEL);
sg                549 drivers/usb/misc/usbtest.c 	if (!sg)
sg                551 drivers/usb/misc/usbtest.c 	sg_init_table(sg, nents);
sg                559 drivers/usb/misc/usbtest.c 			free_sglist(sg, i);
sg                564 drivers/usb/misc/usbtest.c 		sg_set_buf(&sg[i], buf, size);
sg                585 drivers/usb/misc/usbtest.c 	return sg;
sg                605 drivers/usb/misc/usbtest.c 	struct scatterlist	*sg,
sg                622 drivers/usb/misc/usbtest.c 				sg, nents, 0, GFP_KERNEL);
sg               2177 drivers/usb/misc/usbtest.c 	struct scatterlist	*sg;
sg               2274 drivers/usb/misc/usbtest.c 		sg = alloc_sglist(param->sglen, param->length,
sg               2276 drivers/usb/misc/usbtest.c 		if (!sg) {
sg               2282 drivers/usb/misc/usbtest.c 				&req, sg, param->sglen);
sg               2283 drivers/usb/misc/usbtest.c 		free_sglist(sg, param->sglen);
sg               2293 drivers/usb/misc/usbtest.c 		sg = alloc_sglist(param->sglen, param->length,
sg               2295 drivers/usb/misc/usbtest.c 		if (!sg) {
sg               2301 drivers/usb/misc/usbtest.c 				&req, sg, param->sglen);
sg               2302 drivers/usb/misc/usbtest.c 		free_sglist(sg, param->sglen);
sg               2311 drivers/usb/misc/usbtest.c 		sg = alloc_sglist(param->sglen, param->length,
sg               2313 drivers/usb/misc/usbtest.c 		if (!sg) {
sg               2319 drivers/usb/misc/usbtest.c 				&req, sg, param->sglen);
sg               2320 drivers/usb/misc/usbtest.c 		free_sglist(sg, param->sglen);
sg               2329 drivers/usb/misc/usbtest.c 		sg = alloc_sglist(param->sglen, param->length,
sg               2331 drivers/usb/misc/usbtest.c 		if (!sg) {
sg               2337 drivers/usb/misc/usbtest.c 				&req, sg, param->sglen);
sg               2338 drivers/usb/misc/usbtest.c 		free_sglist(sg, param->sglen);
sg                408 drivers/usb/mon/mon_bin.c 	struct scatterlist *sg;
sg                428 drivers/usb/mon/mon_bin.c 		for_each_sg(urb->sg, sg, urb->num_sgs, i) {
sg                429 drivers/usb/mon/mon_bin.c 			if (length == 0 || PageHighMem(sg_page(sg)))
sg                431 drivers/usb/mon/mon_bin.c 			this_len = min_t(unsigned int, sg->length, length);
sg                432 drivers/usb/mon/mon_bin.c 			offset = mon_copy_to_buff(rp, offset, sg_virt(sg),
sg                168 drivers/usb/mon/mon_text.c 		struct scatterlist *sg = urb->sg;
sg                170 drivers/usb/mon/mon_text.c 		if (PageHighMem(sg_page(sg)))
sg                174 drivers/usb/mon/mon_text.c 		len = min_t(int, sg->length, len);
sg                175 drivers/usb/mon/mon_text.c 		src = sg_virt(sg);
sg                837 drivers/usb/musb/musb_host.c 				sg_miter_start(&qh->sg_miter, urb->sg, 1,
sg                847 drivers/usb/musb/musb_host.c 				buf = qh->sg_miter.addr + urb->sg->offset +
sg               1963 drivers/usb/musb/musb_host.c 				sg_miter_start(&qh->sg_miter, urb->sg, 1,
sg               2610 drivers/usb/musb/musb_host.c 	if (urb->num_sgs || urb->sg ||
sg                 79 drivers/usb/musb/ux500_dma.c 	struct scatterlist sg;
sg                 93 drivers/usb/musb/ux500_dma.c 	sg_init_table(&sg, 1);
sg                 94 drivers/usb/musb/ux500_dma.c 	sg_set_page(&sg, pfn_to_page(PFN_DOWN(dma_addr)), len,
sg                 96 drivers/usb/musb/ux500_dma.c 	sg_dma_address(&sg) = dma_addr;
sg                 97 drivers/usb/musb/ux500_dma.c 	sg_dma_len(&sg) = len;
sg                114 drivers/usb/musb/ux500_dma.c 	dma_desc = dmaengine_prep_slave_sg(dma_chan, &sg, 1, direction,
sg                917 drivers/usb/storage/alauda.c 	struct scatterlist *sg;
sg                940 drivers/usb/storage/alauda.c 	sg = NULL;
sg                987 drivers/usb/storage/alauda.c 				&sg, &offset, TO_XFER_BUF);
sg               1010 drivers/usb/storage/alauda.c 	struct scatterlist *sg;
sg               1042 drivers/usb/storage/alauda.c 	sg = NULL;
sg               1059 drivers/usb/storage/alauda.c 				&sg, &offset, FROM_XFER_BUF);
sg                146 drivers/usb/storage/datafab.c 	struct scatterlist *sg = NULL;
sg                203 drivers/usb/storage/datafab.c 				 &sg, &sg_offset, TO_XFER_BUF);
sg                230 drivers/usb/storage/datafab.c 	struct scatterlist *sg = NULL;
sg                266 drivers/usb/storage/datafab.c 				&sg, &sg_offset, FROM_XFER_BUF);
sg                640 drivers/usb/storage/ene_ub6250.c 	struct scatterlist *sg = NULL;
sg                669 drivers/usb/storage/ene_ub6250.c 	usb_stor_access_xfer_buf(buf, 8, srb, &sg, &offset, TO_XFER_BUF);
sg               1492 drivers/usb/storage/ene_ub6250.c 	struct scatterlist *sg = NULL;
sg               1516 drivers/usb/storage/ene_ub6250.c 	usb_stor_access_xfer_buf(buf, 8, srb, &sg, &offset, TO_XFER_BUF);
sg                331 drivers/usb/storage/isd200.c 	struct scatterlist sg;
sg                455 drivers/usb/storage/isd200.c 		sg_init_one(&info->sg, buff, bufflen);
sg                458 drivers/usb/storage/isd200.c 	srb->sdb.table.sgl = buff ? &info->sg : NULL;
sg                164 drivers/usb/storage/jumpshot.c 	struct scatterlist *sg = NULL;
sg                215 drivers/usb/storage/jumpshot.c 				 &sg, &sg_offset, TO_XFER_BUF);
sg                241 drivers/usb/storage/jumpshot.c 	struct scatterlist *sg = NULL;
sg                271 drivers/usb/storage/jumpshot.c 				&sg, &sg_offset, FROM_XFER_BUF);
sg                126 drivers/usb/storage/protocol.c 	struct scatterlist *sg = *sgptr;
sg                130 drivers/usb/storage/protocol.c 	if (sg)
sg                131 drivers/usb/storage/protocol.c 		nents = sg_nents(sg);
sg                133 drivers/usb/storage/protocol.c 		sg = scsi_sglist(srb);
sg                135 drivers/usb/storage/protocol.c 	sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ?
sg                150 drivers/usb/storage/protocol.c 		if (*offset + len < miter.piter.sg->length) {
sg                152 drivers/usb/storage/protocol.c 			*sgptr = miter.piter.sg;
sg                155 drivers/usb/storage/protocol.c 			*sgptr = sg_next(miter.piter.sg);
sg                173 drivers/usb/storage/protocol.c 	struct scatterlist *sg = NULL;
sg                176 drivers/usb/storage/protocol.c 	buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset,
sg                741 drivers/usb/storage/sddr09.c 	struct scatterlist *sg;
sg                765 drivers/usb/storage/sddr09.c 	sg = NULL;
sg                813 drivers/usb/storage/sddr09.c 				&sg, &offset, TO_XFER_BUF);
sg                968 drivers/usb/storage/sddr09.c 	struct scatterlist *sg;
sg               1009 drivers/usb/storage/sddr09.c 	sg = NULL;
sg               1028 drivers/usb/storage/sddr09.c 				&sg, &offset, FROM_XFER_BUF);
sg                203 drivers/usb/storage/sddr55.c 	struct scatterlist *sg;
sg                215 drivers/usb/storage/sddr55.c 	sg = NULL;
sg                291 drivers/usb/storage/sddr55.c 				&sg, &offset, TO_XFER_BUF);
sg                324 drivers/usb/storage/sddr55.c 	struct scatterlist *sg;
sg                342 drivers/usb/storage/sddr55.c 	sg = NULL;
sg                360 drivers/usb/storage/sddr55.c 				&sg, &offset, FROM_XFER_BUF);
sg               1127 drivers/usb/storage/shuttle_usbat.c 	struct scatterlist *sg = NULL;
sg               1181 drivers/usb/storage/shuttle_usbat.c 					 &sg, &sg_offset, TO_XFER_BUF);
sg               1218 drivers/usb/storage/shuttle_usbat.c 	struct scatterlist *sg = NULL;
sg               1257 drivers/usb/storage/shuttle_usbat.c 					 &sg, &sg_offset, FROM_XFER_BUF);
sg               1298 drivers/usb/storage/shuttle_usbat.c 	struct scatterlist *sg = NULL;
sg               1386 drivers/usb/storage/shuttle_usbat.c 				 &sg, &sg_offset, TO_XFER_BUF);
sg                412 drivers/usb/storage/transport.c 		struct scatterlist *sg, int num_sg, unsigned int length,
sg                424 drivers/usb/storage/transport.c 			sg, num_sg, length, GFP_NOIO);
sg               1186 drivers/usb/storage/transport.c 			struct scatterlist *sg = NULL;
sg               1190 drivers/usb/storage/transport.c 					US_BULK_CS_WRAP_LEN, srb, &sg,
sg                462 drivers/usb/storage/uas.c 	urb->sg = sdb->table.sgl;
sg                313 drivers/usb/usbip/stub_main.c 			sgl_free(urb->sg);
sg                314 drivers/usb/usbip/stub_main.c 			urb->sg = NULL;
sg                459 drivers/usb/usbip/stub_rx.c 	struct scatterlist *sgl = NULL, *sg;
sg                541 drivers/usb/usbip/stub_rx.c 				priv->urbs[0]->sg = sgl;
sg                559 drivers/usb/usbip/stub_rx.c 		for_each_sg(sgl, sg, nents, i) {
sg                568 drivers/usb/usbip/stub_rx.c 			priv->urbs[i]->transfer_buffer = sg_virt(sg);
sg                569 drivers/usb/usbip/stub_rx.c 			priv->urbs[i]->transfer_buffer_length = sg->length;
sg                165 drivers/usb/usbip/stub_tx.c 		struct scatterlist *sg;
sg                244 drivers/usb/usbip/stub_tx.c 				for_each_sg(urb->sg, sg, urb->num_sgs, i) {
sg                248 drivers/usb/usbip/stub_tx.c 					if (copy < sg->length)
sg                251 drivers/usb/usbip/stub_tx.c 						size = sg->length;
sg                253 drivers/usb/usbip/stub_tx.c 					iov[iovnum].iov_base = sg_virt(sg);
sg                683 drivers/usb/usbip/usbip_common.c 	struct scatterlist *sg;
sg                714 drivers/usb/usbip/usbip_common.c 		for_each_sg(urb->sg, sg, urb->num_sgs, i) {
sg                717 drivers/usb/usbip/usbip_common.c 			if (copy < sg->length)
sg                720 drivers/usb/usbip/usbip_common.c 				recv_size = sg->length;
sg                722 drivers/usb/usbip/usbip_common.c 			recv = usbip_recv(ud->tcp_socket, sg_virt(sg),
sg                 56 drivers/usb/usbip/vhci_tx.c 	struct scatterlist *sg;
sg                108 drivers/usb/usbip/vhci_tx.c 				for_each_sg(urb->sg, sg, urb->num_sgs, i) {
sg                109 drivers/usb/usbip/vhci_tx.c 					iov[iovnum].iov_base = sg_virt(sg);
sg                110 drivers/usb/usbip/vhci_tx.c 					iov[iovnum].iov_len = sg->length;
sg                576 drivers/vhost/scsi.c 	struct scatterlist *sg, *prot_sg;
sg                594 drivers/vhost/scsi.c 	sg = cmd->tvc_sgl;
sg                598 drivers/vhost/scsi.c 	cmd->tvc_sgl = sg;
sg                628 drivers/vhost/scsi.c 	struct scatterlist *sg = sgl;
sg                643 drivers/vhost/scsi.c 		sg_set_page(sg++, pages[npages++], n, offset);
sg                673 drivers/vhost/scsi.c 		      struct scatterlist *sg, int sg_count)
sg                675 drivers/vhost/scsi.c 	struct scatterlist *p = sg;
sg                679 drivers/vhost/scsi.c 		ret = vhost_scsi_map_to_sgl(cmd, iter, sg, write);
sg                681 drivers/vhost/scsi.c 			while (p < sg) {
sg                688 drivers/vhost/scsi.c 		sg += ret;
sg                268 drivers/video/fbdev/mx3fb.c 	struct scatterlist		sg[2];
sg                396 drivers/video/fbdev/mx3fb.c 			to_tx_desc(mx3_fbi->txd), to_tx_desc(mx3_fbi->txd)->sg);
sg                403 drivers/video/fbdev/mx3fb.c 		      &mx3_fbi->sg[0], 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT);
sg                801 drivers/video/fbdev/mx3fb.c 	struct scatterlist *sg = mx3_fbi->sg;
sg                818 drivers/video/fbdev/mx3fb.c 	sg_init_table(&sg[0], 1);
sg                819 drivers/video/fbdev/mx3fb.c 	sg_init_table(&sg[1], 1);
sg                821 drivers/video/fbdev/mx3fb.c 	sg_dma_address(&sg[0]) = fbi->fix.smem_start;
sg                822 drivers/video/fbdev/mx3fb.c 	sg_set_page(&sg[0], virt_to_page(fbi->screen_base),
sg               1141 drivers/video/fbdev/mx3fb.c 	struct scatterlist *sg = mx3_fbi->sg;
sg               1194 drivers/video/fbdev/mx3fb.c 	sg_dma_address(&sg[mx3_fbi->cur_ipu_buf]) = base;
sg               1195 drivers/video/fbdev/mx3fb.c 	sg_set_page(&sg[mx3_fbi->cur_ipu_buf],
sg               1202 drivers/video/fbdev/mx3fb.c 	txd = dmaengine_prep_slave_sg(dma_chan, sg +
sg                228 drivers/video/fbdev/via/via-core.c int viafb_dma_copy_out_sg(unsigned int offset, struct scatterlist *sg, int nsg)
sg                254 drivers/video/fbdev/via/via-core.c 	for_each_sg(sg, sgentry, nsg, i) {
sg                141 drivers/virtio/virtio_balloon.c 	struct scatterlist sg;
sg                144 drivers/virtio/virtio_balloon.c 	sg_init_one(&sg, vb->pfns, sizeof(vb->pfns[0]) * vb->num_pfns);
sg                147 drivers/virtio/virtio_balloon.c 	virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL);
sg                343 drivers/virtio/virtio_balloon.c 	struct scatterlist sg;
sg                351 drivers/virtio/virtio_balloon.c 	sg_init_one(&sg, vb->stats, sizeof(vb->stats[0]) * num_stats);
sg                352 drivers/virtio/virtio_balloon.c 	virtqueue_add_outbuf(vq, &sg, 1, vb, GFP_KERNEL);
sg                502 drivers/virtio/virtio_balloon.c 		struct scatterlist sg;
sg                512 drivers/virtio/virtio_balloon.c 		sg_init_one(&sg, vb->stats, sizeof(vb->stats[0]) * num_stats);
sg                513 drivers/virtio/virtio_balloon.c 		err = virtqueue_add_outbuf(vb->stats_vq, &sg, 1, vb,
sg                542 drivers/virtio/virtio_balloon.c 	struct scatterlist sg;
sg                552 drivers/virtio/virtio_balloon.c 	sg_init_one(&sg, &vb->cmd_id_active, sizeof(vb->cmd_id_active));
sg                553 drivers/virtio/virtio_balloon.c 	err = virtqueue_add_outbuf(vq, &sg, 1, &vb->cmd_id_active, GFP_KERNEL);
sg                561 drivers/virtio/virtio_balloon.c 	struct scatterlist sg;
sg                569 drivers/virtio/virtio_balloon.c 	sg_init_one(&sg, &vb->cmd_id_stop, sizeof(vb->cmd_id_stop));
sg                570 drivers/virtio/virtio_balloon.c 	err = virtqueue_add_outbuf(vq, &sg, 1, &vb->cmd_id_stop, GFP_KERNEL);
sg                580 drivers/virtio/virtio_balloon.c 	struct scatterlist sg;
sg                598 drivers/virtio/virtio_balloon.c 	sg_init_one(&sg, p, VIRTIO_BALLOON_FREE_PAGE_SIZE);
sg                601 drivers/virtio/virtio_balloon.c 		err = virtqueue_add_inbuf(vq, &sg, 1, p, GFP_KERNEL);
sg                 25 drivers/virtio/virtio_input.c 	struct scatterlist sg[1];
sg                 27 drivers/virtio/virtio_input.c 	sg_init_one(sg, evtbuf, sizeof(*evtbuf));
sg                 28 drivers/virtio/virtio_input.c 	virtqueue_add_inbuf(vi->evt, sg, 1, evtbuf, GFP_ATOMIC);
sg                 62 drivers/virtio/virtio_input.c 	struct scatterlist sg[1];
sg                 73 drivers/virtio/virtio_input.c 	sg_init_one(sg, stsbuf, sizeof(*stsbuf));
sg                 77 drivers/virtio/virtio_input.c 		rc = virtqueue_add_outbuf(vi->sts, sg, 1, stsbuf, GFP_ATOMIC);
sg                326 drivers/virtio/virtio_ring.c 				   struct scatterlist *sg,
sg                330 drivers/virtio/virtio_ring.c 		return (dma_addr_t)sg_phys(sg);
sg                338 drivers/virtio/virtio_ring.c 			    sg_page(sg), sg->offset, sg->length,
sg                425 drivers/virtio/virtio_ring.c 	struct scatterlist *sg;
sg                482 drivers/virtio/virtio_ring.c 		for (sg = sgs[n]; sg; sg = sg_next(sg)) {
sg                483 drivers/virtio/virtio_ring.c 			dma_addr_t addr = vring_map_one_sg(vq, sg, DMA_TO_DEVICE);
sg                489 drivers/virtio/virtio_ring.c 			desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length);
sg                495 drivers/virtio/virtio_ring.c 		for (sg = sgs[n]; sg; sg = sg_next(sg)) {
sg                496 drivers/virtio/virtio_ring.c 			dma_addr_t addr = vring_map_one_sg(vq, sg, DMA_FROM_DEVICE);
sg                502 drivers/virtio/virtio_ring.c 			desc[i].len = cpu_to_virtio32(_vq->vdev, sg->length);
sg                988 drivers/virtio/virtio_ring.c 	struct scatterlist *sg;
sg               1008 drivers/virtio/virtio_ring.c 		for (sg = sgs[n]; sg; sg = sg_next(sg)) {
sg               1009 drivers/virtio/virtio_ring.c 			addr = vring_map_one_sg(vq, sg, n < out_sgs ?
sg               1017 drivers/virtio/virtio_ring.c 			desc[i].len = cpu_to_le32(sg->length);
sg               1102 drivers/virtio/virtio_ring.c 	struct scatterlist *sg;
sg               1147 drivers/virtio/virtio_ring.c 		for (sg = sgs[n]; sg; sg = sg_next(sg)) {
sg               1148 drivers/virtio/virtio_ring.c 			dma_addr_t addr = vring_map_one_sg(vq, sg, n < out_sgs ?
sg               1162 drivers/virtio/virtio_ring.c 			desc[i].len = cpu_to_le32(sg->length);
sg               1167 drivers/virtio/virtio_ring.c 				vq->packed.desc_extra[curr].len = sg->length;
sg               1735 drivers/virtio/virtio_ring.c 		struct scatterlist *sg;
sg               1737 drivers/virtio/virtio_ring.c 		for (sg = sgs[i]; sg; sg = sg_next(sg))
sg               1759 drivers/virtio/virtio_ring.c 			 struct scatterlist *sg, unsigned int num,
sg               1763 drivers/virtio/virtio_ring.c 	return virtqueue_add(vq, &sg, num, 1, 0, data, NULL, gfp);
sg               1781 drivers/virtio/virtio_ring.c 			struct scatterlist *sg, unsigned int num,
sg               1785 drivers/virtio/virtio_ring.c 	return virtqueue_add(vq, &sg, num, 0, 1, data, NULL, gfp);
sg               1804 drivers/virtio/virtio_ring.c 			struct scatterlist *sg, unsigned int num,
sg               1809 drivers/virtio/virtio_ring.c 	return virtqueue_add(vq, &sg, num, 0, 1, data, ctx, gfp);
sg                469 drivers/xen/swiotlb-xen.c 	struct scatterlist *sg;
sg                474 drivers/xen/swiotlb-xen.c 	for_each_sg(sgl, sg, nelems, i)
sg                475 drivers/xen/swiotlb-xen.c 		xen_swiotlb_unmap_page(hwdev, sg->dma_address, sg_dma_len(sg),
sg                484 drivers/xen/swiotlb-xen.c 	struct scatterlist *sg;
sg                489 drivers/xen/swiotlb-xen.c 	for_each_sg(sgl, sg, nelems, i) {
sg                490 drivers/xen/swiotlb-xen.c 		sg->dma_address = xen_swiotlb_map_page(dev, sg_page(sg),
sg                491 drivers/xen/swiotlb-xen.c 				sg->offset, sg->length, dir, attrs);
sg                492 drivers/xen/swiotlb-xen.c 		if (sg->dma_address == DMA_MAPPING_ERROR)
sg                494 drivers/xen/swiotlb-xen.c 		sg_dma_len(sg) = sg->length;
sg                508 drivers/xen/swiotlb-xen.c 	struct scatterlist *sg;
sg                511 drivers/xen/swiotlb-xen.c 	for_each_sg(sgl, sg, nelems, i) {
sg                512 drivers/xen/swiotlb-xen.c 		xen_swiotlb_sync_single_for_cpu(dev, sg->dma_address,
sg                513 drivers/xen/swiotlb-xen.c 				sg->length, dir);
sg                521 drivers/xen/swiotlb-xen.c 	struct scatterlist *sg;
sg                524 drivers/xen/swiotlb-xen.c 	for_each_sg(sgl, sg, nelems, i) {
sg                525 drivers/xen/swiotlb-xen.c 		xen_swiotlb_sync_single_for_device(dev, sg->dma_address,
sg                526 drivers/xen/swiotlb-xen.c 				sg->length, dir);
sg                482 drivers/xen/xen-scsiback.c 	struct scatterlist *sg;
sg                563 drivers/xen/xen-scsiback.c 	for_each_sg(pending_req->sgl, sg, nr_segments, i) {
sg                564 drivers/xen/xen-scsiback.c 		sg_set_page(sg, pg[i], seg->length, seg->offset);
sg                574 drivers/xen/xen-scsiback.c 		if (sg->offset >= PAGE_SIZE ||
sg                575 drivers/xen/xen-scsiback.c 		    sg->length > PAGE_SIZE ||
sg                576 drivers/xen/xen-scsiback.c 		    sg->offset + sg->length > PAGE_SIZE)
sg               3570 fs/cifs/smb2ops.c static inline void smb2_sg_set_buf(struct scatterlist *sg, const void *buf,
sg               3581 fs/cifs/smb2ops.c 	sg_set_page(sg, addr, buflen, offset_in_page(buf));
sg               3594 fs/cifs/smb2ops.c 	struct scatterlist *sg;
sg               3604 fs/cifs/smb2ops.c 	sg = kmalloc_array(sg_len, sizeof(struct scatterlist), GFP_KERNEL);
sg               3605 fs/cifs/smb2ops.c 	if (!sg)
sg               3608 fs/cifs/smb2ops.c 	sg_init_table(sg, sg_len);
sg               3616 fs/cifs/smb2ops.c 			smb2_sg_set_buf(&sg[idx++],
sg               3625 fs/cifs/smb2ops.c 			sg_set_page(&sg[idx++], rqst[i].rq_pages[j], len, offset);
sg               3628 fs/cifs/smb2ops.c 	smb2_sg_set_buf(&sg[idx], sign, SMB2_SIGNATURE_SIZE);
sg               3629 fs/cifs/smb2ops.c 	return sg;
sg               3667 fs/cifs/smb2ops.c 	struct scatterlist *sg;
sg               3715 fs/cifs/smb2ops.c 	sg = init_sg(num_rqst, rqst, sign);
sg               3716 fs/cifs/smb2ops.c 	if (!sg) {
sg               3737 fs/cifs/smb2ops.c 	aead_request_set_crypt(req, sg, sg, crypt_len, iv);
sg               3751 fs/cifs/smb2ops.c 	kfree(sg);
sg                988 fs/cifs/smbdirect.c 	struct scatterlist *sg;
sg                996 fs/cifs/smbdirect.c 	for_each_sg(sgl, sg, num_sgs, i) {
sg                998 fs/cifs/smbdirect.c 			ib_dma_map_page(info->id->device, sg_page(sg),
sg                999 fs/cifs/smbdirect.c 			       sg->offset, sg->length, DMA_TO_DEVICE);
sg               1006 fs/cifs/smbdirect.c 		request->sge[i+1].length = sg->length;
sg                 45 fs/crypto/fname.c 	struct scatterlist sg;
sg                 67 fs/crypto/fname.c 	sg_init_one(&sg, out, olen);
sg                 68 fs/crypto/fname.c 	skcipher_request_set_crypt(req, &sg, &sg, olen, &iv);
sg                247 fs/ecryptfs/crypto.c int virt_to_scatterlist(const void *addr, int size, struct scatterlist *sg,
sg                255 fs/ecryptfs/crypto.c 	sg_init_table(sg, sg_size);
sg                260 fs/ecryptfs/crypto.c 		sg_set_page(&sg[i], pg, 0, offset);
sg                263 fs/ecryptfs/crypto.c 			sg[i].length = remainder_of_page;
sg                267 fs/ecryptfs/crypto.c 			sg[i].length = size;
sg                577 fs/ecryptfs/ecryptfs_kernel.h int virt_to_scatterlist(const void *addr, int size, struct scatterlist *sg,
sg               2296 fs/ext4/mballoc.c 	} sg;
sg               2304 fs/ext4/mballoc.c 	i = (blocksize_bits + 2) * sizeof(sg.info.bb_counters[0]) +
sg               2318 fs/ext4/mballoc.c 	memcpy(&sg, ext4_get_group_info(sb, group), i);
sg               2323 fs/ext4/mballoc.c 	seq_printf(seq, "#%-5u: %-5u %-5u %-5u [", group, sg.info.bb_free,
sg               2324 fs/ext4/mballoc.c 			sg.info.bb_fragments, sg.info.bb_first_free);
sg               2327 fs/ext4/mballoc.c 				sg.info.bb_counters[i] : 0);
sg                322 fs/fuse/virtio_fs.c 	struct scatterlist sg;
sg                323 fs/fuse/virtio_fs.c 	struct scatterlist *sgs[] = {&sg};
sg                345 fs/fuse/virtio_fs.c 		sg_init_one(&sg, forget, sizeof(*forget));
sg                713 fs/fuse/virtio_fs.c 	struct scatterlist sg;
sg                714 fs/fuse/virtio_fs.c 	struct scatterlist *sgs[] = {&sg};
sg                742 fs/fuse/virtio_fs.c 	sg_init_one(&sg, forget, sizeof(*forget));
sg                826 fs/fuse/virtio_fs.c static unsigned int sg_init_fuse_pages(struct scatterlist *sg,
sg                836 fs/fuse/virtio_fs.c 		sg_init_table(&sg[i], 1);
sg                838 fs/fuse/virtio_fs.c 		sg_set_page(&sg[i], pages[i], this_len, page_descs[i].offset);
sg                846 fs/fuse/virtio_fs.c static unsigned int sg_init_fuse_args(struct scatterlist *sg,
sg                860 fs/fuse/virtio_fs.c 		sg_init_one(&sg[total_sgs++], argbuf, len);
sg                863 fs/fuse/virtio_fs.c 		total_sgs += sg_init_fuse_pages(&sg[total_sgs],
sg                882 fs/fuse/virtio_fs.c 	struct scatterlist *sg = stack_sg;
sg                898 fs/fuse/virtio_fs.c 		sg = kmalloc_array(total_sgs, sizeof(sg[0]), GFP_ATOMIC);
sg                899 fs/fuse/virtio_fs.c 		if (!sgs || !sg) {
sg                911 fs/fuse/virtio_fs.c 	sg_init_one(&sg[out_sgs++], &req->in.h, sizeof(req->in.h));
sg                912 fs/fuse/virtio_fs.c 	out_sgs += sg_init_fuse_args(&sg[out_sgs], req,
sg                919 fs/fuse/virtio_fs.c 		sg_init_one(&sg[out_sgs + in_sgs++],
sg                921 fs/fuse/virtio_fs.c 		in_sgs += sg_init_fuse_args(&sg[out_sgs + in_sgs], req,
sg                930 fs/fuse/virtio_fs.c 		sgs[i] = &sg[i];
sg                972 fs/fuse/virtio_fs.c 		kfree(sg);
sg                111 fs/verity/hash_algs.c 	struct scatterlist sg;
sg                143 fs/verity/hash_algs.c 	sg_init_one(&sg, padded_salt, padded_salt_size);
sg                147 fs/verity/hash_algs.c 	ahash_request_set_crypt(req, &sg, NULL, padded_salt_size);
sg                188 fs/verity/hash_algs.c 	struct scatterlist sg;
sg                195 fs/verity/hash_algs.c 	sg_init_table(&sg, 1);
sg                196 fs/verity/hash_algs.c 	sg_set_page(&sg, page, PAGE_SIZE, 0);
sg                200 fs/verity/hash_algs.c 	ahash_request_set_crypt(req, &sg, out, PAGE_SIZE);
sg                236 fs/verity/hash_algs.c 	struct scatterlist sg;
sg                244 fs/verity/hash_algs.c 	sg_init_one(&sg, data, size);
sg                248 fs/verity/hash_algs.c 	ahash_request_set_crypt(req, &sg, out, size);
sg                 84 include/crypto/algapi.h 	struct scatterlist *sg;
sg                327 include/crypto/algapi.h 	walk->in.sg = src;
sg                328 include/crypto/algapi.h 	walk->out.sg = dst;
sg                337 include/crypto/algapi.h 	walk->in.sg = src;
sg                338 include/crypto/algapi.h 	walk->out.sg = dst;
sg                 60 include/crypto/if_alg.h 	struct scatterlist sg[ALG_MAX_PAGES + 1];
sg                 69 include/crypto/if_alg.h 	struct scatterlist sg[0];	/* Array of SGs forming the SGL */
sg                 27 include/crypto/internal/hash.h 	struct scatterlist *sg;
sg                 20 include/crypto/scatterwalk.h 					    struct scatterlist *sg, int num)
sg                 22 include/crypto/scatterwalk.h 	if (sg)
sg                 23 include/crypto/scatterwalk.h 		sg_chain(head, num, sg);
sg                 30 include/crypto/scatterwalk.h 	unsigned int len = walk->sg->offset + walk->sg->length - walk->offset;
sg                 56 include/crypto/scatterwalk.h 	return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
sg                 65 include/crypto/scatterwalk.h 				     struct scatterlist *sg)
sg                 67 include/crypto/scatterwalk.h 	walk->sg = sg;
sg                 68 include/crypto/scatterwalk.h 	walk->offset = sg->offset;
sg                 83 include/crypto/scatterwalk.h 		page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT);
sg                 92 include/crypto/scatterwalk.h 	if (more && walk->offset >= walk->sg->offset + walk->sg->length)
sg                 93 include/crypto/scatterwalk.h 		scatterwalk_start(walk, sg_next(walk->sg));
sg                 99 include/crypto/scatterwalk.h 	if (!more || walk->offset >= walk->sg->offset + walk->sg->length ||
sg                108 include/crypto/scatterwalk.h void scatterwalk_map_and_copy(void *buf, struct scatterlist *sg,
sg                359 include/drm/drm_device.h 	struct drm_sg_mem *sg;
sg                104 include/drm/drm_prime.h void drm_prime_gem_destroy(struct drm_gem_object *obj, struct sg_table *sg);
sg                231 include/drm/ttm/ttm_bo_api.h 	struct sg_table *sg;
sg                528 include/drm/ttm/ttm_bo_api.h 			 struct sg_table *sg,
sg                573 include/drm/ttm/ttm_bo_api.h 		struct sg_table *sg, struct dma_resv *resv,
sg                111 include/drm/ttm/ttm_tt.h 	struct sg_table *sg; /* for SG objects via dma-buf */
sg                 33 include/linux/dma-debug.h extern void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                 61 include/linux/dma-debug.h 				      struct scatterlist *sg,
sg                 65 include/linux/dma-debug.h 					 struct scatterlist *sg,
sg                 99 include/linux/dma-debug.h static inline void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg                145 include/linux/dma-debug.h 					     struct scatterlist *sg,
sg                151 include/linux/dma-debug.h 						struct scatterlist *sg,
sg                104 include/linux/dma-mapping.h 	int (*map_sg)(struct device *dev, struct scatterlist *sg,
sg                108 include/linux/dma-mapping.h 			 struct scatterlist *sg, int nents,
sg                124 include/linux/dma-mapping.h 				struct scatterlist *sg, int nents,
sg                127 include/linux/dma-mapping.h 				   struct scatterlist *sg, int nents,
sg                311 include/linux/dma-mapping.h static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                320 include/linux/dma-mapping.h 		ents = dma_direct_map_sg(dev, sg, nents, dir, attrs);
sg                322 include/linux/dma-mapping.h 		ents = ops->map_sg(dev, sg, nents, dir, attrs);
sg                324 include/linux/dma-mapping.h 	debug_dma_map_sg(dev, sg, nents, ents, dir);
sg                329 include/linux/dma-mapping.h static inline void dma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                336 include/linux/dma-mapping.h 	debug_dma_unmap_sg(dev, sg, nents, dir);
sg                338 include/linux/dma-mapping.h 		dma_direct_unmap_sg(dev, sg, nents, dir, attrs);
sg                340 include/linux/dma-mapping.h 		ops->unmap_sg(dev, sg, nents, dir, attrs);
sg                408 include/linux/dma-mapping.h dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
sg                415 include/linux/dma-mapping.h 		dma_direct_sync_sg_for_cpu(dev, sg, nelems, dir);
sg                417 include/linux/dma-mapping.h 		ops->sync_sg_for_cpu(dev, sg, nelems, dir);
sg                418 include/linux/dma-mapping.h 	debug_dma_sync_sg_for_cpu(dev, sg, nelems, dir);
sg                422 include/linux/dma-mapping.h dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
sg                429 include/linux/dma-mapping.h 		dma_direct_sync_sg_for_device(dev, sg, nelems, dir);
sg                431 include/linux/dma-mapping.h 		ops->sync_sg_for_device(dev, sg, nelems, dir);
sg                432 include/linux/dma-mapping.h 	debug_dma_sync_sg_for_device(dev, sg, nelems, dir);
sg                479 include/linux/dma-mapping.h static inline int dma_map_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                485 include/linux/dma-mapping.h 		struct scatterlist *sg, int nents, enum dma_data_direction dir,
sg                508 include/linux/dma-mapping.h 		struct scatterlist *sg, int nelems, enum dma_data_direction dir)
sg                512 include/linux/dma-mapping.h 		struct scatterlist *sg, int nelems, enum dma_data_direction dir)
sg                146 include/linux/dma/ipu-dma.h 	struct scatterlist		*sg;	/* scatterlist for this */
sg                160 include/linux/dma/ipu-dma.h 	struct scatterlist	*sg[2];	/* scatterlist elements in buffer-0 and -1 */
sg                774 include/linux/dmaengine.h 		struct dma_chan *chan, struct scatterlist *sg,
sg                825 include/linux/dmaengine.h 	struct scatterlist sg;
sg                826 include/linux/dmaengine.h 	sg_init_table(&sg, 1);
sg                827 include/linux/dmaengine.h 	sg_dma_address(&sg) = buf;
sg                828 include/linux/dmaengine.h 	sg_dma_len(&sg) = len;
sg                833 include/linux/dmaengine.h 	return chan->device->device_prep_slave_sg(chan, &sg, 1,
sg                430 include/linux/iommu.h 			   struct scatterlist *sg,unsigned int nents, int prot);
sg                679 include/linux/iommu.h 				  unsigned long iova, struct scatterlist *sg,
sg                639 include/linux/libata.h 	struct scatterlist	*sg;
sg               1165 include/linux/libata.h extern void ata_sg_init(struct ata_queued_cmd *qc, struct scatterlist *sg,
sg               1713 include/linux/libata.h 	qc->sg = NULL;
sg                243 include/linux/memstick.h 		struct scatterlist sg;
sg                319 include/linux/memstick.h 			  const struct scatterlist *sg);
sg                142 include/linux/mmc/core.h 	struct scatterlist	*sg;		/* I/O scatter list */
sg                 56 include/linux/mpi.h int mpi_write_to_sgl(MPI a, struct scatterlist *sg, unsigned nbytes,
sg                128 include/linux/mtd/ubi.h 	struct scatterlist sg[UBI_MAX_SG_COUNT];
sg                616 include/linux/nvme-fc-driver.h 	struct scatterlist	*sg;
sg                 65 include/linux/pci-dma-compat.h pci_map_sg(struct pci_dev *hwdev, struct scatterlist *sg,
sg                 68 include/linux/pci-dma-compat.h 	return dma_map_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction);
sg                 72 include/linux/pci-dma-compat.h pci_unmap_sg(struct pci_dev *hwdev, struct scatterlist *sg,
sg                 75 include/linux/pci-dma-compat.h 	dma_unmap_sg(&hwdev->dev, sg, nents, (enum dma_data_direction)direction);
sg                 93 include/linux/pci-dma-compat.h pci_dma_sync_sg_for_cpu(struct pci_dev *hwdev, struct scatterlist *sg,
sg                 96 include/linux/pci-dma-compat.h 	dma_sync_sg_for_cpu(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction);
sg                100 include/linux/pci-dma-compat.h pci_dma_sync_sg_for_device(struct pci_dev *hwdev, struct scatterlist *sg,
sg                103 include/linux/pci-dma-compat.h 	dma_sync_sg_for_device(&hwdev->dev, sg, nelems, (enum dma_data_direction)direction);
sg                 33 include/linux/pci-p2pdma.h int pci_p2pdma_map_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                 35 include/linux/pci-p2pdma.h void pci_p2pdma_unmap_sg_attrs(struct device *dev, struct scatterlist *sg,
sg                 87 include/linux/pci-p2pdma.h 		struct scatterlist *sg, int nents, enum dma_data_direction dir,
sg                 93 include/linux/pci-p2pdma.h 		struct scatterlist *sg, int nents, enum dma_data_direction dir,
sg                122 include/linux/pci-p2pdma.h static inline int pci_p2pdma_map_sg(struct device *dev, struct scatterlist *sg,
sg                125 include/linux/pci-p2pdma.h 	return pci_p2pdma_map_sg_attrs(dev, sg, nents, dir, 0);
sg                129 include/linux/pci-p2pdma.h 		struct scatterlist *sg, int nents, enum dma_data_direction dir)
sg                131 include/linux/pci-p2pdma.h 	pci_p2pdma_unmap_sg_attrs(dev, sg, nents, dir, 0);
sg                184 include/linux/platform_data/dma-ste-dma40.h 	struct scatterlist sg;
sg                185 include/linux/platform_data/dma-ste-dma40.h 	sg_init_table(&sg, 1);
sg                186 include/linux/platform_data/dma-ste-dma40.h 	sg.dma_address = addr;
sg                187 include/linux/platform_data/dma-ste-dma40.h 	sg.length = size;
sg                189 include/linux/platform_data/dma-ste-dma40.h 	return dmaengine_prep_slave_sg(chan, &sg, 1, direction, flags);
sg                508 include/linux/rio.h 	struct scatterlist	*sg;	/* I/O scatter list */
sg                 34 include/linux/scatterlist.h #define sg_dma_address(sg)	((sg)->dma_address)
sg                 37 include/linux/scatterlist.h #define sg_dma_len(sg)		((sg)->dma_length)
sg                 39 include/linux/scatterlist.h #define sg_dma_len(sg)		((sg)->length)
sg                 72 include/linux/scatterlist.h #define sg_is_chain(sg)		((sg)->page_link & SG_CHAIN)
sg                 73 include/linux/scatterlist.h #define sg_is_last(sg)		((sg)->page_link & SG_END)
sg                 74 include/linux/scatterlist.h #define sg_chain_ptr(sg)	\
sg                 75 include/linux/scatterlist.h 	((struct scatterlist *) ((sg)->page_link & ~(SG_CHAIN | SG_END)))
sg                 87 include/linux/scatterlist.h static inline void sg_assign_page(struct scatterlist *sg, struct page *page)
sg                 89 include/linux/scatterlist.h 	unsigned long page_link = sg->page_link & (SG_CHAIN | SG_END);
sg                 97 include/linux/scatterlist.h 	BUG_ON(sg_is_chain(sg));
sg                 99 include/linux/scatterlist.h 	sg->page_link = page_link | (unsigned long) page;
sg                116 include/linux/scatterlist.h static inline void sg_set_page(struct scatterlist *sg, struct page *page,
sg                119 include/linux/scatterlist.h 	sg_assign_page(sg, page);
sg                120 include/linux/scatterlist.h 	sg->offset = offset;
sg                121 include/linux/scatterlist.h 	sg->length = len;
sg                124 include/linux/scatterlist.h static inline struct page *sg_page(struct scatterlist *sg)
sg                127 include/linux/scatterlist.h 	BUG_ON(sg_is_chain(sg));
sg                129 include/linux/scatterlist.h 	return (struct page *)((sg)->page_link & ~(SG_CHAIN | SG_END));
sg                139 include/linux/scatterlist.h static inline void sg_set_buf(struct scatterlist *sg, const void *buf,
sg                145 include/linux/scatterlist.h 	sg_set_page(sg, virt_to_page(buf), buflen, offset_in_page(buf));
sg                151 include/linux/scatterlist.h #define for_each_sg(sglist, sg, nr, __i)	\
sg                152 include/linux/scatterlist.h 	for (__i = 0, sg = (sglist); __i < (nr); __i++, sg = sg_next(sg))
sg                190 include/linux/scatterlist.h static inline void sg_mark_end(struct scatterlist *sg)
sg                195 include/linux/scatterlist.h 	sg->page_link |= SG_END;
sg                196 include/linux/scatterlist.h 	sg->page_link &= ~SG_CHAIN;
sg                207 include/linux/scatterlist.h static inline void sg_unmark_end(struct scatterlist *sg)
sg                209 include/linux/scatterlist.h 	sg->page_link &= ~SG_END;
sg                222 include/linux/scatterlist.h static inline dma_addr_t sg_phys(struct scatterlist *sg)
sg                224 include/linux/scatterlist.h 	return page_to_phys(sg_page(sg)) + sg->offset;
sg                237 include/linux/scatterlist.h static inline void *sg_virt(struct scatterlist *sg)
sg                239 include/linux/scatterlist.h 	return page_address(sg_page(sg)) + sg->offset;
sg                254 include/linux/scatterlist.h int sg_nents(struct scatterlist *sg);
sg                255 include/linux/scatterlist.h int sg_nents_for_len(struct scatterlist *sg, u64 len);
sg                353 include/linux/scatterlist.h 	struct scatterlist	*sg;		/* sg holding the page */
sg                384 include/linux/scatterlist.h 	return nth_page(sg_page(piter->sg), piter->sg_pgoffset);
sg                395 include/linux/scatterlist.h 	return sg_dma_address(dma_iter->base.sg) +
sg                174 include/linux/sched/topology.h 	struct sched_group *__percpu *sg;
sg               1123 include/linux/skbuff.h int __must_check skb_to_sgvec_nomark(struct sk_buff *skb, struct scatterlist *sg,
sg               1125 include/linux/skbuff.h int __must_check skb_to_sgvec(struct sk_buff *skb, struct scatterlist *sg,
sg                 44 include/linux/skmsg.h 	struct sk_msg_sg		sg;
sg                132 include/linux/skmsg.h 	WARN_ON(i == msg->sg.end && bytes);
sg                166 include/linux/skmsg.h 	sk_msg_iter_var_prev(msg->sg.which)
sg                169 include/linux/skmsg.h 	sk_msg_iter_var_next(msg->sg.which)
sg                173 include/linux/skmsg.h 	memset(&msg->sg, 0, offsetofend(struct sk_msg_sg, copy));
sg                178 include/linux/skmsg.h 	BUILD_BUG_ON(ARRAY_SIZE(msg->sg.data) - 1 != NR_MSG_FRAG_IDS);
sg                180 include/linux/skmsg.h 	sg_init_marker(msg->sg.data, NR_MSG_FRAG_IDS);
sg                186 include/linux/skmsg.h 	dst->sg.data[which] = src->sg.data[which];
sg                187 include/linux/skmsg.h 	dst->sg.data[which].length  = size;
sg                188 include/linux/skmsg.h 	dst->sg.size		   += size;
sg                189 include/linux/skmsg.h 	src->sg.size		   -= size;
sg                190 include/linux/skmsg.h 	src->sg.data[which].length -= size;
sg                191 include/linux/skmsg.h 	src->sg.data[which].offset += size;
sg                202 include/linux/skmsg.h 	return sk_msg_iter_dist(msg->sg.start, msg->sg.end) == MAX_MSG_FRAGS;
sg                207 include/linux/skmsg.h 	return sk_msg_iter_dist(msg->sg.start, msg->sg.end);
sg                212 include/linux/skmsg.h 	return &msg->sg.data[which];
sg                217 include/linux/skmsg.h 	return msg->sg.data[which];
sg                232 include/linux/skmsg.h 	struct scatterlist *sge = sk_msg_elem(msg, msg->sg.start);
sg                234 include/linux/skmsg.h 	if (msg->sg.copy[msg->sg.start]) {
sg                249 include/linux/skmsg.h 	sge = sk_msg_elem(msg, msg->sg.end);
sg                253 include/linux/skmsg.h 	msg->sg.copy[msg->sg.end] = true;
sg                254 include/linux/skmsg.h 	msg->sg.size += len;
sg                261 include/linux/skmsg.h 		msg->sg.copy[i] = copy_state;
sg                263 include/linux/skmsg.h 		if (i == msg->sg.end)
sg                293 include/linux/spi/spi-mem.h 				       struct sg_table *sg);
sg                297 include/linux/spi/spi-mem.h 					  struct sg_table *sg);
sg                306 include/linux/spi/spi-mem.h 				   struct sg_table *sg)
sg                314 include/linux/spi/spi-mem.h 				     struct sg_table *sg)
sg                144 include/linux/tifm.h int tifm_map_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
sg                146 include/linux/tifm.h void tifm_unmap_sg(struct tifm_dev *sock, struct scatterlist *sg, int nents,
sg               1571 include/linux/usb.h 	struct scatterlist *sg;		/* (in) scatter gather buffer list */
sg               1771 include/linux/usb.h 		      struct scatterlist *sg, int nents);
sg               1774 include/linux/usb.h 			   struct scatterlist *sg, int n_hw_ents);
sg               1777 include/linux/usb.h 			 struct scatterlist *sg, int n_hw_ents);
sg               1878 include/linux/usb.h 	struct scatterlist	*sg,
sg                102 include/linux/usb/gadget.h 	struct scatterlist	*sg;
sg                148 include/linux/via-core.h int viafb_dma_copy_out_sg(unsigned int offset, struct scatterlist *sg, int nsg);
sg                 39 include/linux/virtio.h 			 struct scatterlist sg[], unsigned int num,
sg                 44 include/linux/virtio.h 			struct scatterlist sg[], unsigned int num,
sg                 49 include/linux/virtio.h 			    struct scatterlist sg[], unsigned int num,
sg                398 include/net/tls.h 		struct scatterlist *sg, u16 first_offset,
sg               2411 include/rdma/ib_verbs.h 	int (*map_mr_sg)(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
sg               4004 include/rdma/ib_verbs.h 				struct scatterlist *sg, int nents,
sg               4007 include/rdma/ib_verbs.h 	return dma_map_sg(dev->dma_device, sg, nents, direction);
sg               4018 include/rdma/ib_verbs.h 				   struct scatterlist *sg, int nents,
sg               4021 include/rdma/ib_verbs.h 	dma_unmap_sg(dev->dma_device, sg, nents, direction);
sg               4025 include/rdma/ib_verbs.h 				      struct scatterlist *sg, int nents,
sg               4029 include/rdma/ib_verbs.h 	return dma_map_sg_attrs(dev->dma_device, sg, nents, direction,
sg               4034 include/rdma/ib_verbs.h 					 struct scatterlist *sg, int nents,
sg               4038 include/rdma/ib_verbs.h 	dma_unmap_sg_attrs(dev->dma_device, sg, nents, direction, dma_attrs);
sg               4333 include/rdma/ib_verbs.h int ib_map_mr_sg(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
sg               4341 include/rdma/ib_verbs.h ib_map_mr_sg_zbva(struct ib_mr *mr, struct scatterlist *sg, int sg_nents,
sg               4346 include/rdma/ib_verbs.h 	n = ib_map_mr_sg(mr, sg, sg_nents, sg_offset, page_size);
sg                 46 include/rdma/rw.h 		struct scatterlist *sg, u32 sg_cnt, u32 sg_offset,
sg                 49 include/rdma/rw.h 		struct scatterlist *sg, u32 sg_cnt,
sg                 53 include/rdma/rw.h 		u8 port_num, struct scatterlist *sg, u32 sg_cnt,
sg                 58 include/rdma/rw.h 		u8 port_num, struct scatterlist *sg, u32 sg_cnt,
sg                 36 include/scsi/libiscsi_tcp.h 	struct scatterlist	*sg;
sg                164 include/scsi/scsi_cmnd.h extern void *scsi_kmap_atomic_sg(struct scatterlist *sg, int sg_count,
sg                203 include/scsi/scsi_cmnd.h #define scsi_for_each_sg(cmd, sg, nseg, __i)			\
sg                204 include/scsi/scsi_cmnd.h 	for_each_sg(scsi_sglist(cmd), sg, nseg, __i)
sg                307 include/scsi/scsi_cmnd.h #define scsi_for_each_prot_sg(cmd, sg, nseg, __i)		\
sg                308 include/scsi/scsi_cmnd.h 	for_each_sg(scsi_prot_sglist(cmd), sg, nseg, __i)
sg                309 include/soc/fsl/dpaa2-fd.h static inline dma_addr_t dpaa2_sg_get_addr(const struct dpaa2_sg_entry *sg)
sg                311 include/soc/fsl/dpaa2-fd.h 	return (dma_addr_t)le64_to_cpu(sg->addr);
sg                319 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_addr(struct dpaa2_sg_entry *sg, dma_addr_t addr)
sg                321 include/soc/fsl/dpaa2-fd.h 	sg->addr = cpu_to_le64(addr);
sg                324 include/soc/fsl/dpaa2-fd.h static inline bool dpaa2_sg_short_len(const struct dpaa2_sg_entry *sg)
sg                326 include/soc/fsl/dpaa2-fd.h 	return !!((le16_to_cpu(sg->format_offset) >> SG_SHORT_LEN_FLAG_SHIFT)
sg                336 include/soc/fsl/dpaa2-fd.h static inline u32 dpaa2_sg_get_len(const struct dpaa2_sg_entry *sg)
sg                338 include/soc/fsl/dpaa2-fd.h 	if (dpaa2_sg_short_len(sg))
sg                339 include/soc/fsl/dpaa2-fd.h 		return le32_to_cpu(sg->len) & SG_SHORT_LEN_MASK;
sg                341 include/soc/fsl/dpaa2-fd.h 	return le32_to_cpu(sg->len);
sg                349 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_len(struct dpaa2_sg_entry *sg, u32 len)
sg                351 include/soc/fsl/dpaa2-fd.h 	sg->len = cpu_to_le32(len);
sg                360 include/soc/fsl/dpaa2-fd.h static inline u16 dpaa2_sg_get_offset(const struct dpaa2_sg_entry *sg)
sg                362 include/soc/fsl/dpaa2-fd.h 	return le16_to_cpu(sg->format_offset) & SG_OFFSET_MASK;
sg                370 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_offset(struct dpaa2_sg_entry *sg,
sg                373 include/soc/fsl/dpaa2-fd.h 	sg->format_offset &= cpu_to_le16(~SG_OFFSET_MASK);
sg                374 include/soc/fsl/dpaa2-fd.h 	sg->format_offset |= cpu_to_le16(offset);
sg                384 include/soc/fsl/dpaa2-fd.h 	dpaa2_sg_get_format(const struct dpaa2_sg_entry *sg)
sg                386 include/soc/fsl/dpaa2-fd.h 	return (enum dpaa2_sg_format)((le16_to_cpu(sg->format_offset)
sg                395 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_format(struct dpaa2_sg_entry *sg,
sg                398 include/soc/fsl/dpaa2-fd.h 	sg->format_offset &= cpu_to_le16(~(SG_FORMAT_MASK << SG_FORMAT_SHIFT));
sg                399 include/soc/fsl/dpaa2-fd.h 	sg->format_offset |= cpu_to_le16(format << SG_FORMAT_SHIFT);
sg                408 include/soc/fsl/dpaa2-fd.h static inline u16 dpaa2_sg_get_bpid(const struct dpaa2_sg_entry *sg)
sg                410 include/soc/fsl/dpaa2-fd.h 	return le16_to_cpu(sg->bpid) & SG_BPID_MASK;
sg                418 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_bpid(struct dpaa2_sg_entry *sg, u16 bpid)
sg                420 include/soc/fsl/dpaa2-fd.h 	sg->bpid &= cpu_to_le16(~(SG_BPID_MASK));
sg                421 include/soc/fsl/dpaa2-fd.h 	sg->bpid |= cpu_to_le16(bpid);
sg                430 include/soc/fsl/dpaa2-fd.h static inline bool dpaa2_sg_is_final(const struct dpaa2_sg_entry *sg)
sg                432 include/soc/fsl/dpaa2-fd.h 	return !!(le16_to_cpu(sg->format_offset) >> SG_FINAL_FLAG_SHIFT);
sg                440 include/soc/fsl/dpaa2-fd.h static inline void dpaa2_sg_set_final(struct dpaa2_sg_entry *sg, bool final)
sg                442 include/soc/fsl/dpaa2-fd.h 	sg->format_offset &= cpu_to_le16((~(SG_FINAL_FLAG_MASK
sg                444 include/soc/fsl/dpaa2-fd.h 	sg->format_offset |= cpu_to_le16(final << SG_FINAL_FLAG_SHIFT);
sg                201 include/soc/fsl/qman.h static inline dma_addr_t qm_sg_addr(const struct qm_sg_entry *sg)
sg                203 include/soc/fsl/qman.h 	return be64_to_cpu(sg->data) & 0xffffffffffLLU;
sg                206 include/soc/fsl/qman.h static inline u64 qm_sg_entry_get64(const struct qm_sg_entry *sg)
sg                208 include/soc/fsl/qman.h 	return be64_to_cpu(sg->data) & 0xffffffffffLLU;
sg                211 include/soc/fsl/qman.h static inline void qm_sg_entry_set64(struct qm_sg_entry *sg, u64 addr)
sg                213 include/soc/fsl/qman.h 	sg->addr_hi = upper_32_bits(addr);
sg                214 include/soc/fsl/qman.h 	sg->addr_lo = cpu_to_be32(lower_32_bits(addr));
sg                217 include/soc/fsl/qman.h static inline bool qm_sg_entry_is_final(const struct qm_sg_entry *sg)
sg                219 include/soc/fsl/qman.h 	return be32_to_cpu(sg->cfg) & QM_SG_FIN;
sg                222 include/soc/fsl/qman.h static inline bool qm_sg_entry_is_ext(const struct qm_sg_entry *sg)
sg                224 include/soc/fsl/qman.h 	return be32_to_cpu(sg->cfg) & QM_SG_EXT;
sg                227 include/soc/fsl/qman.h static inline int qm_sg_entry_get_len(const struct qm_sg_entry *sg)
sg                229 include/soc/fsl/qman.h 	return be32_to_cpu(sg->cfg) & QM_SG_LEN_MASK;
sg                232 include/soc/fsl/qman.h static inline void qm_sg_entry_set_len(struct qm_sg_entry *sg, int len)
sg                234 include/soc/fsl/qman.h 	sg->cfg = cpu_to_be32(len & QM_SG_LEN_MASK);
sg                237 include/soc/fsl/qman.h static inline void qm_sg_entry_set_f(struct qm_sg_entry *sg, int len)
sg                239 include/soc/fsl/qman.h 	sg->cfg = cpu_to_be32(QM_SG_FIN | (len & QM_SG_LEN_MASK));
sg                242 include/soc/fsl/qman.h static inline int qm_sg_entry_get_off(const struct qm_sg_entry *sg)
sg                244 include/soc/fsl/qman.h 	return be32_to_cpu(sg->offset) & QM_SG_OFF_MASK;
sg               1213 kernel/dma/debug.c static void check_sg_segment(struct device *dev, struct scatterlist *sg)
sg               1223 kernel/dma/debug.c 	if (sg->length > max_seg)
sg               1225 kernel/dma/debug.c 			   sg->length, max_seg);
sg               1231 kernel/dma/debug.c 	start = sg_dma_address(sg);
sg               1232 kernel/dma/debug.c 	end = start + sg_dma_len(sg) - 1;
sg               1346 kernel/dma/debug.c void debug_dma_map_sg(struct device *dev, struct scatterlist *sg,
sg               1356 kernel/dma/debug.c 	for_each_sg(sg, s, mapped_ents, i) {
sg               1579 kernel/dma/debug.c void debug_dma_sync_sg_for_cpu(struct device *dev, struct scatterlist *sg,
sg               1588 kernel/dma/debug.c 	for_each_sg(sg, s, nelems, i) {
sg               1612 kernel/dma/debug.c void debug_dma_sync_sg_for_device(struct device *dev, struct scatterlist *sg,
sg               1621 kernel/dma/debug.c 	for_each_sg(sg, s, nelems, i) {
sg                242 kernel/dma/direct.c 	struct scatterlist *sg;
sg                245 kernel/dma/direct.c 	for_each_sg(sgl, sg, nents, i) {
sg                246 kernel/dma/direct.c 		phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg));
sg                249 kernel/dma/direct.c 			swiotlb_tbl_sync_single(dev, paddr, sg->length,
sg                253 kernel/dma/direct.c 			arch_sync_dma_for_device(dev, paddr, sg->length,
sg                281 kernel/dma/direct.c 	struct scatterlist *sg;
sg                284 kernel/dma/direct.c 	for_each_sg(sgl, sg, nents, i) {
sg                285 kernel/dma/direct.c 		phys_addr_t paddr = dma_to_phys(dev, sg_dma_address(sg));
sg                288 kernel/dma/direct.c 			arch_sync_dma_for_cpu(dev, paddr, sg->length, dir);
sg                291 kernel/dma/direct.c 			swiotlb_tbl_sync_single(dev, paddr, sg->length, dir,
sg                316 kernel/dma/direct.c 	struct scatterlist *sg;
sg                319 kernel/dma/direct.c 	for_each_sg(sgl, sg, nents, i)
sg                320 kernel/dma/direct.c 		dma_direct_unmap_page(dev, sg->dma_address, sg_dma_len(sg), dir,
sg                356 kernel/dma/direct.c 	struct scatterlist *sg;
sg                358 kernel/dma/direct.c 	for_each_sg(sgl, sg, nents, i) {
sg                359 kernel/dma/direct.c 		sg->dma_address = dma_direct_map_page(dev, sg_page(sg),
sg                360 kernel/dma/direct.c 				sg->offset, sg->length, dir, attrs);
sg                361 kernel/dma/direct.c 		if (sg->dma_address == DMA_MAPPING_ERROR)
sg                363 kernel/dma/direct.c 		sg_dma_len(sg) = sg->length;
sg                 42 kernel/dma/virt.c 	struct scatterlist *sg;
sg                 44 kernel/dma/virt.c 	for_each_sg(sgl, sg, nents, i) {
sg                 45 kernel/dma/virt.c 		BUG_ON(!sg_page(sg));
sg                 46 kernel/dma/virt.c 		sg_dma_address(sg) = (uintptr_t)sg_virt(sg);
sg                 47 kernel/dma/virt.c 		sg_dma_len(sg) = sg->length;
sg               7992 kernel/sched/fair.c group_smaller_min_cpu_capacity(struct sched_group *sg, struct sched_group *ref)
sg               7994 kernel/sched/fair.c 	return fits_capacity(sg->sgc->min_capacity, ref->sgc->min_capacity);
sg               8002 kernel/sched/fair.c group_smaller_max_cpu_capacity(struct sched_group *sg, struct sched_group *ref)
sg               8004 kernel/sched/fair.c 	return fits_capacity(sg->sgc->max_capacity, ref->sgc->max_capacity);
sg               8123 kernel/sched/fair.c 				   struct sched_group *sg,
sg               8135 kernel/sched/fair.c 	    (!group_smaller_max_cpu_capacity(sg, sds->local) ||
sg               8158 kernel/sched/fair.c 	    group_smaller_min_cpu_capacity(sds->local, sg))
sg               8182 kernel/sched/fair.c 	    sched_asym_prefer(env->dst_cpu, sg->asym_prefer_cpu)) {
sg               8188 kernel/sched/fair.c 				      sg->asym_prefer_cpu))
sg               8233 kernel/sched/fair.c 	struct sched_group *sg = env->sd->groups;
sg               8248 kernel/sched/fair.c 		local_group = cpumask_test_cpu(env->dst_cpu, sched_group_span(sg));
sg               8250 kernel/sched/fair.c 			sds->local = sg;
sg               8254 kernel/sched/fair.c 			    time_after_eq(jiffies, sg->sgc->next_update))
sg               8258 kernel/sched/fair.c 		update_sg_lb_stats(env, sg, sgs, &sg_status);
sg               8277 kernel/sched/fair.c 			sgs->group_type = group_classify(sg, sgs);
sg               8280 kernel/sched/fair.c 		if (update_sd_pick_busiest(env, sds, sg, sgs)) {
sg               8281 kernel/sched/fair.c 			sds->busiest = sg;
sg               8291 kernel/sched/fair.c 		sg = sg->next;
sg               8292 kernel/sched/fair.c 	} while (sg != env->sd->groups);
sg               8789 kernel/sched/fair.c 	struct sched_group *sg = env->sd->groups;
sg               8807 kernel/sched/fair.c 	for_each_cpu_and(cpu, group_balance_mask(sg), env->cpus) {
sg               8816 kernel/sched/fair.c 		balance_cpu = group_balance_cpu(sg);
sg               1428 kernel/sched/sched.h static inline struct cpumask *sched_group_span(struct sched_group *sg)
sg               1430 kernel/sched/sched.h 	return to_cpumask(sg->cpumask);
sg               1436 kernel/sched/sched.h static inline struct cpumask *group_balance_mask(struct sched_group *sg)
sg               1438 kernel/sched/sched.h 	return to_cpumask(sg->sgc->cpumask);
sg               1450 kernel/sched/sched.h extern int group_balance_cpu(struct sched_group *sg);
sg                558 kernel/sched/topology.c static void free_sched_groups(struct sched_group *sg, int free_sgc)
sg                562 kernel/sched/topology.c 	if (!sg)
sg                565 kernel/sched/topology.c 	first = sg;
sg                567 kernel/sched/topology.c 		tmp = sg->next;
sg                569 kernel/sched/topology.c 		if (free_sgc && atomic_dec_and_test(&sg->sgc->ref))
sg                570 kernel/sched/topology.c 			kfree(sg->sgc);
sg                572 kernel/sched/topology.c 		if (atomic_dec_and_test(&sg->ref))
sg                573 kernel/sched/topology.c 			kfree(sg);
sg                574 kernel/sched/topology.c 		sg = tmp;
sg                575 kernel/sched/topology.c 	} while (sg != first);
sg                728 kernel/sched/topology.c int group_balance_cpu(struct sched_group *sg)
sg                730 kernel/sched/topology.c 	return cpumask_first(group_balance_mask(sg));
sg                840 kernel/sched/topology.c build_balance_mask(struct sched_domain *sd, struct sched_group *sg, struct cpumask *mask)
sg                842 kernel/sched/topology.c 	const struct cpumask *sg_span = sched_group_span(sg);
sg                879 kernel/sched/topology.c 	struct sched_group *sg;
sg                882 kernel/sched/topology.c 	sg = kzalloc_node(sizeof(struct sched_group) + cpumask_size(),
sg                885 kernel/sched/topology.c 	if (!sg)
sg                888 kernel/sched/topology.c 	sg_span = sched_group_span(sg);
sg                894 kernel/sched/topology.c 	atomic_inc(&sg->ref);
sg                895 kernel/sched/topology.c 	return sg;
sg                899 kernel/sched/topology.c 				     struct sched_group *sg)
sg                906 kernel/sched/topology.c 	build_balance_mask(sd, sg, mask);
sg                907 kernel/sched/topology.c 	cpu = cpumask_first_and(sched_group_span(sg), mask);
sg                909 kernel/sched/topology.c 	sg->sgc = *per_cpu_ptr(sdd->sgc, cpu);
sg                910 kernel/sched/topology.c 	if (atomic_inc_return(&sg->sgc->ref) == 1)
sg                911 kernel/sched/topology.c 		cpumask_copy(group_balance_mask(sg), mask);
sg                913 kernel/sched/topology.c 		WARN_ON_ONCE(!cpumask_equal(group_balance_mask(sg), mask));
sg                920 kernel/sched/topology.c 	sg_span = sched_group_span(sg);
sg                921 kernel/sched/topology.c 	sg->sgc->capacity = SCHED_CAPACITY_SCALE * cpumask_weight(sg_span);
sg                922 kernel/sched/topology.c 	sg->sgc->min_capacity = SCHED_CAPACITY_SCALE;
sg                923 kernel/sched/topology.c 	sg->sgc->max_capacity = SCHED_CAPACITY_SCALE;
sg                929 kernel/sched/topology.c 	struct sched_group *first = NULL, *last = NULL, *sg;
sg                959 kernel/sched/topology.c 		sg = build_group_from_child_sched_domain(sibling, cpu);
sg                960 kernel/sched/topology.c 		if (!sg)
sg                963 kernel/sched/topology.c 		sg_span = sched_group_span(sg);
sg                966 kernel/sched/topology.c 		init_overlap_sched_group(sd, sg);
sg                969 kernel/sched/topology.c 			first = sg;
sg                971 kernel/sched/topology.c 			last->next = sg;
sg                972 kernel/sched/topology.c 		last = sg;
sg               1061 kernel/sched/topology.c 	struct sched_group *sg;
sg               1067 kernel/sched/topology.c 	sg = *per_cpu_ptr(sdd->sg, cpu);
sg               1068 kernel/sched/topology.c 	sg->sgc = *per_cpu_ptr(sdd->sgc, cpu);
sg               1071 kernel/sched/topology.c 	already_visited = atomic_inc_return(&sg->ref) > 1;
sg               1073 kernel/sched/topology.c 	WARN_ON(already_visited != (atomic_inc_return(&sg->sgc->ref) > 1));
sg               1077 kernel/sched/topology.c 		return sg;
sg               1080 kernel/sched/topology.c 		cpumask_copy(sched_group_span(sg), sched_domain_span(child));
sg               1081 kernel/sched/topology.c 		cpumask_copy(group_balance_mask(sg), sched_group_span(sg));
sg               1083 kernel/sched/topology.c 		cpumask_set_cpu(cpu, sched_group_span(sg));
sg               1084 kernel/sched/topology.c 		cpumask_set_cpu(cpu, group_balance_mask(sg));
sg               1087 kernel/sched/topology.c 	sg->sgc->capacity = SCHED_CAPACITY_SCALE * cpumask_weight(sched_group_span(sg));
sg               1088 kernel/sched/topology.c 	sg->sgc->min_capacity = SCHED_CAPACITY_SCALE;
sg               1089 kernel/sched/topology.c 	sg->sgc->max_capacity = SCHED_CAPACITY_SCALE;
sg               1091 kernel/sched/topology.c 	return sg;
sg               1116 kernel/sched/topology.c 		struct sched_group *sg;
sg               1121 kernel/sched/topology.c 		sg = get_group(i, sdd);
sg               1123 kernel/sched/topology.c 		cpumask_or(covered, covered, sched_group_span(sg));
sg               1126 kernel/sched/topology.c 			first = sg;
sg               1128 kernel/sched/topology.c 			last->next = sg;
sg               1129 kernel/sched/topology.c 		last = sg;
sg               1149 kernel/sched/topology.c 	struct sched_group *sg = sd->groups;
sg               1151 kernel/sched/topology.c 	WARN_ON(!sg);
sg               1156 kernel/sched/topology.c 		sg->group_weight = cpumask_weight(sched_group_span(sg));
sg               1161 kernel/sched/topology.c 		for_each_cpu(cpu, sched_group_span(sg)) {
sg               1167 kernel/sched/topology.c 		sg->asym_prefer_cpu = max_cpu;
sg               1170 kernel/sched/topology.c 		sg = sg->next;
sg               1171 kernel/sched/topology.c 	} while (sg != sd->groups);
sg               1173 kernel/sched/topology.c 	if (cpu != group_balance_cpu(sg))
sg               1271 kernel/sched/topology.c 	if (atomic_read(&(*per_cpu_ptr(sdd->sg, cpu))->ref))
sg               1272 kernel/sched/topology.c 		*per_cpu_ptr(sdd->sg, cpu) = NULL;
sg               1766 kernel/sched/topology.c 		sdd->sg = alloc_percpu(struct sched_group *);
sg               1767 kernel/sched/topology.c 		if (!sdd->sg)
sg               1777 kernel/sched/topology.c 			struct sched_group *sg;
sg               1794 kernel/sched/topology.c 			sg = kzalloc_node(sizeof(struct sched_group) + cpumask_size(),
sg               1796 kernel/sched/topology.c 			if (!sg)
sg               1799 kernel/sched/topology.c 			sg->next = sg;
sg               1801 kernel/sched/topology.c 			*per_cpu_ptr(sdd->sg, j) = sg;
sg               1839 kernel/sched/topology.c 			if (sdd->sg)
sg               1840 kernel/sched/topology.c 				kfree(*per_cpu_ptr(sdd->sg, j));
sg               1848 kernel/sched/topology.c 		free_percpu(sdd->sg);
sg               1849 kernel/sched/topology.c 		sdd->sg = NULL;
sg               1537 lib/iov_iter.c 	struct scatterlist sg;
sg               1541 lib/iov_iter.c 	sg_init_one(&sg, addr, copied);
sg               1542 lib/iov_iter.c 	ahash_request_set_crypt(hash, &sg, NULL, copied);
sg                 23 lib/scatterlist.c struct scatterlist *sg_next(struct scatterlist *sg)
sg                 25 lib/scatterlist.c 	if (sg_is_last(sg))
sg                 28 lib/scatterlist.c 	sg++;
sg                 29 lib/scatterlist.c 	if (unlikely(sg_is_chain(sg)))
sg                 30 lib/scatterlist.c 		sg = sg_chain_ptr(sg);
sg                 32 lib/scatterlist.c 	return sg;
sg                 45 lib/scatterlist.c int sg_nents(struct scatterlist *sg)
sg                 48 lib/scatterlist.c 	for (nents = 0; sg; sg = sg_next(sg))
sg                 68 lib/scatterlist.c int sg_nents_for_len(struct scatterlist *sg, u64 len)
sg                 76 lib/scatterlist.c 	for (nents = 0, total = 0; sg; sg = sg_next(sg)) {
sg                 78 lib/scatterlist.c 		total += sg->length;
sg                103 lib/scatterlist.c 	struct scatterlist *sg, *ret = NULL;
sg                106 lib/scatterlist.c 	for_each_sg(sgl, sg, nents, i)
sg                107 lib/scatterlist.c 		ret = sg;
sg                138 lib/scatterlist.c void sg_init_one(struct scatterlist *sg, const void *buf, unsigned int buflen)
sg                140 lib/scatterlist.c 	sg_init_table(sg, 1);
sg                141 lib/scatterlist.c 	sg_set_buf(sg, buf, buflen);
sg                169 lib/scatterlist.c static void sg_kfree(struct scatterlist *sg, unsigned int nents)
sg                172 lib/scatterlist.c 		kmemleak_free(sg);
sg                173 lib/scatterlist.c 		free_page((unsigned long) sg);
sg                175 lib/scatterlist.c 		kfree(sg);
sg                271 lib/scatterlist.c 	struct scatterlist *sg, *prv;
sg                299 lib/scatterlist.c 			sg = first_chunk;
sg                302 lib/scatterlist.c 			sg = alloc_fn(alloc_size, gfp_mask);
sg                304 lib/scatterlist.c 		if (unlikely(!sg)) {
sg                317 lib/scatterlist.c 		sg_init_table(sg, alloc_size);
sg                325 lib/scatterlist.c 			sg_chain(prv, prv_max_ents, sg);
sg                327 lib/scatterlist.c 			table->sgl = sg;
sg                333 lib/scatterlist.c 			sg_mark_end(&sg[sg_size - 1]);
sg                335 lib/scatterlist.c 		prv = sg;
sg                490 lib/scatterlist.c 	struct scatterlist *sgl, *sg;
sg                512 lib/scatterlist.c 	sg = sgl;
sg                521 lib/scatterlist.c 		sg_set_page(sg, page, elem_len, 0);
sg                523 lib/scatterlist.c 		sg = sg_next(sg);
sg                562 lib/scatterlist.c 	struct scatterlist *sg;
sg                566 lib/scatterlist.c 	for_each_sg(sgl, sg, nents, i) {
sg                567 lib/scatterlist.c 		if (!sg)
sg                569 lib/scatterlist.c 		page = sg_page(sg);
sg                607 lib/scatterlist.c 	piter->sg = sglist;
sg                612 lib/scatterlist.c static int sg_page_count(struct scatterlist *sg)
sg                614 lib/scatterlist.c 	return PAGE_ALIGN(sg->offset + sg->length) >> PAGE_SHIFT;
sg                619 lib/scatterlist.c 	if (!piter->__nents || !piter->sg)
sg                625 lib/scatterlist.c 	while (piter->sg_pgoffset >= sg_page_count(piter->sg)) {
sg                626 lib/scatterlist.c 		piter->sg_pgoffset -= sg_page_count(piter->sg);
sg                627 lib/scatterlist.c 		piter->sg = sg_next(piter->sg);
sg                628 lib/scatterlist.c 		if (!--piter->__nents || !piter->sg)
sg                636 lib/scatterlist.c static int sg_dma_page_count(struct scatterlist *sg)
sg                638 lib/scatterlist.c 	return PAGE_ALIGN(sg->offset + sg_dma_len(sg)) >> PAGE_SHIFT;
sg                645 lib/scatterlist.c 	if (!piter->__nents || !piter->sg)
sg                651 lib/scatterlist.c 	while (piter->sg_pgoffset >= sg_dma_page_count(piter->sg)) {
sg                652 lib/scatterlist.c 		piter->sg_pgoffset -= sg_dma_page_count(piter->sg);
sg                653 lib/scatterlist.c 		piter->sg = sg_next(piter->sg);
sg                654 lib/scatterlist.c 		if (!--piter->__nents || !piter->sg)
sg                688 lib/scatterlist.c 		struct scatterlist *sg;
sg                693 lib/scatterlist.c 		sg = miter->piter.sg;
sg                695 lib/scatterlist.c 		miter->__offset = miter->piter.sg_pgoffset ? 0 : sg->offset;
sg                698 lib/scatterlist.c 		miter->__remaining = sg->offset + sg->length -
sg                 28 lib/sg_split.c 	struct scatterlist *sg;
sg                 35 lib/sg_split.c 	for_each_sg(in, sg, nents, i) {
sg                 36 lib/sg_split.c 		sglen = mapped ? sg_dma_len(sg) : sg->length;
sg                 44 lib/sg_split.c 			curr->in_sg0 = sg;
sg                 57 lib/sg_split.c 			curr->in_sg0 = sg;
sg                 76 net/9p/trans_virtio.c 	struct scatterlist sg[VIRTQUEUE_NUM];
sg                167 net/9p/trans_virtio.c static int pack_sg_list(struct scatterlist *sg, int start,
sg                179 net/9p/trans_virtio.c 		sg_unmark_end(&sg[index]);
sg                180 net/9p/trans_virtio.c 		sg_set_buf(&sg[index++], data, s);
sg                185 net/9p/trans_virtio.c 		sg_mark_end(&sg[index - 1]);
sg                213 net/9p/trans_virtio.c pack_sg_list_p(struct scatterlist *sg, int start, int limit,
sg                231 net/9p/trans_virtio.c 		sg_unmark_end(&sg[index]);
sg                232 net/9p/trans_virtio.c 		sg_set_page(&sg[index++], pdata[i++], s, data_off);
sg                239 net/9p/trans_virtio.c 		sg_mark_end(&sg[index - 1]);
sg                267 net/9p/trans_virtio.c 	out = pack_sg_list(chan->sg, 0,
sg                270 net/9p/trans_virtio.c 		sgs[out_sgs++] = chan->sg;
sg                272 net/9p/trans_virtio.c 	in = pack_sg_list(chan->sg, out,
sg                275 net/9p/trans_virtio.c 		sgs[out_sgs + in_sgs++] = chan->sg + out;
sg                444 net/9p/trans_virtio.c 	out = pack_sg_list(chan->sg, 0,
sg                448 net/9p/trans_virtio.c 		sgs[out_sgs++] = chan->sg;
sg                451 net/9p/trans_virtio.c 		sgs[out_sgs++] = chan->sg + out;
sg                452 net/9p/trans_virtio.c 		out += pack_sg_list_p(chan->sg, out, VIRTQUEUE_NUM,
sg                463 net/9p/trans_virtio.c 	in = pack_sg_list(chan->sg, out,
sg                466 net/9p/trans_virtio.c 		sgs[out_sgs + in_sgs++] = chan->sg + out;
sg                469 net/9p/trans_virtio.c 		sgs[out_sgs + in_sgs++] = chan->sg + out + in;
sg                470 net/9p/trans_virtio.c 		in += pack_sg_list_p(chan->sg, out + in, VIRTQUEUE_NUM,
sg                583 net/9p/trans_virtio.c 	sg_init_table(chan->sg, VIRTQUEUE_NUM);
sg                163 net/ceph/crypto.c 	struct scatterlist *sg;
sg                192 net/ceph/crypto.c 	for_each_sg(sgt->sgl, sg, sgt->orig_nents, i) {
sg                201 net/ceph/crypto.c 		sg_set_page(sg, page, len, off);
sg               2232 net/core/filter.c 	i = msg->sg.start;
sg               2239 net/core/filter.c 	} while (i != msg->sg.end);
sg               2249 net/core/filter.c 	if (!msg->sg.copy[i] && bytes_sg_total <= len)
sg               2267 net/core/filter.c 	} while (i != msg->sg.end);
sg               2294 net/core/filter.c 	sg_set_page(&msg->sg.data[first_sge], page, copy, 0);
sg               2316 net/core/filter.c 		if (move_from == msg->sg.end)
sg               2319 net/core/filter.c 		msg->sg.data[i] = msg->sg.data[move_from];
sg               2320 net/core/filter.c 		msg->sg.data[move_from].length = 0;
sg               2321 net/core/filter.c 		msg->sg.data[move_from].page_link = 0;
sg               2322 net/core/filter.c 		msg->sg.data[move_from].offset = 0;
sg               2326 net/core/filter.c 	msg->sg.end = msg->sg.end - shift > msg->sg.end ?
sg               2327 net/core/filter.c 		      msg->sg.end - shift + NR_MSG_FRAG_IDS :
sg               2328 net/core/filter.c 		      msg->sg.end - shift;
sg               2330 net/core/filter.c 	msg->data = sg_virt(&msg->sg.data[first_sge]) + start - offset;
sg               2357 net/core/filter.c 	i = msg->sg.start;
sg               2365 net/core/filter.c 	} while (i != msg->sg.end);
sg               2380 net/core/filter.c 		copy = msg->sg.data[i].length;
sg               2439 net/core/filter.c 		while (i != msg->sg.end) {
sg               2440 net/core/filter.c 			msg->sg.data[i] = sge;
sg               2454 net/core/filter.c 	msg->sg.size += len;
sg               2455 net/core/filter.c 	msg->sg.copy[new] = false;
sg               2456 net/core/filter.c 	sg_set_page(&msg->sg.data[new], page, len + copy, 0);
sg               2460 net/core/filter.c 		msg->sg.data[new] = rsge;
sg               2484 net/core/filter.c 		msg->sg.data[prev] = msg->sg.data[i];
sg               2485 net/core/filter.c 	} while (i != msg->sg.end);
sg               2499 net/core/filter.c 	while (i != msg->sg.end) {
sg               2500 net/core/filter.c 		msg->sg.data[i] = sge;
sg               2518 net/core/filter.c 	i = msg->sg.start;
sg               2526 net/core/filter.c 	} while (i != msg->sg.end);
sg               2529 net/core/filter.c 	if (start >= offset + l || last >= msg->sg.size)
sg               2630 net/core/filter.c 	msg->sg.size -= (len - pop);
sg               8343 net/core/filter.c 	BUILD_BUG_ON(offsetof(struct sk_msg, sg) != 0);
sg               3666 net/core/skbuff.c 	bool csum, sg;
sg               3697 net/core/skbuff.c 	sg = !!(features & NETIF_F_SG);
sg               3700 net/core/skbuff.c 	if (sg && csum && (mss != GSO_BY_FRAGS))  {
sg               3763 net/core/skbuff.c 		if (hsize > len || !sg)
sg               3767 net/core/skbuff.c 		    (skb_headlen(list_skb) == len || sg)) {
sg               3838 net/core/skbuff.c 		if (!sg) {
sg               4162 net/core/skbuff.c __skb_to_sgvec(struct sk_buff *skb, struct scatterlist *sg, int offset, int len,
sg               4176 net/core/skbuff.c 		sg_set_buf(sg, skb->data + offset, copy);
sg               4191 net/core/skbuff.c 			if (unlikely(elt && sg_is_last(&sg[elt - 1])))
sg               4196 net/core/skbuff.c 			sg_set_page(&sg[elt], skb_frag_page(frag), copy,
sg               4213 net/core/skbuff.c 			if (unlikely(elt && sg_is_last(&sg[elt - 1])))
sg               4218 net/core/skbuff.c 			ret = __skb_to_sgvec(frag_iter, sg+elt, offset - start,
sg               4245 net/core/skbuff.c int skb_to_sgvec(struct sk_buff *skb, struct scatterlist *sg, int offset, int len)
sg               4247 net/core/skbuff.c 	int nsg = __skb_to_sgvec(skb, sg, offset, len, 0);
sg               4252 net/core/skbuff.c 	sg_mark_end(&sg[nsg - 1]);
sg               4277 net/core/skbuff.c int skb_to_sgvec_nomark(struct sk_buff *skb, struct scatterlist *sg,
sg               4280 net/core/skbuff.c 	return __skb_to_sgvec(skb, sg, offset, len, 0);
sg                 13 net/core/skmsg.c 	if (msg->sg.end > msg->sg.start &&
sg                 14 net/core/skmsg.c 	    elem_first_coalesce < msg->sg.end)
sg                 17 net/core/skmsg.c 	if (msg->sg.end < msg->sg.start &&
sg                 18 net/core/skmsg.c 	    (elem_first_coalesce > msg->sg.start ||
sg                 19 net/core/skmsg.c 	     elem_first_coalesce < msg->sg.end))
sg                 31 net/core/skmsg.c 	len -= msg->sg.size;
sg                 45 net/core/skmsg.c 		i = msg->sg.end;
sg                 47 net/core/skmsg.c 		sge = &msg->sg.data[i];
sg                 59 net/core/skmsg.c 			sge = &msg->sg.data[msg->sg.end];
sg                 67 net/core/skmsg.c 		msg->sg.size += use;
sg                 79 net/core/skmsg.c 	int i = src->sg.start;
sg                 89 net/core/skmsg.c 		if (i == src->sg.end && off)
sg                 99 net/core/skmsg.c 		if (dst->sg.end)
sg                100 net/core/skmsg.c 			sgd = sk_msg_elem(dst, dst->sg.end - 1);
sg                106 net/core/skmsg.c 			dst->sg.size += sge_len;
sg                118 net/core/skmsg.c 		if (i == src->sg.end && len)
sg                129 net/core/skmsg.c 	int i = msg->sg.start;
sg                146 net/core/skmsg.c 	} while (bytes && i != msg->sg.end);
sg                147 net/core/skmsg.c 	msg->sg.start = i;
sg                153 net/core/skmsg.c 	int i = msg->sg.start;
sg                156 net/core/skmsg.c 		struct scatterlist *sge = &msg->sg.data[i];
sg                162 net/core/skmsg.c 	} while (i != msg->sg.end);
sg                186 net/core/skmsg.c 	while (msg->sg.size) {
sg                187 net/core/skmsg.c 		msg->sg.size -= sge->length;
sg                190 net/core/skmsg.c 		sk_msg_check_to_free(msg, i, msg->sg.size);
sg                200 net/core/skmsg.c 	return __sk_msg_free(sk, msg, msg->sg.start, false);
sg                206 net/core/skmsg.c 	return __sk_msg_free(sk, msg, msg->sg.start, true);
sg                214 net/core/skmsg.c 	u32 i = msg->sg.start;
sg                225 net/core/skmsg.c 			msg->sg.size -= bytes;
sg                229 net/core/skmsg.c 		msg->sg.size -= sge->length;
sg                235 net/core/skmsg.c 	msg->sg.start = i;
sg                252 net/core/skmsg.c 	int trim = msg->sg.size - len;
sg                253 net/core/skmsg.c 	u32 i = msg->sg.end;
sg                261 net/core/skmsg.c 	msg->sg.size = len;
sg                262 net/core/skmsg.c 	while (msg->sg.data[i].length &&
sg                263 net/core/skmsg.c 	       trim >= msg->sg.data[i].length) {
sg                264 net/core/skmsg.c 		trim -= msg->sg.data[i].length;
sg                271 net/core/skmsg.c 	msg->sg.data[i].length -= trim;
sg                274 net/core/skmsg.c 	if (msg->sg.curr == i && msg->sg.copybreak > msg->sg.data[i].length)
sg                275 net/core/skmsg.c 		msg->sg.copybreak = msg->sg.data[i].length;
sg                278 net/core/skmsg.c 	msg->sg.end = i;
sg                286 net/core/skmsg.c 	if (!msg->sg.size) {
sg                287 net/core/skmsg.c 		msg->sg.curr = msg->sg.start;
sg                288 net/core/skmsg.c 		msg->sg.copybreak = 0;
sg                289 net/core/skmsg.c 	} else if (sk_msg_iter_dist(msg->sg.start, msg->sg.curr) >=
sg                290 net/core/skmsg.c 		   sk_msg_iter_dist(msg->sg.start, msg->sg.end)) {
sg                292 net/core/skmsg.c 		msg->sg.curr = i;
sg                293 net/core/skmsg.c 		msg->sg.copybreak = msg->sg.data[i].length;
sg                306 net/core/skmsg.c 	orig = msg->sg.size;
sg                324 net/core/skmsg.c 		msg->sg.size += copied;
sg                328 net/core/skmsg.c 			sg_set_page(&msg->sg.data[msg->sg.end],
sg                330 net/core/skmsg.c 			sg_unmark_end(&msg->sg.data[msg->sg.end]);
sg                343 net/core/skmsg.c 		msg->sg.copybreak = 0;
sg                344 net/core/skmsg.c 		msg->sg.curr = msg->sg.end;
sg                351 net/core/skmsg.c 		iov_iter_revert(from, msg->sg.size - orig);
sg                359 net/core/skmsg.c 	int ret = -ENOSPC, i = msg->sg.curr;
sg                367 net/core/skmsg.c 		if (msg->sg.copybreak >= sge->length) {
sg                368 net/core/skmsg.c 			msg->sg.copybreak = 0;
sg                370 net/core/skmsg.c 			if (i == msg->sg.end)
sg                375 net/core/skmsg.c 		buf_size = sge->length - msg->sg.copybreak;
sg                377 net/core/skmsg.c 		to = sg_virt(sge) + msg->sg.copybreak;
sg                378 net/core/skmsg.c 		msg->sg.copybreak += copy;
sg                390 net/core/skmsg.c 		msg->sg.copybreak = 0;
sg                392 net/core/skmsg.c 	} while (i != msg->sg.end);
sg                394 net/core/skmsg.c 	msg->sg.curr = i;
sg                414 net/core/skmsg.c 	num_sge = skb_to_sgvec(skb, msg->sg.data, 0, skb->len);
sg                422 net/core/skmsg.c 	msg->sg.start = 0;
sg                423 net/core/skmsg.c 	msg->sg.size = copied;
sg                424 net/core/skmsg.c 	msg->sg.end = num_sge;
sg                156 net/ipv4/ah4.c 	struct scatterlist *sg;
sg                187 net/ipv4/ah4.c 	sg = ah_req_sg(ahash, req);
sg                188 net/ipv4/ah4.c 	seqhisg = sg + nfrags;
sg                224 net/ipv4/ah4.c 	sg_init_table(sg, nfrags + sglists);
sg                225 net/ipv4/ah4.c 	err = skb_to_sgvec_nomark(skb, sg, 0, skb->len);
sg                234 net/ipv4/ah4.c 	ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len);
sg                314 net/ipv4/ah4.c 	struct scatterlist *sg;
sg                379 net/ipv4/ah4.c 	sg = ah_req_sg(ahash, req);
sg                380 net/ipv4/ah4.c 	seqhisg = sg + nfrags;
sg                399 net/ipv4/ah4.c 	sg_init_table(sg, nfrags + sglists);
sg                400 net/ipv4/ah4.c 	err = skb_to_sgvec_nomark(skb, sg, 0, skb->len);
sg                409 net/ipv4/ah4.c 	ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len);
sg                103 net/ipv4/esp4.c 	struct scatterlist *sg;
sg                116 net/ipv4/esp4.c 		for (sg = sg_next(req->src); sg; sg = sg_next(sg))
sg                117 net/ipv4/esp4.c 			put_page(sg_page(sg));
sg                377 net/ipv4/esp4.c 	struct scatterlist *sg, *dsg;
sg                400 net/ipv4/esp4.c 	sg = esp_req_sg(aead, req);
sg                403 net/ipv4/esp4.c 		dsg = sg;
sg                405 net/ipv4/esp4.c 		dsg = &sg[esp->nfrags];
sg                410 net/ipv4/esp4.c 	sg_init_table(sg, esp->nfrags);
sg                411 net/ipv4/esp4.c 	err = skb_to_sgvec(skb, sg,
sg                451 net/ipv4/esp4.c 	aead_request_set_crypt(req, sg, dsg, ivlen + esp->clen, iv);
sg                474 net/ipv4/esp4.c 	if (sg != dsg)
sg                709 net/ipv4/esp4.c 	struct scatterlist *sg;
sg                755 net/ipv4/esp4.c 	sg = esp_req_sg(aead, req);
sg                759 net/ipv4/esp4.c 	sg_init_table(sg, nfrags);
sg                760 net/ipv4/esp4.c 	err = skb_to_sgvec(skb, sg, 0, skb->len);
sg                773 net/ipv4/esp4.c 	aead_request_set_crypt(req, sg, sg, elen + ivlen, iv);
sg               3809 net/ipv4/tcp.c 	struct scatterlist sg;
sg               3818 net/ipv4/tcp.c 	sg_init_table(&sg, 1);
sg               3820 net/ipv4/tcp.c 	sg_set_buf(&sg, ((u8 *) tp) + header_len, head_data_len);
sg               3821 net/ipv4/tcp.c 	ahash_request_set_crypt(req, &sg, NULL, head_data_len);
sg               3830 net/ipv4/tcp.c 		sg_set_page(&sg, page, skb_frag_size(f),
sg               3832 net/ipv4/tcp.c 		ahash_request_set_crypt(req, &sg, NULL, skb_frag_size(f));
sg               3847 net/ipv4/tcp.c 	struct scatterlist sg;
sg               3849 net/ipv4/tcp.c 	sg_init_one(&sg, key->key, key->keylen);
sg               3850 net/ipv4/tcp.c 	ahash_request_set_crypt(hp->md5_req, &sg, NULL, key->keylen);
sg                 62 net/ipv4/tcp_bpf.c 		i = msg_rx->sg.start;
sg                 74 net/ipv4/tcp_bpf.c 				msg_rx->sg.start = i;
sg                 83 net/ipv4/tcp_bpf.c 				msg_rx->sg.size -= copy;
sg                 96 net/ipv4/tcp_bpf.c 		} while (i != msg_rx->sg.end);
sg                103 net/ipv4/tcp_bpf.c 		msg_rx->sg.start = i;
sg                104 net/ipv4/tcp_bpf.c 		if (!sge->length && msg_rx->sg.start == msg_rx->sg.end) {
sg                178 net/ipv4/tcp_bpf.c 	tmp->sg.start = msg->sg.start;
sg                179 net/ipv4/tcp_bpf.c 	i = msg->sg.start;
sg                196 net/ipv4/tcp_bpf.c 		tmp->sg.end = i;
sg                202 net/ipv4/tcp_bpf.c 	} while (i != msg->sg.end);
sg                205 net/ipv4/tcp_bpf.c 		msg->sg.start = i;
sg                229 net/ipv4/tcp_bpf.c 		sge = sk_msg_elem(msg, msg->sg.start);
sg                250 net/ipv4/tcp_bpf.c 		msg->sg.size -= ret;
sg                264 net/ipv4/tcp_bpf.c 			if (msg->sg.start == msg->sg.end)
sg                318 net/ipv4/tcp_bpf.c 		delta = msg->sg.size;
sg                320 net/ipv4/tcp_bpf.c 		delta -= msg->sg.size;
sg                324 net/ipv4/tcp_bpf.c 	    msg->cork_bytes > msg->sg.size && !enospc) {
sg                325 net/ipv4/tcp_bpf.c 		psock->cork_bytes = msg->cork_bytes - msg->sg.size;
sg                336 net/ipv4/tcp_bpf.c 	tosend = msg->sg.size;
sg                390 net/ipv4/tcp_bpf.c 		    msg->sg.data[msg->sg.start].page_link &&
sg                391 net/ipv4/tcp_bpf.c 		    msg->sg.data[msg->sg.start].length)
sg                434 net/ipv4/tcp_bpf.c 		osize = msg_tx->sg.size;
sg                435 net/ipv4/tcp_bpf.c 		err = sk_msg_alloc(sk, msg_tx, msg_tx->sg.size + copy, msg_tx->sg.end - 1);
sg                440 net/ipv4/tcp_bpf.c 			copy = msg_tx->sg.size - osize;
sg               1189 net/ipv4/tcp_ipv4.c 	struct scatterlist sg;
sg               1203 net/ipv4/tcp_ipv4.c 	sg_init_one(&sg, bp, sizeof(*bp) + sizeof(*th));
sg               1204 net/ipv4/tcp_ipv4.c 	ahash_request_set_crypt(hp->md5_req, &sg, NULL,
sg                333 net/ipv6/ah6.c 	struct scatterlist *sg;
sg                370 net/ipv6/ah6.c 	sg = ah_req_sg(ahash, req);
sg                371 net/ipv6/ah6.c 	seqhisg = sg + nfrags;
sg                415 net/ipv6/ah6.c 	sg_init_table(sg, nfrags + sglists);
sg                416 net/ipv6/ah6.c 	err = skb_to_sgvec_nomark(skb, sg, 0, skb->len);
sg                425 net/ipv6/ah6.c 	ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len);
sg                519 net/ipv6/ah6.c 	struct scatterlist *sg;
sg                584 net/ipv6/ah6.c 	sg = ah_req_sg(ahash, req);
sg                585 net/ipv6/ah6.c 	seqhisg = sg + nfrags;
sg                600 net/ipv6/ah6.c 	sg_init_table(sg, nfrags + sglists);
sg                601 net/ipv6/ah6.c 	err = skb_to_sgvec_nomark(skb, sg, 0, skb->len);
sg                611 net/ipv6/ah6.c 	ahash_request_set_crypt(req, sg, icv, skb->len + seqhi_len);
sg                111 net/ipv6/esp6.c 	struct scatterlist *sg;
sg                123 net/ipv6/esp6.c 		for (sg = sg_next(req->src); sg; sg = sg_next(sg))
sg                124 net/ipv6/esp6.c 			put_page(sg_page(sg));
sg                320 net/ipv6/esp6.c 	struct scatterlist *sg, *dsg;
sg                342 net/ipv6/esp6.c 	sg = esp_req_sg(aead, req);
sg                345 net/ipv6/esp6.c 		dsg = sg;
sg                347 net/ipv6/esp6.c 		dsg = &sg[esp->nfrags];
sg                351 net/ipv6/esp6.c 	sg_init_table(sg, esp->nfrags);
sg                352 net/ipv6/esp6.c 	err = skb_to_sgvec(skb, sg,
sg                392 net/ipv6/esp6.c 	aead_request_set_crypt(req, sg, dsg, ivlen + esp->clen, iv);
sg                415 net/ipv6/esp6.c 	if (sg != dsg)
sg                605 net/ipv6/esp6.c 	struct scatterlist *sg;
sg                654 net/ipv6/esp6.c 	sg = esp_req_sg(aead, req);
sg                658 net/ipv6/esp6.c 	sg_init_table(sg, nfrags);
sg                659 net/ipv6/esp6.c 	ret = skb_to_sgvec(skb, sg, 0, skb->len);
sg                672 net/ipv6/esp6.c 	aead_request_set_crypt(req, sg, sg, elen + ivlen, iv);
sg                599 net/ipv6/tcp_ipv6.c 	struct scatterlist sg;
sg                613 net/ipv6/tcp_ipv6.c 	sg_init_one(&sg, bp, sizeof(*bp) + sizeof(*th));
sg                614 net/ipv6/tcp_ipv6.c 	ahash_request_set_crypt(hp->md5_req, &sg, NULL,
sg                 22 net/mac80211/aead_api.c 	struct scatterlist sg[3];
sg                 34 net/mac80211/aead_api.c 	sg_init_table(sg, 3);
sg                 35 net/mac80211/aead_api.c 	sg_set_buf(&sg[0], __aad, aad_len);
sg                 36 net/mac80211/aead_api.c 	sg_set_buf(&sg[1], data, data_len);
sg                 37 net/mac80211/aead_api.c 	sg_set_buf(&sg[2], mic, mic_len);
sg                 40 net/mac80211/aead_api.c 	aead_request_set_crypt(aead_req, sg, sg, data_len, b_0);
sg                 41 net/mac80211/aead_api.c 	aead_request_set_ad(aead_req, sg[0].length);
sg                 53 net/mac80211/aead_api.c 	struct scatterlist sg[3];
sg                 69 net/mac80211/aead_api.c 	sg_init_table(sg, 3);
sg                 70 net/mac80211/aead_api.c 	sg_set_buf(&sg[0], __aad, aad_len);
sg                 71 net/mac80211/aead_api.c 	sg_set_buf(&sg[1], data, data_len);
sg                 72 net/mac80211/aead_api.c 	sg_set_buf(&sg[2], mic, mic_len);
sg                 75 net/mac80211/aead_api.c 	aead_request_set_crypt(aead_req, sg, sg, data_len + mic_len, b_0);
sg                 76 net/mac80211/aead_api.c 	aead_request_set_ad(aead_req, sg[0].length);
sg                 20 net/mac80211/aes_gmac.c 	struct scatterlist sg[4];
sg                 36 net/mac80211/aes_gmac.c 	sg_init_table(sg, 4);
sg                 37 net/mac80211/aes_gmac.c 	sg_set_buf(&sg[0], __aad, GMAC_AAD_LEN);
sg                 38 net/mac80211/aes_gmac.c 	sg_set_buf(&sg[1], data, data_len - GMAC_MIC_LEN);
sg                 39 net/mac80211/aes_gmac.c 	sg_set_buf(&sg[2], zero, GMAC_MIC_LEN);
sg                 40 net/mac80211/aes_gmac.c 	sg_set_buf(&sg[3], mic, GMAC_MIC_LEN);
sg                 47 net/mac80211/aes_gmac.c 	aead_request_set_crypt(aead_req, sg, sg, 0, iv);
sg                655 net/mac802154/llsec.c 	struct scatterlist sg;
sg                672 net/mac802154/llsec.c 	sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen + authlen);
sg                680 net/mac802154/llsec.c 	aead_request_set_crypt(req, &sg, &sg, datalen, iv);
sg                861 net/mac802154/llsec.c 	struct scatterlist sg;
sg                876 net/mac802154/llsec.c 	sg_init_one(&sg, skb_mac_header(skb), assoclen + datalen);
sg                884 net/mac802154/llsec.c 	aead_request_set_crypt(req, &sg, &sg, datalen, iv);
sg                332 net/rds/ib.h   	struct scatterlist *sg;
sg                335 net/rds/ib.h   	for_each_sg(sglist, sg, sg_dma_len, i) {
sg                336 net/rds/ib.h   		ib_dma_sync_single_for_cpu(dev, sg_dma_address(sg),
sg                337 net/rds/ib.h   					   sg_dma_len(sg), direction);
sg                347 net/rds/ib.h   	struct scatterlist *sg;
sg                350 net/rds/ib.h   	for_each_sg(sglist, sg, sg_dma_len, i) {
sg                351 net/rds/ib.h   		ib_dma_sync_single_for_device(dev, sg_dma_address(sg),
sg                352 net/rds/ib.h   					      sg_dma_len(sg), direction);
sg                432 net/rds/ib.h   		unsigned int hdr_off, unsigned int sg, unsigned int off);
sg                 99 net/rds/ib_fmr.c 			  struct rds_ib_mr *ibmr, struct scatterlist *sg,
sg                104 net/rds/ib_fmr.c 	struct scatterlist *scat = sg;
sg                112 net/rds/ib_fmr.c 	sg_dma_len = ib_dma_map_sg(dev, sg, nents, DMA_BIDIRECTIONAL);
sg                127 net/rds/ib_fmr.c 				ib_dma_unmap_sg(dev, sg, nents,
sg                136 net/rds/ib_fmr.c 				ib_dma_unmap_sg(dev, sg, nents,
sg                149 net/rds/ib_fmr.c 		ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL);
sg                156 net/rds/ib_fmr.c 		ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL);
sg                172 net/rds/ib_fmr.c 		ib_dma_unmap_sg(dev, sg, nents, DMA_BIDIRECTIONAL);
sg                181 net/rds/ib_fmr.c 	ibmr->sg = scat;
sg                199 net/rds/ib_fmr.c 				 struct scatterlist *sg,
sg                213 net/rds/ib_fmr.c 	ret = rds_ib_map_fmr(rds_ibdev, ibmr, sg, nents);
sg                134 net/rds/ib_frmr.c 	ret = ib_map_mr_sg_zbva(frmr->mr, ibmr->sg, ibmr->sg_len,
sg                190 net/rds/ib_frmr.c 			   struct scatterlist *sg, unsigned int sg_len)
sg                203 net/rds/ib_frmr.c 	ibmr->sg = sg;
sg                208 net/rds/ib_frmr.c 	ibmr->sg_dma_len = ib_dma_map_sg(dev, ibmr->sg, ibmr->sg_len,
sg                221 net/rds/ib_frmr.c 		unsigned int dma_len = sg_dma_len(&ibmr->sg[i]);
sg                222 net/rds/ib_frmr.c 		u64 dma_addr = sg_dma_address(&ibmr->sg[i]);
sg                260 net/rds/ib_frmr.c 	ib_dma_unmap_sg(rds_ibdev->dev, ibmr->sg, ibmr->sg_len,
sg                403 net/rds/ib_frmr.c 				  struct scatterlist *sg,
sg                426 net/rds/ib_frmr.c 	ret = rds_ib_map_frmr(rds_ibdev, ibmr->pool, ibmr, sg, nents);
sg                 80 net/rds/ib_mr.h 	struct scatterlist		*sg;
sg                123 net/rds/ib_mr.h void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents,
sg                145 net/rds/ib_mr.h 				  struct scatterlist *sg,
sg                218 net/rds/ib_rdma.c 		ib_dma_sync_sg_for_cpu(rds_ibdev->dev, ibmr->sg,
sg                222 net/rds/ib_rdma.c 		ib_dma_sync_sg_for_device(rds_ibdev->dev, ibmr->sg,
sg                234 net/rds/ib_rdma.c 				ibmr->sg, ibmr->sg_len,
sg                244 net/rds/ib_rdma.c 			struct page *page = sg_page(&ibmr->sg[i]);
sg                252 net/rds/ib_rdma.c 		kfree(ibmr->sg);
sg                254 net/rds/ib_rdma.c 		ibmr->sg = NULL;
sg                529 net/rds/ib_rdma.c void *rds_ib_get_mr(struct scatterlist *sg, unsigned long nents,
sg                553 net/rds/ib_rdma.c 		ibmr = rds_ib_reg_frmr(rds_ibdev, ic, sg, nents, key_ret);
sg                555 net/rds/ib_rdma.c 		ibmr = rds_ib_reg_fmr(rds_ibdev, sg, nents, key_ret);
sg                484 net/rds/ib_send.c 		unsigned int hdr_off, unsigned int sg, unsigned int off)
sg                512 net/rds/ib_send.c 		scat = &rm->data.op_sg[sg];
sg                 76 net/rds/loop.c 			 unsigned int hdr_off, unsigned int sg,
sg                 79 net/rds/loop.c 	struct scatterlist *sgp = &rm->data.op_sg[sg];
sg                 90 net/rds/loop.c 	BUG_ON(hdr_off || sg || off);
sg                365 net/rds/message.c 	struct scatterlist *sg;
sg                376 net/rds/message.c 	sg = rm->data.op_sg;
sg                409 net/rds/message.c 		sg_set_page(sg, pages, copied, start);
sg                411 net/rds/message.c 		sg++;
sg                426 net/rds/message.c 	struct scatterlist *sg;
sg                432 net/rds/message.c 	sg = rm->data.op_sg;
sg                439 net/rds/message.c 		if (!sg_page(sg)) {
sg                440 net/rds/message.c 			ret = rds_page_remainder_alloc(sg, iov_iter_count(from),
sg                449 net/rds/message.c 				sg->length - sg_off);
sg                452 net/rds/message.c 		nbytes = copy_page_from_iter(sg_page(sg), sg->offset + sg_off,
sg                459 net/rds/message.c 		if (sg_off == sg->length)
sg                460 net/rds/message.c 			sg++;
sg                469 net/rds/message.c 	struct scatterlist *sg;
sg                479 net/rds/message.c 	sg = rm->data.op_sg;
sg                485 net/rds/message.c 				sg->length - vec_off);
sg                489 net/rds/message.c 		ret = copy_page_to_iter(sg_page(sg), sg->offset + vec_off,
sg                497 net/rds/message.c 		if (vec_off == sg->length) {
sg                499 net/rds/message.c 			sg++;
sg                180 net/rds/rdma.c 	struct scatterlist *sg;
sg                255 net/rds/rdma.c 	sg = kcalloc(nents, sizeof(*sg), GFP_KERNEL);
sg                256 net/rds/rdma.c 	if (!sg) {
sg                261 net/rds/rdma.c 	sg_init_table(sg, nents);
sg                265 net/rds/rdma.c 		sg_set_page(&sg[i], pages[i], PAGE_SIZE, 0);
sg                273 net/rds/rdma.c 	trans_private = rs->rs_transport->get_mr(sg, nents, rs,
sg                279 net/rds/rdma.c 			put_page(sg_page(&sg[i]));
sg                280 net/rds/rdma.c 		kfree(sg);
sg                689 net/rds/rdma.c 			struct scatterlist *sg;
sg                691 net/rds/rdma.c 			sg = &op->op_sg[op->op_nents + j];
sg                692 net/rds/rdma.c 			sg_set_page(sg, pages[j],
sg                697 net/rds/rdma.c 			       sg->offset, sg->length, iov->addr, iov->bytes);
sg                699 net/rds/rdma.c 			iov->addr += sg->length;
sg                700 net/rds/rdma.c 			iov->bytes -= sg->length;
sg                558 net/rds/rds.h  		    unsigned int hdr_off, unsigned int sg, unsigned int off);
sg                574 net/rds/rds.h  	void *(*get_mr)(struct scatterlist *sg, unsigned long nr_sg,
sg                143 net/rds/send.c 	struct scatterlist *sg;
sg                382 net/rds/send.c 			sg = &rm->data.op_sg[cp->cp_xmit_sg];
sg                384 net/rds/send.c 				tmp = min_t(int, ret, sg->length -
sg                388 net/rds/send.c 				if (cp->cp_xmit_data_off == sg->length) {
sg                390 net/rds/send.c 					sg++;
sg                 90 net/rds/tcp.h  		 unsigned int hdr_off, unsigned int sg, unsigned int off);
sg                 76 net/rds/tcp_send.c 		 unsigned int hdr_off, unsigned int sg, unsigned int off)
sg                120 net/rds/tcp_send.c 	while (sg < rm->data.op_nents) {
sg                124 net/rds/tcp_send.c 						sg_page(&rm->data.op_sg[sg]),
sg                125 net/rds/tcp_send.c 						rm->data.op_sg[sg].offset + off,
sg                126 net/rds/tcp_send.c 						rm->data.op_sg[sg].length - off,
sg                128 net/rds/tcp_send.c 		rdsdebug("tcp sendpage %p:%u:%u ret %d\n", (void *)sg_page(&rm->data.op_sg[sg]),
sg                129 net/rds/tcp_send.c 			 rm->data.op_sg[sg].offset + off, rm->data.op_sg[sg].length - off,
sg                136 net/rds/tcp_send.c 		if (off == rm->data.op_sg[sg].length) {
sg                138 net/rds/tcp_send.c 			sg++;
sg                140 net/rds/tcp_send.c 		if (sg == rm->data.op_nents - 1)
sg                105 net/rxrpc/rxkad.c 	struct scatterlist sg;
sg                133 net/rxrpc/rxkad.c 	sg_init_one(&sg, tmpbuf, tmpsize);
sg                136 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, &sg, &sg, tmpsize, iv.x);
sg                187 net/rxrpc/rxkad.c 	struct scatterlist sg;
sg                201 net/rxrpc/rxkad.c 	sg_init_one(&sg, sechdr, 8);
sg                204 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x);
sg                225 net/rxrpc/rxkad.c 	struct scatterlist sg[16];
sg                244 net/rxrpc/rxkad.c 	sg_init_one(&sg[0], sechdr, sizeof(rxkhdr));
sg                247 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, &sg[0], &sg[0], sizeof(rxkhdr), iv.x);
sg                258 net/rxrpc/rxkad.c 	sg_init_table(sg, ARRAY_SIZE(sg));
sg                259 net/rxrpc/rxkad.c 	err = skb_to_sgvec(skb, sg, 0, len);
sg                262 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, len, iv.x);
sg                284 net/rxrpc/rxkad.c 	struct scatterlist sg;
sg                314 net/rxrpc/rxkad.c 	sg_init_one(&sg, call->crypto_buf, 8);
sg                317 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x);
sg                358 net/rxrpc/rxkad.c 	struct scatterlist sg[16];
sg                375 net/rxrpc/rxkad.c 	sg_init_table(sg, ARRAY_SIZE(sg));
sg                376 net/rxrpc/rxkad.c 	ret = skb_to_sgvec(skb, sg, offset, 8);
sg                385 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, 8, iv.x);
sg                436 net/rxrpc/rxkad.c 	struct scatterlist _sg[4], *sg;
sg                453 net/rxrpc/rxkad.c 	sg = _sg;
sg                458 net/rxrpc/rxkad.c 		sg = kmalloc_array(nsg, sizeof(*sg), GFP_NOIO);
sg                459 net/rxrpc/rxkad.c 		if (!sg)
sg                463 net/rxrpc/rxkad.c 	sg_init_table(sg, nsg);
sg                464 net/rxrpc/rxkad.c 	ret = skb_to_sgvec(skb, sg, offset, len);
sg                466 net/rxrpc/rxkad.c 		if (sg != _sg)
sg                467 net/rxrpc/rxkad.c 			kfree(sg);
sg                477 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, len, iv.x);
sg                480 net/rxrpc/rxkad.c 	if (sg != _sg)
sg                481 net/rxrpc/rxkad.c 		kfree(sg);
sg                533 net/rxrpc/rxkad.c 	struct scatterlist sg;
sg                557 net/rxrpc/rxkad.c 	sg_init_one(&sg, call->crypto_buf, 8);
sg                560 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x);
sg                786 net/rxrpc/rxkad.c 	struct scatterlist sg[1];
sg                795 net/rxrpc/rxkad.c 	sg_init_table(sg, 1);
sg                796 net/rxrpc/rxkad.c 	sg_set_buf(sg, &resp->encrypted, sizeof(resp->encrypted));
sg                799 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, sizeof(resp->encrypted), iv.x);
sg                904 net/rxrpc/rxkad.c 	struct scatterlist sg[1];
sg                941 net/rxrpc/rxkad.c 	sg_init_one(&sg[0], ticket, ticket_len);
sg                943 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, ticket_len, iv.x);
sg               1057 net/rxrpc/rxkad.c 	struct scatterlist sg[1];
sg               1070 net/rxrpc/rxkad.c 	sg_init_table(sg, 1);
sg               1071 net/rxrpc/rxkad.c 	sg_set_buf(sg, &resp->encrypted, sizeof(resp->encrypted));
sg               1074 net/rxrpc/rxkad.c 	skcipher_request_set_crypt(req, sg, sg, sizeof(resp->encrypted), iv.x);
sg                404 net/smc/smc_ib.c 	struct scatterlist *sg;
sg                408 net/smc/smc_ib.c 	for_each_sg(buf_slot->sgt[SMC_SINGLE_LINK].sgl, sg,
sg                410 net/smc/smc_ib.c 		if (!sg_dma_len(sg))
sg                413 net/smc/smc_ib.c 					   sg_dma_address(sg),
sg                414 net/smc/smc_ib.c 					   sg_dma_len(sg),
sg                424 net/smc/smc_ib.c 	struct scatterlist *sg;
sg                428 net/smc/smc_ib.c 	for_each_sg(buf_slot->sgt[SMC_SINGLE_LINK].sgl, sg,
sg                430 net/smc/smc_ib.c 		if (!sg_dma_len(sg))
sg                433 net/smc/smc_ib.c 					      sg_dma_address(sg),
sg                434 net/smc/smc_ib.c 					      sg_dma_len(sg),
sg                 63 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist sg[1];
sg                 80 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, out, length);
sg                 84 net/sunrpc/auth_gss/gss_krb5_crypto.c 	skcipher_request_set_crypt(req, sg, sg, length, local_iv);
sg                102 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist sg[1];
sg                118 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, out, length);
sg                122 net/sunrpc/auth_gss/gss_krb5_crypto.c 	skcipher_request_set_crypt(req, sg, sg, length, local_iv);
sg                132 net/sunrpc/auth_gss/gss_krb5_crypto.c checksummer(struct scatterlist *sg, void *data)
sg                136 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ahash_request_set_crypt(req, sg, NULL, sg->length);
sg                169 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist              sg[1];
sg                217 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, rc4salt, 4);
sg                218 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ahash_request_set_crypt(req, sg, NULL, 4);
sg                223 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, header, hdrlen);
sg                224 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ahash_request_set_crypt(req, sg, NULL, hdrlen);
sg                248 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, checksumdata, crypto_ahash_digestsize(md5));
sg                249 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ahash_request_set_crypt(req, sg, checksumdata,
sg                282 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist              sg[1];
sg                324 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, header, hdrlen);
sg                325 net/sunrpc/auth_gss/gss_krb5_crypto.c 	ahash_request_set_crypt(req, sg, NULL, hdrlen);
sg                379 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist sg[1];
sg                420 net/sunrpc/auth_gss/gss_krb5_crypto.c 		sg_init_one(sg, header, hdrlen);
sg                421 net/sunrpc/auth_gss/gss_krb5_crypto.c 		ahash_request_set_crypt(req, sg, NULL, hdrlen);
sg                465 net/sunrpc/auth_gss/gss_krb5_crypto.c encryptor(struct scatterlist *sg, void *data)
sg                472 net/sunrpc/auth_gss/gss_krb5_crypto.c 	int thislen = desc->fraglen + sg->length;
sg                486 net/sunrpc/auth_gss/gss_krb5_crypto.c 		in_page = sg_page(sg);
sg                488 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_set_page(&desc->infrags[desc->fragno], in_page, sg->length,
sg                489 net/sunrpc/auth_gss/gss_krb5_crypto.c 		    sg->offset);
sg                490 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_set_page(&desc->outfrags[desc->fragno], sg_page(sg), sg->length,
sg                491 net/sunrpc/auth_gss/gss_krb5_crypto.c 		    sg->offset);
sg                493 net/sunrpc/auth_gss/gss_krb5_crypto.c 	desc->fraglen += sg->length;
sg                494 net/sunrpc/auth_gss/gss_krb5_crypto.c 	desc->pos += sg->length;
sg                516 net/sunrpc/auth_gss/gss_krb5_crypto.c 		sg_set_page(&desc->outfrags[0], sg_page(sg), fraglen,
sg                517 net/sunrpc/auth_gss/gss_krb5_crypto.c 				sg->offset + sg->length - fraglen);
sg                567 net/sunrpc/auth_gss/gss_krb5_crypto.c decryptor(struct scatterlist *sg, void *data)
sg                570 net/sunrpc/auth_gss/gss_krb5_crypto.c 	int thislen = desc->fraglen + sg->length;
sg                578 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_set_page(&desc->frags[desc->fragno], sg_page(sg), sg->length,
sg                579 net/sunrpc/auth_gss/gss_krb5_crypto.c 		    sg->offset);
sg                581 net/sunrpc/auth_gss/gss_krb5_crypto.c 	desc->fraglen += sg->length;
sg                601 net/sunrpc/auth_gss/gss_krb5_crypto.c 		sg_set_page(&desc->frags[0], sg_page(sg), fraglen,
sg                602 net/sunrpc/auth_gss/gss_krb5_crypto.c 				sg->offset + sg->length - fraglen);
sg                681 net/sunrpc/auth_gss/gss_krb5_crypto.c 	struct scatterlist sg[1];
sg                709 net/sunrpc/auth_gss/gss_krb5_crypto.c 	sg_init_one(sg, data, len);
sg                713 net/sunrpc/auth_gss/gss_krb5_crypto.c 	skcipher_request_set_crypt(req, sg, sg, len, iv);
sg               1558 net/sunrpc/xdr.c 	struct scatterlist      sg[1];
sg               1560 net/sunrpc/xdr.c 	sg_init_table(sg, 1);
sg               1568 net/sunrpc/xdr.c 		sg_set_buf(sg, buf->head[0].iov_base + offset, thislen);
sg               1569 net/sunrpc/xdr.c 		ret = actor(sg, data);
sg               1591 net/sunrpc/xdr.c 			sg_set_page(sg, buf->pages[i], thislen, page_offset);
sg               1592 net/sunrpc/xdr.c 			ret = actor(sg, data);
sg               1608 net/sunrpc/xdr.c 		sg_set_buf(sg, buf->tail[0].iov_base + offset, thislen);
sg               1609 net/sunrpc/xdr.c 		ret = actor(sg, data);
sg                165 net/sunrpc/xprtrdma/frwr_ops.c 	struct scatterlist *sg;
sg                176 net/sunrpc/xprtrdma/frwr_ops.c 	sg = kcalloc(depth, sizeof(*sg), GFP_NOFS);
sg                177 net/sunrpc/xprtrdma/frwr_ops.c 	if (!sg)
sg                186 net/sunrpc/xprtrdma/frwr_ops.c 	sg_init_table(sg, depth);
sg                187 net/sunrpc/xprtrdma/frwr_ops.c 	mr->mr_sg = sg;
sg                356 net/sunrpc/xprtrdma/svc_rdma_rw.c 	struct scatterlist *sg = ctxt->rw_sg_table.sgl;
sg                358 net/sunrpc/xprtrdma/svc_rdma_rw.c 	sg_set_buf(&sg[0], info->wi_base, len);
sg                372 net/sunrpc/xprtrdma/svc_rdma_rw.c 	struct scatterlist *sg;
sg                380 net/sunrpc/xprtrdma/svc_rdma_rw.c 	sg = ctxt->rw_sg_table.sgl;
sg                385 net/sunrpc/xprtrdma/svc_rdma_rw.c 		sg_set_page(sg, *page, sge_bytes, page_off);
sg                388 net/sunrpc/xprtrdma/svc_rdma_rw.c 		sg = sg_next(sg);
sg                606 net/sunrpc/xprtrdma/svc_rdma_rw.c 	struct scatterlist *sg;
sg                615 net/sunrpc/xprtrdma/svc_rdma_rw.c 	sg = ctxt->rw_sg_table.sgl;
sg                625 net/sunrpc/xprtrdma/svc_rdma_rw.c 		sg_set_page(sg, rqstp->rq_pages[info->ri_pageno],
sg                627 net/sunrpc/xprtrdma/svc_rdma_rw.c 		sg = sg_next(sg);
sg                775 net/tls/tls_device.c 	struct scatterlist sg[1];
sg                790 net/tls/tls_device.c 	sg_init_table(sg, 1);
sg                791 net/tls/tls_device.c 	sg_set_buf(&sg[0], buf,
sg                800 net/tls/tls_device.c 	err = decrypt_skb(sk, skb, sg);
sg                 37 net/tls/tls_device_fallback.c static void chain_to_walk(struct scatterlist *sg, struct scatter_walk *walk)
sg                 39 net/tls/tls_device_fallback.c 	struct scatterlist *src = walk->sg;
sg                 42 net/tls/tls_device_fallback.c 	sg_set_page(sg, sg_page(src),
sg                 45 net/tls/tls_device_fallback.c 	scatterwalk_crypto_chain(sg, sg_next(src), 2);
sg                101 net/tls/tls_main.c 		struct scatterlist *sg,
sg                111 net/tls/tls_main.c 	size = sg->length - offset;
sg                112 net/tls/tls_main.c 	offset += sg->offset;
sg                116 net/tls/tls_main.c 		if (sg_is_last(sg))
sg                121 net/tls/tls_main.c 		p = sg_page(sg);
sg                132 net/tls/tls_main.c 			offset -= sg->offset;
sg                134 net/tls/tls_main.c 			ctx->partially_sent_record = (void *)sg;
sg                140 net/tls/tls_main.c 		sk_mem_uncharge(sk, sg->length);
sg                141 net/tls/tls_main.c 		sg = sg_next(sg);
sg                142 net/tls/tls_main.c 		if (!sg)
sg                145 net/tls/tls_main.c 		offset = sg->offset;
sg                146 net/tls/tls_main.c 		size = sg->length;
sg                202 net/tls/tls_main.c 	struct scatterlist *sg;
sg                205 net/tls/tls_main.c 	sg = ctx->partially_sent_record;
sg                209 net/tls/tls_main.c 	return tls_push_sg(sk, ctx, sg, offset, flags);
sg                214 net/tls/tls_main.c 	struct scatterlist *sg;
sg                216 net/tls/tls_main.c 	for (sg = ctx->partially_sent_record; sg; sg = sg_next(sg)) {
sg                217 net/tls/tls_main.c 		put_page(sg_page(sg));
sg                218 net/tls/tls_main.c 		sk_mem_uncharge(sk, sg->length);
sg                159 net/tls/tls_sw.c 	struct scatterlist *sg;
sg                197 net/tls/tls_sw.c 		for_each_sg(sg_next(sgout), sg, UINT_MAX, pages) {
sg                198 net/tls/tls_sw.c 			if (!sg)
sg                200 net/tls/tls_sw.c 			put_page(sg_page(sg));
sg                303 net/tls/tls_sw.c 	len = required - msg_pl->sg.size;
sg                308 net/tls/tls_sw.c 	skip = prot->prepend_size + msg_pl->sg.size;
sg                403 net/tls/tls_sw.c 					 &msg_en->sg.data[msg_en->sg.curr],
sg                439 net/tls/tls_sw.c 	sge = sk_msg_elem(msg_en, msg_en->sg.curr);
sg                510 net/tls/tls_sw.c 	msg_en->sg.curr = start;
sg                552 net/tls/tls_sw.c 	u32 orig_size = msg_opl->sg.size;
sg                561 net/tls/tls_sw.c 	ret = sk_msg_alloc(sk, &new->msg_encrypted, msg_opl->sg.size +
sg                568 net/tls/tls_sw.c 	*orig_end = msg_opl->sg.end;
sg                569 net/tls/tls_sw.c 	i = msg_opl->sg.start;
sg                587 net/tls/tls_sw.c 		if (i == msg_opl->sg.end)
sg                592 net/tls/tls_sw.c 	msg_opl->sg.end = i;
sg                593 net/tls/tls_sw.c 	msg_opl->sg.curr = i;
sg                594 net/tls/tls_sw.c 	msg_opl->sg.copybreak = 0;
sg                596 net/tls/tls_sw.c 	msg_opl->sg.size = bytes;
sg                600 net/tls/tls_sw.c 	msg_npl->sg.size = orig_size - bytes;
sg                602 net/tls/tls_sw.c 	j = msg_npl->sg.start;
sg                622 net/tls/tls_sw.c 	msg_npl->sg.end = j;
sg                623 net/tls/tls_sw.c 	msg_npl->sg.curr = j;
sg                624 net/tls/tls_sw.c 	msg_npl->sg.copybreak = 0;
sg                638 net/tls/tls_sw.c 	i = msg_opl->sg.end;
sg                640 net/tls/tls_sw.c 	j = msg_npl->sg.start;
sg                651 net/tls/tls_sw.c 	msg_opl->sg.end = orig_end;
sg                652 net/tls/tls_sw.c 	msg_opl->sg.curr = orig_end;
sg                653 net/tls/tls_sw.c 	msg_opl->sg.copybreak = 0;
sg                654 net/tls/tls_sw.c 	msg_opl->apply_bytes = msg_opl->sg.size + msg_npl->sg.size;
sg                655 net/tls/tls_sw.c 	msg_opl->sg.size += msg_npl->sg.size;
sg                683 net/tls/tls_sw.c 	split = split_point && split_point < msg_pl->sg.size;
sg                685 net/tls/tls_sw.c 		      msg_pl->sg.size +
sg                686 net/tls/tls_sw.c 		      prot->overhead_size > msg_en->sg.size) ||
sg                689 net/tls/tls_sw.c 		      prot->overhead_size > msg_en->sg.size))) {
sg                691 net/tls/tls_sw.c 		split_point = msg_en->sg.size;
sg                704 net/tls/tls_sw.c 		if (!msg_pl->sg.size) {
sg                710 net/tls/tls_sw.c 		sk_msg_trim(sk, msg_en, msg_pl->sg.size +
sg                717 net/tls/tls_sw.c 	i = msg_pl->sg.end;
sg                725 net/tls/tls_sw.c 		sg_chain(msg_pl->sg.data, msg_pl->sg.end + 1,
sg                731 net/tls/tls_sw.c 	if (msg_pl->sg.end < msg_pl->sg.start) {
sg                732 net/tls/tls_sw.c 		sg_chain(&msg_pl->sg.data[msg_pl->sg.start],
sg                733 net/tls/tls_sw.c 			 MAX_SKB_FRAGS - msg_pl->sg.start + 1,
sg                734 net/tls/tls_sw.c 			 msg_pl->sg.data);
sg                737 net/tls/tls_sw.c 	i = msg_pl->sg.start;
sg                738 net/tls/tls_sw.c 	sg_chain(rec->sg_aead_in, 2, &msg_pl->sg.data[i]);
sg                740 net/tls/tls_sw.c 	i = msg_en->sg.end;
sg                744 net/tls/tls_sw.c 	i = msg_en->sg.start;
sg                745 net/tls/tls_sw.c 	sg_chain(rec->sg_aead_out, 2, &msg_en->sg.data[i]);
sg                747 net/tls/tls_sw.c 	tls_make_aad(rec->aad_space, msg_pl->sg.size + prot->tail_size,
sg                752 net/tls/tls_sw.c 			 page_address(sg_page(&msg_en->sg.data[i])) +
sg                753 net/tls/tls_sw.c 			 msg_en->sg.data[i].offset,
sg                754 net/tls/tls_sw.c 			 msg_pl->sg.size + prot->tail_size,
sg                760 net/tls/tls_sw.c 			       msg_pl->sg.size + prot->tail_size, i);
sg                774 net/tls/tls_sw.c 		sk_msg_trim(sk, msg_en, msg_pl->sg.size + prot->overhead_size);
sg                812 net/tls/tls_sw.c 		delta = msg->sg.size;
sg                814 net/tls/tls_sw.c 		delta -= msg->sg.size;
sg                816 net/tls/tls_sw.c 	if (msg->cork_bytes && msg->cork_bytes > msg->sg.size &&
sg                822 net/tls/tls_sw.c 	send = msg->sg.size;
sg                844 net/tls/tls_sw.c 		msg->sg.size -= send;
sg                850 net/tls/tls_sw.c 			msg->sg.size = 0;
sg                852 net/tls/tls_sw.c 		if (msg->sg.size == 0)
sg                862 net/tls/tls_sw.c 		if (msg->sg.size == 0)
sg                904 net/tls/tls_sw.c 	copied = msg_pl->sg.size;
sg                969 net/tls/tls_sw.c 		orig_size = msg_pl->sg.size;
sg                972 net/tls/tls_sw.c 		record_room = TLS_MAX_PAYLOAD_SIZE - msg_pl->sg.size;
sg                978 net/tls/tls_sw.c 		required_size = msg_pl->sg.size + try_to_copy +
sg                994 net/tls/tls_sw.c 			try_to_copy -= required_size - msg_en->sg.size;
sg                999 net/tls/tls_sw.c 			u32 first = msg_pl->sg.end;
sg               1028 net/tls/tls_sw.c 					msg_pl->sg.size - orig_size);
sg               1033 net/tls/tls_sw.c 		required_size = msg_pl->sg.size + try_to_copy;
sg               1044 net/tls/tls_sw.c 			try_to_copy -= required_size - msg_pl->sg.size;
sg               1047 net/tls/tls_sw.c 				    msg_pl->sg.size + prot->overhead_size);
sg               1092 net/tls/tls_sw.c 		if (ctx->open_rec && msg_en->sg.size < required_size)
sg               1176 net/tls/tls_sw.c 		record_room = TLS_MAX_PAYLOAD_SIZE - msg_pl->sg.size;
sg               1183 net/tls/tls_sw.c 		required_size = msg_pl->sg.size + copy + prot->overhead_size;
sg               1197 net/tls/tls_sw.c 			copy -= required_size - msg_pl->sg.size;
sg               1231 net/tls/tls_sw.c 				tls_trim_both_msgs(sk, msg_pl->sg.size);
sg                430 net/vmw_vsock/virtio_transport.c 	struct scatterlist sg;
sg                435 net/vmw_vsock/virtio_transport.c 	sg_init_one(&sg, event, sizeof(*event));
sg                437 net/vmw_vsock/virtio_transport.c 	return virtqueue_add_inbuf(vq, &sg, 1, event, GFP_KERNEL);
sg                195 net/wireless/lib80211_crypt_ccmp.c 	struct scatterlist sg[2];
sg                218 net/wireless/lib80211_crypt_ccmp.c 	sg_init_table(sg, 2);
sg                219 net/wireless/lib80211_crypt_ccmp.c 	sg_set_buf(&sg[0], aad, aad_len);
sg                220 net/wireless/lib80211_crypt_ccmp.c 	sg_set_buf(&sg[1], skb->data + hdr_len + CCMP_HDR_LEN,
sg                225 net/wireless/lib80211_crypt_ccmp.c 	aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                260 net/wireless/lib80211_crypt_ccmp.c 	struct scatterlist sg[2];
sg                319 net/wireless/lib80211_crypt_ccmp.c 	sg_init_table(sg, 2);
sg                320 net/wireless/lib80211_crypt_ccmp.c 	sg_set_buf(&sg[0], aad, aad_len);
sg                321 net/wireless/lib80211_crypt_ccmp.c 	sg_set_buf(&sg[1], pos, data_len);
sg                325 net/wireless/lib80211_crypt_ccmp.c 	aead_request_set_crypt(req, sg, sg, data_len, iv);
sg                 26 samples/kfifo/dma-example.c 	struct scatterlist	sg[10];
sg                 62 samples/kfifo/dma-example.c 	sg_init_table(sg, ARRAY_SIZE(sg));
sg                 63 samples/kfifo/dma-example.c 	nents = kfifo_dma_in_prepare(&fifo, sg, ARRAY_SIZE(sg), FIFO_SIZE);
sg                 77 samples/kfifo/dma-example.c 			i, sg_page(&sg[i]), sg[i].offset, sg[i].length);
sg                 79 samples/kfifo/dma-example.c 		if (sg_is_last(&sg[i]))
sg                 93 samples/kfifo/dma-example.c 	nents = kfifo_dma_out_prepare(&fifo, sg, ARRAY_SIZE(sg), 8);
sg                106 samples/kfifo/dma-example.c 			i, sg_page(&sg[i]), sg[i].offset, sg[i].length);
sg                108 samples/kfifo/dma-example.c 		if (sg_is_last(&sg[i]))
sg                839 samples/vfio-mdev/mbochs.c 	struct sg_table *sg;
sg                843 samples/vfio-mdev/mbochs.c 	sg = kzalloc(sizeof(*sg), GFP_KERNEL);
sg                844 samples/vfio-mdev/mbochs.c 	if (!sg)
sg                846 samples/vfio-mdev/mbochs.c 	if (sg_alloc_table_from_pages(sg, dmabuf->pages, dmabuf->pagecount,
sg                849 samples/vfio-mdev/mbochs.c 	if (!dma_map_sg(at->dev, sg->sgl, sg->nents, direction))
sg                852 samples/vfio-mdev/mbochs.c 	return sg;
sg                855 samples/vfio-mdev/mbochs.c 	sg_free_table(sg);
sg                857 samples/vfio-mdev/mbochs.c 	kfree(sg);
sg                863 samples/vfio-mdev/mbochs.c 				struct sg_table *sg,
sg                871 samples/vfio-mdev/mbochs.c 	sg_free_table(sg);
sg                872 samples/vfio-mdev/mbochs.c 	kfree(sg);
sg                212 security/integrity/ima/ima_crypto.c 	struct scatterlist sg[1];
sg                292 security/integrity/ima/ima_crypto.c 		sg_init_one(&sg[0], rbuf[active], rbuf_len);
sg                293 security/integrity/ima/ima_crypto.c 		ahash_request_set_crypt(req, sg, NULL, rbuf_len);
sg                536 security/integrity/ima/ima_crypto.c 	struct scatterlist sg;
sg                555 security/integrity/ima/ima_crypto.c 	sg_init_one(&sg, buf, len);
sg                556 security/integrity/ima/ima_crypto.c 	ahash_request_set_crypt(req, &sg, NULL, len);
sg                 26 security/keys/big_key.c 	struct scatterlist	*sg;
sg                117 security/keys/big_key.c 	aead_request_set_crypt(aead_req, buf->sg, buf->sg, datalen, zero_nonce);
sg                173 security/keys/big_key.c 	buf->sg = (void *)(buf->pages + npg);
sg                174 security/keys/big_key.c 	sg_init_table(buf->sg, npg);
sg                182 security/keys/big_key.c 		sg_set_page(&buf->sg[i], buf->pages[i], l, 0);
sg                143 sound/core/sgbuf.c 	struct snd_sg_buf *sg = dmab->private_data;
sg                149 sound/core/sgbuf.c 	pg = sg->table[start].addr >> PAGE_SHIFT;
sg                155 sound/core/sgbuf.c 		if ((sg->table[start].addr >> PAGE_SHIFT) != pg)
sg                112 sound/soc/sh/siu_pcm.c 	struct scatterlist sg;
sg                115 sound/soc/sh/siu_pcm.c 	sg_init_table(&sg, 1);
sg                116 sound/soc/sh/siu_pcm.c 	sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)),
sg                118 sound/soc/sh/siu_pcm.c 	sg_dma_len(&sg) = size;
sg                119 sound/soc/sh/siu_pcm.c 	sg_dma_address(&sg) = buff;
sg                122 sound/soc/sh/siu_pcm.c 		&sg, 1, DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
sg                160 sound/soc/sh/siu_pcm.c 	struct scatterlist sg;
sg                165 sound/soc/sh/siu_pcm.c 	sg_init_table(&sg, 1);
sg                166 sound/soc/sh/siu_pcm.c 	sg_set_page(&sg, pfn_to_page(PFN_DOWN(buff)),
sg                168 sound/soc/sh/siu_pcm.c 	sg_dma_len(&sg) = size;
sg                169 sound/soc/sh/siu_pcm.c 	sg_dma_address(&sg) = buff;
sg                172 sound/soc/sh/siu_pcm.c 		&sg, 1, DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
sg                144 sound/soc/sprd/sprd-pcm-compress.c 	struct scatterlist *sg, *sgt;
sg                163 sound/soc/sprd/sprd-pcm-compress.c 	sgt = sg = devm_kcalloc(dev, sg_num, sizeof(*sg), GFP_KERNEL);
sg                164 sound/soc/sprd/sprd-pcm-compress.c 	if (!sg) {
sg                238 sound/soc/sprd/sprd-pcm-compress.c 	dma->desc = dma->chan->device->device_prep_slave_sg(dma->chan, sg,
sg                253 sound/soc/sprd/sprd-pcm-compress.c 	devm_kfree(dev, sg);
sg                258 sound/soc/sprd/sprd-pcm-compress.c 	devm_kfree(dev, sg);
sg                206 sound/soc/sprd/sprd-pcm-dma.c 	struct scatterlist *sg;
sg                232 sound/soc/sprd/sprd-pcm-dma.c 	sg = devm_kcalloc(component->dev, sg_num, sizeof(*sg), GFP_KERNEL);
sg                233 sound/soc/sprd/sprd-pcm-dma.c 	if (!sg) {
sg                244 sound/soc/sprd/sprd-pcm-dma.c 		struct scatterlist *sgt = sg;
sg                290 sound/soc/sprd/sprd-pcm-dma.c 		data->desc = chan->device->device_prep_slave_sg(chan, sg,
sg                305 sound/soc/sprd/sprd-pcm-dma.c 	devm_kfree(component->dev, sg);
sg                310 sound/soc/sprd/sprd-pcm-dma.c 	devm_kfree(component->dev, sg);
sg                125 sound/soc/txx9/txx9aclc.c 	struct scatterlist sg;
sg                127 sound/soc/txx9/txx9aclc.c 	sg_init_table(&sg, 1);
sg                128 sound/soc/txx9/txx9aclc.c 	sg_set_page(&sg, pfn_to_page(PFN_DOWN(buf_dma_addr)),
sg                130 sound/soc/txx9/txx9aclc.c 	sg_dma_address(&sg) = buf_dma_addr;
sg                131 sound/soc/txx9/txx9aclc.c 	desc = dmaengine_prep_slave_sg(chan, &sg, 1,
sg                 14 tools/virtio/linux/scatterlist.h #define sg_is_chain(sg)		((sg)->page_link & 0x01)
sg                 15 tools/virtio/linux/scatterlist.h #define sg_is_last(sg)		((sg)->page_link & 0x02)
sg                 16 tools/virtio/linux/scatterlist.h #define sg_chain_ptr(sg)	\
sg                 17 tools/virtio/linux/scatterlist.h 	((struct scatterlist *) ((sg)->page_link & ~0x03))
sg                 29 tools/virtio/linux/scatterlist.h static inline void sg_assign_page(struct scatterlist *sg, struct page *page)
sg                 31 tools/virtio/linux/scatterlist.h 	unsigned long page_link = sg->page_link & 0x3;
sg                 39 tools/virtio/linux/scatterlist.h 	BUG_ON(sg_is_chain(sg));
sg                 41 tools/virtio/linux/scatterlist.h 	sg->page_link = page_link | (unsigned long) page;
sg                 58 tools/virtio/linux/scatterlist.h static inline void sg_set_page(struct scatterlist *sg, struct page *page,
sg                 61 tools/virtio/linux/scatterlist.h 	sg_assign_page(sg, page);
sg                 62 tools/virtio/linux/scatterlist.h 	sg->offset = offset;
sg                 63 tools/virtio/linux/scatterlist.h 	sg->length = len;
sg                 66 tools/virtio/linux/scatterlist.h static inline struct page *sg_page(struct scatterlist *sg)
sg                 69 tools/virtio/linux/scatterlist.h 	BUG_ON(sg_is_chain(sg));
sg                 71 tools/virtio/linux/scatterlist.h 	return (struct page *)((sg)->page_link & ~0x3);
sg                 77 tools/virtio/linux/scatterlist.h #define for_each_sg(sglist, sg, nr, __i)	\
sg                 78 tools/virtio/linux/scatterlist.h 	for (__i = 0, sg = (sglist); __i < (nr); __i++, sg = sg_next(sg))
sg                115 tools/virtio/linux/scatterlist.h static inline void sg_mark_end(struct scatterlist *sg)
sg                120 tools/virtio/linux/scatterlist.h 	sg->page_link |= 0x02;
sg                121 tools/virtio/linux/scatterlist.h 	sg->page_link &= ~0x01;
sg                132 tools/virtio/linux/scatterlist.h static inline void sg_unmark_end(struct scatterlist *sg)
sg                134 tools/virtio/linux/scatterlist.h 	sg->page_link &= ~0x02;
sg                137 tools/virtio/linux/scatterlist.h static inline struct scatterlist *sg_next(struct scatterlist *sg)
sg                139 tools/virtio/linux/scatterlist.h 	if (sg_is_last(sg))
sg                142 tools/virtio/linux/scatterlist.h 	sg++;
sg                143 tools/virtio/linux/scatterlist.h 	if (unlikely(sg_is_chain(sg)))
sg                144 tools/virtio/linux/scatterlist.h 		sg = sg_chain_ptr(sg);
sg                146 tools/virtio/linux/scatterlist.h 	return sg;
sg                155 tools/virtio/linux/scatterlist.h static inline dma_addr_t sg_phys(struct scatterlist *sg)
sg                157 tools/virtio/linux/scatterlist.h 	return page_to_phys(sg_page(sg)) + sg->offset;
sg                160 tools/virtio/linux/scatterlist.h static inline void sg_set_buf(struct scatterlist *sg, const void *buf,
sg                163 tools/virtio/linux/scatterlist.h 	sg_set_page(sg, virt_to_page(buf), buflen, offset_in_page(buf));
sg                166 tools/virtio/linux/scatterlist.h static inline void sg_init_one(struct scatterlist *sg,
sg                169 tools/virtio/linux/scatterlist.h 	sg_init_table(sg, 1);
sg                170 tools/virtio/linux/scatterlist.h 	sg_set_buf(sg, buf, buflen);
sg                 37 tools/virtio/linux/virtio.h 			 struct scatterlist sg[], unsigned int num,
sg                 42 tools/virtio/linux/virtio.h 			struct scatterlist sg[], unsigned int num,
sg                328 tools/virtio/vringh_test.c 			struct scatterlist sg[4];
sg                353 tools/virtio/vringh_test.c 				sg_init_table(sg, num_sg = 3);
sg                354 tools/virtio/vringh_test.c 				sg_set_buf(&sg[0], (void *)dbuf, 1);
sg                355 tools/virtio/vringh_test.c 				sg_set_buf(&sg[1], (void *)dbuf + 1, 2);
sg                356 tools/virtio/vringh_test.c 				sg_set_buf(&sg[2], (void *)dbuf + 3, 1);
sg                359 tools/virtio/vringh_test.c 				sg_init_table(sg, num_sg = 2);
sg                360 tools/virtio/vringh_test.c 				sg_set_buf(&sg[0], (void *)dbuf, 1);
sg                361 tools/virtio/vringh_test.c 				sg_set_buf(&sg[1], (void *)dbuf + 1, 3);
sg                364 tools/virtio/vringh_test.c 				sg_init_table(sg, num_sg = 1);
sg                365 tools/virtio/vringh_test.c 				sg_set_buf(&sg[0], (void *)dbuf, 4);
sg                368 tools/virtio/vringh_test.c 				sg_init_table(sg, num_sg = 4);
sg                369 tools/virtio/vringh_test.c 				sg_set_buf(&sg[0], (void *)dbuf, 1);
sg                370 tools/virtio/vringh_test.c 				sg_set_buf(&sg[1], (void *)dbuf + 1, 1);
sg                371 tools/virtio/vringh_test.c 				sg_set_buf(&sg[2], (void *)dbuf + 2, 1);
sg                372 tools/virtio/vringh_test.c 				sg_set_buf(&sg[3], (void *)dbuf + 3, 1);
sg                380 tools/virtio/vringh_test.c 				err = virtqueue_add_outbuf(vq, sg, num_sg, dbuf,
sg                383 tools/virtio/vringh_test.c 				err = virtqueue_add_inbuf(vq, sg, num_sg,