sgc               255 drivers/crypto/caam/caampkc.c 	int sgc;
sgc               306 drivers/crypto/caam/caampkc.c 	sgc = dma_map_sg(dev, req_ctx->fixup_src, src_nents, DMA_TO_DEVICE);
sgc               307 drivers/crypto/caam/caampkc.c 	if (unlikely(!sgc)) {
sgc               312 drivers/crypto/caam/caampkc.c 	sgc = dma_map_sg(dev, req->dst, dst_nents, DMA_FROM_DEVICE);
sgc               313 drivers/crypto/caam/caampkc.c 	if (unlikely(!sgc)) {
sgc               110 drivers/gpio/gpio-sa1100.c static void sa1100_update_edge_regs(struct sa1100_gpio_chip *sgc)
sgc               112 drivers/gpio/gpio-sa1100.c 	void *base = sgc->membase;
sgc               115 drivers/gpio/gpio-sa1100.c 	grer = sgc->irqrising & sgc->irqmask;
sgc               116 drivers/gpio/gpio-sa1100.c 	gfer = sgc->irqfalling & sgc->irqmask;
sgc               124 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
sgc               128 drivers/gpio/gpio-sa1100.c 		if ((sgc->irqrising | sgc->irqfalling) & mask)
sgc               134 drivers/gpio/gpio-sa1100.c 		sgc->irqrising |= mask;
sgc               136 drivers/gpio/gpio-sa1100.c 		sgc->irqrising &= ~mask;
sgc               138 drivers/gpio/gpio-sa1100.c 		sgc->irqfalling |= mask;
sgc               140 drivers/gpio/gpio-sa1100.c 		sgc->irqfalling &= ~mask;
sgc               142 drivers/gpio/gpio-sa1100.c 	sa1100_update_edge_regs(sgc);
sgc               152 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
sgc               154 drivers/gpio/gpio-sa1100.c 	writel_relaxed(BIT(d->hwirq), sgc->membase + R_GEDR);
sgc               159 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
sgc               162 drivers/gpio/gpio-sa1100.c 	sgc->irqmask &= ~mask;
sgc               164 drivers/gpio/gpio-sa1100.c 	sa1100_update_edge_regs(sgc);
sgc               169 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
sgc               172 drivers/gpio/gpio-sa1100.c 	sgc->irqmask |= mask;
sgc               174 drivers/gpio/gpio-sa1100.c 	sa1100_update_edge_regs(sgc);
sgc               179 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_data_get_irq_chip_data(d);
sgc               183 drivers/gpio/gpio-sa1100.c 			sgc->irqwake |= BIT(d->hwirq);
sgc               185 drivers/gpio/gpio-sa1100.c 			sgc->irqwake &= ~BIT(d->hwirq);
sgc               205 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = d->host_data;
sgc               207 drivers/gpio/gpio-sa1100.c 	irq_set_chip_data(irq, sgc);
sgc               228 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = irq_desc_get_handler_data(desc);
sgc               230 drivers/gpio/gpio-sa1100.c 	void __iomem *gedr = sgc->membase + R_GEDR;
sgc               240 drivers/gpio/gpio-sa1100.c 		irq = sgc->irqbase;
sgc               254 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = &sa1100_gpio_chip;
sgc               259 drivers/gpio/gpio-sa1100.c 	writel_relaxed(sgc->irqwake & sgc->irqrising, sgc->membase + R_GRER);
sgc               260 drivers/gpio/gpio-sa1100.c 	writel_relaxed(sgc->irqwake & sgc->irqfalling, sgc->membase + R_GFER);
sgc               265 drivers/gpio/gpio-sa1100.c 	writel_relaxed(readl_relaxed(sgc->membase + R_GEDR),
sgc               266 drivers/gpio/gpio-sa1100.c 		       sgc->membase + R_GEDR);
sgc               308 drivers/gpio/gpio-sa1100.c 	struct sa1100_gpio_chip *sgc = &sa1100_gpio_chip;
sgc               312 drivers/gpio/gpio-sa1100.c 	writel_relaxed(0, sgc->membase + R_GFER);
sgc               313 drivers/gpio/gpio-sa1100.c 	writel_relaxed(0, sgc->membase + R_GRER);
sgc               314 drivers/gpio/gpio-sa1100.c 	writel_relaxed(-1, sgc->membase + R_GEDR);
sgc               320 drivers/gpio/gpio-sa1100.c 			&sa1100_gpio_irqdomain_ops, sgc);
sgc               324 drivers/gpio/gpio-sa1100.c 						 sa1100_gpio_handler, sgc);
sgc               929 drivers/net/ethernet/intel/ixgbe/ixgbe.h 		       struct scatterlist *sgl, unsigned int sgc);
sgc               931 drivers/net/ethernet/intel/ixgbe/ixgbe.h 			  struct scatterlist *sgl, unsigned int sgc);
sgc                29 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	ddp->sgc = 0;
sgc               110 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		dma_unmap_sg(&adapter->pdev->dev, ddp->sgl, ddp->sgc,
sgc               133 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 				struct scatterlist *sgl, unsigned int sgc,
sgc               170 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		      xid, ddp->sgl, ddp->sgc);
sgc               188 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	dmacount = dma_map_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE);
sgc               202 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	ddp->sgc = sgc;
sgc               323 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	dma_unmap_sg(&adapter->pdev->dev, sgl, sgc, DMA_FROM_DEVICE);
sgc               344 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		       struct scatterlist *sgl, unsigned int sgc)
sgc               346 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	return ixgbe_fcoe_ddp_setup(netdev, xid, sgl, sgc, 0);
sgc               365 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 			    struct scatterlist *sgl, unsigned int sgc)
sgc               367 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 	return ixgbe_fcoe_ddp_setup(netdev, xid, sgl, sgc, 1);
sgc               442 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 			     ddp->sgc, DMA_FROM_DEVICE);
sgc               445 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.c 		ddp->sgc = 0;
sgc                38 drivers/net/ethernet/intel/ixgbe/ixgbe_fcoe.h 	unsigned int sgc;
sgc               408 drivers/scsi/esas2r/esas2r.h 			      struct esas2r_sg_context *sgc);
sgc               598 drivers/scsi/esas2r/esas2r.h typedef u32 (*PGETPHYSADDR) (struct esas2r_sg_context *sgc, u64 *addr);
sgc               712 drivers/scsi/esas2r/esas2r.h 	struct esas2r_sg_context sgc;
sgc              1068 drivers/scsi/esas2r/esas2r.h 			     struct esas2r_sg_context *sgc);
sgc              1074 drivers/scsi/esas2r/esas2r.h 		   struct esas2r_request *rq, struct esas2r_sg_context *sgc);
sgc              1105 drivers/scsi/esas2r/esas2r.h 			      struct esas2r_sg_context *sgc);
sgc              1107 drivers/scsi/esas2r/esas2r.h 			      struct esas2r_sg_context *sgc);
sgc              1139 drivers/scsi/esas2r/esas2r.h 			      struct esas2r_sg_context *sgc);
sgc              1166 drivers/scsi/esas2r/esas2r.h static inline void esas2r_sgc_init(struct esas2r_sg_context *sgc,
sgc              1171 drivers/scsi/esas2r/esas2r.h 	sgc->adapter = a;
sgc              1172 drivers/scsi/esas2r/esas2r.h 	sgc->first_req = rq;
sgc              1178 drivers/scsi/esas2r/esas2r.h 	sgc->sge.a64.limit = (struct atto_vda_sge *)((u8 *)rq->vrq
sgc              1185 drivers/scsi/esas2r/esas2r.h 		sgc->sge.a64.last =
sgc              1186 drivers/scsi/esas2r/esas2r.h 			sgc->sge.a64.curr = first;
sgc              1191 drivers/scsi/esas2r/esas2r.h 		sgc->sge.a64.last =
sgc              1192 drivers/scsi/esas2r/esas2r.h 			sgc->sge.a64.curr = &rq->vrq->scsi.u.sge[0];
sgc              1196 drivers/scsi/esas2r/esas2r.h 	sgc->sge.a64.chain = NULL;
sgc              1303 drivers/scsi/esas2r/esas2r.h 					struct esas2r_sg_context *sgc)
sgc              1308 drivers/scsi/esas2r/esas2r.h 	return (*a->build_sgl)(a, sgc);
sgc                53 drivers/scsi/esas2r/esas2r_disc.c static u32 esas2r_disc_get_phys_addr(struct esas2r_sg_context *sgc, u64 *addr);
sgc               888 drivers/scsi/esas2r/esas2r_disc.c 	struct esas2r_sg_context sgc;
sgc               896 drivers/scsi/esas2r/esas2r_disc.c 	sgc.cur_offset = NULL;
sgc               897 drivers/scsi/esas2r/esas2r_disc.c 	sgc.get_phys_addr = (PGETPHYSADDR)esas2r_disc_get_phys_addr;
sgc               898 drivers/scsi/esas2r/esas2r_disc.c 	sgc.length = offsetof(struct atto_ioctl, data)
sgc               901 drivers/scsi/esas2r/esas2r_disc.c 	esas2r_sgc_init(&sgc, a, rq, rq->vrq->ioctl.sge);
sgc               903 drivers/scsi/esas2r/esas2r_disc.c 	esas2r_build_ioctl_req(a, rq, sgc.length, VDA_IOCTL_HBA);
sgc               905 drivers/scsi/esas2r/esas2r_disc.c 	if (!esas2r_build_sg_list(a, rq, &sgc)) {
sgc              1030 drivers/scsi/esas2r/esas2r_disc.c static u32 esas2r_disc_get_phys_addr(struct esas2r_sg_context *sgc, u64 *addr)
sgc              1032 drivers/scsi/esas2r/esas2r_disc.c 	struct esas2r_adapter *a = sgc->adapter;
sgc              1034 drivers/scsi/esas2r/esas2r_disc.c 	if (sgc->length > ESAS2R_DISC_BUF_LEN)
sgc              1040 drivers/scsi/esas2r/esas2r_disc.c 	return sgc->length;
sgc               144 drivers/scsi/esas2r/esas2r_flash.c 			if (fc->sgc.cur_offset == NULL)
sgc               181 drivers/scsi/esas2r/esas2r_flash.c 	struct esas2r_sg_context *sgc = &fc->sgc;
sgc               186 drivers/scsi/esas2r/esas2r_flash.c 		if (sgc->cur_offset)
sgc               187 drivers/scsi/esas2r/esas2r_flash.c 			cksum = esas2r_calc_byte_xor_cksum(sgc->cur_offset,
sgc               188 drivers/scsi/esas2r/esas2r_flash.c 							   sgc->length,
sgc               199 drivers/scsi/esas2r/esas2r_flash.c 			       sgc->length);
sgc               208 drivers/scsi/esas2r/esas2r_flash.c 	fc->curr_len = fc->sgc.length;
sgc               210 drivers/scsi/esas2r/esas2r_flash.c 	if (sgc->cur_offset) {
sgc               212 drivers/scsi/esas2r/esas2r_flash.c 		esas2r_sgc_init(sgc, a, rq, &rq->vrq->flash.data.sge[0]);
sgc               214 drivers/scsi/esas2r/esas2r_flash.c 		if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc               219 drivers/scsi/esas2r/esas2r_flash.c 		fc->sgc.length = 0;
sgc               364 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = FM_BUF_SZ;
sgc               366 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = fc->cmp_len;
sgc               368 drivers/scsi/esas2r/esas2r_flash.c 		fc->sgc.cur_offset = fc->sgc_offset +
sgc               378 drivers/scsi/esas2r/esas2r_flash.c 	while (fc->sgc.length == 0) {
sgc               392 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = ch->length;
sgc               393 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset +
sgc               406 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = FM_BUF_SZ;
sgc               407 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset
sgc               430 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = ch->length;
sgc               431 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset +
sgc               441 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = FM_BUF_SZ;
sgc               442 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset
sgc               466 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = ch->length;
sgc               467 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset +
sgc               477 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = FM_BUF_SZ;
sgc               478 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset
sgc               500 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = ch->length;
sgc               501 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset +
sgc               511 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = FM_BUF_SZ;
sgc               512 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.cur_offset = fc->sgc_offset
sgc               544 drivers/scsi/esas2r/esas2r_flash.c 		    && fc->sgc.length > fc->cmp_len)
sgc               545 drivers/scsi/esas2r/esas2r_flash.c 			fc->sgc.length = fc->cmp_len;
sgc               848 drivers/scsi/esas2r/esas2r_flash.c 			     struct esas2r_sg_context *sgc)
sgc               915 drivers/scsi/esas2r/esas2r_flash.c 		esas2r_sgc_init(sgc, a, rq, rq->vrq->flash.data.sge);
sgc               916 drivers/scsi/esas2r/esas2r_flash.c 		sgc->length = datalen;
sgc               918 drivers/scsi/esas2r/esas2r_flash.c 		if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc              1391 drivers/scsi/esas2r/esas2r_flash.c 		   struct esas2r_request *rq, struct esas2r_sg_context *sgc)
sgc              1403 drivers/scsi/esas2r/esas2r_flash.c 	memcpy(&fc->sgc, sgc, sizeof(struct esas2r_sg_context));
sgc              1404 drivers/scsi/esas2r/esas2r_flash.c 	sgc = &fc->sgc;
sgc              1406 drivers/scsi/esas2r/esas2r_flash.c 	fc->sgc_offset = sgc->cur_offset;
sgc              1457 drivers/scsi/esas2r/esas2r_flash.c 		fc->sgc.length = FLS_LENGTH_BOOT;
sgc              1458 drivers/scsi/esas2r/esas2r_flash.c 		fc->sgc.cur_offset = NULL;
sgc               188 drivers/scsi/esas2r/esas2r_io.c 			      struct esas2r_sg_context *sgc)
sgc               190 drivers/scsi/esas2r/esas2r_io.c 	struct esas2r_request *rq = sgc->first_req;
sgc               193 drivers/scsi/esas2r/esas2r_io.c 	while (sgc->length) {
sgc               198 drivers/scsi/esas2r/esas2r_io.c 		len = (*sgc->get_phys_addr)(sgc, &addr);
sgc               204 drivers/scsi/esas2r/esas2r_io.c 		if (unlikely(len > sgc->length))
sgc               205 drivers/scsi/esas2r/esas2r_io.c 			len = sgc->length;
sgc               222 drivers/scsi/esas2r/esas2r_io.c 		if (unlikely(sgc->sge.a64.curr > sgc->sge.a64.limit)) {
sgc               237 drivers/scsi/esas2r/esas2r_io.c 			sgelen = (u8)((u8 *)sgc->sge.a64.curr
sgc               238 drivers/scsi/esas2r/esas2r_io.c 				      - (u8 *)sgc->sge.a64.last);
sgc               244 drivers/scsi/esas2r/esas2r_io.c 			memcpy(sgl->virt_addr, sgc->sge.a64.last, sgelen);
sgc               247 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.a64.curr =
sgc               252 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.a64.limit =
sgc               257 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.a64.last->length = cpu_to_le32(
sgc               259 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.a64.last->address =
sgc               268 drivers/scsi/esas2r/esas2r_io.c 			if (sgc->sge.a64.chain) {
sgc               269 drivers/scsi/esas2r/esas2r_io.c 				sgc->sge.a64.chain->length |=
sgc               271 drivers/scsi/esas2r/esas2r_io.c 						((u8 *)(sgc->sge.a64.
sgc               279 drivers/scsi/esas2r/esas2r_io.c 							 ((u8 *)sgc->
sgc               299 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.a64.chain = sgc->sge.a64.last;
sgc               306 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.a64.last = sgc->sge.a64.curr;
sgc               309 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.a64.curr->length = cpu_to_le32(SGE_ADDR_64 | len);
sgc               310 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.a64.curr->address = cpu_to_le32(addr);
sgc               311 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.a64.curr++;
sgc               312 drivers/scsi/esas2r/esas2r_io.c 		sgc->cur_offset += len;
sgc               313 drivers/scsi/esas2r/esas2r_io.c 		sgc->length -= len;
sgc               328 drivers/scsi/esas2r/esas2r_io.c 	sgc->sge.a64.last->length |= cpu_to_le32(SGE_LAST);
sgc               334 drivers/scsi/esas2r/esas2r_io.c 	if (sgc->sge.a64.chain) {
sgc               335 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.a64.chain->length |= cpu_to_le32(
sgc               336 drivers/scsi/esas2r/esas2r_io.c 			((u8 *)(sgc->sge.a64.curr) -
sgc               346 drivers/scsi/esas2r/esas2r_io.c 			((u16)((u8 *)sgc->sge.a64.last - (u8 *)vrq)
sgc               371 drivers/scsi/esas2r/esas2r_io.c 				  struct esas2r_sg_context *sgc)
sgc               373 drivers/scsi/esas2r/esas2r_io.c 	struct esas2r_request *rq = sgc->first_req;
sgc               380 drivers/scsi/esas2r/esas2r_io.c 	while (sgc->length) {
sgc               383 drivers/scsi/esas2r/esas2r_io.c 		len = (*sgc->get_phys_addr)(sgc, &addr);
sgc               390 drivers/scsi/esas2r/esas2r_io.c 		if (unlikely(len > sgc->length))
sgc               391 drivers/scsi/esas2r/esas2r_io.c 			len = sgc->length;
sgc               409 drivers/scsi/esas2r/esas2r_io.c 		if (sgc->sge.prd.sge_cnt == 0) {
sgc               410 drivers/scsi/esas2r/esas2r_io.c 			if (len == sgc->length) {
sgc               418 drivers/scsi/esas2r/esas2r_io.c 				sgc->sge.prd.curr->ctl_len = cpu_to_le32(
sgc               420 drivers/scsi/esas2r/esas2r_io.c 				sgc->sge.prd.curr->address = cpu_to_le64(addr);
sgc               423 drivers/scsi/esas2r/esas2r_io.c 				sgc->cur_offset += len;
sgc               424 drivers/scsi/esas2r/esas2r_io.c 				sgc->length -= len;
sgc               432 drivers/scsi/esas2r/esas2r_io.c 			if (sgc->sge.prd.chain) {
sgc               439 drivers/scsi/esas2r/esas2r_io.c 				sgc->sge.prd.chain->ctl_len |= cpu_to_le32(
sgc               440 drivers/scsi/esas2r/esas2r_io.c 					sgc->sge.prd.sgl_max_cnt);
sgc               467 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.chain = sgc->sge.prd.curr;
sgc               469 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.chain->ctl_len = cpu_to_le32(PRD_CHAIN);
sgc               470 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.chain->address =
sgc               478 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.curr =
sgc               482 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.sge_cnt = sgc->sge.prd.sgl_max_cnt - 1;
sgc               485 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.sge_cnt--;
sgc               487 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.curr->ctl_len = cpu_to_le32(PRD_DATA | len);
sgc               488 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.curr->address = cpu_to_le64(addr);
sgc               492 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.curr++;
sgc               496 drivers/scsi/esas2r/esas2r_io.c 		sgc->cur_offset += len;
sgc               497 drivers/scsi/esas2r/esas2r_io.c 		sgc->length -= len;
sgc               513 drivers/scsi/esas2r/esas2r_io.c 		if (sgc->sge.prd.chain) {
sgc               514 drivers/scsi/esas2r/esas2r_io.c 			sgc->sge.prd.chain->ctl_len |=
sgc               515 drivers/scsi/esas2r/esas2r_io.c 				cpu_to_le32(sgc->sge.prd.sgl_max_cnt
sgc               516 drivers/scsi/esas2r/esas2r_io.c 					    - sgc->sge.prd.sge_cnt
sgc               525 drivers/scsi/esas2r/esas2r_io.c 			      struct esas2r_sg_context *sgc)
sgc               527 drivers/scsi/esas2r/esas2r_io.c 	struct esas2r_request *rq = sgc->first_req;
sgc               528 drivers/scsi/esas2r/esas2r_io.c 	u32 len = sgc->length;
sgc               596 drivers/scsi/esas2r/esas2r_io.c 			sgc->length = startlba * t->block_size;
sgc               602 drivers/scsi/esas2r/esas2r_io.c 			if (sgc->length > len)
sgc               603 drivers/scsi/esas2r/esas2r_io.c 				sgc->length = len;
sgc               605 drivers/scsi/esas2r/esas2r_io.c 			sgc->length = len;
sgc               608 drivers/scsi/esas2r/esas2r_io.c 		sgc->length = len;
sgc               614 drivers/scsi/esas2r/esas2r_io.c 		(struct atto_physical_region_description *)sgc->sge.a64.curr;
sgc               616 drivers/scsi/esas2r/esas2r_io.c 	sgc->sge.prd.sgl_max_cnt = sgl_page_size /
sgc               623 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.sge_cnt = 0;
sgc               624 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.chain = NULL;
sgc               625 drivers/scsi/esas2r/esas2r_io.c 		sgc->sge.prd.curr = curr_iblk_chn;
sgc               629 drivers/scsi/esas2r/esas2r_io.c 		len -= sgc->length;
sgc               633 drivers/scsi/esas2r/esas2r_io.c 		if (unlikely(!esas2r_build_prd_iblk(a, sgc)))
sgc               642 drivers/scsi/esas2r/esas2r_io.c 				sgc->length = t->inter_byte;
sgc               644 drivers/scsi/esas2r/esas2r_io.c 				sgc->length = len;
sgc                90 drivers/scsi/esas2r/esas2r_ioctl.c static u32 get_physaddr_fm_api(struct esas2r_sg_context *sgc, u64 *addr)
sgc                92 drivers/scsi/esas2r/esas2r_ioctl.c 	struct esas2r_adapter *a = (struct esas2r_adapter *)sgc->adapter;
sgc                93 drivers/scsi/esas2r/esas2r_ioctl.c 	int offset = sgc->cur_offset - a->save_offset;
sgc                99 drivers/scsi/esas2r/esas2r_ioctl.c static u32 get_physaddr_fm_api_header(struct esas2r_sg_context *sgc, u64 *addr)
sgc               101 drivers/scsi/esas2r/esas2r_ioctl.c 	struct esas2r_adapter *a = (struct esas2r_adapter *)sgc->adapter;
sgc               102 drivers/scsi/esas2r/esas2r_ioctl.c 	int offset = sgc->cur_offset - a->save_offset;
sgc               189 drivers/scsi/esas2r/esas2r_ioctl.c static u32 get_physaddr_buffered_ioctl(struct esas2r_sg_context *sgc,
sgc               192 drivers/scsi/esas2r/esas2r_ioctl.c 	int offset = (u8 *)sgc->cur_offset - esas2r_buffered_ioctl;
sgc               209 drivers/scsi/esas2r/esas2r_ioctl.c 	struct esas2r_sg_context sgc;
sgc               263 drivers/scsi/esas2r/esas2r_ioctl.c 	sgc.cur_offset = esas2r_buffered_ioctl + bi->offset;
sgc               264 drivers/scsi/esas2r/esas2r_ioctl.c 	sgc.get_phys_addr = (PGETPHYSADDR)get_physaddr_buffered_ioctl;
sgc               265 drivers/scsi/esas2r/esas2r_ioctl.c 	sgc.length = esas2r_buffered_ioctl_size;
sgc               267 drivers/scsi/esas2r/esas2r_ioctl.c 	if (!(*bi->callback)(a, rq, &sgc, bi->context)) {
sgc               295 drivers/scsi/esas2r/esas2r_ioctl.c 			      struct esas2r_sg_context *sgc, void *context)
sgc               300 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_sgc_init(sgc, a, rq, rq->vrq->ioctl.sge);
sgc               301 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_build_ioctl_req(a, rq, sgc->length, VDA_IOCTL_SMP);
sgc               303 drivers/scsi/esas2r/esas2r_ioctl.c 	if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc               344 drivers/scsi/esas2r/esas2r_ioctl.c 			      struct esas2r_sg_context *sgc,
sgc               353 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_sgc_init(sgc, a, rq, rq->vrq->ioctl.sge);
sgc               354 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_build_ioctl_req(a, rq, sgc->length, VDA_IOCTL_CSMI);
sgc               367 drivers/scsi/esas2r/esas2r_ioctl.c 	if (!esas2r_build_sg_list(a, rq, sgc))
sgc               392 drivers/scsi/esas2r/esas2r_ioctl.c 			       struct esas2r_sg_context *sgc, void *context)
sgc               514 drivers/scsi/esas2r/esas2r_ioctl.c 		if (!csmi_ioctl_tunnel(a, ioctl_csmi, rq, sgc,
sgc               585 drivers/scsi/esas2r/esas2r_ioctl.c 		if (!csmi_ioctl_tunnel(a, ioctl_csmi, rq, sgc,
sgc               670 drivers/scsi/esas2r/esas2r_ioctl.c 			     struct esas2r_sg_context *sgc)
sgc               672 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_sgc_init(sgc, a, rq, rq->vrq->ioctl.sge);
sgc               674 drivers/scsi/esas2r/esas2r_ioctl.c 	esas2r_build_ioctl_req(a, rq, sgc->length, VDA_IOCTL_HBA);
sgc               676 drivers/scsi/esas2r/esas2r_ioctl.c 	if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc               746 drivers/scsi/esas2r/esas2r_ioctl.c 			      struct esas2r_sg_context *sgc,
sgc               901 drivers/scsi/esas2r/esas2r_ioctl.c 			if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc               917 drivers/scsi/esas2r/esas2r_ioctl.c 			if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc               988 drivers/scsi/esas2r/esas2r_ioctl.c 			if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc              1005 drivers/scsi/esas2r/esas2r_ioctl.c 		esas2r_sgc_init(sgc, a, rq, NULL);
sgc              1007 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc->length = hi->data_length;
sgc              1008 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc->cur_offset += offsetof(struct atto_ioctl, data.byte)
sgc              1034 drivers/scsi/esas2r/esas2r_ioctl.c 			if (sgc->length) {
sgc              1047 drivers/scsi/esas2r/esas2r_ioctl.c 		if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc              1064 drivers/scsi/esas2r/esas2r_ioctl.c 			if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc              1110 drivers/scsi/esas2r/esas2r_ioctl.c 		if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc              1160 drivers/scsi/esas2r/esas2r_ioctl.c 			if (hba_ioctl_tunnel(a, hi, rq, sgc))
sgc              1815 drivers/scsi/esas2r/esas2r_ioctl.c static u32 get_physaddr_vda(struct esas2r_sg_context *sgc, u64 *addr)
sgc              1817 drivers/scsi/esas2r/esas2r_ioctl.c 	struct esas2r_adapter *a = (struct esas2r_adapter *)sgc->adapter;
sgc              1818 drivers/scsi/esas2r/esas2r_ioctl.c 	int offset = (u8 *)sgc->cur_offset - (u8 *)a->vda_buffer;
sgc              1834 drivers/scsi/esas2r/esas2r_ioctl.c 		struct esas2r_sg_context sgc;
sgc              1852 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.first_req = rq;
sgc              1853 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.adapter = a;
sgc              1854 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.cur_offset = a->vda_buffer + VDA_BUFFER_HEADER_SZ;
sgc              1855 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.get_phys_addr = (PGETPHYSADDR)get_physaddr_vda;
sgc              1860 drivers/scsi/esas2r/esas2r_ioctl.c 			esas2r_process_vda_ioctl(a, vi, rq, &sgc);
sgc              1934 drivers/scsi/esas2r/esas2r_ioctl.c static u32 get_physaddr_fs_api(struct esas2r_sg_context *sgc, u64 *addr)
sgc              1936 drivers/scsi/esas2r/esas2r_ioctl.c 	struct esas2r_adapter *a = (struct esas2r_adapter *)sgc->adapter;
sgc              1939 drivers/scsi/esas2r/esas2r_ioctl.c 	u32 offset = (u8 *)sgc->cur_offset - (u8 *)fs;
sgc              1954 drivers/scsi/esas2r/esas2r_ioctl.c 		struct esas2r_sg_context sgc;
sgc              1983 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.cur_offset = fs->data;
sgc              1984 drivers/scsi/esas2r/esas2r_ioctl.c 		sgc.get_phys_addr = (PGETPHYSADDR)get_physaddr_fs_api;
sgc              1988 drivers/scsi/esas2r/esas2r_ioctl.c 		if (!esas2r_process_fs_ioctl(a, fs, rq, &sgc)) {
sgc               753 drivers/scsi/esas2r/esas2r_main.c static u32 get_physaddr_from_sgc(struct esas2r_sg_context *sgc, u64 *addr)
sgc               757 drivers/scsi/esas2r/esas2r_main.c 	if (likely(sgc->cur_offset == sgc->exp_offset)) {
sgc               763 drivers/scsi/esas2r/esas2r_main.c 		if (sgc->sgel_count < sgc->num_sgel) {
sgc               765 drivers/scsi/esas2r/esas2r_main.c 			if (sgc->exp_offset > (u8 *)0) {
sgc               767 drivers/scsi/esas2r/esas2r_main.c 				sgc->cur_sgel = sg_next(sgc->cur_sgel);
sgc               768 drivers/scsi/esas2r/esas2r_main.c 				++(sgc->sgel_count);
sgc               772 drivers/scsi/esas2r/esas2r_main.c 			len = sg_dma_len(sgc->cur_sgel);
sgc               773 drivers/scsi/esas2r/esas2r_main.c 			(*addr) = sg_dma_address(sgc->cur_sgel);
sgc               776 drivers/scsi/esas2r/esas2r_main.c 			sgc->exp_offset += len;
sgc               781 drivers/scsi/esas2r/esas2r_main.c 	} else if (sgc->cur_offset < sgc->exp_offset) {
sgc               787 drivers/scsi/esas2r/esas2r_main.c 		len = sg_dma_len(sgc->cur_sgel);
sgc               788 drivers/scsi/esas2r/esas2r_main.c 		(*addr) = sg_dma_address(sgc->cur_sgel);
sgc               790 drivers/scsi/esas2r/esas2r_main.c 		sgc->exp_offset -= len;
sgc               794 drivers/scsi/esas2r/esas2r_main.c 			(sgc->cur_offset - sgc->exp_offset);
sgc               796 drivers/scsi/esas2r/esas2r_main.c 		sgc->exp_offset += len;
sgc               800 drivers/scsi/esas2r/esas2r_main.c 			sgc->exp_offset - sgc->cur_offset);
sgc               817 drivers/scsi/esas2r/esas2r_main.c 	struct esas2r_sg_context sgc;
sgc               852 drivers/scsi/esas2r/esas2r_main.c 	esas2r_sgc_init(&sgc, a, rq, NULL);
sgc               854 drivers/scsi/esas2r/esas2r_main.c 	sgc.length = bufflen;
sgc               855 drivers/scsi/esas2r/esas2r_main.c 	sgc.cur_offset = NULL;
sgc               857 drivers/scsi/esas2r/esas2r_main.c 	sgc.cur_sgel = scsi_sglist(cmd);
sgc               858 drivers/scsi/esas2r/esas2r_main.c 	sgc.exp_offset = NULL;
sgc               859 drivers/scsi/esas2r/esas2r_main.c 	sgc.num_sgel = scsi_dma_map(cmd);
sgc               860 drivers/scsi/esas2r/esas2r_main.c 	sgc.sgel_count = 0;
sgc               862 drivers/scsi/esas2r/esas2r_main.c 	if (unlikely(sgc.num_sgel < 0)) {
sgc               867 drivers/scsi/esas2r/esas2r_main.c 	sgc.get_phys_addr = (PGETPHYSADDR)get_physaddr_from_sgc;
sgc               869 drivers/scsi/esas2r/esas2r_main.c 	if (unlikely(!esas2r_build_sg_list(a, rq, &sgc))) {
sgc                68 drivers/scsi/esas2r/esas2r_vda.c 			      struct esas2r_sg_context *sgc)
sgc               135 drivers/scsi/esas2r/esas2r_vda.c 		u8 *cmdcurr_offset = sgc->cur_offset
sgc               162 drivers/scsi/esas2r/esas2r_vda.c 				cmdcurr_offset = sgc->cur_offset;
sgc               175 drivers/scsi/esas2r/esas2r_vda.c 				esas2r_sgc_init(sgc, a, rq,
sgc               177 drivers/scsi/esas2r/esas2r_vda.c 				sgc->length = payldlen;
sgc               179 drivers/scsi/esas2r/esas2r_vda.c 				if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc               195 drivers/scsi/esas2r/esas2r_vda.c 		sgc->cur_offset = cmdcurr_offset;
sgc               255 drivers/scsi/esas2r/esas2r_vda.c 		esas2r_sgc_init(sgc, a, rq, firstsg);
sgc               256 drivers/scsi/esas2r/esas2r_vda.c 		sgc->length = datalen;
sgc               258 drivers/scsi/esas2r/esas2r_vda.c 		if (!esas2r_build_sg_list(a, rq, sgc)) {
sgc              1063 drivers/scsi/fcoe/fcoe.c 			  struct scatterlist *sgl, unsigned int sgc)
sgc              1070 drivers/scsi/fcoe/fcoe.c 							      sgc);
sgc              1085 drivers/scsi/fcoe/fcoe.c 			   struct scatterlist *sgl, unsigned int sgc)
sgc              1091 drivers/scsi/fcoe/fcoe.c 							       sgl, sgc);
sgc              1337 include/linux/netdevice.h 						      unsigned int sgc);
sgc              1343 include/linux/netdevice.h 						       unsigned int sgc);
sgc               175 include/linux/sched/topology.h 	struct sched_group_capacity *__percpu *sgc;
sgc              5629 kernel/sched/fair.c 					group->sgc->capacity;
sgc              5631 kernel/sched/fair.c 					group->sgc->capacity;
sgc              7805 kernel/sched/fair.c 	sdg->sgc->capacity = capacity;
sgc              7806 kernel/sched/fair.c 	sdg->sgc->min_capacity = capacity;
sgc              7807 kernel/sched/fair.c 	sdg->sgc->max_capacity = capacity;
sgc              7819 kernel/sched/fair.c 	sdg->sgc->next_update = jiffies + interval;
sgc              7837 kernel/sched/fair.c 			struct sched_group_capacity *sgc;
sgc              7854 kernel/sched/fair.c 				sgc = rq->sd->groups->sgc;
sgc              7855 kernel/sched/fair.c 				capacity += sgc->capacity;
sgc              7869 kernel/sched/fair.c 			struct sched_group_capacity *sgc = group->sgc;
sgc              7871 kernel/sched/fair.c 			capacity += sgc->capacity;
sgc              7872 kernel/sched/fair.c 			min_capacity = min(sgc->min_capacity, min_capacity);
sgc              7873 kernel/sched/fair.c 			max_capacity = max(sgc->max_capacity, max_capacity);
sgc              7878 kernel/sched/fair.c 	sdg->sgc->capacity = capacity;
sgc              7879 kernel/sched/fair.c 	sdg->sgc->min_capacity = min_capacity;
sgc              7880 kernel/sched/fair.c 	sdg->sgc->max_capacity = max_capacity;
sgc              7938 kernel/sched/fair.c 	return group->sgc->imbalance;
sgc              7994 kernel/sched/fair.c 	return fits_capacity(sg->sgc->min_capacity, ref->sgc->min_capacity);
sgc              8004 kernel/sched/fair.c 	return fits_capacity(sg->sgc->max_capacity, ref->sgc->max_capacity);
sgc              8096 kernel/sched/fair.c 	sgs->group_capacity = group->sgc->capacity;
sgc              8254 kernel/sched/fair.c 			    time_after_eq(jiffies, sg->sgc->next_update))
sgc              8965 kernel/sched/fair.c 			int *group_imbalance = &sd_parent->groups->sgc->imbalance;
sgc              9066 kernel/sched/fair.c 		int *group_imbalance = &sd_parent->groups->sgc->imbalance;
sgc              1415 kernel/sched/sched.h 	struct sched_group_capacity *sgc;
sgc              1438 kernel/sched/sched.h 	return to_cpumask(sg->sgc->cpumask);
sgc                78 kernel/sched/topology.c 				group->sgc->id,
sgc                87 kernel/sched/topology.c 		if (group->sgc->capacity != SCHED_CAPACITY_SCALE)
sgc                88 kernel/sched/topology.c 			printk(KERN_CONT " cap=%lu", group->sgc->capacity);
sgc               569 kernel/sched/topology.c 		if (free_sgc && atomic_dec_and_test(&sg->sgc->ref))
sgc               570 kernel/sched/topology.c 			kfree(sg->sgc);
sgc               909 kernel/sched/topology.c 	sg->sgc = *per_cpu_ptr(sdd->sgc, cpu);
sgc               910 kernel/sched/topology.c 	if (atomic_inc_return(&sg->sgc->ref) == 1)
sgc               921 kernel/sched/topology.c 	sg->sgc->capacity = SCHED_CAPACITY_SCALE * cpumask_weight(sg_span);
sgc               922 kernel/sched/topology.c 	sg->sgc->min_capacity = SCHED_CAPACITY_SCALE;
sgc               923 kernel/sched/topology.c 	sg->sgc->max_capacity = SCHED_CAPACITY_SCALE;
sgc              1068 kernel/sched/topology.c 	sg->sgc = *per_cpu_ptr(sdd->sgc, cpu);
sgc              1073 kernel/sched/topology.c 	WARN_ON(already_visited != (atomic_inc_return(&sg->sgc->ref) > 1));
sgc              1087 kernel/sched/topology.c 	sg->sgc->capacity = SCHED_CAPACITY_SCALE * cpumask_weight(sched_group_span(sg));
sgc              1088 kernel/sched/topology.c 	sg->sgc->min_capacity = SCHED_CAPACITY_SCALE;
sgc              1089 kernel/sched/topology.c 	sg->sgc->max_capacity = SCHED_CAPACITY_SCALE;
sgc              1274 kernel/sched/topology.c 	if (atomic_read(&(*per_cpu_ptr(sdd->sgc, cpu))->ref))
sgc              1275 kernel/sched/topology.c 		*per_cpu_ptr(sdd->sgc, cpu) = NULL;
sgc              1770 kernel/sched/topology.c 		sdd->sgc = alloc_percpu(struct sched_group_capacity *);
sgc              1771 kernel/sched/topology.c 		if (!sdd->sgc)
sgc              1778 kernel/sched/topology.c 			struct sched_group_capacity *sgc;
sgc              1803 kernel/sched/topology.c 			sgc = kzalloc_node(sizeof(struct sched_group_capacity) + cpumask_size(),
sgc              1805 kernel/sched/topology.c 			if (!sgc)
sgc              1809 kernel/sched/topology.c 			sgc->id = j;
sgc              1812 kernel/sched/topology.c 			*per_cpu_ptr(sdd->sgc, j) = sgc;
sgc              1841 kernel/sched/topology.c 			if (sdd->sgc)
sgc              1842 kernel/sched/topology.c 				kfree(*per_cpu_ptr(sdd->sgc, j));
sgc              1850 kernel/sched/topology.c 		free_percpu(sdd->sgc);
sgc              1851 kernel/sched/topology.c 		sdd->sgc = NULL;
sgc               401 net/8021q/vlan_dev.c 				   struct scatterlist *sgl, unsigned int sgc)
sgc               408 net/8021q/vlan_dev.c 		rc = ops->ndo_fcoe_ddp_setup(real_dev, xid, sgl, sgc);
sgc               448 net/8021q/vlan_dev.c 				    struct scatterlist *sgl, unsigned int sgc)
sgc               455 net/8021q/vlan_dev.c 		rc = ops->ndo_fcoe_ddp_target(real_dev, xid, sgl, sgc);