sg_used 469 drivers/crypto/ccp/ccp-dev.h unsigned int sg_used; sg_used 86 drivers/crypto/ccp/ccp-ops.c wa->sg_used = 0; sg_used 111 drivers/crypto/ccp/ccp-ops.c wa->sg_used += nbytes; sg_used 113 drivers/crypto/ccp/ccp-ops.c if (wa->sg_used == wa->sg->length) { sg_used 115 drivers/crypto/ccp/ccp-ops.c wa->sg_used = 0; sg_used 296 drivers/crypto/ccp/ccp-ops.c scatterwalk_map_and_copy(dm_wa->address, sg_wa->sg, sg_wa->sg_used, sg_used 302 drivers/crypto/ccp/ccp-ops.c nbytes = min(sg_wa->sg->length - sg_wa->sg_used, sg_used 334 drivers/crypto/ccp/ccp-ops.c sg_src_len = sg_dma_len(src->sg_wa.sg) - src->sg_wa.sg_used; sg_used 338 drivers/crypto/ccp/ccp-ops.c sg_dst_len = sg_dma_len(dst->sg_wa.sg) - dst->sg_wa.sg_used; sg_used 369 drivers/crypto/ccp/ccp-ops.c op->src.u.dma.offset = src->sg_wa.sg_used; sg_used 390 drivers/crypto/ccp/ccp-ops.c op->dst.u.dma.offset = dst->sg_wa.sg_used; sg_used 2027 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used = 0; sg_used 2047 drivers/crypto/ccp/ccp-ops.c op.dst.u.dma.offset = dst.sg_wa.sg_used; sg_used 2056 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used += src.sg_wa.sg->length; sg_used 2057 drivers/crypto/ccp/ccp-ops.c if (dst.sg_wa.sg_used == dst.sg_wa.sg->length) { sg_used 2059 drivers/crypto/ccp/ccp-ops.c dst.sg_wa.sg_used = 0; sg_used 537 drivers/dma/xilinx/xilinx_dma.c dma_addr_t buf_addr, size_t sg_used, sg_used 541 drivers/dma/xilinx/xilinx_dma.c hw->buf_addr = lower_32_bits(buf_addr + sg_used + period_len); sg_used 542 drivers/dma/xilinx/xilinx_dma.c hw->buf_addr_msb = upper_32_bits(buf_addr + sg_used + sg_used 545 drivers/dma/xilinx/xilinx_dma.c hw->buf_addr = buf_addr + sg_used + period_len; sg_used 1786 drivers/dma/xilinx/xilinx_dma.c size_t sg_used; sg_used 1802 drivers/dma/xilinx/xilinx_dma.c sg_used = 0; sg_used 1805 drivers/dma/xilinx/xilinx_dma.c while (sg_used < sg_dma_len(sg)) { sg_used 1818 drivers/dma/xilinx/xilinx_dma.c sg_used); sg_used 1823 drivers/dma/xilinx/xilinx_dma.c sg_used, 0); sg_used 1833 drivers/dma/xilinx/xilinx_dma.c sg_used += copy; sg_used 1882 drivers/dma/xilinx/xilinx_dma.c size_t copy, sg_used; sg_used 1908 drivers/dma/xilinx/xilinx_dma.c sg_used = 0; sg_used 1910 drivers/dma/xilinx/xilinx_dma.c while (sg_used < period_len) { sg_used 1923 drivers/dma/xilinx/xilinx_dma.c sg_used); sg_used 1925 drivers/dma/xilinx/xilinx_dma.c xilinx_axidma_buf(chan, hw, buf_addr, sg_used, sg_used 1933 drivers/dma/xilinx/xilinx_dma.c sg_used += copy; sg_used 2788 drivers/scsi/hpsa.c int sg_used, enum dma_data_direction data_direction) sg_used 2792 drivers/scsi/hpsa.c for (i = 0; i < sg_used; i++) sg_used 6446 drivers/scsi/hpsa.c BYTE sg_used = 0; sg_used 6489 drivers/scsi/hpsa.c buff_size[sg_used] = sz; sg_used 6490 drivers/scsi/hpsa.c buff[sg_used] = kmalloc(sz, GFP_KERNEL); sg_used 6491 drivers/scsi/hpsa.c if (buff[sg_used] == NULL) { sg_used 6496 drivers/scsi/hpsa.c if (copy_from_user(buff[sg_used], data_ptr, sz)) { sg_used 6501 drivers/scsi/hpsa.c memset(buff[sg_used], 0, sz); sg_used 6504 drivers/scsi/hpsa.c sg_used++; sg_used 6511 drivers/scsi/hpsa.c c->Header.SGList = (u8) sg_used; sg_used 6512 drivers/scsi/hpsa.c c->Header.SGTotal = cpu_to_le16(sg_used); sg_used 6517 drivers/scsi/hpsa.c for (i = 0; i < sg_used; i++) { sg_used 6537 drivers/scsi/hpsa.c if (sg_used) sg_used 6538 drivers/scsi/hpsa.c hpsa_pci_unmap(h->pdev, c, sg_used, DMA_BIDIRECTIONAL); sg_used 6556 drivers/scsi/hpsa.c for (i = 0; i < sg_used; i++) { sg_used 6571 drivers/scsi/hpsa.c for (i = 0; i < sg_used; i++)