nchunks            54 drivers/crypto/cavium/cpt/cptvf.h 	u32 nchunks; /* Number of command chunks */
nchunks           195 drivers/crypto/cavium/cpt/cptvf_main.c 		queue->nchunks = 0;
nchunks           242 drivers/crypto/cavium/cpt/cptvf_main.c 					i, queue->nchunks);
nchunks           248 drivers/crypto/cavium/cpt/cptvf_main.c 			if (queue->nchunks == 0) {
nchunks           257 drivers/crypto/cavium/cpt/cptvf_main.c 			queue->nchunks++;
nchunks            81 drivers/dma/sh/rcar-dmac.c 	unsigned int nchunks;
nchunks           356 drivers/dma/sh/rcar-dmac.c 			chan->index, desc, desc->nchunks, &desc->hwdescs.dma);
nchunks           370 drivers/dma/sh/rcar-dmac.c 				     RCAR_DMACHCRB_DCNT(desc->nchunks - 1) |
nchunks           727 drivers/dma/sh/rcar-dmac.c 	rcar_dmac_realloc_hwdesc(chan, desc, desc->nchunks * sizeof(*hwdesc));
nchunks           902 drivers/dma/sh/rcar-dmac.c 	unsigned int nchunks = 0;
nchunks           992 drivers/dma/sh/rcar-dmac.c 			nchunks++;
nchunks           996 drivers/dma/sh/rcar-dmac.c 	desc->nchunks = nchunks;
nchunks          1007 drivers/dma/sh/rcar-dmac.c 	desc->hwdescs.use = !cross_boundary && nchunks > 1;
nchunks          1367 drivers/dma/sh/rcar-dmac.c 			dptr = desc->nchunks;
nchunks          1369 drivers/dma/sh/rcar-dmac.c 		WARN_ON(dptr >= desc->nchunks);
nchunks          1376 drivers/dma/sh/rcar-dmac.c 		if (chunk == running || ++dptr == desc->nchunks)
nchunks           462 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	unsigned		nchunks;
nchunks           145 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->nchunks = cs->in.num_chunks;
nchunks           146 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->chunks = kmalloc_array(p->nchunks, sizeof(struct amdgpu_cs_chunk),
nchunks           153 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	for (i = 0; i < p->nchunks; i++) {
nchunks           248 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	i = p->nchunks - 1;
nchunks           254 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->nchunks = 0;
nchunks           772 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	for (i = 0; i < parser->nchunks; i++)
nchunks           799 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		for (i = 0, j = 0; i < p->nchunks && j < p->job->num_ibs; i++) {
nchunks           949 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	for (i = 0, j = 0; i < parser->nchunks && j < parser->job->num_ibs; i++) {
nchunks          1216 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	for (i = 0; i < p->nchunks; ++i) {
nchunks          1062 drivers/gpu/drm/radeon/radeon.h 	unsigned		nchunks;
nchunks           302 drivers/gpu/drm/radeon/radeon_cs.c 	p->nchunks = cs->num_chunks;
nchunks           303 drivers/gpu/drm/radeon/radeon_cs.c 	p->chunks = kcalloc(p->nchunks, sizeof(struct radeon_cs_chunk), GFP_KERNEL);
nchunks           307 drivers/gpu/drm/radeon/radeon_cs.c 	for (i = 0; i < p->nchunks; i++) {
nchunks           452 drivers/gpu/drm/radeon/radeon_cs.c 	for (i = 0; i < parser->nchunks; i++)
nchunks           181 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 	cmd->nchunks = npages;
nchunks           461 drivers/infiniband/hw/vmw_pvrdma/pvrdma_dev_api.h 	u32 nchunks;
nchunks           483 drivers/infiniband/hw/vmw_pvrdma/pvrdma_dev_api.h 	u32 nchunks;
nchunks           515 drivers/infiniband/hw/vmw_pvrdma/pvrdma_dev_api.h 	u32 nchunks;
nchunks           171 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	cmd->nchunks = npages;
nchunks           243 drivers/infiniband/hw/vmw_pvrdma/pvrdma_mr.c 	cmd->nchunks = max_num_sg;
nchunks           176 drivers/infiniband/hw/vmw_pvrdma/pvrdma_srq.c 	cmd->nchunks = srq->npages;
nchunks           257 drivers/mtd/nand/raw/marvell_nand.c 	int nchunks;
nchunks           272 drivers/mtd/nand/raw/marvell_nand.c 		.nchunks = nc,						\
nchunks          1212 drivers/mtd/nand/raw/marvell_nand.c 	for (chunk = 0; chunk < lt->nchunks; chunk++) {
nchunks          1271 drivers/mtd/nand/raw/marvell_nand.c 	else if (chunk < lt->nchunks - 1)
nchunks          1329 drivers/mtd/nand/raw/marvell_nand.c 	for (chunk = 0; chunk < lt->nchunks; chunk++) {
nchunks          1372 drivers/mtd/nand/raw/marvell_nand.c 	for (chunk = 0; chunk < lt->nchunks; chunk++) {
nchunks          1465 drivers/mtd/nand/raw/marvell_nand.c 	for (chunk = 0; chunk < lt->nchunks; chunk++) {
nchunks          1525 drivers/mtd/nand/raw/marvell_nand.c 		if (lt->nchunks == 1)
nchunks          1535 drivers/mtd/nand/raw/marvell_nand.c 	} else if (chunk < lt->nchunks - 1) {
nchunks          1542 drivers/mtd/nand/raw/marvell_nand.c 	if (chunk == lt->nchunks - 1)
nchunks          1582 drivers/mtd/nand/raw/marvell_nand.c 	for (chunk = 0; chunk < lt->nchunks; chunk++) {
nchunks          2214 drivers/mtd/nand/raw/marvell_nand.c 	ecc->steps = l->nchunks;
nchunks           347 drivers/mtd/nand/raw/sunxi_nand.c 				    int chunksize, int nchunks,
nchunks           361 drivers/mtd/nand/raw/sunxi_nand.c 	sg_init_one(sg, buf, nchunks * chunksize);
nchunks           374 drivers/mtd/nand/raw/sunxi_nand.c 	writel(nchunks, nfc->regs + NFC_REG_SECTOR_NUM);
nchunks           377 drivers/mtd/nand/raw/sunxi_nand.c 		writel(chunksize * nchunks, nfc->regs + NFC_REG_MDMA_CNT);
nchunks           905 drivers/mtd/nand/raw/sunxi_nand.c 					    int nchunks)
nchunks           920 drivers/mtd/nand/raw/sunxi_nand.c 	ret = sunxi_nfc_dma_op_prepare(nfc, buf, ecc->size, nchunks,
nchunks           951 drivers/mtd/nand/raw/sunxi_nand.c 	for (i = 0; i < nchunks; i++) {
nchunks           983 drivers/mtd/nand/raw/sunxi_nand.c 		for (i = 0; i < nchunks; i++) {
nchunks          1185 drivers/mtd/nand/raw/sunxi_nand.c 	int nchunks = DIV_ROUND_UP(data_offs + readlen, nand->ecc.size);
nchunks          1192 drivers/mtd/nand/raw/sunxi_nand.c 	ret = sunxi_nfc_hw_ecc_read_chunks_dma(nand, buf, false, page, nchunks);
nchunks           380 lib/bitmap.c   	int c, old_c, totaldigits, ndigits, nchunks, nbits;
nchunks           386 lib/bitmap.c   	nchunks = nbits = totaldigits = c = 0;
nchunks           432 lib/bitmap.c   		if (nchunks == 0 && chunk == 0)
nchunks           437 lib/bitmap.c   		nchunks++;
nchunks           438 lib/bitmap.c   		nbits += (nchunks == 1) ? nbits_to_hold_value(chunk) : CHUNKSZ;
nchunks           764 net/sctp/auth.c 	__u16 nchunks;
nchunks           773 net/sctp/auth.c 	nchunks = param_len - sizeof(struct sctp_paramhdr);
nchunks           774 net/sctp/auth.c 	if (nchunks == SCTP_NUM_CHUNK_TYPES)
nchunks           777 net/sctp/auth.c 	p->chunks[nchunks] = chunk_id;
nchunks           448 net/sunrpc/xprtrdma/rpc_rdma.c 	int nsegs, nchunks;
nchunks           468 net/sunrpc/xprtrdma/rpc_rdma.c 	nchunks = 0;
nchunks           480 net/sunrpc/xprtrdma/rpc_rdma.c 		nchunks++;
nchunks           485 net/sunrpc/xprtrdma/rpc_rdma.c 	*segcount = cpu_to_be32(nchunks);
nchunks           511 net/sunrpc/xprtrdma/rpc_rdma.c 	int nsegs, nchunks;
nchunks           529 net/sunrpc/xprtrdma/rpc_rdma.c 	nchunks = 0;
nchunks           541 net/sunrpc/xprtrdma/rpc_rdma.c 		nchunks++;
nchunks           546 net/sunrpc/xprtrdma/rpc_rdma.c 	*segcount = cpu_to_be32(nchunks);
nchunks           677 sound/drivers/vx/vx_pcm.c 				     struct vx_pipe *pipe, int nchunks)
nchunks           684 sound/drivers/vx/vx_pcm.c 	for (i = 0; i < nchunks; i++) {