chunks            149 arch/mips/ar7/prom.c 	struct psp_env_chunk *chunks = (struct psp_env_chunk *)psp_env_data;
chunks            151 arch/mips/ar7/prom.c 	memcpy_fromio(chunks, psp_env_base, PSP_ENV_SIZE);
chunks            156 arch/mips/ar7/prom.c 		if ((chunks[i].num == 0xff) || ((i + chunks[i].len) > n))
chunks            158 arch/mips/ar7/prom.c 		value = chunks[i].data;
chunks            159 arch/mips/ar7/prom.c 		if (chunks[i].num) {
chunks            160 arch/mips/ar7/prom.c 			name = lookup_psp_var_map(chunks[i].num);
chunks            167 arch/mips/ar7/prom.c 		i += chunks[i].len;
chunks            285 arch/x86/kernel/cpu/resctrl/internal.h 	u64	chunks;
chunks            219 arch/x86/kernel/cpu/resctrl/monitor.c 	u64 shift = 64 - MBM_CNTR_WIDTH, chunks;
chunks            221 arch/x86/kernel/cpu/resctrl/monitor.c 	chunks = (cur_msr << shift) - (prev_msr << shift);
chunks            222 arch/x86/kernel/cpu/resctrl/monitor.c 	return chunks >>= shift;
chunks            228 arch/x86/kernel/cpu/resctrl/monitor.c 	u64 chunks, tval;
chunks            259 arch/x86/kernel/cpu/resctrl/monitor.c 	chunks = mbm_overflow_count(m->prev_msr, tval);
chunks            260 arch/x86/kernel/cpu/resctrl/monitor.c 	m->chunks += chunks;
chunks            263 arch/x86/kernel/cpu/resctrl/monitor.c 	rr->val += m->chunks;
chunks            275 arch/x86/kernel/cpu/resctrl/monitor.c 	u64 tval, cur_bw, chunks;
chunks            281 arch/x86/kernel/cpu/resctrl/monitor.c 	chunks = mbm_overflow_count(m->prev_bw_msr, tval);
chunks            282 arch/x86/kernel/cpu/resctrl/monitor.c 	m->chunks_bw += chunks;
chunks            283 arch/x86/kernel/cpu/resctrl/monitor.c 	m->chunks = m->chunks_bw;
chunks            284 arch/x86/kernel/cpu/resctrl/monitor.c 	cur_bw = (chunks * r->mon_scale) >> 20;
chunks            366 drivers/dma/ioat/dma.c 	int i, chunks;
chunks            373 drivers/dma/ioat/dma.c 	ioat_chan->desc_chunks = chunks = (total_descs * IOAT_DESC_SZ) / SZ_2M;
chunks            375 drivers/dma/ioat/dma.c 	for (i = 0; i < chunks; i++) {
chunks            434 drivers/dma/ioat/dma.c 		if (chunks == 1)
chunks             79 drivers/dma/sh/rcar-dmac.c 	struct list_head chunks;
chunks            107 drivers/dma/sh/rcar-dmac.c 		struct rcar_dmac_xfer_chunk chunks[0];
chunks            115 drivers/dma/sh/rcar-dmac.c 	((PAGE_SIZE - offsetof(struct rcar_dmac_desc_page, chunks)) /	\
chunks            351 drivers/dma/sh/rcar-dmac.c 			list_first_entry(&desc->chunks,
chunks            475 drivers/dma/sh/rcar-dmac.c 	desc->running = list_first_entry(&desc->chunks,
chunks            508 drivers/dma/sh/rcar-dmac.c 		INIT_LIST_HEAD(&desc->chunks);
chunks            539 drivers/dma/sh/rcar-dmac.c 	list_splice_tail_init(&desc->chunks, &chan->desc.chunks_free);
chunks            635 drivers/dma/sh/rcar-dmac.c 		struct rcar_dmac_xfer_chunk *chunk = &page->chunks[i];
chunks            733 drivers/dma/sh/rcar-dmac.c 	list_for_each_entry(chunk, &desc->chunks, node) {
chunks            991 drivers/dma/sh/rcar-dmac.c 			list_add_tail(&chunk->node, &desc->chunks);
chunks           1375 drivers/dma/sh/rcar-dmac.c 	list_for_each_entry_reverse(chunk, &desc->chunks, node) {
chunks           1505 drivers/dma/sh/rcar-dmac.c 		if (!list_is_last(&desc->running->node, &desc->chunks)) {
chunks           1518 drivers/dma/sh/rcar-dmac.c 				list_first_entry(&desc->chunks,
chunks             97 drivers/dma/sh/shdma-base.c 		if (chunk->chunks == 1) {
chunks            356 drivers/dma/sh/shdma-base.c 		if (desc->mark == DESC_COMPLETED && desc->chunks == 1) {
chunks            372 drivers/dma/sh/shdma-base.c 			BUG_ON(desc->chunks != 1);
chunks            567 drivers/dma/sh/shdma-base.c 	int chunks = 0;
chunks            572 drivers/dma/sh/shdma-base.c 		chunks += DIV_ROUND_UP(sg_dma_len(sg), schan->max_xfer_len);
chunks            612 drivers/dma/sh/shdma-base.c 				new->chunks = 1;
chunks            614 drivers/dma/sh/shdma-base.c 				new->chunks = chunks--;
chunks            463 drivers/gpu/drm/amd/amdgpu/amdgpu.h 	struct amdgpu_cs_chunk	*chunks;
chunks            138 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	chunk_array_user = u64_to_user_ptr(cs->in.chunks);
chunks            146 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->chunks = kmalloc_array(p->nchunks, sizeof(struct amdgpu_cs_chunk),
chunks            148 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (!p->chunks) {
chunks            165 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->chunks[i].chunk_id = user_chunk.chunk_id;
chunks            166 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->chunks[i].length_dw = user_chunk.length_dw;
chunks            168 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		size = p->chunks[i].length_dw;
chunks            171 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL);
chunks            172 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (p->chunks[i].kdata == NULL) {
chunks            178 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		if (copy_from_user(p->chunks[i].kdata, cdata, size)) {
chunks            183 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		switch (p->chunks[i].chunk_id) {
chunks            190 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			if (p->chunks[i].length_dw * sizeof(uint32_t) < size) {
chunks            195 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			ret = amdgpu_cs_user_fence_chunk(p, p->chunks[i].kdata,
chunks            204 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			if (p->chunks[i].length_dw * sizeof(uint32_t) < size) {
chunks            209 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			ret = amdgpu_cs_bo_handles_chunk(p, p->chunks[i].kdata);
chunks            251 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		kvfree(p->chunks[i].kdata);
chunks            252 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	kfree(p->chunks);
chunks            253 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	p->chunks = NULL;
chunks            773 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		kvfree(parser->chunks[i].kdata);
chunks            774 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	kfree(parser->chunks);
chunks            808 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 			chunk = &p->chunks[i];
chunks            955 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		chunk = &parser->chunks[i];
chunks           1219 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 		chunk = &p->chunks[i];
chunks           1063 drivers/gpu/drm/radeon/radeon.h 	struct radeon_cs_chunk	*chunks;
chunks            296 drivers/gpu/drm/radeon/radeon_cs.c 	chunk_array_ptr = (uint64_t *)(unsigned long)(cs->chunks);
chunks            303 drivers/gpu/drm/radeon/radeon_cs.c 	p->chunks = kcalloc(p->nchunks, sizeof(struct radeon_cs_chunk), GFP_KERNEL);
chunks            304 drivers/gpu/drm/radeon/radeon_cs.c 	if (p->chunks == NULL) {
chunks            317 drivers/gpu/drm/radeon/radeon_cs.c 		p->chunks[i].length_dw = user_chunk.length_dw;
chunks            319 drivers/gpu/drm/radeon/radeon_cs.c 			p->chunk_relocs = &p->chunks[i];
chunks            322 drivers/gpu/drm/radeon/radeon_cs.c 			p->chunk_ib = &p->chunks[i];
chunks            324 drivers/gpu/drm/radeon/radeon_cs.c 			if (p->chunks[i].length_dw == 0)
chunks            328 drivers/gpu/drm/radeon/radeon_cs.c 			p->chunk_const_ib = &p->chunks[i];
chunks            330 drivers/gpu/drm/radeon/radeon_cs.c 			if (p->chunks[i].length_dw == 0)
chunks            334 drivers/gpu/drm/radeon/radeon_cs.c 			p->chunk_flags = &p->chunks[i];
chunks            336 drivers/gpu/drm/radeon/radeon_cs.c 			if (p->chunks[i].length_dw == 0)
chunks            340 drivers/gpu/drm/radeon/radeon_cs.c 		size = p->chunks[i].length_dw;
chunks            342 drivers/gpu/drm/radeon/radeon_cs.c 		p->chunks[i].user_ptr = cdata;
chunks            351 drivers/gpu/drm/radeon/radeon_cs.c 		p->chunks[i].kdata = kvmalloc_array(size, sizeof(uint32_t), GFP_KERNEL);
chunks            353 drivers/gpu/drm/radeon/radeon_cs.c 		if (p->chunks[i].kdata == NULL) {
chunks            356 drivers/gpu/drm/radeon/radeon_cs.c 		if (copy_from_user(p->chunks[i].kdata, cdata, size)) {
chunks            360 drivers/gpu/drm/radeon/radeon_cs.c 			p->cs_flags = p->chunks[i].kdata[0];
chunks            361 drivers/gpu/drm/radeon/radeon_cs.c 			if (p->chunks[i].length_dw > 1)
chunks            362 drivers/gpu/drm/radeon/radeon_cs.c 				ring = p->chunks[i].kdata[1];
chunks            363 drivers/gpu/drm/radeon/radeon_cs.c 			if (p->chunks[i].length_dw > 2)
chunks            364 drivers/gpu/drm/radeon/radeon_cs.c 				priority = (s32)p->chunks[i].kdata[2];
chunks            453 drivers/gpu/drm/radeon/radeon_cs.c 		kvfree(parser->chunks[i].kdata);
chunks            454 drivers/gpu/drm/radeon/radeon_cs.c 	kfree(parser->chunks);
chunks             94 drivers/infiniband/hw/efa/efa_verbs.c 	struct pbl_chunk *chunks;
chunks           1110 drivers/infiniband/hw/efa/efa_verbs.c 	chunk_list->chunks = kcalloc(chunk_list_size,
chunks           1111 drivers/infiniband/hw/efa/efa_verbs.c 				     sizeof(*chunk_list->chunks),
chunks           1113 drivers/infiniband/hw/efa/efa_verbs.c 	if (!chunk_list->chunks)
chunks           1122 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].buf = kzalloc(EFA_CHUNK_SIZE, GFP_KERNEL);
chunks           1123 drivers/infiniband/hw/efa/efa_verbs.c 		if (!chunk_list->chunks[i].buf)
chunks           1126 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].length = EFA_CHUNK_USED_SIZE;
chunks           1128 drivers/infiniband/hw/efa/efa_verbs.c 	chunk_list->chunks[chunk_list_size - 1].length =
chunks           1135 drivers/infiniband/hw/efa/efa_verbs.c 	cur_chunk_buf = chunk_list->chunks[0].buf;
chunks           1143 drivers/infiniband/hw/efa/efa_verbs.c 			cur_chunk_buf = chunk_list->chunks[chunk_idx].buf;
chunks           1151 drivers/infiniband/hw/efa/efa_verbs.c 					  chunk_list->chunks[i].buf,
chunks           1152 drivers/infiniband/hw/efa/efa_verbs.c 					  chunk_list->chunks[i].length,
chunks           1160 drivers/infiniband/hw/efa/efa_verbs.c 		chunk_list->chunks[i].dma_addr = dma_addr;
chunks           1167 drivers/infiniband/hw/efa/efa_verbs.c 		prev_chunk_buf = chunk_list->chunks[i - 1].buf;
chunks           1171 drivers/infiniband/hw/efa/efa_verbs.c 		ctrl_buf->length = chunk_list->chunks[i].length;
chunks           1182 drivers/infiniband/hw/efa/efa_verbs.c 		dma_unmap_single(&dev->pdev->dev, chunk_list->chunks[i].dma_addr,
chunks           1183 drivers/infiniband/hw/efa/efa_verbs.c 				 chunk_list->chunks[i].length, DMA_TO_DEVICE);
chunks           1187 drivers/infiniband/hw/efa/efa_verbs.c 		kfree(chunk_list->chunks[i].buf);
chunks           1189 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(chunk_list->chunks);
chunks           1199 drivers/infiniband/hw/efa/efa_verbs.c 		dma_unmap_single(&dev->pdev->dev, chunk_list->chunks[i].dma_addr,
chunks           1200 drivers/infiniband/hw/efa/efa_verbs.c 				 chunk_list->chunks[i].length, DMA_TO_DEVICE);
chunks           1201 drivers/infiniband/hw/efa/efa_verbs.c 		kfree(chunk_list->chunks[i].buf);
chunks           1204 drivers/infiniband/hw/efa/efa_verbs.c 	kfree(chunk_list->chunks);
chunks           1381 drivers/infiniband/hw/efa/efa_verbs.c 			pbl->phys.indirect.chunk_list.chunks[0].length;
chunks           1383 drivers/infiniband/hw/efa/efa_verbs.c 		efa_com_set_dma_addr(pbl->phys.indirect.chunk_list.chunks[0].dma_addr,
chunks             45 drivers/infiniband/hw/usnic/usnic_vnic.c 	struct usnic_vnic_res_chunk	chunks[USNIC_VNIC_RES_TYPE_MAX];
chunks            118 drivers/infiniband/hw/usnic/usnic_vnic.c 	for (i = 0; i < ARRAY_SIZE(vnic->chunks); i++) {
chunks            119 drivers/infiniband/hw/usnic/usnic_vnic.c 		chunk = &vnic->chunks[i];
chunks            223 drivers/infiniband/hw/usnic/usnic_vnic.c 	return vnic->chunks[type].cnt;
chunks            229 drivers/infiniband/hw/usnic/usnic_vnic.c 	return vnic->chunks[type].free_cnt;
chunks            255 drivers/infiniband/hw/usnic/usnic_vnic.c 		src = &vnic->chunks[type];
chunks            287 drivers/infiniband/hw/usnic/usnic_vnic.c 			vnic->chunks[res->type].free_cnt++;
chunks            383 drivers/infiniband/hw/usnic/usnic_vnic.c 						&vnic->chunks[res_type]);
chunks            392 drivers/infiniband/hw/usnic/usnic_vnic.c 		usnic_vnic_free_res_chunk(&vnic->chunks[res_type]);
chunks            428 drivers/infiniband/hw/usnic/usnic_vnic.c 		usnic_vnic_free_res_chunk(&vnic->chunks[res_type]);
chunks            779 drivers/md/md-bitmap.c 				   unsigned long chunks, int with_super,
chunks            786 drivers/md/md-bitmap.c 	bytes = DIV_ROUND_UP(chunks, 8);
chunks           1061 drivers/md/md-bitmap.c 	unsigned long i, chunks, index, oldindex, bit, node_offset = 0;
chunks           1071 drivers/md/md-bitmap.c 	chunks = bitmap->counts.chunks;
chunks           1078 drivers/md/md-bitmap.c 		for (i = 0; i < chunks ; i++) {
chunks           1109 drivers/md/md-bitmap.c 	for (i = 0; i < chunks; i++) {
chunks           1173 drivers/md/md-bitmap.c 		 bit_cnt, chunks);
chunks           1293 drivers/md/md-bitmap.c 	for (j = 0; j < counts->chunks; j++) {
chunks           1996 drivers/md/md-bitmap.c 	for (j = 0; j < counts->chunks; j++) {
chunks           2068 drivers/md/md-bitmap.c 	unsigned long chunks;
chunks           2092 drivers/md/md-bitmap.c 			bytes = DIV_ROUND_UP(bitmap->counts.chunks, 8);
chunks           2103 drivers/md/md-bitmap.c 			chunks = DIV_ROUND_UP_SECTOR_T(blocks, 1 << chunkshift);
chunks           2104 drivers/md/md-bitmap.c 			bytes = DIV_ROUND_UP(chunks, 8);
chunks           2111 drivers/md/md-bitmap.c 	chunks = DIV_ROUND_UP_SECTOR_T(blocks, 1 << chunkshift);
chunks           2114 drivers/md/md-bitmap.c 		ret = md_bitmap_storage_alloc(&store, chunks,
chunks           2123 drivers/md/md-bitmap.c 	pages = DIV_ROUND_UP(chunks, PAGE_COUNTER_RATIO);
chunks           2151 drivers/md/md-bitmap.c 	bitmap->counts.chunks = chunks;
chunks           2155 drivers/md/md-bitmap.c 	blocks = min(old_counts.chunks << old_counts.chunkshift,
chunks           2156 drivers/md/md-bitmap.c 		     chunks << chunkshift);
chunks           2177 drivers/md/md-bitmap.c 				bitmap->counts.chunks = old_counts.chunks;
chunks           2180 drivers/md/md-bitmap.c 				blocks = old_counts.chunks << old_counts.chunkshift;
chunks           2227 drivers/md/md-bitmap.c 		while (block < (chunks << chunkshift)) {
chunks            191 drivers/md/md-bitmap.h 		unsigned long chunks;		/* Total number of data
chunks           2839 drivers/md/raid10.c 	int extra_chunk, chunks;
chunks           2853 drivers/md/raid10.c 	chunks = conf->geo.raid_disks / conf->geo.near_copies;
chunks           2858 drivers/md/raid10.c 	window_size = (chunks + extra_chunk) * conf->mddev->chunk_sectors;
chunks            465 drivers/misc/habanalabs/command_submission.c static int _hl_cs_ioctl(struct hl_fpriv *hpriv, void __user *chunks,
chunks            495 drivers/misc/habanalabs/command_submission.c 	if (copy_from_user(cs_chunk_array, chunks, size_to_copy)) {
chunks            615 drivers/misc/habanalabs/command_submission.c 	void __user *chunks;
chunks            634 drivers/misc/habanalabs/command_submission.c 		chunks = (void __user *)(uintptr_t)args->in.chunks_restore;
chunks            668 drivers/misc/habanalabs/command_submission.c 			rc = _hl_cs_ioctl(hpriv, chunks, num_chunks,
chunks            710 drivers/misc/habanalabs/command_submission.c 	chunks = (void __user *)(uintptr_t)args->in.chunks_execute;
chunks            721 drivers/misc/habanalabs/command_submission.c 	rc = _hl_cs_ioctl(hpriv, chunks, num_chunks, &cs_seq);
chunks            660 drivers/mtd/nand/raw/davinci_nand.c 		int chunks = mtd->writesize / 512;
chunks            662 drivers/mtd/nand/raw/davinci_nand.c 		if (!chunks || mtd->oobsize < 16) {
chunks            671 drivers/mtd/nand/raw/davinci_nand.c 		if (chunks == 1) {
chunks            673 drivers/mtd/nand/raw/davinci_nand.c 		} else if (chunks == 4 || chunks == 8) {
chunks            504 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	} *chunks;
chunks            516 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	chunks = kzalloc(array_size(sizeof(*chunks), nseg), GFP_KERNEL);
chunks            517 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	if (!chunks)
chunks            525 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		chunks[i].chunk = kmalloc(chunk_size,
chunks            527 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		if (!chunks[i].chunk)
chunks            530 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		chunks[i].len = min_t(u64, chunk_size, max_size - off);
chunks            535 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 			memcpy(chunks[i].chunk, arg->in_buf + off, coff);
chunks            537 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		memset(chunks[i].chunk + coff, 0, chunk_size - coff);
chunks            539 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		off += chunks[i].len;
chunks            547 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		addr = dma_map_single(dev, chunks[i].chunk, chunks[i].len,
chunks            549 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		chunks[i].dma_addr = addr;
chunks            556 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 			      round_down(addr + chunks[i].len - 1, dma_size),
chunks            558 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 			      &addr, chunks[i].len, dma_size)) {
chunks            574 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		desc->descs[i].size = cpu_to_le32(chunks[i].len);
chunks            575 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		desc->descs[i].addr = cpu_to_le64(chunks[i].dma_addr);
chunks            594 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		len = min_t(u64, chunks[i].len, arg->out_size - off);
chunks            595 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		memcpy(arg->out_buf + off, chunks[i].chunk, len);
chunks            608 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		dma_unmap_single(dev, chunks[i].dma_addr, chunks[i].len,
chunks            613 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 		kfree(chunks[i].chunk);
chunks            614 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c 	kfree(chunks);
chunks           1687 drivers/net/wireless/ath/ath10k/wmi-tlv.c 	void *chunks;
chunks           1720 drivers/net/wireless/ath/ath10k/wmi-tlv.c 	chunks = (void *)tlv->value;
chunks           1795 drivers/net/wireless/ath/ath10k/wmi-tlv.c 	ath10k_wmi_tlv_put_host_mem_chunks(ar, chunks);
chunks           6424 drivers/net/wireless/ath/ath10k/wmi.c 				    struct wmi_host_mem_chunks *chunks)
chunks           6429 drivers/net/wireless/ath/ath10k/wmi.c 	chunks->count = __cpu_to_le32(ar->wmi.num_mem_chunks);
chunks           6432 drivers/net/wireless/ath/ath10k/wmi.c 		chunk = &chunks->items[i];
chunks           7310 drivers/net/wireless/ath/ath10k/wmi.h 				    struct wmi_host_mem_chunks *chunks);
chunks            193 drivers/net/wireless/ath/carl9170/tx.c 	unsigned int chunks;
chunks            198 drivers/net/wireless/ath/carl9170/tx.c 	chunks = DIV_ROUND_UP(skb->len, ar->fw.mem_block_size);
chunks            199 drivers/net/wireless/ath/carl9170/tx.c 	if (unlikely(atomic_sub_return(chunks, &ar->mem_free_blocks) < 0)) {
chunks            200 drivers/net/wireless/ath/carl9170/tx.c 		atomic_add(chunks, &ar->mem_free_blocks);
chunks            209 drivers/net/wireless/ath/carl9170/tx.c 		atomic_add(chunks, &ar->mem_free_blocks);
chunks            237 drivers/net/wireless/ti/wlcore/boot.c 	u32 chunks, addr, len;
chunks            242 drivers/net/wireless/ti/wlcore/boot.c 	chunks = be32_to_cpup((__be32 *) fw);
chunks            245 drivers/net/wireless/ti/wlcore/boot.c 	wl1271_debug(DEBUG_BOOT, "firmware chunks to be uploaded: %u", chunks);
chunks            247 drivers/net/wireless/ti/wlcore/boot.c 	while (chunks--) {
chunks            258 drivers/net/wireless/ti/wlcore/boot.c 			     chunks, addr, len);
chunks           1177 drivers/nvme/host/lightnvm.c static NVM_DEV_ATTR_20_RO(chunks);
chunks            326 drivers/video/fbdev/matrox/matroxfb_base.c 	pos += minfo->curr.ydstorg.chunks;
chunks            777 drivers/video/fbdev/matrox/matroxfb_base.c 		minfo->curr.ydstorg.chunks = ydstorg >> (isInterleave(minfo) ? 3 : 2);
chunks            812 drivers/video/fbdev/matrox/matroxfb_base.c 			pos += minfo->curr.ydstorg.chunks;
chunks            237 drivers/video/fbdev/matrox/matroxfb_base.h 		unsigned int chunks;
chunks            355 drivers/virt/vboxguest/vboxguest_core.c 	u32 i, chunks;
chunks            383 drivers/virt/vboxguest/vboxguest_core.c 	chunks = req->balloon_chunks;
chunks            384 drivers/virt/vboxguest/vboxguest_core.c 	if (chunks > gdev->mem_balloon.max_chunks) {
chunks            386 drivers/virt/vboxguest/vboxguest_core.c 			__func__, chunks, gdev->mem_balloon.max_chunks);
chunks            390 drivers/virt/vboxguest/vboxguest_core.c 	if (chunks > gdev->mem_balloon.chunks) {
chunks            392 drivers/virt/vboxguest/vboxguest_core.c 		for (i = gdev->mem_balloon.chunks; i < chunks; i++) {
chunks            397 drivers/virt/vboxguest/vboxguest_core.c 			gdev->mem_balloon.chunks++;
chunks            401 drivers/virt/vboxguest/vboxguest_core.c 		for (i = gdev->mem_balloon.chunks; i-- > chunks;) {
chunks            406 drivers/virt/vboxguest/vboxguest_core.c 			gdev->mem_balloon.chunks--;
chunks           1466 drivers/virt/vboxguest/vboxguest_core.c 	balloon_info->u.out.balloon_chunks = gdev->mem_balloon.chunks;
chunks             29 drivers/virt/vboxguest/vboxguest_core.h 	u32 chunks;
chunks            378 fs/btrfs/inode.c 	struct async_chunk chunks[];
chunks           1235 fs/btrfs/inode.c 	ctx = kvmalloc(struct_size(ctx, chunks, num_chunks), GFP_KERNEL);
chunks           1251 fs/btrfs/inode.c 	async_chunk = ctx->chunks;
chunks            346 fs/ocfs2/quota_local.c 	int i, chunks = le32_to_cpu(ldinfo->dqi_chunks);
chunks            349 fs/ocfs2/quota_local.c 	for (i = 0; i < chunks; i++) {
chunks            732 fs/xfs/xfs_buf_item.c 	int			chunks;
chunks            772 fs/xfs/xfs_buf_item.c 		chunks = DIV_ROUND_UP(BBTOB(bp->b_maps[i].bm_len),
chunks            774 fs/xfs/xfs_buf_item.c 		map_size = DIV_ROUND_UP(chunks, NBWORD);
chunks             60 include/linux/genalloc.h 	struct list_head chunks;	/* list of chunks in this pool */
chunks            320 include/linux/sctp.h 	__u8 chunks[0];
chunks            332 include/linux/sctp.h 	__u8 chunks[0];
chunks             52 include/linux/shdma-base.h 	int chunks;
chunks           4110 include/linux/skbuff.h 	u8 chunks;		/* same */
chunks            364 include/net/sctp/structs.h 	struct sctp_chunks_param *chunks;
chunks            522 include/net/sctp/structs.h 	struct list_head chunks;
chunks            557 include/uapi/drm/amdgpu_drm.h 	__u64		chunks;
chunks            987 include/uapi/drm/radeon_drm.h 	__u64		chunks;
chunks             17 kernel/audit_tree.c 	struct list_head chunks;
chunks            101 kernel/audit_tree.c 		INIT_LIST_HEAD(&tree->chunks);
chunks            437 kernel/audit_tree.c 	list_add(&chunk->owners[0].list, &tree->chunks);
chunks            509 kernel/audit_tree.c 	list_add(&p->list, &tree->chunks);
chunks            574 kernel/audit_tree.c 	while (!list_empty(&victim->chunks)) {
chunks            579 kernel/audit_tree.c 		p = list_first_entry(&victim->chunks, struct node, list);
chunks            620 kernel/audit_tree.c 	for (p = tree->chunks.next; p != &tree->chunks; p = q) {
chunks            625 kernel/audit_tree.c 			list_add(p, &tree->chunks);
chunks            708 kernel/audit_tree.c 		list_for_each_entry(node, &tree->chunks, list) {
chunks            847 kernel/audit_tree.c 		list_for_each_entry(node, &tree->chunks, list)
chunks            949 kernel/audit_tree.c 			list_for_each_entry(node, &tree->chunks, list)
chunks            936 lib/debugobjects.c 	unsigned long flags, oaddr, saddr, eaddr, paddr, chunks;
chunks            947 lib/debugobjects.c 	chunks = ((eaddr - paddr) + (ODEBUG_CHUNK_SIZE - 1));
chunks            948 lib/debugobjects.c 	chunks >>= ODEBUG_CHUNK_SHIFT;
chunks            950 lib/debugobjects.c 	for (;chunks > 0; chunks--, paddr += ODEBUG_CHUNK_SIZE) {
chunks            158 lib/genalloc.c 		INIT_LIST_HEAD(&pool->chunks);
chunks            201 lib/genalloc.c 	list_add_rcu(&chunk->next_chunk, &pool->chunks);
chunks            221 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
chunks            247 lib/genalloc.c 	list_for_each_safe(_chunk, _next_chunk, &pool->chunks) {
chunks            295 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
chunks            501 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk) {
chunks            536 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &(pool)->chunks, next_chunk)
chunks            559 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &(pool)->chunks, next_chunk) {
chunks            583 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk)
chunks            602 lib/genalloc.c 	list_for_each_entry_rcu(chunk, &pool->chunks, next_chunk)
chunks            394 lib/scatterlist.c 	unsigned int chunks, cur_page, seg_len, i;
chunks            402 lib/scatterlist.c 	chunks = 1;
chunks            408 lib/scatterlist.c 			chunks++;
chunks            413 lib/scatterlist.c 	ret = sg_alloc_table(sgt, chunks, gfp_mask);
chunks            661 mm/z3fold.c    	int chunks = size_to_chunks(size), i;
chunks            666 mm/z3fold.c    	for_each_unbuddied_list(i, chunks) {
chunks            722 mm/z3fold.c    			l = &unbuddied[chunks];
chunks            867 mm/z3fold.c    	int chunks = size_to_chunks(size);
chunks            887 mm/z3fold.c    				    chunks >= zhdr->start_middle)
chunks            961 mm/z3fold.c    		zhdr->first_chunks = chunks;
chunks            963 mm/z3fold.c    		zhdr->last_chunks = chunks;
chunks            965 mm/z3fold.c    		zhdr->middle_chunks = chunks;
chunks            357 mm/zbud.c      	int chunks, i, freechunks;
chunks            366 mm/zbud.c      	chunks = size_to_chunks(size);
chunks            371 mm/zbud.c      	for_each_unbuddied_list(i, chunks) {
chunks            396 mm/zbud.c      		zhdr->first_chunks = chunks;
chunks            398 mm/zbud.c      		zhdr->last_chunks = chunks;
chunks           6003 net/core/skbuff.c 	memcpy(new, old, old->chunks * SKB_EXT_ALIGN_VALUE);
chunks           6048 net/core/skbuff.c 		newoff = new->chunks;
chunks           6058 net/core/skbuff.c 	new->chunks = newlen;
chunks            186 net/sctp/auth.c 			struct sctp_chunks_param *chunks,
chunks            197 net/sctp/auth.c 	if (chunks)
chunks            198 net/sctp/auth.c 		chunks_len = ntohs(chunks->param_hdr.length);
chunks            209 net/sctp/auth.c 	if (chunks) {
chunks            210 net/sctp/auth.c 		memcpy(new->data + offset, chunks, chunks_len);
chunks            655 net/sctp/auth.c 		switch (param->chunks[i]) {
chunks            663 net/sctp/auth.c 			if (param->chunks[i] == chunk)
chunks            777 net/sctp/auth.c 	p->chunks[nchunks] = chunk_id;
chunks             43 net/sctp/chunk.c 	INIT_LIST_HEAD(&msg->chunks);
chunks             65 net/sctp/chunk.c 	list_for_each_entry(chunk, &msg->chunks, frag_list)
chunks             84 net/sctp/chunk.c 	list_for_each_safe(pos, temp, &msg->chunks) {
chunks            282 net/sctp/chunk.c 		list_add_tail(&chunk->frag_list, &msg->chunks);
chunks            291 net/sctp/chunk.c 	list_for_each_safe(pos, temp, &msg->chunks) {
chunks           1552 net/sctp/sm_make_chunk.c 	list_for_each_entry(lchunk, &msg->chunks, frag_list) {
chunks           1983 net/sctp/sm_make_chunk.c 		switch (param.ext->chunks[i]) {
chunks           2015 net/sctp/sm_make_chunk.c 		switch (param.ext->chunks[i]) {
chunks           1098 net/sctp/sm_sideeffect.c 	list_for_each_entry(chunk, &msg->chunks, frag_list)
chunks            155 net/sctp/socket.c 	list_for_each_entry(c, &msg->chunks, frag_list) {	\
chunks           1868 net/sctp/socket.c 	list_for_each_entry(chunk, &datamsg->chunks, frag_list) {
chunks           7015 net/sctp/socket.c 	if (copy_to_user(to, ch->chunks, num_chunks))
chunks           7065 net/sctp/socket.c 	if (copy_to_user(to, ch->chunks, num_chunks))
chunks             61 net/sctp/stream_interleave.c 	list_for_each_entry(lchunk, &chunk->msg->chunks, frag_list) {
chunks            230 net/sctp/stream_sched.c 	if (!list_is_last(&ch->frag_list, &ch->msg->chunks) &&
chunks            240 net/sctp/stream_sched_prio.c 	ch = list_first_entry(&msg->chunks, struct sctp_chunk, frag_list);
chunks            105 net/sctp/stream_sched_rr.c 	ch = list_first_entry(&msg->chunks, struct sctp_chunk, frag_list);
chunks            345 net/xdp/xdp_umem.c 	unsigned int chunks, chunks_per_page;
chunks            379 net/xdp/xdp_umem.c 	chunks = (unsigned int)div_u64(size, chunk_size);
chunks            380 net/xdp/xdp_umem.c 	if (chunks == 0)
chunks            385 net/xdp/xdp_umem.c 		if (chunks < chunks_per_page || chunks % chunks_per_page)