chunk_size 204 arch/arm/mm/cache-uniphier.c unsigned long chunk_size = min_t(unsigned long, size, chunk_size 207 arch/arm/mm/cache-uniphier.c __uniphier_cache_maint_common(data, start, chunk_size, chunk_size 210 arch/arm/mm/cache-uniphier.c start += chunk_size; chunk_size 211 arch/arm/mm/cache-uniphier.c size -= chunk_size; chunk_size 474 arch/x86/kernel/cpu/mtrr/cleanup.c u64 chunk_size, u64 gran_size) chunk_size 483 arch/x86/kernel/cpu/mtrr/cleanup.c var_state.chunk_sizek = chunk_size >> 10; chunk_size 590 arch/x86/kernel/cpu/mtrr/cleanup.c mtrr_calc_range_state(u64 chunk_size, u64 gran_size, chunk_size 607 arch/x86/kernel/cpu/mtrr/cleanup.c num_reg = x86_setup_var_mtrrs(range, nr_range, chunk_size, gran_size); chunk_size 615 arch/x86/kernel/cpu/mtrr/cleanup.c result[i].chunk_sizek = chunk_size >> 10; chunk_size 687 arch/x86/kernel/cpu/mtrr/cleanup.c u64 chunk_size, gran_size; chunk_size 758 arch/x86/kernel/cpu/mtrr/cleanup.c for (chunk_size = gran_size; chunk_size < (1ULL<<32); chunk_size 759 arch/x86/kernel/cpu/mtrr/cleanup.c chunk_size <<= 1) { chunk_size 764 arch/x86/kernel/cpu/mtrr/cleanup.c mtrr_calc_range_state(chunk_size, gran_size, chunk_size 784 arch/x86/kernel/cpu/mtrr/cleanup.c chunk_size = result[i].chunk_sizek; chunk_size 785 arch/x86/kernel/cpu/mtrr/cleanup.c chunk_size <<= 10; chunk_size 788 arch/x86/kernel/cpu/mtrr/cleanup.c x86_setup_var_mtrrs(range, nr_range, chunk_size, gran_size); chunk_size 131 arch/x86/platform/olpc/olpc_dt.c const size_t chunk_size = max(PAGE_SIZE, size); chunk_size 139 arch/x86/platform/olpc/olpc_dt.c res = memblock_alloc(chunk_size, SMP_CACHE_BYTES); chunk_size 142 arch/x86/platform/olpc/olpc_dt.c chunk_size); chunk_size 144 arch/x86/platform/olpc/olpc_dt.c prom_early_allocated += chunk_size; chunk_size 145 arch/x86/platform/olpc/olpc_dt.c memset(res, 0, chunk_size); chunk_size 146 arch/x86/platform/olpc/olpc_dt.c free_mem = chunk_size; chunk_size 743 arch/x86/xen/setup.c phys_addr_t mem_end, addr, size, chunk_size; chunk_size 826 arch/x86/xen/setup.c chunk_size = size; chunk_size 831 arch/x86/xen/setup.c chunk_size = min(size, mem_end - addr); chunk_size 833 arch/x86/xen/setup.c chunk_size = min(size, PFN_PHYS(extra_pages)); chunk_size 835 arch/x86/xen/setup.c n_pfns = PFN_DOWN(addr + chunk_size) - pfn_s; chunk_size 844 arch/x86/xen/setup.c xen_align_and_add_e820_region(addr, chunk_size, type); chunk_size 846 arch/x86/xen/setup.c addr += chunk_size; chunk_size 847 arch/x86/xen/setup.c size -= chunk_size; chunk_size 460 drivers/crypto/qat/qat_common/icp_qat_uclo.h u64 chunk_size; chunk_size 1254 drivers/crypto/qat/qat_common/qat_uclo.c (*desc))->chunk_size; chunk_size 1282 drivers/crypto/qat/qat_common/qat_uclo.c auth_chunk->chunk_size = img_desc.dram_size; chunk_size 112 drivers/dma/uniphier-mdmac.c u32 src_mode, src_addr, dest_mode, dest_addr, chunk_size; chunk_size 128 drivers/dma/uniphier-mdmac.c chunk_size = sg_dma_len(sg); chunk_size 134 drivers/dma/uniphier-mdmac.c writel(chunk_size, mc->reg_ch_base + UNIPHIER_MDMAC_CH_SIZE); chunk_size 502 drivers/gpu/drm/amd/amdkfd/kfd_device.c unsigned int chunk_size); chunk_size 965 drivers/gpu/drm/amd/amdkfd/kfd_device.c unsigned int chunk_size) chunk_size 969 drivers/gpu/drm/amd/amdkfd/kfd_device.c if (WARN_ON(buf_size < chunk_size)) chunk_size 973 drivers/gpu/drm/amd/amdkfd/kfd_device.c if (WARN_ON(chunk_size == 0)) chunk_size 976 drivers/gpu/drm/amd/amdkfd/kfd_device.c kfd->gtt_sa_chunk_size = chunk_size; chunk_size 977 drivers/gpu/drm/amd/amdkfd/kfd_device.c kfd->gtt_sa_num_of_chunks = buf_size / chunk_size; chunk_size 1004 drivers/gpu/drm/amd/amdkfd/kfd_device.c unsigned int chunk_size) chunk_size 1006 drivers/gpu/drm/amd/amdkfd/kfd_device.c return start_addr + bit_num * chunk_size; chunk_size 1011 drivers/gpu/drm/amd/amdkfd/kfd_device.c unsigned int chunk_size) chunk_size 1013 drivers/gpu/drm/amd/amdkfd/kfd_device.c return (uint32_t *) ((uint64_t) start_addr + bit_num * chunk_size); chunk_size 556 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, chunk_size 565 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, chunk_size 1027 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size, chunk_size 1037 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hubp.c CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size, chunk_size 174 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c rq_regs->crq_expansion_mode, rq_regs->plane1_base_address, rq_regs->rq_regs_l.chunk_size, chunk_size 178 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer.c rq_regs->rq_regs_l.pte_row_height_linear, rq_regs->rq_regs_c.chunk_size, rq_regs->rq_regs_c.min_chunk_size, chunk_size 213 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c rq_regs->crq_expansion_mode, rq_regs->plane1_base_address, rq_regs->rq_regs_l.chunk_size, chunk_size 217 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_hw_sequencer_debug.c rq_regs->rq_regs_l.pte_row_height_linear, rq_regs->rq_regs_c.chunk_size, rq_regs->rq_regs_c.min_chunk_size, chunk_size 204 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, chunk_size 213 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, chunk_size 1225 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c CHUNK_SIZE, &rq_regs->rq_regs_l.chunk_size, chunk_size 1235 drivers/gpu/drm/amd/display/dc/dcn20/dcn20_hubp.c CHUNK_SIZE_C, &rq_regs->rq_regs_c.chunk_size, chunk_size 130 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c CHUNK_SIZE, rq_regs->rq_regs_l.chunk_size, chunk_size 139 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_hubp.c CHUNK_SIZE_C, rq_regs->rq_regs_c.chunk_size, chunk_size 172 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20.c rq_regs->chunk_size = dml_log2(rq_sizing.chunk_bytes) - 10; chunk_size 172 drivers/gpu/drm/amd/display/dc/dml/dcn20/display_rq_dlg_calc_20v2.c rq_regs->chunk_size = dml_log2(rq_sizing.chunk_bytes) - 10; chunk_size 150 drivers/gpu/drm/amd/display/dc/dml/dcn21/display_rq_dlg_calc_21.c rq_regs->chunk_size = dml_log2(rq_sizing.chunk_bytes) - 10; chunk_size 484 drivers/gpu/drm/amd/display/dc/dml/display_mode_structs.h unsigned int chunk_size; chunk_size 163 drivers/gpu/drm/amd/display/dc/dml/display_rq_dlg_helpers.c dml_print("DML_RQ_DLG_CALC: chunk_size = 0x%0x\n", rq_regs.chunk_size); chunk_size 214 drivers/gpu/drm/amd/display/dc/dml/dml1_display_rq_dlg_calc.c rq_regs->chunk_size = dml_log2(rq_sizing.chunk_bytes) - 10; chunk_size 125 drivers/gpu/drm/drm_dsc.c pps_payload->chunk_size = cpu_to_be16(dsc_cfg->slice_chunk_size); chunk_size 483 drivers/gpu/drm/i915/display/intel_gmbus.c unsigned int chunk_size = len; chunk_size 495 drivers/gpu/drm/i915/display/intel_gmbus.c (chunk_size << GMBUS_BYTE_COUNT_SHIFT) | chunk_size 92 drivers/gpu/drm/i915/i915_buddy.c int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size) chunk_size 97 drivers/gpu/drm/i915/i915_buddy.c if (size < chunk_size) chunk_size 100 drivers/gpu/drm/i915/i915_buddy.c if (chunk_size < PAGE_SIZE) chunk_size 103 drivers/gpu/drm/i915/i915_buddy.c if (!is_power_of_2(chunk_size)) chunk_size 106 drivers/gpu/drm/i915/i915_buddy.c size = round_down(size, chunk_size); chunk_size 109 drivers/gpu/drm/i915/i915_buddy.c mm->chunk_size = chunk_size; chunk_size 110 drivers/gpu/drm/i915/i915_buddy.c mm->max_order = ilog2(size) - ilog2(chunk_size); chunk_size 144 drivers/gpu/drm/i915/i915_buddy.c order = ilog2(root_size) - ilog2(chunk_size); chunk_size 153 drivers/gpu/drm/i915/i915_buddy.c GEM_BUG_ON(i915_buddy_block_size(mm, root) < chunk_size); chunk_size 200 drivers/gpu/drm/i915/i915_buddy.c offset + (mm->chunk_size << block_order)); chunk_size 348 drivers/gpu/drm/i915/i915_buddy.c if (size < mm->chunk_size) chunk_size 351 drivers/gpu/drm/i915/i915_buddy.c if (!IS_ALIGNED(size | start, mm->chunk_size)) chunk_size 66 drivers/gpu/drm/i915/i915_buddy.h u64 chunk_size; chunk_size 110 drivers/gpu/drm/i915/i915_buddy.h return mm->chunk_size << i915_buddy_block_order(block); chunk_size 113 drivers/gpu/drm/i915/i915_buddy.h int i915_buddy_init(struct i915_buddy_mm *mm, u64 size, u64 chunk_size); chunk_size 60 drivers/gpu/drm/i915/selftests/i915_buddy.c if (block_size < mm->chunk_size) { chunk_size 70 drivers/gpu/drm/i915/selftests/i915_buddy.c if (!IS_ALIGNED(block_size, mm->chunk_size)) { chunk_size 75 drivers/gpu/drm/i915/selftests/i915_buddy.c if (!IS_ALIGNED(offset, mm->chunk_size)) { chunk_size 281 drivers/gpu/drm/i915/selftests/i915_buddy.c static void igt_mm_config(u64 *size, u64 *chunk_size) chunk_size 294 drivers/gpu/drm/i915/selftests/i915_buddy.c *chunk_size = ms; chunk_size 302 drivers/gpu/drm/i915/selftests/i915_buddy.c u64 chunk_size; chunk_size 306 drivers/gpu/drm/i915/selftests/i915_buddy.c igt_mm_config(&mm_size, &chunk_size); chunk_size 308 drivers/gpu/drm/i915/selftests/i915_buddy.c pr_info("buddy_init with size=%llx, chunk_size=%llx\n", mm_size, chunk_size); chunk_size 310 drivers/gpu/drm/i915/selftests/i915_buddy.c err = i915_buddy_init(&mm, mm_size, chunk_size); chunk_size 618 drivers/gpu/drm/i915/selftests/i915_buddy.c u64 chunk_size; chunk_size 624 drivers/gpu/drm/i915/selftests/i915_buddy.c igt_mm_config(&size, &chunk_size); chunk_size 626 drivers/gpu/drm/i915/selftests/i915_buddy.c pr_info("buddy_init with size=%llx, chunk_size=%llx\n", size, chunk_size); chunk_size 628 drivers/gpu/drm/i915/selftests/i915_buddy.c err = i915_buddy_init(&mm, size, chunk_size); chunk_size 647 drivers/gpu/drm/i915/selftests/i915_buddy.c size = min(page_num * mm.chunk_size, rem); chunk_size 36 drivers/gpu/drm/qxl/qxl_image.c unsigned int chunk_size) chunk_size 45 drivers/gpu/drm/qxl/qxl_image.c ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo); chunk_size 411 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c uint32_t chunk_size = max - next_cmd; chunk_size 416 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c if (bytes < chunk_size) chunk_size 417 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c chunk_size = bytes; chunk_size 421 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c memcpy(fifo_mem + (next_cmd >> 2), buffer, chunk_size); chunk_size 422 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c rest = bytes - chunk_size; chunk_size 424 drivers/gpu/drm/vmwgfx/vmwgfx_fifo.c memcpy(fifo_mem + (min >> 2), buffer + (chunk_size >> 2), rest); chunk_size 235 drivers/infiniband/hw/hns/hns_roce_hem.c u32 chunk_size; chunk_size 249 drivers/infiniband/hw/hns/hns_roce_hem.c chunk_size = table->type < HEM_TYPE_MTT ? mhop->buf_chunk_size : chunk_size 252 drivers/infiniband/hw/hns/hns_roce_hem.c (chunk_size / table->obj_size); chunk_size 2967 drivers/infiniband/hw/i40iw/i40iw_ctrl.c LS_64(info->chunk_size, I40IW_CQPSQ_STAG_LPBLSIZE) | chunk_size 3015 drivers/infiniband/hw/i40iw/i40iw_ctrl.c if (info->chunk_size && (info->first_pm_pbl_index >= pble_obj_cnt)) chunk_size 3035 drivers/infiniband/hw/i40iw/i40iw_ctrl.c if (!info->chunk_size) { chunk_size 3048 drivers/infiniband/hw/i40iw/i40iw_ctrl.c LS_64(info->chunk_size, I40IW_CQPSQ_STAG_LPBLSIZE) | chunk_size 3292 drivers/infiniband/hw/i40iw/i40iw_ctrl.c LS_64(info->chunk_size, I40IWQPSQ_LPBLSIZE) | chunk_size 773 drivers/infiniband/hw/i40iw/i40iw_type.h u32 chunk_size; chunk_size 790 drivers/infiniband/hw/i40iw/i40iw_type.h u32 chunk_size; chunk_size 809 drivers/infiniband/hw/i40iw/i40iw_type.h u32 chunk_size; chunk_size 1704 drivers/infiniband/hw/i40iw/i40iw_verbs.c stag_info->chunk_size = 1; chunk_size 1707 drivers/infiniband/hw/i40iw/i40iw_verbs.c stag_info->chunk_size = 3; chunk_size 2257 drivers/infiniband/hw/i40iw/i40iw_verbs.c info.chunk_size = 1; chunk_size 364 drivers/infiniband/hw/mthca/mthca_memfree.c unsigned chunk_size; chunk_size 386 drivers/infiniband/hw/mthca/mthca_memfree.c chunk_size = MTHCA_TABLE_CHUNK_SIZE; chunk_size 388 drivers/infiniband/hw/mthca/mthca_memfree.c chunk_size = nobj * obj_size - i * MTHCA_TABLE_CHUNK_SIZE; chunk_size 390 drivers/infiniband/hw/mthca/mthca_memfree.c table->icm[i] = mthca_alloc_icm(dev, chunk_size >> PAGE_SHIFT, chunk_size 717 drivers/input/evdev.c size_t chunk_size = compat ? sizeof(compat_long_t) : sizeof(long); chunk_size 720 drivers/input/evdev.c if (maxlen % chunk_size) chunk_size 724 drivers/input/evdev.c len *= chunk_size; chunk_size 418 drivers/input/touchscreen/wdt87xx_i2c.c u32 chunk_id, chunk_size; chunk_size 426 drivers/input/touchscreen/wdt87xx_i2c.c chunk_size = get_unaligned_le32(fw->data + chunk_size 428 drivers/input/touchscreen/wdt87xx_i2c.c pos += chunk_size + 2 * sizeof(u32); /* chunk ID + size */ chunk_size 145 drivers/md/dm-exception-store.c unsigned chunk_size; chunk_size 147 drivers/md/dm-exception-store.c if (kstrtouint(chunk_size_arg, 10, &chunk_size)) { chunk_size 152 drivers/md/dm-exception-store.c if (!chunk_size) { chunk_size 153 drivers/md/dm-exception-store.c store->chunk_size = store->chunk_mask = store->chunk_shift = 0; chunk_size 157 drivers/md/dm-exception-store.c return dm_exception_store_set_chunk_size(store, chunk_size, error); chunk_size 161 drivers/md/dm-exception-store.c unsigned chunk_size, chunk_size 165 drivers/md/dm-exception-store.c if (!is_power_of_2(chunk_size)) { chunk_size 171 drivers/md/dm-exception-store.c if (chunk_size % chunk_size 173 drivers/md/dm-exception-store.c chunk_size % chunk_size 179 drivers/md/dm-exception-store.c if (chunk_size > INT_MAX >> SECTOR_SHIFT) { chunk_size 184 drivers/md/dm-exception-store.c store->chunk_size = chunk_size; chunk_size 185 drivers/md/dm-exception-store.c store->chunk_mask = chunk_size - 1; chunk_size 186 drivers/md/dm-exception-store.c store->chunk_shift = __ffs(chunk_size); chunk_size 121 drivers/md/dm-exception-store.h unsigned chunk_size; chunk_size 184 drivers/md/dm-exception-store.h unsigned chunk_size, chunk_size 81 drivers/md/dm-snap-persistent.c __le32 chunk_size; chunk_size 172 drivers/md/dm-snap-persistent.c len = ps->store->chunk_size << SECTOR_SHIFT; chunk_size 234 drivers/md/dm-snap-persistent.c .sector = ps->store->chunk_size * chunk, chunk_size 235 drivers/md/dm-snap-persistent.c .count = ps->store->chunk_size, chunk_size 301 drivers/md/dm-snap-persistent.c memset(ps->area, 0, ps->store->chunk_size << SECTOR_SHIFT); chunk_size 314 drivers/md/dm-snap-persistent.c unsigned chunk_size; chunk_size 322 drivers/md/dm-snap-persistent.c if (!ps->store->chunk_size) { chunk_size 323 drivers/md/dm-snap-persistent.c ps->store->chunk_size = max(DM_CHUNK_SIZE_DEFAULT_SECTORS, chunk_size 326 drivers/md/dm-snap-persistent.c ps->store->chunk_mask = ps->store->chunk_size - 1; chunk_size 327 drivers/md/dm-snap-persistent.c ps->store->chunk_shift = __ffs(ps->store->chunk_size); chunk_size 359 drivers/md/dm-snap-persistent.c chunk_size = le32_to_cpu(dh->chunk_size); chunk_size 361 drivers/md/dm-snap-persistent.c if (ps->store->chunk_size == chunk_size) chunk_size 367 drivers/md/dm-snap-persistent.c chunk_size, ps->store->chunk_size); chunk_size 372 drivers/md/dm-snap-persistent.c r = dm_exception_store_set_chunk_size(ps->store, chunk_size, chunk_size 376 drivers/md/dm-snap-persistent.c chunk_size, chunk_err); chunk_size 392 drivers/md/dm-snap-persistent.c memset(ps->header_area, 0, ps->store->chunk_size << SECTOR_SHIFT); chunk_size 398 drivers/md/dm-snap-persistent.c dh->chunk_size = cpu_to_le32(ps->store->chunk_size); chunk_size 503 drivers/md/dm-snap-persistent.c ps->store->chunk_size << SECTOR_SHIFT, chunk_size 548 drivers/md/dm-snap-persistent.c memcpy(ps->area, area, ps->store->chunk_size << SECTOR_SHIFT); chunk_size 582 drivers/md/dm-snap-persistent.c *sectors_allocated = ps->next_free * store->chunk_size; chunk_size 591 drivers/md/dm-snap-persistent.c store->chunk_size; chunk_size 629 drivers/md/dm-snap-persistent.c ps->exceptions_per_area = (ps->store->chunk_size << SECTOR_SHIFT) / chunk_size 683 drivers/md/dm-snap-persistent.c if (size < ((ps->next_free + 1) * store->chunk_size)) chunk_size 917 drivers/md/dm-snap-persistent.c (unsigned long long)store->chunk_size); chunk_size 45 drivers/md/dm-snap-transient.c if (size < (tc->next_free + store->chunk_size)) chunk_size 49 drivers/md/dm-snap-transient.c tc->next_free += store->chunk_size; chunk_size 97 drivers/md/dm-snap-transient.c DMEMIT(" N %llu", (unsigned long long)store->chunk_size); chunk_size 535 drivers/md/dm-snap.c if (l->store->chunk_size < s->store->chunk_size) chunk_size 852 drivers/md/dm-snap.c unsigned chunk_size = 0; chunk_size 856 drivers/md/dm-snap.c chunk_size = min_not_zero(chunk_size, chunk_size 857 drivers/md/dm-snap.c snap->store->chunk_size); chunk_size 859 drivers/md/dm-snap.c return (uint32_t) chunk_size; chunk_size 1012 drivers/md/dm-snap.c sector_t sector, unsigned chunk_size); chunk_size 1078 drivers/md/dm-snap.c io_size = linear_chunks * s->store->chunk_size; chunk_size 1373 drivers/md/dm-snap.c s->store->chunk_size = 0; chunk_size 1387 drivers/md/dm-snap.c if (!s->store->chunk_size) { chunk_size 1392 drivers/md/dm-snap.c r = dm_set_target_max_io_len(ti, s->store->chunk_size); chunk_size 1452 drivers/md/dm-snap.c snap_dest->ti->max_io_len = snap_dest->store->chunk_size; chunk_size 1798 drivers/md/dm-snap.c src.count = min((sector_t)s->store->chunk_size, dev_size - src.sector); chunk_size 1927 drivers/md/dm-snap.c dest.count = s->store->chunk_size; chunk_size 1938 drivers/md/dm-snap.c (s->store->chunk_size << SECTOR_SHIFT); chunk_size 2398 drivers/md/dm-snap.c limits->discard_granularity = snap->store->chunk_size; chunk_size 2399 drivers/md/dm-snap.c limits->max_discard_sectors = snap->store->chunk_size; chunk_size 35 drivers/md/dm-stripe.c uint32_t chunk_size; chunk_size 103 drivers/md/dm-stripe.c uint32_t chunk_size; chunk_size 117 drivers/md/dm-stripe.c if (kstrtouint(argv[1], 10, &chunk_size) || !chunk_size) { chunk_size 130 drivers/md/dm-stripe.c if (sector_div(tmp_len, chunk_size)) { chunk_size 164 drivers/md/dm-stripe.c r = dm_set_target_max_io_len(ti, chunk_size); chunk_size 176 drivers/md/dm-stripe.c sc->chunk_size = chunk_size; chunk_size 177 drivers/md/dm-stripe.c if (chunk_size & (chunk_size - 1)) chunk_size 180 drivers/md/dm-stripe.c sc->chunk_size_shift = __ffs(chunk_size); chunk_size 223 drivers/md/dm-stripe.c chunk_offset = sector_div(chunk, sc->chunk_size); chunk_size 225 drivers/md/dm-stripe.c chunk_offset = chunk & (sc->chunk_size - 1); chunk_size 237 drivers/md/dm-stripe.c chunk *= sc->chunk_size; chunk_size 256 drivers/md/dm-stripe.c *result -= sector_div(sector, sc->chunk_size); chunk_size 258 drivers/md/dm-stripe.c *result = sector & ~(sector_t)(sc->chunk_size - 1); chunk_size 261 drivers/md/dm-stripe.c *result += sc->chunk_size; /* next chunk */ chunk_size 417 drivers/md/dm-stripe.c (unsigned long long)sc->chunk_size); chunk_size 481 drivers/md/dm-stripe.c unsigned chunk_size = sc->chunk_size << SECTOR_SHIFT; chunk_size 483 drivers/md/dm-stripe.c blk_limits_io_min(limits, chunk_size); chunk_size 484 drivers/md/dm-stripe.c blk_limits_io_opt(limits, chunk_size * sc->stripes); chunk_size 21 drivers/md/dm-unstripe.c uint32_t chunk_size; chunk_size 61 drivers/md/dm-unstripe.c if (kstrtouint(argv[1], 10, &uc->chunk_size) || !uc->chunk_size) { chunk_size 87 drivers/md/dm-unstripe.c uc->unstripe_offset = uc->unstripe * uc->chunk_size; chunk_size 88 drivers/md/dm-unstripe.c uc->unstripe_width = (uc->stripes - 1) * uc->chunk_size; chunk_size 89 drivers/md/dm-unstripe.c uc->chunk_shift = is_power_of_2(uc->chunk_size) ? fls(uc->chunk_size) - 1 : 0; chunk_size 92 drivers/md/dm-unstripe.c if (sector_div(tmp_len, uc->chunk_size)) { chunk_size 97 drivers/md/dm-unstripe.c if (dm_set_target_max_io_len(ti, uc->chunk_size)) { chunk_size 126 drivers/md/dm-unstripe.c sector_div(tmp_sector, uc->chunk_size); chunk_size 156 drivers/md/dm-unstripe.c uc->stripes, (unsigned long long)uc->chunk_size, uc->unstripe, chunk_size 175 drivers/md/dm-unstripe.c limits->chunk_sectors = uc->chunk_size; chunk_size 1229 drivers/md/md.c mddev->chunk_sectors = sb->chunk_size >> 9; chunk_size 1408 drivers/md/md.c sb->chunk_size = mddev->chunk_sectors << 9; chunk_size 4137 drivers/md/md.c __ATTR(chunk_size, S_IRUGO|S_IWUSR, chunk_size_show, chunk_size_store); chunk_size 6414 drivers/md/md.c info.chunk_size = mddev->chunk_sectors << 9; chunk_size 6956 drivers/md/md.c mddev->chunk_sectors = info->chunk_size >> 9; chunk_size 7105 drivers/md/md.c mddev->chunk_sectors != info->chunk_size >> 9 || chunk_size 3726 drivers/md/raid10.c int i, disk_idx, chunk_size; chunk_size 3762 drivers/md/raid10.c chunk_size = mddev->chunk_sectors << 9; chunk_size 3768 drivers/md/raid10.c blk_queue_io_min(mddev->queue, chunk_size); chunk_size 3770 drivers/md/raid10.c blk_queue_io_opt(mddev->queue, chunk_size * conf->geo.raid_disks); chunk_size 3772 drivers/md/raid10.c blk_queue_io_opt(mddev->queue, chunk_size * chunk_size 7425 drivers/md/raid5.c int chunk_size; chunk_size 7436 drivers/md/raid5.c chunk_size = mddev->chunk_sectors << 9; chunk_size 7437 drivers/md/raid5.c blk_queue_io_min(mddev->queue, chunk_size); chunk_size 7438 drivers/md/raid5.c blk_queue_io_opt(mddev->queue, chunk_size * chunk_size 235 drivers/media/dvb-frontends/dib9000.c u32 chunk_size = 126; chunk_size 261 drivers/media/dvb-frontends/dib9000.c l = len < chunk_size ? len : chunk_size; chunk_size 319 drivers/media/dvb-frontends/dib9000.c u32 chunk_size = 126; chunk_size 345 drivers/media/dvb-frontends/dib9000.c l = len < chunk_size ? len : chunk_size; chunk_size 49 drivers/media/dvb-frontends/drxk.h int chunk_size; chunk_size 6781 drivers/media/dvb-frontends/drxk_hard.c state->m_chunk_size = config->chunk_size; chunk_size 57 drivers/media/usb/dvb-usb-v2/az6007.c .chunk_size = 64, chunk_size 70 drivers/media/usb/dvb-usb-v2/az6007.c .chunk_size = 64, chunk_size 416 drivers/media/usb/em28xx/em28xx-dvb.c .chunk_size = 56, chunk_size 425 drivers/media/usb/em28xx/em28xx-dvb.c .chunk_size = 54, chunk_size 438 drivers/media/usb/em28xx/em28xx-dvb.c .chunk_size = 62, chunk_size 447 drivers/media/usb/em28xx/em28xx-dvb.c .chunk_size = 58, chunk_size 1197 drivers/mtd/nand/raw/marvell_nand.c int chunk_size = lt->data_bytes + lt->spare_bytes + lt->ecc_bytes; chunk_size 1221 drivers/mtd/nand/raw/marvell_nand.c nand_change_read_column_op(chip, chunk * chunk_size, chunk_size 1150 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c unsigned int chunk_size = min(size, 256U); chunk_size 1152 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c ret = t3_write_flash(adapter, addr, chunk_size, fw_data); chunk_size 1156 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c addr += chunk_size; chunk_size 1157 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c fw_data += chunk_size; chunk_size 1158 drivers/net/ethernet/chelsio/cxgb3/t3_hw.c size -= chunk_size; chunk_size 3111 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c u32 chunk_size = min_t(u32, tot_len, CUDBG_CHUNK_SIZE); chunk_size 3113 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c rc = cudbg_get_buff(pdbg_init, dbg_buff, chunk_size, chunk_size 3120 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c memcpy(temp_buff.data, data + cur_off, chunk_size); chunk_size 3121 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c tot_len -= chunk_size; chunk_size 3122 drivers/net/ethernet/chelsio/cxgb4/cudbg_lib.c cur_off += chunk_size; chunk_size 3049 drivers/net/ethernet/emulex/benet/be_cmds.c u32 chunk_size = 0; chunk_size 3074 drivers/net/ethernet/emulex/benet/be_cmds.c chunk_size = min_t(u32, image_size, LANCER_FW_DOWNLOAD_CHUNK); chunk_size 3077 drivers/net/ethernet/emulex/benet/be_cmds.c memcpy(dest_image_ptr, data_ptr, chunk_size); chunk_size 3080 drivers/net/ethernet/emulex/benet/be_cmds.c chunk_size, offset, chunk_size 267 drivers/net/ethernet/emulex/benet/be_ethtool.c u32 read_len = 0, total_read_len = 0, chunk_size; chunk_size 283 drivers/net/ethernet/emulex/benet/be_ethtool.c chunk_size = min_t(u32, (buf_len - total_read_len), chunk_size 285 drivers/net/ethernet/emulex/benet/be_ethtool.c chunk_size = ALIGN(chunk_size, 4); chunk_size 286 drivers/net/ethernet/emulex/benet/be_ethtool.c status = lancer_cmd_read_object(adapter, &read_cmd, chunk_size, chunk_size 809 drivers/net/ethernet/intel/ice/ice_adminq_cmd.h u8 chunk_size; chunk_size 421 drivers/net/ethernet/mellanox/mlx4/icm.c unsigned chunk_size; chunk_size 443 drivers/net/ethernet/mellanox/mlx4/icm.c chunk_size = MLX4_TABLE_CHUNK_SIZE; chunk_size 445 drivers/net/ethernet/mellanox/mlx4/icm.c chunk_size = PAGE_ALIGN(size - chunk_size 448 drivers/net/ethernet/mellanox/mlx4/icm.c table->icm[i] = mlx4_alloc_icm(dev, chunk_size >> PAGE_SHIFT, chunk_size 62 drivers/net/ethernet/mellanox/mlx5/core/en/params.c frag_sz = max_t(u32, frag_sz, xsk->chunk_size); chunk_size 11 drivers/net/ethernet/mellanox/mlx5/core/en/params.h u16 chunk_size; chunk_size 18 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c if (xsk->chunk_size > PAGE_SIZE || chunk_size 19 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c xsk->chunk_size < MLX5E_MIN_XSK_CHUNK_SIZE) chunk_size 23 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/setup.c if (mlx5e_rx_get_min_frag_sz(params, xsk) > xsk->chunk_size) chunk_size 99 drivers/net/ethernet/mellanox/mlx5/core/en/xsk/umem.c xsk->chunk_size = umem->chunk_size_nohr + umem->headroom; chunk_size 3905 drivers/net/ethernet/mellanox/mlx5/core/en_main.c max_mtu_frame = MLX5E_HW2SW_MTU(new_params, xsk.chunk_size - hr); chunk_size 298 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c enum mlx5dr_icm_chunk_size chunk_size) chunk_size 305 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c bucket->num_of_entries = mlx5dr_icm_pool_chunk_size_to_entries(chunk_size); chunk_size 450 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c enum mlx5dr_icm_chunk_size chunk_size) chunk_size 457 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c if (chunk_size > pool->max_log_chunk_sz) chunk_size 460 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_icm_pool.c bucket = &pool->buckets[chunk_size]; chunk_size 329 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c cur_entries = mlx5dr_icm_pool_chunk_size_to_entries(cur_htbl->chunk_size); chunk_size 481 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c new_size = mlx5dr_icm_next_higher_chunk(cur_htbl->chunk_size); chunk_size 484 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c if (new_size == cur_htbl->chunk_size) chunk_size 616 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c if (dmn->info.max_log_sw_icm_sz <= htbl->chunk_size) chunk_size 622 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c if (dr_get_bits_per_mask(htbl->byte_mask) * BITS_PER_BYTE <= htbl->chunk_size) chunk_size 830 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_rule.c cur_htbl->chunk_size); chunk_size 654 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c if (htbl->chunk_size == DR_CHUNK_SIZE_MAX - 1 || !htbl->byte_mask) chunk_size 658 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c num_of_entries = mlx5dr_icm_pool_chunk_size_to_entries(htbl->chunk_size); chunk_size 663 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c enum mlx5dr_icm_chunk_size chunk_size, chunk_size 674 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c chunk = mlx5dr_icm_alloc_chunk(pool, chunk_size); chunk_size 697 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_ste.c htbl->chunk_size = chunk_size; chunk_size 159 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h enum mlx5dr_icm_chunk_size chunk_size; chunk_size 196 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h enum mlx5dr_icm_chunk_size chunk_size, chunk_size 820 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h mlx5dr_icm_pool_chunk_size_to_entries(enum mlx5dr_icm_chunk_size chunk_size) chunk_size 822 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h return 1 << chunk_size; chunk_size 826 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h mlx5dr_icm_pool_chunk_size_to_byte(enum mlx5dr_icm_chunk_size chunk_size, chunk_size 837 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h num_of_entries = mlx5dr_icm_pool_chunk_size_to_entries(chunk_size); chunk_size 950 drivers/net/ethernet/mellanox/mlx5/core/steering/dr_types.h enum mlx5dr_icm_chunk_size chunk_size); chunk_size 319 drivers/net/ethernet/mellanox/mlxsw/i2c.c int off = mlxsw_i2c->cmd.mb_off_in, chunk_size, i, j; chunk_size 333 drivers/net/ethernet/mellanox/mlxsw/i2c.c chunk_size = (in_mbox_size > mlxsw_i2c->block_size) ? chunk_size 335 drivers/net/ethernet/mellanox/mlxsw/i2c.c write_tran.len = MLXSW_I2C_ADDR_WIDTH + chunk_size; chunk_size 338 drivers/net/ethernet/mellanox/mlxsw/i2c.c mlxsw_i2c->block_size * i, chunk_size); chunk_size 358 drivers/net/ethernet/mellanox/mlxsw/i2c.c off += chunk_size; chunk_size 359 drivers/net/ethernet/mellanox/mlxsw/i2c.c in_mbox_size -= chunk_size; chunk_size 398 drivers/net/ethernet/mellanox/mlxsw/i2c.c int num, chunk_size, reg_size, i, j; chunk_size 446 drivers/net/ethernet/mellanox/mlxsw/i2c.c chunk_size = (reg_size > mlxsw_i2c->block_size) ? chunk_size 448 drivers/net/ethernet/mellanox/mlxsw/i2c.c read_tran[1].len = chunk_size; chunk_size 470 drivers/net/ethernet/mellanox/mlxsw/i2c.c off += chunk_size; chunk_size 471 drivers/net/ethernet/mellanox/mlxsw/i2c.c reg_size -= chunk_size; chunk_size 472 drivers/net/ethernet/mellanox/mlxsw/i2c.c read_tran[1].buf += chunk_size; chunk_size 505 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c size_t chunk_size, dma_size; chunk_size 512 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c chunk_size = BIT_ULL(chunk_order); chunk_size 514 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c nseg = DIV_ROUND_UP(max_size, chunk_size); chunk_size 525 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c chunks[i].chunk = kmalloc(chunk_size, chunk_size 530 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c chunks[i].len = min_t(u64, chunk_size, max_size - off); chunk_size 534 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c coff = min_t(u64, arg->in_size - off, chunk_size); chunk_size 537 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c memset(chunks[i].chunk + coff, 0, chunk_size - coff); chunk_size 238 drivers/net/wireless/ath/wcn36xx/dxe.c int i, chunk_size = pool->chunk_size; chunk_size 249 drivers/net/wireless/ath/wcn36xx/dxe.c bd_phy_addr += chunk_size; chunk_size 250 drivers/net/wireless/ath/wcn36xx/dxe.c bd_cpu_addr += chunk_size; chunk_size 625 drivers/net/wireless/ath/wcn36xx/dxe.c wcn->mgmt_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + chunk_size 628 drivers/net/wireless/ath/wcn36xx/dxe.c s = wcn->mgmt_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_H; chunk_size 640 drivers/net/wireless/ath/wcn36xx/dxe.c wcn->data_mem_pool.chunk_size = WCN36XX_BD_CHUNK_SIZE + chunk_size 643 drivers/net/wireless/ath/wcn36xx/dxe.c s = wcn->data_mem_pool.chunk_size * WCN36XX_DXE_CH_DESC_NUMB_TX_L; chunk_size 663 drivers/net/wireless/ath/wcn36xx/dxe.c dma_free_coherent(wcn->dev, wcn->mgmt_mem_pool.chunk_size * chunk_size 669 drivers/net/wireless/ath/wcn36xx/dxe.c dma_free_coherent(wcn->dev, wcn->data_mem_pool.chunk_size * chunk_size 449 drivers/net/wireless/ath/wcn36xx/dxe.h int chunk_size; chunk_size 211 drivers/net/wireless/intel/iwlwifi/mvm/nvm.c int chunk_size, ret; chunk_size 213 drivers/net/wireless/intel/iwlwifi/mvm/nvm.c chunk_size = min(IWL_NVM_DEFAULT_CHUNK_SIZE, chunk_size 217 drivers/net/wireless/intel/iwlwifi/mvm/nvm.c chunk_size, data + offset); chunk_size 221 drivers/net/wireless/intel/iwlwifi/mvm/nvm.c offset += chunk_size; chunk_size 444 drivers/net/wireless/marvell/libertas/if_sdio.c u32 chunk_size; chunk_size 473 drivers/net/wireless/marvell/libertas/if_sdio.c chunk_size = min_t(size_t, size, 60); chunk_size 475 drivers/net/wireless/marvell/libertas/if_sdio.c *((__le32*)chunk_buffer) = cpu_to_le32(chunk_size); chunk_size 476 drivers/net/wireless/marvell/libertas/if_sdio.c memcpy(chunk_buffer + 4, firmware, chunk_size); chunk_size 485 drivers/net/wireless/marvell/libertas/if_sdio.c firmware += chunk_size; chunk_size 486 drivers/net/wireless/marvell/libertas/if_sdio.c size -= chunk_size; chunk_size 540 drivers/net/wireless/marvell/libertas/if_sdio.c u32 chunk_size; chunk_size 611 drivers/net/wireless/marvell/libertas/if_sdio.c chunk_size = min_t(size_t, req_size, 512); chunk_size 613 drivers/net/wireless/marvell/libertas/if_sdio.c memcpy(chunk_buffer, firmware, chunk_size); chunk_size 619 drivers/net/wireless/marvell/libertas/if_sdio.c chunk_buffer, roundup(chunk_size, 32)); chunk_size 623 drivers/net/wireless/marvell/libertas/if_sdio.c firmware += chunk_size; chunk_size 624 drivers/net/wireless/marvell/libertas/if_sdio.c size -= chunk_size; chunk_size 625 drivers/net/wireless/marvell/libertas/if_sdio.c req_size -= chunk_size; chunk_size 901 drivers/ntb/test/ntb_perf.c u64 total_size, chunk_size; chunk_size 906 drivers/ntb/test/ntb_perf.c chunk_size = 1ULL << chunk_order; chunk_size 907 drivers/ntb/test/ntb_perf.c chunk_size = min_t(u64, peer->outbuf_size, chunk_size); chunk_size 917 drivers/ntb/test/ntb_perf.c ret = perf_copy_chunk(pthr, flt_dst, flt_src, chunk_size); chunk_size 924 drivers/ntb/test/ntb_perf.c pthr->copied += chunk_size; chunk_size 926 drivers/ntb/test/ntb_perf.c flt_dst += chunk_size; chunk_size 927 drivers/ntb/test/ntb_perf.c flt_src += chunk_size; chunk_size 411 drivers/nvdimm/btt.c size_t chunk_size = SZ_2M; chunk_size 414 drivers/nvdimm/btt.c zerobuf = kzalloc(chunk_size, GFP_KERNEL); chunk_size 427 drivers/nvdimm/btt.c size_t size = min(mapsize, chunk_size); chunk_size 453 drivers/nvdimm/btt.c size_t chunk_size = SZ_4K, offset = 0; chunk_size 459 drivers/nvdimm/btt.c zerobuf = kzalloc(chunk_size, GFP_KERNEL); chunk_size 471 drivers/nvdimm/btt.c size_t size = min(logsize, chunk_size); chunk_size 1678 drivers/nvme/host/core.c u32 chunk_size = (((u32)ns->noiob) << (ns->lba_shift - 9)); chunk_size 1679 drivers/nvme/host/core.c blk_queue_chunk_sectors(ns->queue, rounddown_pow_of_two(chunk_size)); chunk_size 1896 drivers/nvme/host/pci.c u32 chunk_size) chunk_size 1905 drivers/nvme/host/pci.c tmp = (preferred + chunk_size - 1); chunk_size 1906 drivers/nvme/host/pci.c do_div(tmp, chunk_size); chunk_size 1924 drivers/nvme/host/pci.c len = min_t(u64, chunk_size, preferred - size); chunk_size 1965 drivers/nvme/host/pci.c u32 chunk_size; chunk_size 1968 drivers/nvme/host/pci.c for (chunk_size = min_t(u64, preferred, PAGE_SIZE * MAX_ORDER_NR_PAGES); chunk_size 1969 drivers/nvme/host/pci.c chunk_size >= max_t(u32, dev->ctrl.hmminds * 4096, PAGE_SIZE * 2); chunk_size 1970 drivers/nvme/host/pci.c chunk_size /= 2) { chunk_size 1971 drivers/nvme/host/pci.c if (!__nvme_alloc_host_mem(dev, preferred, chunk_size)) { chunk_size 792 drivers/rpmsg/qcom_glink_native.c __le32 chunk_size; chunk_size 795 drivers/rpmsg/qcom_glink_native.c unsigned int chunk_size; chunk_size 808 drivers/rpmsg/qcom_glink_native.c chunk_size = le32_to_cpu(hdr.chunk_size); chunk_size 811 drivers/rpmsg/qcom_glink_native.c if (avail < sizeof(hdr) + chunk_size) { chunk_size 834 drivers/rpmsg/qcom_glink_native.c intent->data = kmalloc(chunk_size + left_size, chunk_size 842 drivers/rpmsg/qcom_glink_native.c intent->size = chunk_size + left_size; chunk_size 864 drivers/rpmsg/qcom_glink_native.c if (intent->size - intent->offset < chunk_size) { chunk_size 872 drivers/rpmsg/qcom_glink_native.c sizeof(hdr), chunk_size); chunk_size 873 drivers/rpmsg/qcom_glink_native.c intent->offset += chunk_size; chunk_size 894 drivers/rpmsg/qcom_glink_native.c qcom_glink_rx_advance(glink, ALIGN(sizeof(hdr) + chunk_size, 8)); chunk_size 1268 drivers/rpmsg/qcom_glink_native.c __le32 chunk_size; chunk_size 1309 drivers/rpmsg/qcom_glink_native.c req.chunk_size = cpu_to_le32(len); chunk_size 328 drivers/rtc/rtc-isl12026.c size_t chunk_size, num_written; chunk_size 348 drivers/rtc/rtc-isl12026.c chunk_size = round_down(offset, ISL12026_PAGESIZE) + chunk_size 350 drivers/rtc/rtc-isl12026.c chunk_size = min(bytes, chunk_size); chunk_size 355 drivers/rtc/rtc-isl12026.c memcpy(payload + 2, v + num_written, chunk_size); chunk_size 358 drivers/rtc/rtc-isl12026.c msgs[0].len = chunk_size + 2; chunk_size 368 drivers/rtc/rtc-isl12026.c bytes -= chunk_size; chunk_size 369 drivers/rtc/rtc-isl12026.c offset += chunk_size; chunk_size 370 drivers/rtc/rtc-isl12026.c num_written += chunk_size; chunk_size 1123 drivers/s390/cio/css.c size_t chunk_size = chunk->end_addr - chunk->start_addr + 1; chunk_size 1125 drivers/s390/cio/css.c dma_free_coherent((struct device *) data, chunk_size, chunk_size 1153 drivers/s390/cio/css.c size_t chunk_size; chunk_size 1159 drivers/s390/cio/css.c chunk_size = round_up(size, PAGE_SIZE); chunk_size 1161 drivers/s390/cio/css.c chunk_size, &dma_addr, CIO_DMA_GFP); chunk_size 1164 drivers/s390/cio/css.c gen_pool_add_virt(gp_dma, addr, dma_addr, chunk_size, -1); chunk_size 745 drivers/scsi/megaraid/megaraid_sas_fusion.c u32 chunk_size, array_size, offset; chunk_size 748 drivers/scsi/megaraid/megaraid_sas_fusion.c chunk_size = fusion->reply_alloc_sz * RDPQ_MAX_INDEX_IN_ONE_CHUNK; chunk_size 765 drivers/scsi/megaraid/megaraid_sas_fusion.c chunk_size, 16, 0); chunk_size 769 drivers/scsi/megaraid/megaraid_sas_fusion.c chunk_size, chunk_size 770 drivers/scsi/megaraid/megaraid_sas_fusion.c roundup_pow_of_two(chunk_size), chunk_size 812 drivers/scsi/megaraid/megaraid_sas_fusion.c chunk_size)) { chunk_size 2914 drivers/scsi/qla2xxx/qla_nx2.c uint32_t chunk_size, read_size; chunk_size 2950 drivers/scsi/qla2xxx/qla_nx2.c dma_desc.cmd.read_data_size = chunk_size = ISP8044_PEX_DMA_READ_SIZE; chunk_size 2960 drivers/scsi/qla2xxx/qla_nx2.c chunk_size = (m_hdr->read_data_size - read_size); chunk_size 2961 drivers/scsi/qla2xxx/qla_nx2.c dma_desc.cmd.read_data_size = chunk_size; chunk_size 2978 drivers/scsi/qla2xxx/qla_nx2.c "(chunk_size 0x%x).\n", __func__, chunk_size); chunk_size 2985 drivers/scsi/qla2xxx/qla_nx2.c memcpy(data_ptr, rdmem_buffer, chunk_size); chunk_size 2986 drivers/scsi/qla2xxx/qla_nx2.c data_ptr += chunk_size; chunk_size 2987 drivers/scsi/qla2xxx/qla_nx2.c read_size += chunk_size; chunk_size 7588 drivers/scsi/smartpqi/smartpqi_init.c u32 total_size, u32 chunk_size) chunk_size 7599 drivers/scsi/smartpqi/smartpqi_init.c sg_count = (total_size + chunk_size - 1); chunk_size 7600 drivers/scsi/smartpqi/smartpqi_init.c sg_count /= chunk_size; chunk_size 7604 drivers/scsi/smartpqi/smartpqi_init.c if (sg_count*chunk_size < total_size) chunk_size 7612 drivers/scsi/smartpqi/smartpqi_init.c for (size = 0, i = 0; size < total_size; size += chunk_size, i++) { chunk_size 7616 drivers/scsi/smartpqi/smartpqi_init.c dma_alloc_coherent(dev, chunk_size, &dma_handle, chunk_size 7624 drivers/scsi/smartpqi/smartpqi_init.c put_unaligned_le32 (chunk_size, &mem_descriptor->length); chunk_size 7639 drivers/scsi/smartpqi/smartpqi_init.c dma_free_coherent(dev, chunk_size, chunk_size 902 drivers/staging/gasket/gasket_core.c ulong chunk_size, mapped_bytes = 0; chunk_size 939 drivers/staging/gasket/gasket_core.c chunk_size = min(max_chunk_size, map_length - mapped_bytes); chunk_size 944 drivers/staging/gasket/gasket_core.c PAGE_SHIFT, chunk_size, chunk_size 951 drivers/staging/gasket/gasket_core.c mapped_bytes += chunk_size; chunk_size 158 drivers/staging/uwb/i1480/dfu/mac.c size_t chunk_size; chunk_size 162 drivers/staging/uwb/i1480/dfu/mac.c chunk_size = size < i1480->buf_size ? size : i1480->buf_size; chunk_size 163 drivers/staging/uwb/i1480/dfu/mac.c result = i1480->read(i1480, hdr->address + src_itr, chunk_size); chunk_size 4980 fs/btrfs/volumes.c u64 chunk_size; chunk_size 5172 fs/btrfs/volumes.c chunk_size = stripe_size * data_stripes; chunk_size 5174 fs/btrfs/volumes.c trace_btrfs_chunk_alloc(info, map, start, chunk_size); chunk_size 5185 fs/btrfs/volumes.c em->len = chunk_size; chunk_size 5200 fs/btrfs/volumes.c ret = btrfs_make_block_group(trans, 0, type, start, chunk_size); chunk_size 5236 fs/btrfs/volumes.c u64 chunk_offset, u64 chunk_size) chunk_size 5253 fs/btrfs/volumes.c em = btrfs_get_chunk_map(fs_info, chunk_offset, chunk_size); chunk_size 5304 fs/btrfs/volumes.c btrfs_set_stack_chunk_length(chunk, chunk_size); chunk_size 493 fs/btrfs/volumes.h u64 chunk_offset, u64 chunk_size); chunk_size 119 fs/ext2/dir.c unsigned chunk_size = ext2_chunk_size(dir); chunk_size 129 fs/ext2/dir.c if (limit & (chunk_size - 1)) chunk_size 144 fs/ext2/dir.c if (unlikely(((offs + rec_len - 1) ^ offs) & ~(chunk_size-1))) chunk_size 466 fs/ext2/dir.c unsigned chunk_size = ext2_chunk_size(dir); chunk_size 498 fs/ext2/dir.c rec_len = chunk_size; chunk_size 499 fs/ext2/dir.c de->rec_len = ext2_rec_len_to_disk(chunk_size); chunk_size 606 fs/ext2/dir.c unsigned chunk_size = ext2_chunk_size(inode); chunk_size 614 fs/ext2/dir.c err = ext2_prepare_chunk(page, 0, chunk_size); chunk_size 620 fs/ext2/dir.c memset(kaddr, 0, chunk_size); chunk_size 630 fs/ext2/dir.c de->rec_len = ext2_rec_len_to_disk(chunk_size - EXT2_DIR_REC_LEN(1)); chunk_size 635 fs/ext2/dir.c err = ext2_commit_chunk(page, 0, chunk_size); chunk_size 3126 fs/ext4/mballoc.c #define NRL_CHECK_SIZE(req, size, max, chunk_size) \ chunk_size 3127 fs/ext4/mballoc.c (req <= (size) || max <= (chunk_size)) chunk_size 364 fs/gfs2/rgrp.c u32 chunk_size; chunk_size 386 fs/gfs2/rgrp.c chunk_size = ((ptr == NULL) ? bytes : (ptr - start)); chunk_size 387 fs/gfs2/rgrp.c chunk_size *= GFS2_NBBY; chunk_size 388 fs/gfs2/rgrp.c BUG_ON(len < chunk_size); chunk_size 389 fs/gfs2/rgrp.c len -= chunk_size; chunk_size 391 fs/gfs2/rgrp.c if (gfs2_rbm_from_block(&rbm, block + chunk_size)) { chunk_size 86 fs/minix/dir.c unsigned chunk_size = sbi->s_dirsize; chunk_size 92 fs/minix/dir.c ctx->pos = pos = ALIGN(pos, chunk_size); chunk_size 107 fs/minix/dir.c limit = kaddr + minix_last_byte(inode, n) - chunk_size; chunk_size 128 fs/minix/dir.c ctx->pos += chunk_size; chunk_size 80 fs/nfs/blocklayout/blocklayout.h u64 chunk_size; chunk_size 109 fs/nfs/blocklayout/blocklayout.h u64 chunk_size; chunk_size 127 fs/nfs/blocklayout/dev.c p = xdr_decode_hyper(p, &b->stripe.chunk_size); chunk_size 204 fs/nfs/blocklayout/dev.c chunk = div_u64(offset, dev->chunk_size); chunk_size 209 fs/nfs/blocklayout/dev.c __func__, chunk_idx, offset, dev->chunk_size); chunk_size 215 fs/nfs/blocklayout/dev.c offset = chunk * dev->chunk_size; chunk_size 225 fs/nfs/blocklayout/dev.c map->len = dev->chunk_size; chunk_size 469 fs/nfs/blocklayout/dev.c d->chunk_size = v->stripe.chunk_size; chunk_size 120 fs/nilfs2/dir.c unsigned int chunk_size = nilfs_chunk_size(dir); chunk_size 129 fs/nilfs2/dir.c if (limit & (chunk_size - 1)) chunk_size 144 fs/nilfs2/dir.c if (((offs + rec_len - 1) ^ offs) & ~(chunk_size-1)) chunk_size 443 fs/nilfs2/dir.c unsigned int chunk_size = nilfs_chunk_size(dir); chunk_size 475 fs/nilfs2/dir.c rec_len = chunk_size; chunk_size 476 fs/nilfs2/dir.c de->rec_len = nilfs_rec_len_to_disk(chunk_size); chunk_size 583 fs/nilfs2/dir.c unsigned int chunk_size = nilfs_chunk_size(inode); chunk_size 591 fs/nilfs2/dir.c err = nilfs_prepare_chunk(page, 0, chunk_size); chunk_size 597 fs/nilfs2/dir.c memset(kaddr, 0, chunk_size); chunk_size 607 fs/nilfs2/dir.c de->rec_len = nilfs_rec_len_to_disk(chunk_size - NILFS_DIR_REC_LEN(1)); chunk_size 612 fs/nilfs2/dir.c nilfs_commit_chunk(page, mapping, 0, chunk_size); chunk_size 312 fs/ufs/dir.c const unsigned int chunk_size = UFS_SB(sb)->s_uspi->s_dirblksize; chunk_size 345 fs/ufs/dir.c rec_len = chunk_size; chunk_size 346 fs/ufs/dir.c de->d_reclen = cpu_to_fs16(sb, chunk_size); chunk_size 548 fs/ufs/dir.c const unsigned int chunk_size = UFS_SB(sb)->s_uspi->s_dirblksize; chunk_size 556 fs/ufs/dir.c err = ufs_prepare_chunk(page, 0, chunk_size); chunk_size 577 fs/ufs/dir.c de->d_reclen = cpu_to_fs16(sb, chunk_size - UFS_DIR_REC_LEN(1)); chunk_size 582 fs/ufs/dir.c err = ufs_commit_chunk(page, 0, chunk_size); chunk_size 363 include/drm/drm_dsc.h __be16 chunk_size; chunk_size 70 include/uapi/linux/if_xdp.h __u32 chunk_size; chunk_size 193 include/uapi/linux/raid/md_p.h __u32 chunk_size; /* 1 chunk size in bytes */ chunk_size 106 include/uapi/linux/raid/md_u.h int chunk_size; /* 1 chunk size in bytes */ chunk_size 152 include/uapi/linux/raid/md_u.h int chunk_size; /* in bytes */ chunk_size 64 include/uapi/linux/xdp_diag.h __u32 chunk_size; chunk_size 202 init/do_mounts_md.c ainfo.chunk_size = md_setup_args[ent].chunk; chunk_size 251 lib/genalloc.c end_bit = chunk_size(chunk) >> order; chunk_size 300 lib/genalloc.c end_bit = chunk_size(chunk) >> order; chunk_size 603 lib/genalloc.c size += chunk_size(chunk); chunk_size 420 lib/scatterlist.c unsigned int j, chunk_size; chunk_size 432 lib/scatterlist.c chunk_size = ((j - cur_page) << PAGE_SHIFT) - offset; chunk_size 434 lib/scatterlist.c min_t(unsigned long, size, chunk_size), offset); chunk_size 435 lib/scatterlist.c size -= chunk_size; chunk_size 3749 net/core/devlink.c u8 *chunk, u32 chunk_size, chunk_size 3759 net/core/devlink.c err = nla_put(msg, DEVLINK_ATTR_REGION_CHUNK_DATA, chunk_size, chunk); chunk_size 343 net/xdp/xdp_umem.c u32 chunk_size = mr->chunk_size, headroom = mr->headroom; chunk_size 348 net/xdp/xdp_umem.c if (chunk_size < XDP_UMEM_MIN_CHUNK_SIZE || chunk_size > PAGE_SIZE) { chunk_size 362 net/xdp/xdp_umem.c if (!unaligned_chunks && !is_power_of_2(chunk_size)) chunk_size 379 net/xdp/xdp_umem.c chunks = (unsigned int)div_u64(size, chunk_size); chunk_size 384 net/xdp/xdp_umem.c chunks_per_page = PAGE_SIZE / chunk_size; chunk_size 389 net/xdp/xdp_umem.c if (headroom >= chunk_size - XDP_PACKET_HEADROOM) chunk_size 394 net/xdp/xdp_umem.c : ~((u64)chunk_size - 1); chunk_size 397 net/xdp/xdp_umem.c umem->chunk_size_nohr = chunk_size - headroom; chunk_size 730 net/xdp/xsk.c __u32 chunk_size; chunk_size 59 net/xdp/xsk_diag.c du.chunk_size = umem->chunk_size_nohr + umem->headroom; chunk_size 70 tools/include/uapi/linux/if_xdp.h __u32 chunk_size; chunk_size 235 tools/lib/bpf/xsk.c mr.chunk_size = umem->config.frame_size; chunk_size 85 tools/testing/selftests/net/tcp_mmap.c static int chunk_size = 512*1024; chunk_size 139 tools/testing/selftests/net/tcp_mmap.c buffer = malloc(chunk_size); chunk_size 145 tools/testing/selftests/net/tcp_mmap.c addr = mmap(NULL, chunk_size, PROT_READ, flags, fd, 0); chunk_size 159 tools/testing/selftests/net/tcp_mmap.c zc.length = chunk_size; chunk_size 167 tools/testing/selftests/net/tcp_mmap.c assert(zc.length <= chunk_size); chunk_size 174 tools/testing/selftests/net/tcp_mmap.c assert(zc.recv_skip_hint <= chunk_size); chunk_size 185 tools/testing/selftests/net/tcp_mmap.c while (sub < chunk_size) { chunk_size 186 tools/testing/selftests/net/tcp_mmap.c lu = read(fd, buffer + sub, chunk_size - sub); chunk_size 225 tools/testing/selftests/net/tcp_mmap.c munmap(addr, chunk_size); chunk_size 274 tools/testing/selftests/net/tcp_mmap.c &chunk_size, sizeof(chunk_size)) == -1) { chunk_size 384 tools/testing/selftests/net/tcp_mmap.c buffer = mmap(NULL, chunk_size, PROT_READ | PROT_WRITE, chunk_size 422 tools/testing/selftests/net/tcp_mmap.c if (wr > chunk_size) chunk_size 423 tools/testing/selftests/net/tcp_mmap.c wr = chunk_size; chunk_size 431 tools/testing/selftests/net/tcp_mmap.c munmap(buffer, chunk_size);