order_base_2 1193 arch/arc/mm/cache.c write_aux_reg(ARC_REG_IO_COH_AP0_SIZE, order_base_2(mem_sz >> 10) - 2); order_base_2 74 arch/powerpc/kvm/book3s_hv_builtin.c VM_BUG_ON(order_base_2(nr_pages) < KVM_CMA_CHUNK_ORDER - PAGE_SHIFT); order_base_2 76 arch/powerpc/kvm/book3s_hv_builtin.c return cma_alloc(kvm_cma, nr_pages, order_base_2(HPT_ALIGN_PAGES), order_base_2 1067 arch/powerpc/platforms/pseries/iommu.c len = order_base_2(max_addr); order_base_2 10084 arch/x86/kvm/x86.c return hash_32(gfn & 0xffffffff, order_base_2(ASYNC_PF_PER_VCPU)); order_base_2 355 block/blk-throttle.c clamp_t(int, order_base_2(sectors) - 3, 0, LATENCY_BUCKET_SIZE - 1) order_base_2 653 drivers/clk/mvebu/armada-37xx-periph.c rate->width = order_base_2(table_size); order_base_2 161 drivers/clk/sunxi/clk-sun9i-core.c _p = order_base_2(DIV_ROUND_UP(req->parent_rate, req->rate)); order_base_2 247 drivers/clk/sunxi/clk-sun9i-core.c req->p = order_base_2(div); order_base_2 266 drivers/clk/sunxi/clk-sunxi.c div = order_base_2(DIV_ROUND_UP(req->parent_rate, req->rate)); order_base_2 755 drivers/gpu/drm/amd/amdgpu/amdgpu_device.c u32 rbar_size = order_base_2(((space_needed >> 20) | 1)) - 1; order_base_2 48 drivers/gpu/drm/amd/amdgpu/amdgpu_ih.c rb_bufsz = order_base_2(ring_size / 4); order_base_2 127 drivers/gpu/drm/amd/amdgpu/cik_ih.c rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); order_base_2 462 drivers/gpu/drm/amd/amdgpu/cik_sdma.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 129 drivers/gpu/drm/amd/amdgpu/cz_ih.c rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); order_base_2 1583 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (order_base_2(num_sc) << PA_SC_TILE_STEERING_OVERRIDE__NUM_SC__SHIFT) & order_base_2 1586 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (order_base_2(num_rb_per_sc) << PA_SC_TILE_STEERING_OVERRIDE__NUM_RB_PER_SC__SHIFT) & order_base_2 1589 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (order_base_2(num_packer_per_sc) << PA_SC_TILE_STEERING_OVERRIDE__NUM_PACKER_PER_SC__SHIFT) & order_base_2 2831 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2872 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3074 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c rb_bufsz = order_base_2(ring->ring_size / 4) - 1; order_base_2 3282 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (order_base_2(GFX10_MEC_HPD_SIZE / 4) - 1)); order_base_2 3329 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c (order_base_2(ring->ring_size / 4) - 1)); order_base_2 3331 drivers/gpu/drm/amd/amdgpu/gfx_v10_0.c ((order_base_2(AMDGPU_GPU_PAGE_SIZE / 4) - 1) << 8)); order_base_2 2110 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2111 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c tmp = (order_base_2(AMDGPU_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 2199 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2200 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c tmp = (order_base_2(AMDGPU_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 2219 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2220 drivers/gpu/drm/amd/amdgpu/gfx_v6_0.c tmp = (order_base_2(AMDGPU_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 2622 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2623 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c tmp = (order_base_2(AMDGPU_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 2897 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c tmp |= order_base_2(GFX7_MEC_HPD_SIZE / 8); order_base_2 2973 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c order_base_2(ring->ring_size / 8); order_base_2 2975 drivers/gpu/drm/amd/amdgpu/gfx_v7_0.c (order_base_2(AMDGPU_GPU_PAGE_SIZE/8) << 8); order_base_2 4306 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 4482 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c (order_base_2(GFX8_MEC_HPD_SIZE / 4) - 1)); order_base_2 4511 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c (order_base_2(ring->ring_size / 4) - 1)); order_base_2 4513 drivers/gpu/drm/amd/amdgpu/gfx_v8_0.c ((order_base_2(AMDGPU_GPU_PAGE_SIZE / 4) - 1) << 8)); order_base_2 3214 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3443 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c (order_base_2(GFX9_MEC_HPD_SIZE / 4) - 1)); order_base_2 3490 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c (order_base_2(ring->ring_size / 4) - 1)); order_base_2 3492 drivers/gpu/drm/amd/amdgpu/gfx_v9_0.c ((order_base_2(AMDGPU_GPU_PAGE_SIZE / 4) - 1) << 8)); order_base_2 129 drivers/gpu/drm/amd/amdgpu/iceland_ih.c rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); order_base_2 78 drivers/gpu/drm/amd/amdgpu/navi10_ih.c int rb_bufsz = order_base_2(ih->ring_size / 4); order_base_2 438 drivers/gpu/drm/amd/amdgpu/sdma_v2_4.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 676 drivers/gpu/drm/amd/amdgpu/sdma_v3_0.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 963 drivers/gpu/drm/amd/amdgpu/sdma_v4_0.c uint32_t rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 641 drivers/gpu/drm/amd/amdgpu/sdma_v5_0.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 145 drivers/gpu/drm/amd/amdgpu/si_dma.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 75 drivers/gpu/drm/amd/amdgpu/si_ih.c rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); order_base_2 125 drivers/gpu/drm/amd/amdgpu/tonga_ih.c rb_bufsz = order_base_2(adev->irq.ih.ring_size / 4); order_base_2 367 drivers/gpu/drm/amd/amdgpu/uvd_v4_2.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 392 drivers/gpu/drm/amd/amdgpu/uvd_v5_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 811 drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 894 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c size = order_base_2(ring->ring_size); order_base_2 1061 drivers/gpu/drm/amd/amdgpu/uvd_v7_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 904 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 1077 drivers/gpu/drm/amd/amdgpu/vcn_v1_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 1021 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 1186 drivers/gpu/drm/amd/amdgpu/vcn_v2_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 847 drivers/gpu/drm/amd/amdgpu/vcn_v2_5.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 168 drivers/gpu/drm/amd/amdgpu/vega10_ih.c int rb_bufsz = order_base_2(ih->ring_size / 4); order_base_2 205 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; order_base_2 241 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c m->sdma_rlc_rb_cntl = order_base_2(q->queue_size / 4) order_base_2 328 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_cik.c m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; order_base_2 202 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; order_base_2 231 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c order_base_2(q->eop_ring_buffer_size / 4) - 1); order_base_2 373 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_v9.c m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) order_base_2 180 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c m->cp_hqd_pq_control |= order_base_2(q->queue_size / 4) - 1; order_base_2 212 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c order_base_2(q->eop_ring_buffer_size / 4) - 1); order_base_2 354 drivers/gpu/drm/amd/amdkfd/kfd_mqd_manager_vi.c m->sdmax_rlcx_rb_cntl = order_base_2(q->queue_size / 4) order_base_2 253 drivers/gpu/drm/drm_bufs.c map->size, order_base_2(map->size), map->handle); order_base_2 733 drivers/gpu/drm/drm_bufs.c order = order_base_2(request->size); order_base_2 904 drivers/gpu/drm/drm_bufs.c order = order_base_2(request->size); order_base_2 1108 drivers/gpu/drm/drm_bufs.c order = order_base_2(request->size); order_base_2 1400 drivers/gpu/drm/drm_bufs.c order = order_base_2(request->size); order_base_2 93 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); order_base_2 121 drivers/gpu/drm/etnaviv/etnaviv_cmdbuf.c int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / order_base_2 153 drivers/gpu/drm/nouveau/nvkm/core/ramht.c ramht->bits = order_base_2(ramht->size); order_base_2 69 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifog84.c ilength = order_base_2(args->v0.ilength / 8); order_base_2 264 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogf100.c ilength = order_base_2(args->v0.ilength / 8); order_base_2 299 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogk104.c ilength = order_base_2(ilength / 8); order_base_2 179 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifogv100.c ilength = order_base_2(ilength / 8); order_base_2 69 drivers/gpu/drm/nouveau/nvkm/engine/fifo/gpfifonv50.c ilength = order_base_2(args->v0.ilength / 8); order_base_2 357 drivers/gpu/drm/nouveau/nvkm/subdev/instmem/nv50.c u8 page = max(order_base_2(align), 12); order_base_2 89 drivers/gpu/drm/nouveau/nvkm/subdev/mmu/base.c ptp->shift = order_base_2(size); order_base_2 550 drivers/gpu/drm/r128/r128_cce.c dev_priv->ring.size_l2qw = order_base_2(init->ring_size / 8); order_base_2 4083 drivers/gpu/drm/radeon/cik.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 4084 drivers/gpu/drm/radeon/cik.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 4563 drivers/gpu/drm/radeon/cik.c tmp |= order_base_2(MEC_HPD_SIZE / 8); order_base_2 4677 drivers/gpu/drm/radeon/cik.c order_base_2(rdev->ring[idx].ring_size / 8); order_base_2 4679 drivers/gpu/drm/radeon/cik.c (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8); order_base_2 6984 drivers/gpu/drm/radeon/cik.c rb_bufsz = order_base_2(rdev->ih.ring_size / 4); order_base_2 388 drivers/gpu/drm/radeon/cik_sdma.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 3083 drivers/gpu/drm/radeon/evergreen.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3084 drivers/gpu/drm/radeon/evergreen.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 1694 drivers/gpu/drm/radeon/ni.c rb_cntl = order_base_2(ring->ring_size / 8); order_base_2 1695 drivers/gpu/drm/radeon/ni.c rb_cntl |= order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8; order_base_2 210 drivers/gpu/drm/radeon/ni_dma.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 1135 drivers/gpu/drm/radeon/r100.c rb_bufsz = order_base_2(ring_size / 8); order_base_2 2729 drivers/gpu/drm/radeon/r600.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 2730 drivers/gpu/drm/radeon/r600.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 2785 drivers/gpu/drm/radeon/r600.c rb_bufsz = order_base_2(ring_size / 8); order_base_2 3474 drivers/gpu/drm/radeon/r600.c rb_bufsz = order_base_2(ring_size / 4); order_base_2 3711 drivers/gpu/drm/radeon/r600.c rb_bufsz = order_base_2(rdev->ih.ring_size / 4); order_base_2 131 drivers/gpu/drm/radeon/r600_dma.c rb_bufsz = order_base_2(ring->ring_size / 4); order_base_2 3669 drivers/gpu/drm/radeon/si.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3670 drivers/gpu/drm/radeon/si.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 3700 drivers/gpu/drm/radeon/si.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3701 drivers/gpu/drm/radeon/si.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 3724 drivers/gpu/drm/radeon/si.c rb_bufsz = order_base_2(ring->ring_size / 8); order_base_2 3725 drivers/gpu/drm/radeon/si.c tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz; order_base_2 6012 drivers/gpu/drm/radeon/si.c rb_bufsz = order_base_2(rdev->ih.ring_size / 4); order_base_2 377 drivers/gpu/drm/radeon/uvd_v1_0.c rb_bufsz = order_base_2(ring->ring_size); order_base_2 108 drivers/infiniband/sw/rxe/rxe_queue.c q->log2_elem_size = order_base_2(elem_size); order_base_2 940 drivers/iommu/iova.c unsigned int log_size = order_base_2(size); order_base_2 996 drivers/iommu/iova.c unsigned int log_size = order_base_2(size); order_base_2 1118 drivers/iommu/virtio-iommu.c order_base_2(viommu->geometry.aperture_end)); order_base_2 886 drivers/mmc/host/usdhi6rol0.c val = order_base_2(ticks) - 13; order_base_2 1561 drivers/mtd/spi-nor/spi-nor.c pow = ilog2(mtd->size) - order_base_2(lock_len); order_base_2 926 drivers/net/ethernet/intel/ice/ice_lib.c pow = order_base_2(qcount_rx); order_base_2 3914 drivers/net/ethernet/mellanox/mlx4/main.c log_num_mac = order_base_2(saved_value.vu32); order_base_2 87 drivers/net/ethernet/mellanox/mlx5/core/en.h MLX5_MPWRQ_LOG_STRIDE_SZ(mdev, order_base_2(MLX5E_RX_MAX_HEAD)) order_base_2 100 drivers/net/ethernet/mellanox/mlx5/core/en.h #define MLX5E_ORDER2_MAX_PACKET_MTU (order_base_2(10 * 1024)) order_base_2 72 drivers/net/ethernet/mellanox/mlx5/core/en/params.c return MLX5_MPWRQ_LOG_WQE_SZ - order_base_2(linear_frag_sz); order_base_2 100 drivers/net/ethernet/mellanox/mlx5/core/en/params.c if (order_base_2(linear_frag_sz) > MLX5_MAX_MPWQE_LOG_WQE_STRIDE_SZ) order_base_2 106 drivers/net/ethernet/mellanox/mlx5/core/en/params.c log_num_strides = MLX5_MPWRQ_LOG_WQE_SZ - order_base_2(linear_frag_sz); order_base_2 131 drivers/net/ethernet/mellanox/mlx5/core/en/params.c return order_base_2(mlx5e_rx_get_linear_frag_sz(params, xsk)); order_base_2 355 drivers/net/ethernet/mellanox/mlx5/core/en_ethtool.c log_rq_size = order_base_2(param->rx_pending); order_base_2 356 drivers/net/ethernet/mellanox/mlx5/core/en_ethtool.c log_sq_size = order_base_2(param->tx_pending); order_base_2 2123 drivers/net/ethernet/mellanox/mlx5/core/en_main.c info->log_num_frags = order_base_2(info->num_frags); order_base_2 2138 drivers/net/ethernet/mellanox/mlx5/core/en_main.c return order_base_2(sz); order_base_2 2321 drivers/net/ethernet/mellanox/mlx5/core/en_main.c return order_base_2(MLX5E_UMR_WQEBBS) + order_base_2 127 drivers/net/ethernet/netronome/nfp/abm/cls.c bits_per_prio = roundup_pow_of_two(order_base_2(abm->num_bands)); order_base_2 132 drivers/net/ethernet/netronome/nfp/abm/cls.c base_shift = 8 - order_base_2(abm->num_prios); order_base_2 319 drivers/net/ethernet/netronome/nfp/abm/ctrl.c size = roundup_pow_of_two(order_base_2(abm->num_bands)); order_base_2 392 drivers/net/ethernet/netronome/nfp/abm/ctrl.c abm->dscp_mask = GENMASK(7, 8 - order_base_2(abm->num_prios)); order_base_2 632 drivers/net/ethernet/netronome/nfp/nfpcore/nfp_nsp.c buf_order = order_base_2(roundup_pow_of_two(max_size)); order_base_2 834 drivers/net/ethernet/ti/cpsw_ale.c ale->port_num_bits = order_base_2(ale->params.ale_ports); order_base_2 218 drivers/pci/controller/dwc/pcie-designware-host.c order_base_2(nr_irqs)); order_base_2 244 drivers/pci/controller/dwc/pcie-designware-host.c order_base_2(nr_irqs)); order_base_2 720 drivers/pci/controller/pcie-rcar.c order_base_2(no_irqs)); order_base_2 842 drivers/pci/controller/pcie-rcar.c desc->msi_attrib.multiple = order_base_2(nvec); order_base_2 264 drivers/pci/endpoint/pci-epc-core.c encode_int = order_base_2(interrupts); order_base_2 742 drivers/pinctrl/intel/pinctrl-intel.c v = order_base_2(debounce * NSEC_PER_USEC / DEBOUNCE_PERIOD_NSEC); order_base_2 168 drivers/pinctrl/qcom/pinctrl-msm.c mask = GENMASK(g->mux_bit + order_base_2(g->nfuncs) - 1, g->mux_bit); order_base_2 1723 drivers/soc/ti/knav_qmss_queue.c kdev->inst_shift = order_base_2(size); order_base_2 709 drivers/staging/media/allegro-dvt/nal-h264.c order_base_2(pps->num_slice_groups_minus1 + 1), order_base_2 120 drivers/watchdog/imgpdc_wdt.c val |= order_base_2(wdt->wdt_dev.timeout * clk_rate) - 1; order_base_2 238 drivers/watchdog/imgpdc_wdt.c if (order_base_2(clk_rate) > PDC_WDT_CONFIG_DELAY_MASK + 1) { order_base_2 243 drivers/watchdog/imgpdc_wdt.c if (order_base_2(clk_rate) == 0) order_base_2 119 drivers/watchdog/mlx_wdt.c hw_timeout = order_base_2(timeout * MLXREG_WDT_CLOCK_SCALE); order_base_2 674 fs/ext4/indirect.c blk_bits = order_base_2(lblock); order_base_2 2549 fs/ext4/mballoc.c int blocksize_bits = order_base_2(size); order_base_2 2299 fs/jbd2/journal.c int i = order_base_2(size) - 10; order_base_2 2329 fs/jbd2/journal.c int i = order_base_2(size) - 10; order_base_2 230 include/linux/log2.h return order_base_2(n) + 1; order_base_2 231 include/linux/log2.h return order_base_2(n); order_base_2 232 include/linux/mm_types.h #define STRUCT_PAGE_MAX_SHIFT (order_base_2(sizeof(struct page))) order_base_2 766 kernel/events/ring_buffer.c if (order_base_2(size) >= PAGE_SHIFT+MAX_ORDER) order_base_2 59 mm/percpu-km.c pages = alloc_pages(gfp, order_base_2(nr_pages)); order_base_2 92 mm/percpu-km.c __free_pages(chunk->data, order_base_2(nr_pages)); order_base_2 1000 net/ipv4/tcp_metrics.c tcp_metrics_hash_log = order_base_2(slots);