aligned_start 751 arch/arm/kernel/setup.c u64 aligned_start; aligned_start 757 arch/arm/kernel/setup.c aligned_start = PAGE_ALIGN(start); aligned_start 758 arch/arm/kernel/setup.c if (aligned_start > start + size) aligned_start 761 arch/arm/kernel/setup.c size -= aligned_start - start; aligned_start 764 arch/arm/kernel/setup.c if (aligned_start > ULONG_MAX) { aligned_start 770 arch/arm/kernel/setup.c if (aligned_start + size > ULONG_MAX) { aligned_start 778 arch/arm/kernel/setup.c size = ULONG_MAX - aligned_start; aligned_start 782 arch/arm/kernel/setup.c if (aligned_start < PHYS_OFFSET) { aligned_start 783 arch/arm/kernel/setup.c if (aligned_start + size <= PHYS_OFFSET) { aligned_start 785 arch/arm/kernel/setup.c aligned_start, aligned_start + size); aligned_start 790 arch/arm/kernel/setup.c aligned_start, (u64)PHYS_OFFSET); aligned_start 792 arch/arm/kernel/setup.c size -= PHYS_OFFSET - aligned_start; aligned_start 793 arch/arm/kernel/setup.c aligned_start = PHYS_OFFSET; aligned_start 796 arch/arm/kernel/setup.c start = aligned_start; aligned_start 586 arch/arm64/mm/init.c unsigned long aligned_start, aligned_end; aligned_start 588 arch/arm64/mm/init.c aligned_start = __virt_to_phys(start) & PAGE_MASK; aligned_start 590 arch/arm64/mm/init.c memblock_free(aligned_start, aligned_end - aligned_start); aligned_start 696 arch/powerpc/mm/book3s64/radix_pgtable.c unsigned long aligned_start; aligned_start 709 arch/powerpc/mm/book3s64/radix_pgtable.c pte_clear(&init_mm, params->aligned_start, params->pte); aligned_start 710 arch/powerpc/mm/book3s64/radix_pgtable.c create_physical_mapping(__pa(params->aligned_start), __pa(params->start), -1); aligned_start 751 arch/powerpc/mm/book3s64/radix_pgtable.c unsigned long aligned_start = addr & mask; aligned_start 764 arch/powerpc/mm/book3s64/radix_pgtable.c if (overlaps_kernel_text(aligned_start, addr) || aligned_start 780 arch/powerpc/mm/book3s64/radix_pgtable.c params.aligned_start = addr & ~(size - 1); aligned_start 76 arch/sh/mm/cache-sh5.c unsigned long long ullend, addr, aligned_start; aligned_start 77 arch/sh/mm/cache-sh5.c aligned_start = (unsigned long long)(signed long long)(signed long) start; aligned_start 78 arch/sh/mm/cache-sh5.c addr = L1_CACHE_ALIGN(aligned_start); aligned_start 158 arch/sh/mm/cache-sh5.c unsigned long aligned_start; aligned_start 173 arch/sh/mm/cache-sh5.c aligned_start = start & PAGE_MASK; aligned_start 176 arch/sh/mm/cache-sh5.c while (aligned_start < after_last_page_start) { aligned_start 179 arch/sh/mm/cache-sh5.c vma = find_vma(mm, aligned_start); aligned_start 180 arch/sh/mm/cache-sh5.c if (!vma || (aligned_start <= vma->vm_end)) { aligned_start 182 arch/sh/mm/cache-sh5.c aligned_start += PAGE_SIZE; aligned_start 188 arch/sh/mm/cache-sh5.c eaddr = aligned_start; aligned_start 194 arch/sh/mm/cache-sh5.c aligned_start = vma->vm_end; /* Skip to start of next region */ aligned_start 210 arch/sh/mm/cache-sh5.c unsigned long long aligned_start; aligned_start 222 arch/sh/mm/cache-sh5.c aligned_start = L1_CACHE_ALIGN(start); aligned_start 223 arch/sh/mm/cache-sh5.c addr = aligned_start; aligned_start 16 arch/sh/mm/flush-sh4.c reg_size_t aligned_start, v, cnt, end; aligned_start 18 arch/sh/mm/flush-sh4.c aligned_start = register_align(start); aligned_start 19 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); aligned_start 20 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) aligned_start 50 arch/sh/mm/flush-sh4.c reg_size_t aligned_start, v, cnt, end; aligned_start 52 arch/sh/mm/flush-sh4.c aligned_start = register_align(start); aligned_start 53 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); aligned_start 54 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) aligned_start 80 arch/sh/mm/flush-sh4.c reg_size_t aligned_start, v, cnt, end; aligned_start 82 arch/sh/mm/flush-sh4.c aligned_start = register_align(start); aligned_start 83 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); aligned_start 84 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) aligned_start 1038 drivers/misc/cxl/pci.c loff_t aligned_start, aligned_end; aligned_start 1048 drivers/misc/cxl/pci.c aligned_start = round_down(off, 8); aligned_start 1050 drivers/misc/cxl/pci.c aligned_length = aligned_end - aligned_start; aligned_start 1064 drivers/misc/cxl/pci.c memcpy_fromio(tbuf, ebuf + aligned_start, aligned_length); aligned_start 625 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c u8 *buffer_start, *aligned_start; aligned_start 634 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c aligned_start = PTR_ALIGN(buffer_start - DPAA2_ETH_TX_BUF_ALIGN, aligned_start 636 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c if (aligned_start >= skb->head) aligned_start 637 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c buffer_start = aligned_start; aligned_start 1857 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c void *buffer_start, *aligned_start; aligned_start 1876 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c aligned_start = PTR_ALIGN(buffer_start - DPAA2_ETH_TX_BUF_ALIGN, aligned_start 1878 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c if (aligned_start >= xdpf->data - xdpf->headroom) aligned_start 1879 drivers/net/ethernet/freescale/dpaa2/dpaa2-eth.c buffer_start = aligned_start; aligned_start 1239 fs/btrfs/extent-tree.c u64 aligned_start = ALIGN(start, 1 << 9); aligned_start 1241 fs/btrfs/extent-tree.c if (WARN_ON(start != aligned_start)) { aligned_start 1242 fs/btrfs/extent-tree.c len -= aligned_start - start; aligned_start 1244 fs/btrfs/extent-tree.c start = aligned_start; aligned_start 248 fs/jffs2/file.c unsigned aligned_start = start & ~3; aligned_start 267 fs/jffs2/file.c aligned_start = 0; aligned_start 292 fs/jffs2/file.c ret = jffs2_write_inode_range(c, f, ri, page_address(pg) + aligned_start, aligned_start 293 fs/jffs2/file.c (pg->index << PAGE_SHIFT) + aligned_start, aligned_start 294 fs/jffs2/file.c end - aligned_start, &writtenlen); aligned_start 304 fs/jffs2/file.c writtenlen -= min(writtenlen, (start - aligned_start));