Searched refs:aligned_start (Results 1 – 5 of 5) sorted by relevance
/linux-4.1.27/arch/sh/mm/ |
D | flush-sh4.c | 15 reg_size_t aligned_start, v, cnt, end; in sh4__flush_wback_region() local 17 aligned_start = register_align(start); in sh4__flush_wback_region() 18 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region() 19 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region() 49 reg_size_t aligned_start, v, cnt, end; in sh4__flush_purge_region() local 51 aligned_start = register_align(start); in sh4__flush_purge_region() 52 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_purge_region() 53 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_purge_region() 79 reg_size_t aligned_start, v, cnt, end; in sh4__flush_invalidate_region() local 81 aligned_start = register_align(start); in sh4__flush_invalidate_region() [all …]
|
D | cache-sh5.c | 76 unsigned long long ullend, addr, aligned_start; in sh64_icache_inv_kernel_range() local 77 aligned_start = (unsigned long long)(signed long long)(signed long) start; in sh64_icache_inv_kernel_range() 78 addr = L1_CACHE_ALIGN(aligned_start); in sh64_icache_inv_kernel_range() 158 unsigned long aligned_start; in sh64_icache_inv_user_page_range() local 173 aligned_start = start & PAGE_MASK; in sh64_icache_inv_user_page_range() 176 while (aligned_start < after_last_page_start) { in sh64_icache_inv_user_page_range() 179 vma = find_vma(mm, aligned_start); in sh64_icache_inv_user_page_range() 180 if (!vma || (aligned_start <= vma->vm_end)) { in sh64_icache_inv_user_page_range() 182 aligned_start += PAGE_SIZE; in sh64_icache_inv_user_page_range() 188 eaddr = aligned_start; in sh64_icache_inv_user_page_range() [all …]
|
/linux-4.1.27/arch/arm/kernel/ |
D | setup.c | 671 u64 aligned_start; in arm_add_memory() local 677 aligned_start = PAGE_ALIGN(start); in arm_add_memory() 678 if (aligned_start > start + size) in arm_add_memory() 681 size -= aligned_start - start; in arm_add_memory() 684 if (aligned_start > ULONG_MAX) { in arm_add_memory() 690 if (aligned_start + size > ULONG_MAX) { in arm_add_memory() 698 size = ULONG_MAX - aligned_start; in arm_add_memory() 702 if (aligned_start < PHYS_OFFSET) { in arm_add_memory() 703 if (aligned_start + size <= PHYS_OFFSET) { in arm_add_memory() 705 aligned_start, aligned_start + size); in arm_add_memory() [all …]
|
/linux-4.1.27/fs/jffs2/ |
D | file.c | 250 unsigned aligned_start = start & ~3; in jffs2_write_end() local 269 aligned_start = 0; in jffs2_write_end() 294 ret = jffs2_write_inode_range(c, f, ri, page_address(pg) + aligned_start, in jffs2_write_end() 295 (pg->index << PAGE_CACHE_SHIFT) + aligned_start, in jffs2_write_end() 296 end - aligned_start, &writtenlen); in jffs2_write_end() 306 writtenlen -= min(writtenlen, (start - aligned_start)); in jffs2_write_end()
|
/linux-4.1.27/arch/arm64/mm/ |
D | mmu.c | 400 unsigned long aligned_start = round_down(__pa(_stext), in fixup_executable() local 403 create_mapping(aligned_start, __phys_to_virt(aligned_start), in fixup_executable() 404 __pa(_stext) - aligned_start, in fixup_executable()
|