SHMLBA 18 arch/arc/mm/mmap.c ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \ SHMLBA 19 arch/arc/mm/mmap.c (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1))) SHMLBA 49 arch/arc/mm/mmap.c (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) SHMLBA 73 arch/arc/mm/mmap.c info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 17 arch/arm/include/asm/cacheflush.h #define CACHE_COLOUR(vaddr) ((vaddr & (SHMLBA - 1)) >> PAGE_SHIFT) SHMLBA 20 arch/arm/mm/copypage-v6.c #if SHMLBA > 16384 SHMLBA 17 arch/arm/mm/mmap.c ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \ SHMLBA 18 arch/arm/mm/mmap.c (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1))) SHMLBA 51 arch/arm/mm/mmap.c (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) SHMLBA 75 arch/arm/mm/mmap.c info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 105 arch/arm/mm/mmap.c (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) SHMLBA 126 arch/arm/mm/mmap.c info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 11 arch/csky/abiv1/inc/abi/page.h return (addr1 ^ addr2) & (SHMLBA-1); SHMLBA 13 arch/csky/abiv1/mmap.c ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \ SHMLBA 14 arch/csky/abiv1/mmap.c (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1))) SHMLBA 44 arch/csky/abiv1/mmap.c (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) SHMLBA 68 arch/csky/abiv1/mmap.c info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 59 arch/ia64/kernel/sys_ia64.c align_mask = PAGE_MASK & (SHMLBA - 1); SHMLBA 83 arch/nds32/mm/cacheflush.c return ((addr & PAGE_MASK) ^ page) & (SHMLBA - 1); SHMLBA 9 arch/nds32/mm/mmap.c ((((addr)+SHMLBA-1)&~(SHMLBA-1)) + \ SHMLBA 10 arch/nds32/mm/mmap.c (((pgoff)<<PAGE_SHIFT) & (SHMLBA-1))) SHMLBA 46 arch/nds32/mm/mmap.c (addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1)) SHMLBA 70 arch/nds32/mm/mmap.c info.align_mask = do_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 51 arch/sparc/kernel/sys_sparc_32.c ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) SHMLBA 67 arch/sparc/kernel/sys_sparc_32.c (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 84 arch/sparc/kernel/sys_sparc_64.c unsigned long base = (addr+SHMLBA-1)&~(SHMLBA-1); SHMLBA 85 arch/sparc/kernel/sys_sparc_64.c unsigned long off = (pgoff<<PAGE_SHIFT) & (SHMLBA-1); SHMLBA 103 arch/sparc/kernel/sys_sparc_64.c ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) SHMLBA 133 arch/sparc/kernel/sys_sparc_64.c info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 167 arch/sparc/kernel/sys_sparc_64.c ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) SHMLBA 196 arch/sparc/kernel/sys_sparc_64.c info.align_mask = do_color_align ? (PAGE_MASK & (SHMLBA - 1)) : 0; SHMLBA 393 arch/sparc/kernel/sys_sparc_64.c err = do_shmat(first, ptr, (int)second, &raddr, SHMLBA); SHMLBA 16 arch/unicore32/include/asm/cacheflush.h #define CACHE_COLOUR(vaddr) ((vaddr & (SHMLBA - 1)) >> PAGE_SHIFT) SHMLBA 40 arch/xtensa/kernel/syscall.c ((((addr) + SHMLBA - 1) & ~(SHMLBA - 1)) + \ SHMLBA 41 arch/xtensa/kernel/syscall.c (((pgoff) << PAGE_SHIFT) & (SHMLBA - 1))) SHMLBA 48 arch/xtensa/kernel/syscall.c err = do_shmat(shmid, shmaddr, shmflg, &ret, SHMLBA); SHMLBA 71 arch/xtensa/kernel/syscall.c ((addr - (pgoff << PAGE_SHIFT)) & (SHMLBA - 1))) SHMLBA 113 drivers/gpu/drm/drm_bufs.c if (shm && (SHMLBA > PAGE_SIZE)) { SHMLBA 114 drivers/gpu/drm/drm_bufs.c int bits = ilog2(SHMLBA >> PAGE_SHIFT) + 1; SHMLBA 175 drivers/infiniband/sw/rdmavt/mmap.c rdi->mmap_offset = ALIGN(PAGE_SIZE, SHMLBA); SHMLBA 177 drivers/infiniband/sw/rdmavt/mmap.c rdi->mmap_offset += ALIGN(size, SHMLBA); SHMLBA 161 drivers/infiniband/sw/rxe/rxe_mmap.c rxe->mmap_offset = ALIGN(PAGE_SIZE, SHMLBA); SHMLBA 164 drivers/infiniband/sw/rxe/rxe_mmap.c rxe->mmap_offset += ALIGN(size, SHMLBA); SHMLBA 1596 ipc/shm.c err = do_shmat(shmid, shmaddr, shmflg, &ret, SHMLBA); SHMLBA 1606 ipc/shm.c #define COMPAT_SHMLBA SHMLBA SHMLBA 86 ipc/syscall.c second, &raddr, SHMLBA); SHMLBA 121 ipc/syscall.c #define COMPAT_SHMLBA SHMLBA SHMLBA 2627 mm/vmalloc.c return __vmalloc_node_range(size, SHMLBA, VMALLOC_START, VMALLOC_END, SHMLBA 2734 mm/vmalloc.c return __vmalloc_node_range(size, SHMLBA, VMALLOC_START, VMALLOC_END,