Lines Matching refs:next
272 struct vm_area_struct *next = vma->vm_next; in remove_vma() local
281 return next; in remove_vma()
709 struct vm_area_struct *next; in __vma_unlink() local
712 prev->vm_next = next = vma->vm_next; in __vma_unlink()
713 if (next) in __vma_unlink()
714 next->vm_prev = prev; in __vma_unlink()
731 struct vm_area_struct *next = vma->vm_next; in vma_adjust() local
741 if (next && !insert) { in vma_adjust()
744 if (end >= next->vm_end) { in vma_adjust()
749 again: remove_next = 1 + (end > next->vm_end); in vma_adjust()
750 end = next->vm_end; in vma_adjust()
751 exporter = next; in vma_adjust()
753 } else if (end > next->vm_start) { in vma_adjust()
758 adjust_next = (end - next->vm_start) >> PAGE_SHIFT; in vma_adjust()
759 exporter = next; in vma_adjust()
769 importer = next; in vma_adjust()
793 uprobe_munmap(next, next->vm_start, next->vm_end); in vma_adjust()
811 anon_vma = next->anon_vma; in vma_adjust()
813 VM_BUG_ON_VMA(adjust_next && next->anon_vma && in vma_adjust()
814 anon_vma != next->anon_vma, next); in vma_adjust()
818 anon_vma_interval_tree_pre_update_vma(next); in vma_adjust()
825 vma_interval_tree_remove(next, root); in vma_adjust()
838 next->vm_start += adjust_next << PAGE_SHIFT; in vma_adjust()
839 next->vm_pgoff += adjust_next; in vma_adjust()
844 vma_interval_tree_insert(next, root); in vma_adjust()
854 __vma_unlink(mm, next, vma); in vma_adjust()
856 __remove_shared_vm_struct(next, file, mapping); in vma_adjust()
868 if (!next) in vma_adjust()
871 vma_gap_update(next); in vma_adjust()
878 anon_vma_interval_tree_post_update_vma(next); in vma_adjust()
888 uprobe_mmap(next); in vma_adjust()
893 uprobe_munmap(next, next->vm_start, next->vm_end); in vma_adjust()
896 if (next->anon_vma) in vma_adjust()
897 anon_vma_merge(vma, next); in vma_adjust()
899 mpol_put(vma_policy(next)); in vma_adjust()
900 kmem_cache_free(vm_area_cachep, next); in vma_adjust()
906 next = vma->vm_next; in vma_adjust()
909 else if (next) in vma_adjust()
910 vma_gap_update(next); in vma_adjust()
1048 struct vm_area_struct *area, *next; in vma_merge() local
1059 next = prev->vm_next; in vma_merge()
1061 next = mm->mmap; in vma_merge()
1062 area = next; in vma_merge()
1063 if (next && next->vm_end == end) /* cases 6, 7, 8 */ in vma_merge()
1064 next = next->vm_next; in vma_merge()
1077 if (next && end == next->vm_start && in vma_merge()
1078 mpol_equal(policy, vma_policy(next)) && in vma_merge()
1079 can_vma_merge_before(next, vm_flags, in vma_merge()
1084 next->anon_vma, NULL)) { in vma_merge()
1087 next->vm_end, prev->vm_pgoff, NULL); in vma_merge()
1100 if (next && end == next->vm_start && in vma_merge()
1101 mpol_equal(policy, vma_policy(next)) && in vma_merge()
1102 can_vma_merge_before(next, vm_flags, in vma_merge()
1109 err = vma_adjust(area, addr, next->vm_end, in vma_merge()
1110 next->vm_pgoff - pglen, NULL); in vma_merge()
2306 struct vm_area_struct *next; in expand_stack() local
2309 next = vma->vm_next; in expand_stack()
2310 if (next && next->vm_start == address + PAGE_SIZE) { in expand_stack()
2311 if (!(next->vm_flags & VM_GROWSUP)) in expand_stack()
2404 struct vm_area_struct *next = prev ? prev->vm_next : mm->mmap; in unmap_region() local
2412 next ? next->vm_start : USER_PGTABLES_CEILING); in unmap_region()
2675 struct vm_area_struct *next; in SYSCALL_DEFINE5() local
2677 for (next = vma->vm_next; next; next = next->vm_next) { in SYSCALL_DEFINE5()
2679 if (next->vm_start != next->vm_prev->vm_end) in SYSCALL_DEFINE5()
2682 if (next->vm_file != vma->vm_file) in SYSCALL_DEFINE5()
2685 if (next->vm_flags != vma->vm_flags) in SYSCALL_DEFINE5()
2688 if (start + size <= next->vm_end) in SYSCALL_DEFINE5()
2692 if (!next) in SYSCALL_DEFINE5()