Lines Matching refs:prev
61 struct vm_area_struct *vma, struct vm_area_struct *prev,
381 unsigned long prev = 0, pend = 0; in browse_rb() local
386 if (vma->vm_start < prev) { in browse_rb()
388 vma->vm_start, prev); in browse_rb()
409 prev = vma->vm_start; in browse_rb()
660 struct vm_area_struct *prev, struct rb_node **rb_link, in __vma_link() argument
663 __vma_link_list(mm, vma, prev, rb_parent); in __vma_link()
668 struct vm_area_struct *prev, struct rb_node **rb_link, in vma_link() argument
678 __vma_link(mm, vma, prev, rb_link, rb_parent); in vma_link()
694 struct vm_area_struct *prev; in __insert_vm_struct() local
698 &prev, &rb_link, &rb_parent)) in __insert_vm_struct()
700 __vma_link(mm, vma, prev, rb_link, rb_parent); in __insert_vm_struct()
706 struct vm_area_struct *prev) in __vma_unlink() argument
711 prev->vm_next = next = vma->vm_next; in __vma_unlink()
713 next->vm_prev = prev; in __vma_unlink()
1033 struct vm_area_struct *prev, unsigned long addr, in vma_merge() argument
1049 if (prev) in vma_merge()
1050 next = prev->vm_next; in vma_merge()
1060 if (prev && prev->vm_end == addr && in vma_merge()
1061 mpol_equal(vma_policy(prev), policy) && in vma_merge()
1062 can_vma_merge_after(prev, vm_flags, in vma_merge()
1071 is_mergeable_anon_vma(prev->anon_vma, in vma_merge()
1074 err = vma_adjust(prev, prev->vm_start, in vma_merge()
1075 next->vm_end, prev->vm_pgoff, NULL); in vma_merge()
1077 err = vma_adjust(prev, prev->vm_start, in vma_merge()
1078 end, prev->vm_pgoff, NULL); in vma_merge()
1081 khugepaged_enter_vma_merge(prev, vm_flags); in vma_merge()
1082 return prev; in vma_merge()
1092 if (prev && addr < prev->vm_end) /* case 4 */ in vma_merge()
1093 err = vma_adjust(prev, prev->vm_start, in vma_merge()
1094 addr, prev->vm_pgoff, NULL); in vma_merge()
1534 struct vm_area_struct *vma, *prev; in mmap_region() local
1558 while (find_vma_links(mm, addr, addr + len, &prev, &rb_link, in mmap_region()
1577 vma = vma_merge(mm, prev, addr, addr + len, vm_flags, NULL, file, pgoff, in mmap_region()
1640 vma_link(mm, vma, prev, rb_link, rb_parent); in mmap_region()
1682 unmap_region(mm, vma, prev, vma->vm_start, vma->vm_end); in mmap_region()
1766 struct rb_node *prev = &vma->vm_rb; in unmapped_area() local
1767 if (!rb_parent(prev)) in unmapped_area()
1769 vma = rb_entry(rb_parent(prev), in unmapped_area()
1771 if (prev == vma->vm_rb.rb_left) { in unmapped_area()
1869 struct rb_node *prev = &vma->vm_rb; in unmapped_area_topdown() local
1870 if (!rb_parent(prev)) in unmapped_area_topdown()
1872 vma = rb_entry(rb_parent(prev), in unmapped_area_topdown()
1874 if (prev == vma->vm_rb.rb_right) { in unmapped_area_topdown()
2305 struct vm_area_struct *vma, *prev; in find_extend_vma() local
2308 vma = find_vma_prev(mm, addr, &prev); in find_extend_vma()
2311 if (!prev || expand_stack(prev, addr)) in find_extend_vma()
2313 if (prev->vm_flags & VM_LOCKED) in find_extend_vma()
2314 populate_vma_page_range(prev, addr, prev->vm_end, NULL); in find_extend_vma()
2315 return prev; in find_extend_vma()
2320 struct vm_area_struct *prev; in expand_stack() local
2323 prev = vma->vm_prev; in expand_stack()
2324 if (prev && prev->vm_end == address) { in expand_stack()
2325 if (!(prev->vm_flags & VM_GROWSDOWN)) in expand_stack()
2386 struct vm_area_struct *vma, struct vm_area_struct *prev, in unmap_region() argument
2389 struct vm_area_struct *next = prev ? prev->vm_next : mm->mmap; in unmap_region()
2396 free_pgtables(&tlb, vma, prev ? prev->vm_end : FIRST_USER_ADDRESS, in unmap_region()
2407 struct vm_area_struct *prev, unsigned long end) in detach_vmas_to_be_unmapped() argument
2412 insertion_point = (prev ? &prev->vm_next : &mm->mmap); in detach_vmas_to_be_unmapped()
2422 vma->vm_prev = prev; in detach_vmas_to_be_unmapped()
2425 mm->highest_vm_end = prev ? prev->vm_end : 0; in detach_vmas_to_be_unmapped()
2521 struct vm_area_struct *vma, *prev, *last; in do_munmap() local
2534 prev = vma->vm_prev; in do_munmap()
2563 prev = vma; in do_munmap()
2573 vma = prev ? prev->vm_next : mm->mmap; in do_munmap()
2592 detach_vmas_to_be_unmapped(mm, vma, prev, end); in do_munmap()
2593 unmap_region(mm, vma, prev, start, end); in do_munmap()
2732 struct vm_area_struct *vma, *prev; in do_brk() local
2761 while (find_vma_links(mm, addr, addr + len, &prev, &rb_link, in do_brk()
2778 vma = vma_merge(mm, prev, addr, addr + len, flags, in do_brk()
2799 vma_link(mm, vma, prev, rb_link, rb_parent); in do_brk()
2878 struct vm_area_struct *prev; in insert_vm_struct() local
2898 &prev, &rb_link, &rb_parent)) in insert_vm_struct()
2904 vma_link(mm, vma, prev, rb_link, rb_parent); in insert_vm_struct()
2919 struct vm_area_struct *new_vma, *prev; in copy_vma() local
2932 if (find_vma_links(mm, addr, addr + len, &prev, &rb_link, &rb_parent)) in copy_vma()
2934 new_vma = vma_merge(mm, prev, addr, addr + len, vma->vm_flags, in copy_vma()
2974 vma_link(mm, new_vma, prev, rb_link, rb_parent); in copy_vma()