Lines Matching refs:vma
360 struct vm_area_struct *vma, int zoneid, unsigned long start, in __munlock_pagevec_fill() argument
371 pte = get_locked_pte(vma->vm_mm, start, &ptl); in __munlock_pagevec_fill()
383 page = vm_normal_page(vma, start, *pte); in __munlock_pagevec_fill()
422 void munlock_vma_pages_range(struct vm_area_struct *vma, in munlock_vma_pages_range() argument
425 vma->vm_flags &= ~VM_LOCKED; in munlock_vma_pages_range()
443 page = follow_page_mask(vma, start, FOLL_GET | FOLL_DUMP, in munlock_vma_pages_range()
474 start = __munlock_pagevec_fill(&pvec, vma, in munlock_vma_pages_range()
498 static int mlock_fixup(struct vm_area_struct *vma, struct vm_area_struct **prev, in mlock_fixup() argument
501 struct mm_struct *mm = vma->vm_mm; in mlock_fixup()
507 if (newflags == vma->vm_flags || (vma->vm_flags & VM_SPECIAL) || in mlock_fixup()
508 is_vm_hugetlb_page(vma) || vma == get_gate_vma(current->mm)) in mlock_fixup()
511 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in mlock_fixup()
512 *prev = vma_merge(mm, *prev, start, end, newflags, vma->anon_vma, in mlock_fixup()
513 vma->vm_file, pgoff, vma_policy(vma)); in mlock_fixup()
515 vma = *prev; in mlock_fixup()
519 if (start != vma->vm_start) { in mlock_fixup()
520 ret = split_vma(mm, vma, start, 1); in mlock_fixup()
525 if (end != vma->vm_end) { in mlock_fixup()
526 ret = split_vma(mm, vma, end, 0); in mlock_fixup()
547 vma->vm_flags = newflags; in mlock_fixup()
549 munlock_vma_pages_range(vma, start, end); in mlock_fixup()
552 *prev = vma; in mlock_fixup()
559 struct vm_area_struct * vma, * prev; in do_mlock() local
569 vma = find_vma(current->mm, start); in do_mlock()
570 if (!vma || vma->vm_start > start) in do_mlock()
573 prev = vma->vm_prev; in do_mlock()
574 if (start > vma->vm_start) in do_mlock()
575 prev = vma; in do_mlock()
582 newflags = vma->vm_flags & ~VM_LOCKED; in do_mlock()
586 tmp = vma->vm_end; in do_mlock()
589 error = mlock_fixup(vma, &prev, nstart, tmp, newflags); in do_mlock()
598 vma = prev->vm_next; in do_mlock()
599 if (!vma || vma->vm_start != nstart) { in do_mlock()
659 struct vm_area_struct * vma, * prev = NULL; in do_mlockall() local
668 for (vma = current->mm->mmap; vma ; vma = prev->vm_next) { in do_mlockall()
671 newflags = vma->vm_flags & ~VM_LOCKED; in do_mlockall()
676 mlock_fixup(vma, &prev, vma->vm_start, vma->vm_end, newflags); in do_mlockall()