Lines Matching refs:vma
360 struct vm_area_struct *vma, int zoneid, unsigned long start, in __munlock_pagevec_fill() argument
371 pte = get_locked_pte(vma->vm_mm, start, &ptl); in __munlock_pagevec_fill()
383 page = vm_normal_page(vma, start, *pte); in __munlock_pagevec_fill()
422 void munlock_vma_pages_range(struct vm_area_struct *vma, in munlock_vma_pages_range() argument
425 vma->vm_flags &= VM_LOCKED_CLEAR_MASK; in munlock_vma_pages_range()
443 page = follow_page_mask(vma, start, FOLL_GET | FOLL_DUMP, in munlock_vma_pages_range()
474 start = __munlock_pagevec_fill(&pvec, vma, in munlock_vma_pages_range()
498 static int mlock_fixup(struct vm_area_struct *vma, struct vm_area_struct **prev, in mlock_fixup() argument
501 struct mm_struct *mm = vma->vm_mm; in mlock_fixup()
507 if (newflags == vma->vm_flags || (vma->vm_flags & VM_SPECIAL) || in mlock_fixup()
508 is_vm_hugetlb_page(vma) || vma == get_gate_vma(current->mm)) in mlock_fixup()
512 pgoff = vma->vm_pgoff + ((start - vma->vm_start) >> PAGE_SHIFT); in mlock_fixup()
513 *prev = vma_merge(mm, *prev, start, end, newflags, vma->anon_vma, in mlock_fixup()
514 vma->vm_file, pgoff, vma_policy(vma), in mlock_fixup()
515 vma->vm_userfaultfd_ctx); in mlock_fixup()
517 vma = *prev; in mlock_fixup()
521 if (start != vma->vm_start) { in mlock_fixup()
522 ret = split_vma(mm, vma, start, 1); in mlock_fixup()
527 if (end != vma->vm_end) { in mlock_fixup()
528 ret = split_vma(mm, vma, end, 0); in mlock_fixup()
549 vma->vm_flags = newflags; in mlock_fixup()
551 munlock_vma_pages_range(vma, start, end); in mlock_fixup()
554 *prev = vma; in mlock_fixup()
562 struct vm_area_struct * vma, * prev; in apply_vma_lock_flags() local
572 vma = find_vma(current->mm, start); in apply_vma_lock_flags()
573 if (!vma || vma->vm_start > start) in apply_vma_lock_flags()
576 prev = vma->vm_prev; in apply_vma_lock_flags()
577 if (start > vma->vm_start) in apply_vma_lock_flags()
578 prev = vma; in apply_vma_lock_flags()
581 vm_flags_t newflags = vma->vm_flags & VM_LOCKED_CLEAR_MASK; in apply_vma_lock_flags()
586 tmp = vma->vm_end; in apply_vma_lock_flags()
589 error = mlock_fixup(vma, &prev, nstart, tmp, newflags); in apply_vma_lock_flags()
598 vma = prev->vm_next; in apply_vma_lock_flags()
599 if (!vma || vma->vm_start != nstart) { in apply_vma_lock_flags()
687 struct vm_area_struct * vma, * prev = NULL; in apply_mlockall_flags() local
707 for (vma = current->mm->mmap; vma ; vma = prev->vm_next) { in apply_mlockall_flags()
710 newflags = vma->vm_flags & VM_LOCKED_CLEAR_MASK; in apply_mlockall_flags()
714 mlock_fixup(vma, &prev, vma->vm_start, vma->vm_end, newflags); in apply_mlockall_flags()