Lines Matching refs:walk
7 struct mm_walk *walk) in walk_pte_range() argument
14 err = walk->pte_entry(pte, addr, addr + PAGE_SIZE, walk); in walk_pte_range()
28 struct mm_walk *walk) in walk_pmd_range() argument
38 if (pmd_none(*pmd) || !walk->vma) { in walk_pmd_range()
39 if (walk->pte_hole) in walk_pmd_range()
40 err = walk->pte_hole(addr, next, walk); in walk_pmd_range()
49 if (walk->pmd_entry) in walk_pmd_range()
50 err = walk->pmd_entry(pmd, addr, next, walk); in walk_pmd_range()
58 if (!walk->pte_entry) in walk_pmd_range()
61 split_huge_page_pmd_mm(walk->mm, addr, pmd); in walk_pmd_range()
64 err = walk_pte_range(pmd, addr, next, walk); in walk_pmd_range()
73 struct mm_walk *walk) in walk_pud_range() argument
83 if (walk->pte_hole) in walk_pud_range()
84 err = walk->pte_hole(addr, next, walk); in walk_pud_range()
89 if (walk->pmd_entry || walk->pte_entry) in walk_pud_range()
90 err = walk_pmd_range(pud, addr, next, walk); in walk_pud_range()
99 struct mm_walk *walk) in walk_pgd_range() argument
105 pgd = pgd_offset(walk->mm, addr); in walk_pgd_range()
109 if (walk->pte_hole) in walk_pgd_range()
110 err = walk->pte_hole(addr, next, walk); in walk_pgd_range()
115 if (walk->pmd_entry || walk->pte_entry) in walk_pgd_range()
116 err = walk_pud_range(pgd, addr, next, walk); in walk_pgd_range()
133 struct mm_walk *walk) in walk_hugetlb_range() argument
135 struct vm_area_struct *vma = walk->vma; in walk_hugetlb_range()
144 pte = huge_pte_offset(walk->mm, addr & hmask); in walk_hugetlb_range()
145 if (pte && walk->hugetlb_entry) in walk_hugetlb_range()
146 err = walk->hugetlb_entry(pte, hmask, addr, next, walk); in walk_hugetlb_range()
156 struct mm_walk *walk) in walk_hugetlb_range() argument
170 struct mm_walk *walk) in walk_page_test() argument
172 struct vm_area_struct *vma = walk->vma; in walk_page_test()
174 if (walk->test_walk) in walk_page_test()
175 return walk->test_walk(start, end, walk); in walk_page_test()
187 if (walk->pte_hole) in walk_page_test()
188 err = walk->pte_hole(start, end, walk); in walk_page_test()
195 struct mm_walk *walk) in __walk_page_range() argument
198 struct vm_area_struct *vma = walk->vma; in __walk_page_range()
201 if (walk->hugetlb_entry) in __walk_page_range()
202 err = walk_hugetlb_range(start, end, walk); in __walk_page_range()
204 err = walk_pgd_range(start, end, walk); in __walk_page_range()
240 struct mm_walk *walk) in walk_page_range() argument
249 if (!walk->mm) in walk_page_range()
252 VM_BUG_ON_MM(!rwsem_is_locked(&walk->mm->mmap_sem), walk->mm); in walk_page_range()
254 vma = find_vma(walk->mm, start); in walk_page_range()
257 walk->vma = NULL; in walk_page_range()
260 walk->vma = NULL; in walk_page_range()
263 walk->vma = vma; in walk_page_range()
267 err = walk_page_test(start, next, walk); in walk_page_range()
280 if (walk->vma || walk->pte_hole) in walk_page_range()
281 err = __walk_page_range(start, next, walk); in walk_page_range()
288 int walk_page_vma(struct vm_area_struct *vma, struct mm_walk *walk) in walk_page_vma() argument
292 if (!walk->mm) in walk_page_vma()
295 VM_BUG_ON(!rwsem_is_locked(&walk->mm->mmap_sem)); in walk_page_vma()
297 walk->vma = vma; in walk_page_vma()
298 err = walk_page_test(vma->vm_start, vma->vm_end, walk); in walk_page_vma()
303 return __walk_page_range(vma->vm_start, vma->vm_end, walk); in walk_page_vma()