src_mm            311 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	struct drm_mm_node *src_mm, *dst_mm;
src_mm            324 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_mm = amdgpu_find_mm_node(src->mem, &src->offset);
src_mm            325 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_node_start = amdgpu_mm_node_addr(src->bo, src_mm, src->mem) +
src_mm            327 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 	src_node_size = (src_mm->size << PAGE_SHIFT) - src->offset;
src_mm            392 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			src_node_start = amdgpu_mm_node_addr(src->bo, ++src_mm,
src_mm            394 drivers/gpu/drm/amd/amdgpu/amdgpu_ttm.c 			src_node_size = (src_mm->size << PAGE_SHIFT);
src_mm             11 include/linux/huge_mm.h extern int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm             15 include/linux/huge_mm.h extern int copy_huge_pud(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            978 mm/huge_memory.c int copy_huge_pmd(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            997 mm/huge_memory.c 	src_ptl = pmd_lockptr(src_mm, src_pmd);
src_mm           1013 mm/huge_memory.c 			set_pmd_at(src_mm, addr, src_pmd, pmd);
src_mm           1055 mm/huge_memory.c 	pmdp_set_wrprotect(src_mm, addr, src_pmd);
src_mm           1118 mm/huge_memory.c int copy_huge_pud(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm           1127 mm/huge_memory.c 	src_ptl = pud_lockptr(src_mm, src_pud);
src_mm           1144 mm/huge_memory.c 	pudp_set_wrprotect(src_mm, addr, src_pud);
src_mm            678 mm/memory.c    copy_one_pte(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            699 mm/memory.c    							&src_mm->mmlist);
src_mm            718 mm/memory.c    				set_pte_at(src_mm, addr, src_pte, pte);
src_mm            747 mm/memory.c    				set_pte_at(src_mm, addr, src_pte, pte);
src_mm            758 mm/memory.c    		ptep_set_wrprotect(src_mm, addr, src_pte);
src_mm            784 mm/memory.c    static int copy_pte_range(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            802 mm/memory.c    	src_ptl = pte_lockptr(src_mm, src_pmd);
src_mm            823 mm/memory.c    		entry.val = copy_one_pte(dst_mm, src_mm, dst_pte, src_pte,
src_mm            847 mm/memory.c    static inline int copy_pmd_range(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            864 mm/memory.c    			err = copy_huge_pmd(dst_mm, src_mm,
src_mm            874 mm/memory.c    		if (copy_pte_range(dst_mm, src_mm, dst_pmd, src_pmd,
src_mm            881 mm/memory.c    static inline int copy_pud_range(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            898 mm/memory.c    			err = copy_huge_pud(dst_mm, src_mm,
src_mm            908 mm/memory.c    		if (copy_pmd_range(dst_mm, src_mm, dst_pud, src_pud,
src_mm            915 mm/memory.c    static inline int copy_p4d_range(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            930 mm/memory.c    		if (copy_pud_range(dst_mm, src_mm, dst_p4d, src_p4d,
src_mm            937 mm/memory.c    int copy_page_range(struct mm_struct *dst_mm, struct mm_struct *src_mm,
src_mm            959 mm/memory.c    		return copy_hugetlb_page_range(dst_mm, src_mm, vma);
src_mm            981 mm/memory.c    					0, vma, src_mm, addr, end);
src_mm            987 mm/memory.c    	src_pgd = pgd_offset(src_mm, addr);
src_mm            992 mm/memory.c    		if (unlikely(copy_p4d_range(dst_mm, src_mm, dst_pgd, src_pgd,