vm_next           527 arch/parisc/kernel/cache.c 	for (vma = mm->mmap; vma; vma = vma->vm_next)
vm_next           563 arch/parisc/kernel/cache.c 		for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           573 arch/parisc/kernel/cache.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           131 arch/parisc/mm/fault.c 					if (prev->vm_next == NULL)
vm_next           133 arch/parisc/mm/fault.c 					if (prev->vm_next->vm_start > addr)
vm_next           133 arch/powerpc/mm/book3s32/tlb.c 	for (mp = mm->mmap; mp != NULL; mp = mp->vm_next)
vm_next           167 arch/powerpc/mm/book3s64/subpage_prot.c 		vma = vma->vm_next;
vm_next           336 arch/powerpc/oprofile/cell/spu_task_sync.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          2494 arch/s390/mm/gmap.c 	for (vma = mm->mmap; vma != NULL; vma = vma->vm_next) {
vm_next          2609 arch/s390/mm/gmap.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           583 arch/um/kernel/tlb.c 		vma = vma->vm_next;
vm_next           594 arch/um/kernel/tlb.c 		vma = vma->vm_next;
vm_next            69 arch/unicore32/include/asm/mmu_context.h 		BUG_ON(high_vma->vm_next);  /* it should be last */ \
vm_next            71 arch/unicore32/include/asm/mmu_context.h 			high_vma->vm_prev->vm_next = NULL; \
vm_next           266 arch/x86/entry/vdso/vma.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           677 arch/x86/mm/mpx.c 		vma = vma->vm_next;
vm_next           804 arch/x86/mm/mpx.c 		next = next->vm_next;
vm_next           910 arch/x86/mm/mpx.c 		vma = vma->vm_next;
vm_next            86 arch/xtensa/kernel/syscall.c 	for (vmm = find_vma(current->mm, addr); ; vmm = vmm->vm_next) {
vm_next           325 drivers/misc/cxl/fault.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           260 drivers/oprofile/buffer_sync.c 	for (vma = find_vma(mm, addr); vma; vma = vma->vm_next) {
vm_next           546 drivers/tee/optee/call.c 		vma = vma->vm_next;
vm_next          1608 fs/binfmt_elf.c 	for (vma = current->mm->mmap; vma != NULL; vma = vma->vm_next) {
vm_next          2153 fs/binfmt_elf.c 	ret = this_vma->vm_next;
vm_next          1497 fs/binfmt_elf_fdpic.c 	for (vma = current->mm->mmap; vma; vma = vma->vm_next) {
vm_next          1535 fs/binfmt_elf_fdpic.c 	for (vma = current->mm->mmap; vma; vma = vma->vm_next)
vm_next          1727 fs/binfmt_elf_fdpic.c 	for (vma = current->mm->mmap; vma; vma = vma->vm_next) {
vm_next           666 fs/exec.c      			vma->vm_next ? vma->vm_next->vm_start : USER_PGTABLES_CEILING);
vm_next           675 fs/exec.c      			vma->vm_next ? vma->vm_next->vm_start : USER_PGTABLES_CEILING);
vm_next          2210 fs/proc/base.c 	for (vma = mm->mmap, pos = 2; vma; vma = vma->vm_next) {
vm_next           140 fs/proc/task_mmu.c 	return vma->vm_next ?: priv->tail_vma;
vm_next           189 fs/proc/task_mmu.c 			vma = vma->vm_next;
vm_next           882 fs/proc/task_mmu.c 	for (vma = priv->mm->mmap; vma; vma = vma->vm_next) {
vm_next          1189 fs/proc/task_mmu.c 			for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          1215 fs/proc/task_mmu.c 				for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           644 fs/userfaultfd.c 		for (vma = mm->mmap; vma; vma = vma->vm_next)
vm_next           833 fs/userfaultfd.c 	for ( ; vma && vma->vm_start < end; vma = vma->vm_next) {
vm_next           901 fs/userfaultfd.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          1381 fs/userfaultfd.c 	for (cur = vma; cur && cur->vm_start < end; cur = cur->vm_next) {
vm_next          1494 fs/userfaultfd.c 		vma = vma->vm_next;
vm_next          1570 fs/userfaultfd.c 	for (cur = vma; cur && cur->vm_start < end; cur = cur->vm_next) {
vm_next          1656 fs/userfaultfd.c 		vma = vma->vm_next;
vm_next           300 include/linux/mm_types.h 	struct vm_area_struct *vm_next, *vm_prev;
vm_next          1669 ipc/shm.c      		next = vma->vm_next;
vm_next          1708 ipc/shm.c      		next = vma->vm_next;
vm_next           546 kernel/acct.c  			vma = vma->vm_next;
vm_next          9220 kernel/events/core.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           366 kernel/events/uprobes.c 	for (tmp = mm->mmap; tmp; tmp = tmp->vm_next)
vm_next          1250 kernel/events/uprobes.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          2000 kernel/events/uprobes.c 	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           518 kernel/fork.c  	for (mpnt = oldmm->mmap; mpnt; mpnt = mpnt->vm_next) {
vm_next           559 kernel/fork.c  		tmp->vm_next = tmp->vm_prev = NULL;
vm_next           591 kernel/fork.c  		pprev = &tmp->vm_next;
vm_next          2552 kernel/sched/fair.c 	for (; vma; vma = vma->vm_next) {
vm_next          1847 kernel/sys.c   		for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           127 mm/debug.c     		vma, (void *)vma->vm_start, (void *)vma->vm_end, vma->vm_next,
vm_next          1269 mm/gup.c       			vma = vma->vm_next;
vm_next          2400 mm/huge_memory.c 		struct vm_area_struct *next = vma->vm_next;
vm_next          1939 mm/khugepaged.c 	for (; vma; vma = vma->vm_next) {
vm_next           980 mm/ksm.c       		for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          2294 mm/ksm.c       	for (; vma; vma = vma->vm_next) {
vm_next          1137 mm/madvise.c   			vma = prev->vm_next;
vm_next           374 mm/memory.c    		struct vm_area_struct *next = vma->vm_next;
vm_next           394 mm/memory.c    				next = vma->vm_next;
vm_next          1313 mm/memory.c    	for ( ; vma && vma->vm_start < end_addr; vma = vma->vm_next)
vm_next          1338 mm/memory.c    	for ( ; vma && vma->vm_start < range.end; vma = vma->vm_next)
vm_next           383 mm/mempolicy.c 	for (vma = mm->mmap; vma; vma = vma->vm_next)
vm_next           635 mm/mempolicy.c 		if (!vma->vm_next && vma->vm_end < end)
vm_next           751 mm/mempolicy.c 		next = vma->vm_next;
vm_next           765 mm/mempolicy.c 			next = vma->vm_next;
vm_next          1175 mm/mempolicy.c 		vma = vma->vm_next;
vm_next           623 mm/mlock.c     		vma = prev->vm_next;
vm_next           652 mm/mlock.c     	for (; vma ; vma = vma->vm_next) {
vm_next           785 mm/mlock.c     	for (vma = current->mm->mmap; vma ; vma = prev->vm_next) {
vm_next           173 mm/mmap.c      	struct vm_area_struct *next = vma->vm_next;
vm_next           406 mm/mmap.c      		vma = vma->vm_next;
vm_next           575 mm/mmap.c      	for (vma = vma->vm_next; vma; vma = vma->vm_next) {
vm_next           592 mm/mmap.c      	if (vma->vm_next)
vm_next           593 mm/mmap.c      		vma_gap_update(vma->vm_next);
vm_next           686 mm/mmap.c      	next = vma->vm_next;
vm_next           688 mm/mmap.c      		prev->vm_next = next;
vm_next           692 mm/mmap.c      			prev->vm_next = next;
vm_next           722 mm/mmap.c      	struct vm_area_struct *next = vma->vm_next, *orig_vma = vma;
vm_next           763 mm/mmap.c      					   end != next->vm_next->vm_end);
vm_next           778 mm/mmap.c      				exporter = next->vm_next;
vm_next           954 mm/mmap.c      			next = vma->vm_next;
vm_next          1154 mm/mmap.c      		next = prev->vm_next;
vm_next          1159 mm/mmap.c      		next = next->vm_next;
vm_next          1296 mm/mmap.c      	near = vma->vm_next;
vm_next          2362 mm/mmap.c      	next = vma->vm_next;
vm_next          2410 mm/mmap.c      				if (vma->vm_next)
vm_next          2411 mm/mmap.c      					vma_gap_update(vma->vm_next);
vm_next          2609 mm/mmap.c      	struct vm_area_struct *next = prev ? prev->vm_next : mm->mmap;
vm_next          2632 mm/mmap.c      	insertion_point = (prev ? &prev->vm_next : &mm->mmap);
vm_next          2638 mm/mmap.c      		vma = vma->vm_next;
vm_next          2646 mm/mmap.c      	tail_vma->vm_next = NULL;
vm_next          2797 mm/mmap.c      	vma = prev ? prev->vm_next : mm->mmap;
vm_next          2825 mm/mmap.c      			tmp = tmp->vm_next;
vm_next          2930 mm/mmap.c      		for (next = vma->vm_next; next; next = next->vm_next) {
vm_next          2961 mm/mmap.c      				tmp = tmp->vm_next) {
vm_next          3136 mm/mmap.c      			vma = vma->vm_next;
vm_next          3556 mm/mmap.c      	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          3564 mm/mmap.c      	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          3572 mm/mmap.c      	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          3635 mm/mmap.c      	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           597 mm/mprotect.c  		vma = prev->vm_next;
vm_next           422 mm/mremap.c    			vma->vm_next->vm_flags |= VM_ACCOUNT;
vm_next           580 mm/mremap.c    	if (vma->vm_next && vma->vm_next->vm_start < end) /* intersection */
vm_next           103 mm/msync.c     			vma = vma->vm_next;
vm_next           681 mm/nommu.c     		vma->vm_prev->vm_next = vma->vm_next;
vm_next           683 mm/nommu.c     		mm->mmap = vma->vm_next;
vm_next           685 mm/nommu.c     	if (vma->vm_next)
vm_next           686 mm/nommu.c     		vma->vm_next->vm_prev = vma->vm_prev;
vm_next           717 mm/nommu.c     	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           766 mm/nommu.c     	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next          1517 mm/nommu.c     			vma = vma->vm_next;
vm_next          1575 mm/nommu.c     		mm->mmap = vma->vm_next;
vm_next           525 mm/oom_kill.c  	for (vma = mm->mmap ; vma; vma = vma->vm_next) {
vm_next           333 mm/pagewalk.c  			vma = vma->vm_next;
vm_next          2074 mm/swapfile.c  	for (vma = mm->mmap; vma; vma = vma->vm_next) {
vm_next           280 mm/util.c      		next = prev->vm_next;
vm_next           281 mm/util.c      		prev->vm_next = vma;
vm_next           290 mm/util.c      	vma->vm_next = next;