mm_users 654 arch/alpha/kernel/smp.c if (atomic_read(&mm->mm_users) <= 1) { mm_users 701 arch/alpha/kernel/smp.c if (atomic_read(&mm->mm_users) <= 1) { mm_users 755 arch/alpha/kernel/smp.c if (atomic_read(&mm->mm_users) <= 1) { mm_users 301 arch/arc/mm/tlb.c if (atomic_read(&mm->mm_users) == 0) mm_users 255 arch/arm64/include/asm/pgtable.h if (mm != current->active_mm && atomic_read(&mm->mm_users) <= 1) mm_users 85 arch/ia64/include/asm/tlbflush.h if (atomic_read(&mm->mm_users) == 0) mm_users 298 arch/ia64/kernel/smp.c if (likely(mm == current->active_mm && atomic_read(&mm->mm_users) == 1)) mm_users 187 arch/mips/kernel/asm-offsets.c OFFSET(MM_USERS, mm_struct, mm_users); mm_users 538 arch/mips/kernel/smp.c } else if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { mm_users 588 arch/mips/kernel/smp.c } else if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { mm_users 655 arch/mips/kernel/smp.c } else if ((atomic_read(&vma->vm_mm->mm_users) != 1) || mm_users 25 arch/parisc/include/asm/mmu_context.h BUG_ON(atomic_read(&mm->mm_users) != 1); mm_users 619 arch/powerpc/mm/book3s64/radix_tlb.c if (atomic_read(&mm->mm_users) <= 1 && current->mm == mm) mm_users 279 arch/powerpc/mm/hugetlbpage.c if (atomic_read(&tlb->mm->mm_users) < 2 || mm_users 364 arch/sh/kernel/smp.c if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { mm_users 396 arch/sh/kernel/smp.c if ((atomic_read(&mm->mm_users) != 1) || (current->mm != mm)) { mm_users 439 arch/sh/kernel/smp.c if ((atomic_read(&vma->vm_mm->mm_users) != 1) || mm_users 1085 arch/sparc/kernel/smp_64.c if (atomic_read(&mm->mm_users) == 1) { mm_users 1123 arch/sparc/kernel/smp_64.c if (mm == current->mm && atomic_read(&mm->mm_users) == 1) mm_users 1139 arch/sparc/kernel/smp_64.c if (mm == current->mm && atomic_read(&mm->mm_users) == 1) mm_users 1693 arch/sparc/mm/srmmu.c if (atomic_read(&mm->mm_users) == 1 && current->active_mm == mm) mm_users 528 arch/um/kernel/tlb.c if (atomic_read(¤t->mm->mm_users) == 0) mm_users 556 arch/um/kernel/tlb.c if (atomic_read(&mm->mm_users) == 0) mm_users 92 arch/x86/kernel/tboot.c .mm_users = ATOMIC_INIT(2), mm_users 112 arch/xtensa/kernel/asm-offsets.c DEFINE(MM_USERS, offsetof(struct mm_struct, mm_users)); mm_users 61 drivers/firmware/efi/efi.c .mm_users = ATOMIC_INIT(2), mm_users 464 drivers/iommu/intel-svm.c else if (atomic_read(&svm->mm->mm_users) > 0) mm_users 203 drivers/misc/cxl/fault.c if (!atomic_inc_not_zero(&ctx->mm->mm_users)) mm_users 381 fs/coredump.c if (atomic_read(&mm->mm_users) == nr + 1) mm_users 1079 fs/proc/base.c if (atomic_read(&p->mm->mm_users) > 1) { mm_users 410 include/linux/mm_types.h atomic_t mm_users; mm_users 95 include/linux/sched/mm.h atomic_inc(&mm->mm_users); mm_users 100 include/linux/sched/mm.h return atomic_inc_not_zero(&mm->mm_users); mm_users 1436 kernel/events/uprobes.c if (!atomic_read(&vma->vm_mm->mm_users)) /* called by mmput() ? */ mm_users 363 kernel/exit.c if (atomic_read(&mm->mm_users) <= 1) { mm_users 1009 kernel/fork.c atomic_set(&mm->mm_users, 1); mm_users 1073 kernel/fork.c VM_BUG_ON(atomic_read(&mm->mm_users)); mm_users 1099 kernel/fork.c if (atomic_dec_and_test(&mm->mm_users)) mm_users 1115 kernel/fork.c if (atomic_dec_and_test(&mm->mm_users)) { mm_users 1300 kernel/fork.c atomic_read(&mm->mm_users) > 1) { mm_users 2628 kernel/sched/fair.c int mm_users = 0; mm_users 2632 kernel/sched/fair.c mm_users = atomic_read(&mm->mm_users); mm_users 2633 kernel/sched/fair.c if (mm_users == 1) { mm_users 2664 kernel/sched/fair.c current->numa_scan_period * mm_users * NSEC_PER_MSEC); mm_users 150 kernel/sched/membarrier.c if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) mm_users 206 kernel/sched/membarrier.c if (atomic_read(&mm->mm_users) == 1 || num_online_cpus() == 1) { mm_users 25 lib/is_single_threaded.c if (atomic_read(&mm->mm_users) == 1) mm_users 171 mm/debug.c mm->pgd, atomic_read(&mm->mm_users), mm_users 32 mm/init-mm.c .mm_users = ATOMIC_INIT(2), mm_users 404 mm/khugepaged.c return atomic_read(&mm->mm_users) == 0; mm_users 452 mm/ksm.c return atomic_read(&mm->mm_users) == 0; mm_users 255 mm/mmu_notifier.c BUG_ON(atomic_read(&mm->mm_users) <= 0); mm_users 305 mm/mmu_notifier.c BUG_ON(atomic_read(&mm->mm_users) <= 0); mm_users 65 mm/mprotect.c atomic_read(&vma->vm_mm->mm_users) == 1) mm_users 833 mm/oom_kill.c if (atomic_read(&mm->mm_users) <= 1)