loaded_mm 2122 arch/x86/events/core.c load_mm_cr4_irqsoff(this_cpu_read(cpu_tlbstate.loaded_mm)); loaded_mm 350 arch/x86/include/asm/mmu_context.h unsigned long cr3 = build_cr3(this_cpu_read(cpu_tlbstate.loaded_mm)->pgd, loaded_mm 382 arch/x86/include/asm/mmu_context.h temp_state.mm = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 168 arch/x86/include/asm/tlbflush.h struct mm_struct *loaded_mm; loaded_mm 254 arch/x86/include/asm/tlbflush.h struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 257 arch/x86/include/asm/tlbflush.h VM_WARN_ON_ONCE(!loaded_mm); loaded_mm 269 arch/x86/include/asm/tlbflush.h if (loaded_mm != current_mm) loaded_mm 56 arch/x86/kernel/ldt.c if (this_cpu_read(cpu_tlbstate.loaded_mm) != mm) loaded_mm 955 arch/x86/mm/init.c .loaded_mm = &init_mm, loaded_mm 134 arch/x86/mm/tlb.c struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 144 arch/x86/mm/tlb.c if (loaded_mm == &init_mm) loaded_mm 278 arch/x86/mm/tlb.c struct mm_struct *real_prev = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 410 arch/x86/mm/tlb.c this_cpu_write(cpu_tlbstate.loaded_mm, LOADED_MM_SWITCHING); loaded_mm 439 arch/x86/mm/tlb.c this_cpu_write(cpu_tlbstate.loaded_mm, next); loaded_mm 463 arch/x86/mm/tlb.c if (this_cpu_read(cpu_tlbstate.loaded_mm) == &init_mm) loaded_mm 485 arch/x86/mm/tlb.c struct mm_struct *mm = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 533 arch/x86/mm/tlb.c struct mm_struct *loaded_mm = this_cpu_read(cpu_tlbstate.loaded_mm); loaded_mm 535 arch/x86/mm/tlb.c u64 mm_tlb_gen = atomic64_read(&loaded_mm->context.tlb_gen); loaded_mm 541 arch/x86/mm/tlb.c if (unlikely(loaded_mm == &init_mm)) loaded_mm 545 arch/x86/mm/tlb.c loaded_mm->context.ctx_id); loaded_mm 651 arch/x86/mm/tlb.c if (f->mm && f->mm != this_cpu_read(cpu_tlbstate.loaded_mm)) loaded_mm 790 arch/x86/mm/tlb.c if (mm == this_cpu_read(cpu_tlbstate.loaded_mm)) { loaded_mm 995 arch/x86/xen/mmu_pv.c if (this_cpu_read(cpu_tlbstate.loaded_mm) == mm)