ia64_need_tlb_flush 47 arch/ia64/include/asm/mmu_context.h DECLARE_PER_CPU(u8, ia64_need_tlb_flush); ia64_need_tlb_flush 69 arch/ia64/include/asm/mmu_context.h if (unlikely(__ia64_per_cpu_var(ia64_need_tlb_flush))) { ia64_need_tlb_flush 71 arch/ia64/include/asm/mmu_context.h if (__ia64_per_cpu_var(ia64_need_tlb_flush)) { ia64_need_tlb_flush 73 arch/ia64/include/asm/mmu_context.h __ia64_per_cpu_var(ia64_need_tlb_flush) = 0; ia64_need_tlb_flush 49 arch/ia64/mm/tlb.c DEFINE_PER_CPU(u8, ia64_need_tlb_flush); ia64_need_tlb_flush 102 arch/ia64/mm/tlb.c per_cpu(ia64_need_tlb_flush, i) = 1;