need_flush 367 arch/arm/mm/pmsa-v7.c unsigned int subregions, bool need_flush) need_flush 386 arch/arm/mm/pmsa-v7.c if (need_flush) need_flush 440 arch/arm/mm/pmsa-v7.c bool need_flush = region == PMSAv7_RAM_REGION; need_flush 447 arch/arm/mm/pmsa-v7.c xip[i].subreg, need_flush); need_flush 210 arch/sparc/kernel/iommu-common.c (n < pool->hint || need_flush(iommu))) { need_flush 89 arch/x86/kernel/amd_gart_64.c static bool need_flush; /* global flush state. set for each gart wrap */ need_flush 107 arch/x86/kernel/amd_gart_64.c need_flush = true; need_flush 116 arch/x86/kernel/amd_gart_64.c need_flush = true; need_flush 120 arch/x86/kernel/amd_gart_64.c need_flush = true; need_flush 145 arch/x86/kernel/amd_gart_64.c if (need_flush) { need_flush 147 arch/x86/kernel/amd_gart_64.c need_flush = false; need_flush 1882 arch/x86/kvm/mmu.c int need_flush = 0; need_flush 1895 arch/x86/kvm/mmu.c need_flush = 1; need_flush 1914 arch/x86/kvm/mmu.c if (need_flush && kvm_available_flush_tlb_with_range()) { need_flush 1919 arch/x86/kvm/mmu.c return need_flush; need_flush 77 arch/x86/mm/tlb.c u16 *new_asid, bool *need_flush) need_flush 83 arch/x86/mm/tlb.c *need_flush = true; need_flush 96 arch/x86/mm/tlb.c *need_flush = (this_cpu_read(cpu_tlbstate.ctxs[asid].tlb_gen) < need_flush 110 arch/x86/mm/tlb.c *need_flush = true; need_flush 113 arch/x86/mm/tlb.c static void load_new_mm_cr3(pgd_t *pgdir, u16 new_asid, bool need_flush) need_flush 117 arch/x86/mm/tlb.c if (need_flush) { need_flush 283 arch/x86/mm/tlb.c bool need_flush; need_flush 371 arch/x86/mm/tlb.c need_flush = true; need_flush 407 arch/x86/mm/tlb.c choose_new_asid(next, next_tlb_gen, &new_asid, &need_flush); need_flush 414 arch/x86/mm/tlb.c if (need_flush) { need_flush 325 drivers/gpu/drm/etnaviv/etnaviv_buffer.c bool need_flush = switch_mmu_context || gpu->flush_seq != new_flush_seq; need_flush 341 drivers/gpu/drm/etnaviv/etnaviv_buffer.c if (need_flush || switch_context) { need_flush 348 drivers/gpu/drm/etnaviv/etnaviv_buffer.c if (need_flush) { need_flush 378 drivers/gpu/drm/etnaviv/etnaviv_buffer.c if (need_flush) { need_flush 218 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c unsigned int n, m, need_flush; need_flush 221 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c err = i915_gem_object_prepare_write(obj, &need_flush); need_flush 388 drivers/infiniband/hw/mlx4/cm.c int need_flush = 0; need_flush 396 drivers/infiniband/hw/mlx4/cm.c need_flush |= !cancel_delayed_work(&map->timeout); need_flush 402 drivers/infiniband/hw/mlx4/cm.c if (need_flush) need_flush 903 drivers/md/dm-writecache.c bool need_flush = false; need_flush 967 drivers/md/dm-writecache.c need_flush = true; need_flush 988 drivers/md/dm-writecache.c need_flush = true; need_flush 995 drivers/md/dm-writecache.c if (need_flush) { need_flush 4440 drivers/md/raid10.c int need_flush = 0; need_flush 4481 drivers/md/raid10.c need_flush = 1; need_flush 4503 drivers/md/raid10.c need_flush = 1; need_flush 4513 drivers/md/raid10.c if (need_flush || need_flush 1375 fs/ceph/caps.c if (capsnap->need_flush) { need_flush 1475 fs/ceph/caps.c BUG_ON(!capsnap->need_flush); need_flush 2865 fs/ceph/caps.c if (!capsnap->need_flush && need_flush 518 fs/ceph/snap.c capsnap->need_flush = true; need_flush 530 fs/ceph/snap.c capsnap->need_flush ? "" : "no_flush"); need_flush 215 fs/ceph/super.h bool need_flush; need_flush 1719 fs/xfs/xfs_log.c bool need_flush) need_flush 1753 fs/xfs/xfs_log.c if (need_flush) need_flush 1868 fs/xfs/xfs_log.c bool need_flush = true, split = false; need_flush 1928 fs/xfs/xfs_log.c need_flush = false; need_flush 1932 fs/xfs/xfs_log.c xlog_write_iclog(log, iclog, bno, count, need_flush); need_flush 166 mm/highmem.c int need_flush = 0; need_flush 197 mm/highmem.c need_flush = 1; need_flush 199 mm/highmem.c if (need_flush)