ish               301 arch/arm/include/asm/assembler.h 	ALT_SMP(dmb	ish)
ish               303 arch/arm/include/asm/assembler.h 	ALT_SMP(W(dmb)	ish)
ish                77 arch/arm/include/asm/barrier.h #define __smp_mb()	dmb(ish)
ish                14 arch/arm/include/asm/switch_to.h #define __complete_pending_tlbi()	dsb(ish)
ish               362 arch/arm/include/asm/tlbflush.h 		dsb(ish);
ish               416 arch/arm/include/asm/tlbflush.h 		dsb(ish);
ish               476 arch/arm/include/asm/tlbflush.h 		dsb(ish);
ish               526 arch/arm/include/asm/tlbflush.h 		dsb(ish);
ish                83 arch/arm/kernel/smp_tlb.c 	dsb(ish);
ish                90 arch/arm/kernel/smp_tlb.c 	dsb(ish);
ish                37 arch/arm/kvm/hyp/tlb.c 	dsb(ish);
ish                67 arch/arm/kvm/hyp/tlb.c 	dsb(ish);
ish               430 arch/arm64/include/asm/assembler.h 	dsb	ish
ish               102 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC_OP_RETURN(        , dmb ish,  , l, "memory", __VA_ARGS__)\
ish               106 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC_FETCH_OP (        , dmb ish,  , l, "memory", __VA_ARGS__)\
ish               117 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC_FETCH_OP (        , dmb ish,  , l, "memory", __VA_ARGS__)\
ish               201 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC64_OP_RETURN(, dmb ish,  , l, "memory", __VA_ARGS__)	\
ish               205 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC64_FETCH_OP (, dmb ish,  , l, "memory", __VA_ARGS__)	\
ish               216 arch/arm64/include/asm/atomic_ll_sc.h 	ATOMIC64_FETCH_OP (, dmb ish,  , l, "memory", __VA_ARGS__)	\
ish               311 arch/arm64/include/asm/atomic_ll_sc.h __CMPXCHG_CASE(w, b,  mb_,  8, dmb ish,  , l, "memory", K)
ish               312 arch/arm64/include/asm/atomic_ll_sc.h __CMPXCHG_CASE(w, h,  mb_, 16, dmb ish,  , l, "memory", K)
ish               313 arch/arm64/include/asm/atomic_ll_sc.h __CMPXCHG_CASE(w,  ,  mb_, 32, dmb ish,  , l, "memory", K)
ish               314 arch/arm64/include/asm/atomic_ll_sc.h __CMPXCHG_CASE( ,  ,  mb_, 64, dmb ish,  , l, "memory", L)
ish               348 arch/arm64/include/asm/atomic_ll_sc.h __CMPXCHG_DBL(_mb, dmb ish, l, "memory")
ish                60 arch/arm64/include/asm/barrier.h #define __smp_mb()	dmb(ish)
ish               154 arch/arm64/include/asm/cacheflush.h 	dsb(ish);
ish                57 arch/arm64/include/asm/cmpxchg.h __XCHG_CASE(w, b,  mb_,  8, dmb ish, nop,  , a, l, "memory")
ish                58 arch/arm64/include/asm/cmpxchg.h __XCHG_CASE(w, h,  mb_, 16, dmb ish, nop,  , a, l, "memory")
ish                59 arch/arm64/include/asm/cmpxchg.h __XCHG_CASE(w,  ,  mb_, 32, dmb ish, nop,  , a, l, "memory")
ish                60 arch/arm64/include/asm/cmpxchg.h __XCHG_CASE( ,  ,  mb_, 64, dmb ish, nop,  , a, l, "memory")
ish               143 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish               154 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish               171 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish               211 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish               239 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish               253 arch/arm64/include/asm/tlbflush.h 	dsb(ish);
ish                24 arch/arm64/include/asm/vdso/compat_barrier.h #define aarch32_smp_mb()	dmb(ish)
ish                28 arch/arm64/include/asm/vdso/compat_barrier.h #define aarch32_smp_mb()	dmb(ish)
ish               195 arch/arm64/kernel/alternative.c 		dsb(ish);
ish               524 arch/arm64/kernel/process.c 	dsb(ish);
ish                41 arch/arm64/kernel/sys_compat.c 			dsb(ish);
ish               138 arch/arm64/kvm/hyp/tlb.c 	dsb(ish);
ish               140 arch/arm64/kvm/hyp/tlb.c 	dsb(ish);
ish               179 arch/arm64/kvm/hyp/tlb.c 	dsb(ish);
ish               217 arch/arm64/kvm/hyp/tlb.c 	dsb(ish);