ia64_fetchadd      98 arch/ia64/include/asm/atomic.h 		? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq)	\
ia64_fetchadd     106 arch/ia64/include/asm/atomic.h 		? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq)	\
ia64_fetchadd     183 arch/ia64/include/asm/atomic.h 		? ia64_fetchadd(__ia64_aar_i, &(v)->counter, acq)	\
ia64_fetchadd     191 arch/ia64/include/asm/atomic.h 		? ia64_fetchadd(-__ia64_asr_i, &(v)->counter, acq)	\
ia64_fetchadd      47 arch/ia64/include/asm/spinlock.h 	ticket = ia64_fetchadd(1, p, acq);
ia64_fetchadd     171 arch/ia64/include/asm/spinlock.h 	while (unlikely(ia64_fetchadd(1, (int *) __read_lock_ptr, acq) < 0)) {		\
ia64_fetchadd     172 arch/ia64/include/asm/spinlock.h 		ia64_fetchadd(-1, (int *) __read_lock_ptr, rel);			\
ia64_fetchadd     183 arch/ia64/include/asm/spinlock.h 	ia64_fetchadd(-1, (int *) __read_lock_ptr, rel);	\
ia64_fetchadd      84 arch/ia64/include/uapi/asm/intrinsics.h #define ia64_fetch_and_add(i,v)	(ia64_fetchadd(i, v, rel) + (i)) /* return new value */
ia64_fetchadd     249 arch/ia64/kernel/smp.c 	ia64_fetchadd(1, &local_tlb_flush_counts[smp_processor_id()].count, acq);
ia64_fetchadd     319 arch/ia64/kernel/traps.c 			if ((last.count & 15) < 5 && (ia64_fetchadd(1, &last.count, acq) & 15) < 5) {
ia64_fetchadd     125 arch/ia64/mm/tlb.c 	unsigned long t = ia64_fetchadd(1, &ss->ticket, acq), serve;
ia64_fetchadd     142 arch/ia64/mm/tlb.c 	ia64_fetchadd(1, &ss->serve, rel);