__atomic_add       55 arch/s390/include/asm/atomic.h 	__atomic_add(i, &v->counter);
__atomic_add       30 arch/s390/include/asm/atomic_ops.h __ATOMIC_OPS(__atomic_add, int, "laa")
__atomic_add       83 arch/s390/include/asm/atomic_ops.h __ATOMIC_OPS(__atomic_add, "ar")
__atomic_add      116 arch/s390/include/asm/atomic_ops.h #define __atomic_add_const(val, ptr)		__atomic_add(val, ptr)
__atomic_add      117 arch/s390/include/asm/atomic_ops.h #define __atomic_add_const_barrier(val, ptr)	__atomic_add(val, ptr)
__atomic_add       58 arch/s390/include/asm/preempt.h 		__atomic_add(val, &S390_lowcore.preempt_count);
__atomic_add       68 arch/s390/include/asm/preempt.h 	return __atomic_add(-1, &S390_lowcore.preempt_count) == 1;
__atomic_add      116 arch/s390/include/asm/spinlock.h 	old = __atomic_add(1, &rw->cnts);
__atomic_add      296 arch/s390/lib/spinlock.c 	__atomic_add(0x20000, &rw->cnts);