__sl_cas 30 arch/sh/include/asm/spinlock-cas.h while (!__sl_cas(&lock->lock, 1, 0)); __sl_cas 35 arch/sh/include/asm/spinlock-cas.h __sl_cas(&lock->lock, 0, 1); __sl_cas 40 arch/sh/include/asm/spinlock-cas.h return __sl_cas(&lock->lock, 1, 0); __sl_cas 56 arch/sh/include/asm/spinlock-cas.h while (!old || __sl_cas(&rw->lock, old, old-1) != old); __sl_cas 63 arch/sh/include/asm/spinlock-cas.h while (__sl_cas(&rw->lock, old, old+1) != old); __sl_cas 68 arch/sh/include/asm/spinlock-cas.h while (__sl_cas(&rw->lock, RW_LOCK_BIAS, 0) != RW_LOCK_BIAS); __sl_cas 73 arch/sh/include/asm/spinlock-cas.h __sl_cas(&rw->lock, 0, RW_LOCK_BIAS); __sl_cas 80 arch/sh/include/asm/spinlock-cas.h while (old && __sl_cas(&rw->lock, old, old-1) != old); __sl_cas 86 arch/sh/include/asm/spinlock-cas.h return __sl_cas(&rw->lock, RW_LOCK_BIAS, 0) == RW_LOCK_BIAS;