1/*
2 * Atomic xchg and cmpxchg operations.
3 *
4 * This file is subject to the terms and conditions of the GNU General Public
5 * License.  See the file "COPYING" in the main directory of this archive
6 * for more details.
7 *
8 * Copyright (C) 2001 - 2005 Tensilica Inc.
9 */
10
11#ifndef _XTENSA_CMPXCHG_H
12#define _XTENSA_CMPXCHG_H
13
14#ifndef __ASSEMBLY__
15
16#include <linux/stringify.h>
17
18/*
19 * cmpxchg
20 */
21
22static inline unsigned long
23__cmpxchg_u32(volatile int *p, int old, int new)
24{
25#if XCHAL_HAVE_S32C1I
26	__asm__ __volatile__(
27			"       wsr     %2, scompare1\n"
28			"       s32c1i  %0, %1, 0\n"
29			: "+a" (new)
30			: "a" (p), "a" (old)
31			: "memory"
32			);
33
34	return new;
35#else
36	__asm__ __volatile__(
37			"       rsil    a15, "__stringify(LOCKLEVEL)"\n"
38			"       l32i    %0, %1, 0\n"
39			"       bne     %0, %2, 1f\n"
40			"       s32i    %3, %1, 0\n"
41			"1:\n"
42			"       wsr     a15, ps\n"
43			"       rsync\n"
44			: "=&a" (old)
45			: "a" (p), "a" (old), "r" (new)
46			: "a15", "memory");
47	return old;
48#endif
49}
50/* This function doesn't exist, so you'll get a linker error
51 * if something tries to do an invalid cmpxchg(). */
52
53extern void __cmpxchg_called_with_bad_pointer(void);
54
55static __inline__ unsigned long
56__cmpxchg(volatile void *ptr, unsigned long old, unsigned long new, int size)
57{
58	switch (size) {
59	case 4:  return __cmpxchg_u32(ptr, old, new);
60	default: __cmpxchg_called_with_bad_pointer();
61		 return old;
62	}
63}
64
65#define cmpxchg(ptr,o,n)						      \
66	({ __typeof__(*(ptr)) _o_ = (o);				      \
67	   __typeof__(*(ptr)) _n_ = (n);				      \
68	   (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_,	      \
69	   			        (unsigned long)_n_, sizeof (*(ptr))); \
70	})
71
72#include <asm-generic/cmpxchg-local.h>
73
74static inline unsigned long __cmpxchg_local(volatile void *ptr,
75				      unsigned long old,
76				      unsigned long new, int size)
77{
78	switch (size) {
79	case 4:
80		return __cmpxchg_u32(ptr, old, new);
81	default:
82		return __cmpxchg_local_generic(ptr, old, new, size);
83	}
84
85	return old;
86}
87
88/*
89 * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
90 * them available.
91 */
92#define cmpxchg_local(ptr, o, n)				  	       \
93	((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
94			(unsigned long)(n), sizeof(*(ptr))))
95#define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
96#define cmpxchg64(ptr, o, n)    cmpxchg64_local((ptr), (o), (n))
97
98/*
99 * xchg_u32
100 *
101 * Note that a15 is used here because the register allocation
102 * done by the compiler is not guaranteed and a window overflow
103 * may not occur between the rsil and wsr instructions. By using
104 * a15 in the rsil, the machine is guaranteed to be in a state
105 * where no register reference will cause an overflow.
106 */
107
108static inline unsigned long xchg_u32(volatile int * m, unsigned long val)
109{
110#if XCHAL_HAVE_S32C1I
111	unsigned long tmp, result;
112	__asm__ __volatile__(
113			"1:     l32i    %1, %2, 0\n"
114			"       mov     %0, %3\n"
115			"       wsr     %1, scompare1\n"
116			"       s32c1i  %0, %2, 0\n"
117			"       bne     %0, %1, 1b\n"
118			: "=&a" (result), "=&a" (tmp)
119			: "a" (m), "a" (val)
120			: "memory"
121			);
122	return result;
123#else
124	unsigned long tmp;
125	__asm__ __volatile__(
126			"       rsil    a15, "__stringify(LOCKLEVEL)"\n"
127			"       l32i    %0, %1, 0\n"
128			"       s32i    %2, %1, 0\n"
129			"       wsr     a15, ps\n"
130			"       rsync\n"
131			: "=&a" (tmp)
132			: "a" (m), "a" (val)
133			: "a15", "memory");
134	return tmp;
135#endif
136}
137
138#define xchg(ptr,x) \
139	((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
140
141/*
142 * This only works if the compiler isn't horribly bad at optimizing.
143 * gcc-2.5.8 reportedly can't handle this, but I define that one to
144 * be dead anyway.
145 */
146
147extern void __xchg_called_with_bad_pointer(void);
148
149static __inline__ unsigned long
150__xchg(unsigned long x, volatile void * ptr, int size)
151{
152	switch (size) {
153		case 4:
154			return xchg_u32(ptr, x);
155	}
156	__xchg_called_with_bad_pointer();
157	return x;
158}
159
160#endif /* __ASSEMBLY__ */
161
162#endif /* _XTENSA_CMPXCHG_H */
163