This source file includes following definitions.
- __cmpxchg
- __cmpxchg
- __cmpxchg
- __xchg
- __xchg
1
2
3
4
5
6 #ifndef __ASM_ARC_CMPXCHG_H
7 #define __ASM_ARC_CMPXCHG_H
8
9 #include <linux/types.h>
10
11 #include <asm/barrier.h>
12 #include <asm/smp.h>
13
14 #ifdef CONFIG_ARC_HAS_LLSC
15
16 static inline unsigned long
17 __cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
18 {
19 unsigned long prev;
20
21
22
23
24
25 smp_mb();
26
27 __asm__ __volatile__(
28 "1: llock %0, [%1] \n"
29 " brne %0, %2, 2f \n"
30 " scond %3, [%1] \n"
31 " bnz 1b \n"
32 "2: \n"
33 : "=&r"(prev)
34 : "r"(ptr),
35 "ir"(expected),
36 "r"(new)
37 : "cc", "memory");
38
39 smp_mb();
40
41 return prev;
42 }
43
44 #elif !defined(CONFIG_ARC_PLAT_EZNPS)
45
46 static inline unsigned long
47 __cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
48 {
49 unsigned long flags;
50 int prev;
51 volatile unsigned long *p = ptr;
52
53
54
55
56 atomic_ops_lock(flags);
57 prev = *p;
58 if (prev == expected)
59 *p = new;
60 atomic_ops_unlock(flags);
61 return prev;
62 }
63
64 #else
65
66 static inline unsigned long
67 __cmpxchg(volatile void *ptr, unsigned long expected, unsigned long new)
68 {
69
70
71
72 smp_mb();
73
74 write_aux_reg(CTOP_AUX_GPA1, expected);
75
76 __asm__ __volatile__(
77 " mov r2, %0\n"
78 " mov r3, %1\n"
79 " .word %2\n"
80 " mov %0, r2"
81 : "+r"(new)
82 : "r"(ptr), "i"(CTOP_INST_EXC_DI_R2_R2_R3)
83 : "r2", "r3", "memory");
84
85 smp_mb();
86
87 return new;
88 }
89
90 #endif
91
92 #define cmpxchg(ptr, o, n) ({ \
93 (typeof(*(ptr)))__cmpxchg((ptr), \
94 (unsigned long)(o), \
95 (unsigned long)(n)); \
96 })
97
98
99
100
101
102
103
104 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
105
106
107 #ifndef CONFIG_ARC_PLAT_EZNPS
108
109
110
111
112 static inline unsigned long __xchg(unsigned long val, volatile void *ptr,
113 int size)
114 {
115 extern unsigned long __xchg_bad_pointer(void);
116
117 switch (size) {
118 case 4:
119 smp_mb();
120
121 __asm__ __volatile__(
122 " ex %0, [%1] \n"
123 : "+r"(val)
124 : "r"(ptr)
125 : "memory");
126
127 smp_mb();
128
129 return val;
130 }
131 return __xchg_bad_pointer();
132 }
133
134 #define _xchg(ptr, with) ((typeof(*(ptr)))__xchg((unsigned long)(with), (ptr), \
135 sizeof(*(ptr))))
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152 #if !defined(CONFIG_ARC_HAS_LLSC) && defined(CONFIG_SMP)
153
154 #define xchg(ptr, with) \
155 ({ \
156 unsigned long flags; \
157 typeof(*(ptr)) old_val; \
158 \
159 atomic_ops_lock(flags); \
160 old_val = _xchg(ptr, with); \
161 atomic_ops_unlock(flags); \
162 old_val; \
163 })
164
165 #else
166
167 #define xchg(ptr, with) _xchg(ptr, with)
168
169 #endif
170
171 #else
172
173 static inline unsigned long __xchg(unsigned long val, volatile void *ptr,
174 int size)
175 {
176 extern unsigned long __xchg_bad_pointer(void);
177
178 switch (size) {
179 case 4:
180
181
182
183 smp_mb();
184
185 __asm__ __volatile__(
186 " mov r2, %0\n"
187 " mov r3, %1\n"
188 " .word %2\n"
189 " mov %0, r2\n"
190 : "+r"(val)
191 : "r"(ptr), "i"(CTOP_INST_XEX_DI_R2_R2_R3)
192 : "r2", "r3", "memory");
193
194 smp_mb();
195
196 return val;
197 }
198 return __xchg_bad_pointer();
199 }
200
201 #define xchg(ptr, with) ({ \
202 (typeof(*(ptr)))__xchg((unsigned long)(with), \
203 (ptr), \
204 sizeof(*(ptr))); \
205 })
206
207 #endif
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
223
224 #endif