This source file includes following definitions.
- __xchg
- __cmpxchg
- __cmpxchg64
1
2
3
4
5
6
7
8 #ifndef __ASM_CMPXCHG_H
9 #define __ASM_CMPXCHG_H
10
11 #include <linux/bug.h>
12 #include <linux/irqflags.h>
13 #include <asm/compiler.h>
14 #include <asm/war.h>
15
16
17
18
19
20
21 #if R10000_LLSC_WAR
22 # define __scbeqz "beqzl"
23 #else
24 # define __scbeqz "beqz"
25 #endif
26
27
28
29
30
31
32
33
34
35
36
37 extern unsigned long __cmpxchg_called_with_bad_pointer(void)
38 __compiletime_error("Bad argument size for cmpxchg");
39 extern unsigned long __cmpxchg64_unsupported(void)
40 __compiletime_error("cmpxchg64 not available; cpu_has_64bits may be false");
41 extern unsigned long __xchg_called_with_bad_pointer(void)
42 __compiletime_error("Bad argument size for xchg");
43
44 #define __xchg_asm(ld, st, m, val) \
45 ({ \
46 __typeof(*(m)) __ret; \
47 \
48 if (kernel_uses_llsc) { \
49 loongson_llsc_mb(); \
50 __asm__ __volatile__( \
51 " .set push \n" \
52 " .set noat \n" \
53 " .set push \n" \
54 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
55 "1: " ld " %0, %2 # __xchg_asm \n" \
56 " .set pop \n" \
57 " move $1, %z3 \n" \
58 " .set " MIPS_ISA_ARCH_LEVEL " \n" \
59 " " st " $1, %1 \n" \
60 "\t" __scbeqz " $1, 1b \n" \
61 " .set pop \n" \
62 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
63 : GCC_OFF_SMALL_ASM() (*m), "Jr" (val) \
64 : __LLSC_CLOBBER); \
65 } else { \
66 unsigned long __flags; \
67 \
68 raw_local_irq_save(__flags); \
69 __ret = *m; \
70 *m = val; \
71 raw_local_irq_restore(__flags); \
72 } \
73 \
74 __ret; \
75 })
76
77 extern unsigned long __xchg_small(volatile void *ptr, unsigned long val,
78 unsigned int size);
79
80 static __always_inline
81 unsigned long __xchg(volatile void *ptr, unsigned long x, int size)
82 {
83 switch (size) {
84 case 1:
85 case 2:
86 return __xchg_small(ptr, x, size);
87
88 case 4:
89 return __xchg_asm("ll", "sc", (volatile u32 *)ptr, x);
90
91 case 8:
92 if (!IS_ENABLED(CONFIG_64BIT))
93 return __xchg_called_with_bad_pointer();
94
95 return __xchg_asm("lld", "scd", (volatile u64 *)ptr, x);
96
97 default:
98 return __xchg_called_with_bad_pointer();
99 }
100 }
101
102 #define xchg(ptr, x) \
103 ({ \
104 __typeof__(*(ptr)) __res; \
105 \
106 smp_mb__before_llsc(); \
107 \
108 __res = (__typeof__(*(ptr))) \
109 __xchg((ptr), (unsigned long)(x), sizeof(*(ptr))); \
110 \
111 smp_llsc_mb(); \
112 \
113 __res; \
114 })
115
116 #define __cmpxchg_asm(ld, st, m, old, new) \
117 ({ \
118 __typeof(*(m)) __ret; \
119 \
120 if (kernel_uses_llsc) { \
121 loongson_llsc_mb(); \
122 __asm__ __volatile__( \
123 " .set push \n" \
124 " .set noat \n" \
125 " .set push \n" \
126 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
127 "1: " ld " %0, %2 # __cmpxchg_asm \n" \
128 " bne %0, %z3, 2f \n" \
129 " .set pop \n" \
130 " move $1, %z4 \n" \
131 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
132 " " st " $1, %1 \n" \
133 "\t" __scbeqz " $1, 1b \n" \
134 " .set pop \n" \
135 "2: \n" \
136 : "=&r" (__ret), "=" GCC_OFF_SMALL_ASM() (*m) \
137 : GCC_OFF_SMALL_ASM() (*m), "Jr" (old), "Jr" (new) \
138 : __LLSC_CLOBBER); \
139 loongson_llsc_mb(); \
140 } else { \
141 unsigned long __flags; \
142 \
143 raw_local_irq_save(__flags); \
144 __ret = *m; \
145 if (__ret == old) \
146 *m = new; \
147 raw_local_irq_restore(__flags); \
148 } \
149 \
150 __ret; \
151 })
152
153 extern unsigned long __cmpxchg_small(volatile void *ptr, unsigned long old,
154 unsigned long new, unsigned int size);
155
156 static __always_inline
157 unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
158 unsigned long new, unsigned int size)
159 {
160 switch (size) {
161 case 1:
162 case 2:
163 return __cmpxchg_small(ptr, old, new, size);
164
165 case 4:
166 return __cmpxchg_asm("ll", "sc", (volatile u32 *)ptr,
167 (u32)old, new);
168
169 case 8:
170
171 if (!IS_ENABLED(CONFIG_64BIT))
172 return __cmpxchg_called_with_bad_pointer();
173
174 return __cmpxchg_asm("lld", "scd", (volatile u64 *)ptr,
175 (u64)old, new);
176
177 default:
178 return __cmpxchg_called_with_bad_pointer();
179 }
180 }
181
182 #define cmpxchg_local(ptr, old, new) \
183 ((__typeof__(*(ptr))) \
184 __cmpxchg((ptr), \
185 (unsigned long)(__typeof__(*(ptr)))(old), \
186 (unsigned long)(__typeof__(*(ptr)))(new), \
187 sizeof(*(ptr))))
188
189 #define cmpxchg(ptr, old, new) \
190 ({ \
191 __typeof__(*(ptr)) __res; \
192 \
193 smp_mb__before_llsc(); \
194 __res = cmpxchg_local((ptr), (old), (new)); \
195 smp_llsc_mb(); \
196 \
197 __res; \
198 })
199
200 #ifdef CONFIG_64BIT
201 #define cmpxchg64_local(ptr, o, n) \
202 ({ \
203 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
204 cmpxchg_local((ptr), (o), (n)); \
205 })
206
207 #define cmpxchg64(ptr, o, n) \
208 ({ \
209 BUILD_BUG_ON(sizeof(*(ptr)) != 8); \
210 cmpxchg((ptr), (o), (n)); \
211 })
212 #else
213
214 # include <asm-generic/cmpxchg-local.h>
215 # define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
216
217 # ifdef CONFIG_SMP
218
219 static inline unsigned long __cmpxchg64(volatile void *ptr,
220 unsigned long long old,
221 unsigned long long new)
222 {
223 unsigned long long tmp, ret;
224 unsigned long flags;
225
226
227
228
229
230
231
232
233
234 local_irq_save(flags);
235
236 loongson_llsc_mb();
237 asm volatile(
238 " .set push \n"
239 " .set " MIPS_ISA_ARCH_LEVEL " \n"
240
241 "1: lld %L0, %3 # __cmpxchg64 \n"
242
243
244
245
246 " dsra %M0, %L0, 32 \n"
247 " sll %L0, %L0, 0 \n"
248
249
250
251
252 " bne %M0, %M4, 2f \n"
253 " bne %L0, %L4, 2f \n"
254
255
256
257
258 # if MIPS_ISA_REV >= 2
259 " move %L1, %L5 \n"
260 " dins %L1, %M5, 32, 32 \n"
261 # else
262 " dsll %L1, %L5, 32 \n"
263 " dsrl %L1, %L1, 32 \n"
264 " .set noat \n"
265 " dsll $at, %M5, 32 \n"
266 " or %L1, %L1, $at \n"
267 " .set at \n"
268 # endif
269
270 " scd %L1, %2 \n"
271
272 "\t" __scbeqz " %L1, 1b \n"
273 " .set pop \n"
274 "2: \n"
275 : "=&r"(ret),
276 "=&r"(tmp),
277 "=" GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr)
278 : GCC_OFF_SMALL_ASM() (*(unsigned long long *)ptr),
279 "r" (old),
280 "r" (new)
281 : "memory");
282 loongson_llsc_mb();
283
284 local_irq_restore(flags);
285 return ret;
286 }
287
288 # define cmpxchg64(ptr, o, n) ({ \
289 unsigned long long __old = (__typeof__(*(ptr)))(o); \
290 unsigned long long __new = (__typeof__(*(ptr)))(n); \
291 __typeof__(*(ptr)) __res; \
292 \
293
294
295
296
297
298 \
299 if (cpu_has_64bits && kernel_uses_llsc) { \
300 smp_mb__before_llsc(); \
301 __res = __cmpxchg64((ptr), __old, __new); \
302 smp_llsc_mb(); \
303 } else { \
304 __res = __cmpxchg64_unsupported(); \
305 } \
306 \
307 __res; \
308 })
309
310 # else
311 # define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
312 # endif
313 #endif
314
315 #undef __scbeqz
316
317 #endif