1
2
3 #ifndef __ASM_CSKY_CMPXCHG_H
4 #define __ASM_CSKY_CMPXCHG_H
5
6 #ifdef CONFIG_CPU_HAS_LDSTEX
7 #include <asm/barrier.h>
8
9 extern void __bad_xchg(void);
10
11 #define __xchg(new, ptr, size) \
12 ({ \
13 __typeof__(ptr) __ptr = (ptr); \
14 __typeof__(new) __new = (new); \
15 __typeof__(*(ptr)) __ret; \
16 unsigned long tmp; \
17 switch (size) { \
18 case 4: \
19 smp_mb(); \
20 asm volatile ( \
21 "1: ldex.w %0, (%3) \n" \
22 " mov %1, %2 \n" \
23 " stex.w %1, (%3) \n" \
24 " bez %1, 1b \n" \
25 : "=&r" (__ret), "=&r" (tmp) \
26 : "r" (__new), "r"(__ptr) \
27 :); \
28 smp_mb(); \
29 break; \
30 default: \
31 __bad_xchg(); \
32 } \
33 __ret; \
34 })
35
36 #define xchg(ptr, x) (__xchg((x), (ptr), sizeof(*(ptr))))
37
38 #define __cmpxchg(ptr, old, new, size) \
39 ({ \
40 __typeof__(ptr) __ptr = (ptr); \
41 __typeof__(new) __new = (new); \
42 __typeof__(new) __tmp; \
43 __typeof__(old) __old = (old); \
44 __typeof__(*(ptr)) __ret; \
45 switch (size) { \
46 case 4: \
47 smp_mb(); \
48 asm volatile ( \
49 "1: ldex.w %0, (%3) \n" \
50 " cmpne %0, %4 \n" \
51 " bt 2f \n" \
52 " mov %1, %2 \n" \
53 " stex.w %1, (%3) \n" \
54 " bez %1, 1b \n" \
55 "2: \n" \
56 : "=&r" (__ret), "=&r" (__tmp) \
57 : "r" (__new), "r"(__ptr), "r"(__old) \
58 :); \
59 smp_mb(); \
60 break; \
61 default: \
62 __bad_xchg(); \
63 } \
64 __ret; \
65 })
66
67 #define cmpxchg(ptr, o, n) \
68 (__cmpxchg((ptr), (o), (n), sizeof(*(ptr))))
69 #else
70 #include <asm-generic/cmpxchg.h>
71 #endif
72
73 #endif