1
2 #ifndef TOOLS_ASM_X86_CMPXCHG_H
3 #define TOOLS_ASM_X86_CMPXCHG_H
4
5 #include <linux/compiler.h>
6
7
8
9
10
11 extern void __cmpxchg_wrong_size(void)
12 __compiletime_error("Bad argument size for cmpxchg");
13
14
15
16
17
18
19
20
21 #define __X86_CASE_B 1
22 #define __X86_CASE_W 2
23 #define __X86_CASE_L 4
24 #ifdef __x86_64__
25 #define __X86_CASE_Q 8
26 #else
27 #define __X86_CASE_Q -1
28 #endif
29
30
31
32
33
34
35 #define __raw_cmpxchg(ptr, old, new, size, lock) \
36 ({ \
37 __typeof__(*(ptr)) __ret; \
38 __typeof__(*(ptr)) __old = (old); \
39 __typeof__(*(ptr)) __new = (new); \
40 switch (size) { \
41 case __X86_CASE_B: \
42 { \
43 volatile u8 *__ptr = (volatile u8 *)(ptr); \
44 asm volatile(lock "cmpxchgb %2,%1" \
45 : "=a" (__ret), "+m" (*__ptr) \
46 : "q" (__new), "0" (__old) \
47 : "memory"); \
48 break; \
49 } \
50 case __X86_CASE_W: \
51 { \
52 volatile u16 *__ptr = (volatile u16 *)(ptr); \
53 asm volatile(lock "cmpxchgw %2,%1" \
54 : "=a" (__ret), "+m" (*__ptr) \
55 : "r" (__new), "0" (__old) \
56 : "memory"); \
57 break; \
58 } \
59 case __X86_CASE_L: \
60 { \
61 volatile u32 *__ptr = (volatile u32 *)(ptr); \
62 asm volatile(lock "cmpxchgl %2,%1" \
63 : "=a" (__ret), "+m" (*__ptr) \
64 : "r" (__new), "0" (__old) \
65 : "memory"); \
66 break; \
67 } \
68 case __X86_CASE_Q: \
69 { \
70 volatile u64 *__ptr = (volatile u64 *)(ptr); \
71 asm volatile(lock "cmpxchgq %2,%1" \
72 : "=a" (__ret), "+m" (*__ptr) \
73 : "r" (__new), "0" (__old) \
74 : "memory"); \
75 break; \
76 } \
77 default: \
78 __cmpxchg_wrong_size(); \
79 } \
80 __ret; \
81 })
82
83 #define __cmpxchg(ptr, old, new, size) \
84 __raw_cmpxchg((ptr), (old), (new), (size), LOCK_PREFIX)
85
86 #define cmpxchg(ptr, old, new) \
87 __cmpxchg(ptr, old, new, sizeof(*(ptr)))
88
89
90 #endif