1
2 #ifndef _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
3 #define _TOOLS_LINUX_ASM_AARCH64_BARRIER_H
4
5
6
7
8
9
10
11
12
13 #define mb() asm volatile("dmb ish" ::: "memory")
14 #define wmb() asm volatile("dmb ishst" ::: "memory")
15 #define rmb() asm volatile("dmb ishld" ::: "memory")
16
17
18
19
20
21
22
23 #define smp_mb() asm volatile("dmb ish" ::: "memory")
24 #define smp_wmb() asm volatile("dmb ishst" ::: "memory")
25 #define smp_rmb() asm volatile("dmb ishld" ::: "memory")
26
27 #define smp_store_release(p, v) \
28 do { \
29 union { typeof(*p) __val; char __c[1]; } __u = \
30 { .__val = (v) }; \
31 \
32 switch (sizeof(*p)) { \
33 case 1: \
34 asm volatile ("stlrb %w1, %0" \
35 : "=Q" (*p) \
36 : "r" (*(__u8_alias_t *)__u.__c) \
37 : "memory"); \
38 break; \
39 case 2: \
40 asm volatile ("stlrh %w1, %0" \
41 : "=Q" (*p) \
42 : "r" (*(__u16_alias_t *)__u.__c) \
43 : "memory"); \
44 break; \
45 case 4: \
46 asm volatile ("stlr %w1, %0" \
47 : "=Q" (*p) \
48 : "r" (*(__u32_alias_t *)__u.__c) \
49 : "memory"); \
50 break; \
51 case 8: \
52 asm volatile ("stlr %1, %0" \
53 : "=Q" (*p) \
54 : "r" (*(__u64_alias_t *)__u.__c) \
55 : "memory"); \
56 break; \
57 default: \
58 \
59 mb(); \
60 break; \
61 } \
62 } while (0)
63
64 #define smp_load_acquire(p) \
65 ({ \
66 union { typeof(*p) __val; char __c[1]; } __u = \
67 { .__c = { 0 } }; \
68 \
69 switch (sizeof(*p)) { \
70 case 1: \
71 asm volatile ("ldarb %w0, %1" \
72 : "=r" (*(__u8_alias_t *)__u.__c) \
73 : "Q" (*p) : "memory"); \
74 break; \
75 case 2: \
76 asm volatile ("ldarh %w0, %1" \
77 : "=r" (*(__u16_alias_t *)__u.__c) \
78 : "Q" (*p) : "memory"); \
79 break; \
80 case 4: \
81 asm volatile ("ldar %w0, %1" \
82 : "=r" (*(__u32_alias_t *)__u.__c) \
83 : "Q" (*p) : "memory"); \
84 break; \
85 case 8: \
86 asm volatile ("ldar %0, %1" \
87 : "=r" (*(__u64_alias_t *)__u.__c) \
88 : "Q" (*p) : "memory"); \
89 break; \
90 default: \
91 \
92 mb(); \
93 break; \
94 } \
95 __u.__val; \
96 })
97
98 #endif