This source file includes following definitions.
- set_my_cpu_offset
- __my_cpu_offset
1
2
3
4
5 #ifndef __ASM_PERCPU_H
6 #define __ASM_PERCPU_H
7
8 #include <linux/preempt.h>
9
10 #include <asm/alternative.h>
11 #include <asm/cmpxchg.h>
12 #include <asm/stack_pointer.h>
13
14 static inline void set_my_cpu_offset(unsigned long off)
15 {
16 asm volatile(ALTERNATIVE("msr tpidr_el1, %0",
17 "msr tpidr_el2, %0",
18 ARM64_HAS_VIRT_HOST_EXTN)
19 :: "r" (off) : "memory");
20 }
21
22 static inline unsigned long __my_cpu_offset(void)
23 {
24 unsigned long off;
25
26
27
28
29
30 asm(ALTERNATIVE("mrs %0, tpidr_el1",
31 "mrs %0, tpidr_el2",
32 ARM64_HAS_VIRT_HOST_EXTN)
33 : "=r" (off) :
34 "Q" (*(const unsigned long *)current_stack_pointer));
35
36 return off;
37 }
38 #define __my_cpu_offset __my_cpu_offset()
39
40 #define PERCPU_RW_OPS(sz) \
41 static inline unsigned long __percpu_read_##sz(void *ptr) \
42 { \
43 return READ_ONCE(*(u##sz *)ptr); \
44 } \
45 \
46 static inline void __percpu_write_##sz(void *ptr, unsigned long val) \
47 { \
48 WRITE_ONCE(*(u##sz *)ptr, (u##sz)val); \
49 }
50
51 #define __PERCPU_OP_CASE(w, sfx, name, sz, op_llsc, op_lse) \
52 static inline void \
53 __percpu_##name##_case_##sz(void *ptr, unsigned long val) \
54 { \
55 unsigned int loop; \
56 u##sz tmp; \
57 \
58 asm volatile (ARM64_LSE_ATOMIC_INSN( \
59 \
60 "1: ldxr" #sfx "\t%" #w "[tmp], %[ptr]\n" \
61 #op_llsc "\t%" #w "[tmp], %" #w "[tmp], %" #w "[val]\n" \
62 " stxr" #sfx "\t%w[loop], %" #w "[tmp], %[ptr]\n" \
63 " cbnz %w[loop], 1b", \
64 \
65 #op_lse "\t%" #w "[val], %[ptr]\n" \
66 __nops(3)) \
67 : [loop] "=&r" (loop), [tmp] "=&r" (tmp), \
68 [ptr] "+Q"(*(u##sz *)ptr) \
69 : [val] "r" ((u##sz)(val))); \
70 }
71
72 #define __PERCPU_RET_OP_CASE(w, sfx, name, sz, op_llsc, op_lse) \
73 static inline u##sz \
74 __percpu_##name##_return_case_##sz(void *ptr, unsigned long val) \
75 { \
76 unsigned int loop; \
77 u##sz ret; \
78 \
79 asm volatile (ARM64_LSE_ATOMIC_INSN( \
80 \
81 "1: ldxr" #sfx "\t%" #w "[ret], %[ptr]\n" \
82 #op_llsc "\t%" #w "[ret], %" #w "[ret], %" #w "[val]\n" \
83 " stxr" #sfx "\t%w[loop], %" #w "[ret], %[ptr]\n" \
84 " cbnz %w[loop], 1b", \
85 \
86 #op_lse "\t%" #w "[val], %" #w "[ret], %[ptr]\n" \
87 #op_llsc "\t%" #w "[ret], %" #w "[ret], %" #w "[val]\n" \
88 __nops(2)) \
89 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
90 [ptr] "+Q"(*(u##sz *)ptr) \
91 : [val] "r" ((u##sz)(val))); \
92 \
93 return ret; \
94 }
95
96 #define PERCPU_OP(name, op_llsc, op_lse) \
97 __PERCPU_OP_CASE(w, b, name, 8, op_llsc, op_lse) \
98 __PERCPU_OP_CASE(w, h, name, 16, op_llsc, op_lse) \
99 __PERCPU_OP_CASE(w, , name, 32, op_llsc, op_lse) \
100 __PERCPU_OP_CASE( , , name, 64, op_llsc, op_lse)
101
102 #define PERCPU_RET_OP(name, op_llsc, op_lse) \
103 __PERCPU_RET_OP_CASE(w, b, name, 8, op_llsc, op_lse) \
104 __PERCPU_RET_OP_CASE(w, h, name, 16, op_llsc, op_lse) \
105 __PERCPU_RET_OP_CASE(w, , name, 32, op_llsc, op_lse) \
106 __PERCPU_RET_OP_CASE( , , name, 64, op_llsc, op_lse)
107
108 PERCPU_RW_OPS(8)
109 PERCPU_RW_OPS(16)
110 PERCPU_RW_OPS(32)
111 PERCPU_RW_OPS(64)
112 PERCPU_OP(add, add, stadd)
113 PERCPU_OP(andnot, bic, stclr)
114 PERCPU_OP(or, orr, stset)
115 PERCPU_RET_OP(add, add, ldadd)
116
117 #undef PERCPU_RW_OPS
118 #undef __PERCPU_OP_CASE
119 #undef __PERCPU_RET_OP_CASE
120 #undef PERCPU_OP
121 #undef PERCPU_RET_OP
122
123
124
125
126
127
128
129 #define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
130 ({ \
131 int __ret; \
132 preempt_disable_notrace(); \
133 __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
134 raw_cpu_ptr(&(ptr2)), \
135 o1, o2, n1, n2); \
136 preempt_enable_notrace(); \
137 __ret; \
138 })
139
140 #define _pcp_protect(op, pcp, ...) \
141 ({ \
142 preempt_disable_notrace(); \
143 op(raw_cpu_ptr(&(pcp)), __VA_ARGS__); \
144 preempt_enable_notrace(); \
145 })
146
147 #define _pcp_protect_return(op, pcp, args...) \
148 ({ \
149 typeof(pcp) __retval; \
150 preempt_disable_notrace(); \
151 __retval = (typeof(pcp))op(raw_cpu_ptr(&(pcp)), ##args); \
152 preempt_enable_notrace(); \
153 __retval; \
154 })
155
156 #define this_cpu_read_1(pcp) \
157 _pcp_protect_return(__percpu_read_8, pcp)
158 #define this_cpu_read_2(pcp) \
159 _pcp_protect_return(__percpu_read_16, pcp)
160 #define this_cpu_read_4(pcp) \
161 _pcp_protect_return(__percpu_read_32, pcp)
162 #define this_cpu_read_8(pcp) \
163 _pcp_protect_return(__percpu_read_64, pcp)
164
165 #define this_cpu_write_1(pcp, val) \
166 _pcp_protect(__percpu_write_8, pcp, (unsigned long)val)
167 #define this_cpu_write_2(pcp, val) \
168 _pcp_protect(__percpu_write_16, pcp, (unsigned long)val)
169 #define this_cpu_write_4(pcp, val) \
170 _pcp_protect(__percpu_write_32, pcp, (unsigned long)val)
171 #define this_cpu_write_8(pcp, val) \
172 _pcp_protect(__percpu_write_64, pcp, (unsigned long)val)
173
174 #define this_cpu_add_1(pcp, val) \
175 _pcp_protect(__percpu_add_case_8, pcp, val)
176 #define this_cpu_add_2(pcp, val) \
177 _pcp_protect(__percpu_add_case_16, pcp, val)
178 #define this_cpu_add_4(pcp, val) \
179 _pcp_protect(__percpu_add_case_32, pcp, val)
180 #define this_cpu_add_8(pcp, val) \
181 _pcp_protect(__percpu_add_case_64, pcp, val)
182
183 #define this_cpu_add_return_1(pcp, val) \
184 _pcp_protect_return(__percpu_add_return_case_8, pcp, val)
185 #define this_cpu_add_return_2(pcp, val) \
186 _pcp_protect_return(__percpu_add_return_case_16, pcp, val)
187 #define this_cpu_add_return_4(pcp, val) \
188 _pcp_protect_return(__percpu_add_return_case_32, pcp, val)
189 #define this_cpu_add_return_8(pcp, val) \
190 _pcp_protect_return(__percpu_add_return_case_64, pcp, val)
191
192 #define this_cpu_and_1(pcp, val) \
193 _pcp_protect(__percpu_andnot_case_8, pcp, ~val)
194 #define this_cpu_and_2(pcp, val) \
195 _pcp_protect(__percpu_andnot_case_16, pcp, ~val)
196 #define this_cpu_and_4(pcp, val) \
197 _pcp_protect(__percpu_andnot_case_32, pcp, ~val)
198 #define this_cpu_and_8(pcp, val) \
199 _pcp_protect(__percpu_andnot_case_64, pcp, ~val)
200
201 #define this_cpu_or_1(pcp, val) \
202 _pcp_protect(__percpu_or_case_8, pcp, val)
203 #define this_cpu_or_2(pcp, val) \
204 _pcp_protect(__percpu_or_case_16, pcp, val)
205 #define this_cpu_or_4(pcp, val) \
206 _pcp_protect(__percpu_or_case_32, pcp, val)
207 #define this_cpu_or_8(pcp, val) \
208 _pcp_protect(__percpu_or_case_64, pcp, val)
209
210 #define this_cpu_xchg_1(pcp, val) \
211 _pcp_protect_return(xchg_relaxed, pcp, val)
212 #define this_cpu_xchg_2(pcp, val) \
213 _pcp_protect_return(xchg_relaxed, pcp, val)
214 #define this_cpu_xchg_4(pcp, val) \
215 _pcp_protect_return(xchg_relaxed, pcp, val)
216 #define this_cpu_xchg_8(pcp, val) \
217 _pcp_protect_return(xchg_relaxed, pcp, val)
218
219 #define this_cpu_cmpxchg_1(pcp, o, n) \
220 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
221 #define this_cpu_cmpxchg_2(pcp, o, n) \
222 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
223 #define this_cpu_cmpxchg_4(pcp, o, n) \
224 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
225 #define this_cpu_cmpxchg_8(pcp, o, n) \
226 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
227
228 #include <asm-generic/percpu.h>
229
230 #endif