1/*
2 * Copyright (C) 2013 ARM Ltd.
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 *
8 * This program is distributed in the hope that it will be useful,
9 * but WITHOUT ANY WARRANTY; without even the implied warranty of
10 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
11 * GNU General Public License for more details.
12 *
13 * You should have received a copy of the GNU General Public License
14 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
15 */
16#ifndef __ASM_PERCPU_H
17#define __ASM_PERCPU_H
18
19#ifdef CONFIG_SMP
20
21static inline void set_my_cpu_offset(unsigned long off)
22{
23	asm volatile("msr tpidr_el1, %0" :: "r" (off) : "memory");
24}
25
26static inline unsigned long __my_cpu_offset(void)
27{
28	unsigned long off;
29
30	/*
31	 * We want to allow caching the value, so avoid using volatile and
32	 * instead use a fake stack read to hazard against barrier().
33	 */
34	asm("mrs %0, tpidr_el1" : "=r" (off) :
35		"Q" (*(const unsigned long *)current_stack_pointer));
36
37	return off;
38}
39#define __my_cpu_offset __my_cpu_offset()
40
41#else	/* !CONFIG_SMP */
42
43#define set_my_cpu_offset(x)	do { } while (0)
44
45#endif /* CONFIG_SMP */
46
47#define PERCPU_OP(op, asm_op)						\
48static inline unsigned long __percpu_##op(void *ptr,			\
49			unsigned long val, int size)			\
50{									\
51	unsigned long loop, ret;					\
52									\
53	switch (size) {							\
54	case 1:								\
55		do {							\
56			asm ("//__per_cpu_" #op "_1\n"			\
57			"ldxrb	  %w[ret], %[ptr]\n"			\
58			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
59			"stxrb	  %w[loop], %w[ret], %[ptr]\n"		\
60			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
61			  [ptr] "+Q"(*(u8 *)ptr)			\
62			: [val] "Ir" (val));				\
63		} while (loop);						\
64		break;							\
65	case 2:								\
66		do {							\
67			asm ("//__per_cpu_" #op "_2\n"			\
68			"ldxrh	  %w[ret], %[ptr]\n"			\
69			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
70			"stxrh	  %w[loop], %w[ret], %[ptr]\n"		\
71			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
72			  [ptr]  "+Q"(*(u16 *)ptr)			\
73			: [val] "Ir" (val));				\
74		} while (loop);						\
75		break;							\
76	case 4:								\
77		do {							\
78			asm ("//__per_cpu_" #op "_4\n"			\
79			"ldxr	  %w[ret], %[ptr]\n"			\
80			#asm_op " %w[ret], %w[ret], %w[val]\n"		\
81			"stxr	  %w[loop], %w[ret], %[ptr]\n"		\
82			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
83			  [ptr] "+Q"(*(u32 *)ptr)			\
84			: [val] "Ir" (val));				\
85		} while (loop);						\
86		break;							\
87	case 8:								\
88		do {							\
89			asm ("//__per_cpu_" #op "_8\n"			\
90			"ldxr	  %[ret], %[ptr]\n"			\
91			#asm_op " %[ret], %[ret], %[val]\n"		\
92			"stxr	  %w[loop], %[ret], %[ptr]\n"		\
93			: [loop] "=&r" (loop), [ret] "=&r" (ret),	\
94			  [ptr] "+Q"(*(u64 *)ptr)			\
95			: [val] "Ir" (val));				\
96		} while (loop);						\
97		break;							\
98	default:							\
99		BUILD_BUG();						\
100	}								\
101									\
102	return ret;							\
103}
104
105PERCPU_OP(add, add)
106PERCPU_OP(and, and)
107PERCPU_OP(or, orr)
108#undef PERCPU_OP
109
110static inline unsigned long __percpu_read(void *ptr, int size)
111{
112	unsigned long ret;
113
114	switch (size) {
115	case 1:
116		ret = ACCESS_ONCE(*(u8 *)ptr);
117		break;
118	case 2:
119		ret = ACCESS_ONCE(*(u16 *)ptr);
120		break;
121	case 4:
122		ret = ACCESS_ONCE(*(u32 *)ptr);
123		break;
124	case 8:
125		ret = ACCESS_ONCE(*(u64 *)ptr);
126		break;
127	default:
128		BUILD_BUG();
129	}
130
131	return ret;
132}
133
134static inline void __percpu_write(void *ptr, unsigned long val, int size)
135{
136	switch (size) {
137	case 1:
138		ACCESS_ONCE(*(u8 *)ptr) = (u8)val;
139		break;
140	case 2:
141		ACCESS_ONCE(*(u16 *)ptr) = (u16)val;
142		break;
143	case 4:
144		ACCESS_ONCE(*(u32 *)ptr) = (u32)val;
145		break;
146	case 8:
147		ACCESS_ONCE(*(u64 *)ptr) = (u64)val;
148		break;
149	default:
150		BUILD_BUG();
151	}
152}
153
154static inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
155						int size)
156{
157	unsigned long ret, loop;
158
159	switch (size) {
160	case 1:
161		do {
162			asm ("//__percpu_xchg_1\n"
163			"ldxrb %w[ret], %[ptr]\n"
164			"stxrb %w[loop], %w[val], %[ptr]\n"
165			: [loop] "=&r"(loop), [ret] "=&r"(ret),
166			  [ptr] "+Q"(*(u8 *)ptr)
167			: [val] "r" (val));
168		} while (loop);
169		break;
170	case 2:
171		do {
172			asm ("//__percpu_xchg_2\n"
173			"ldxrh %w[ret], %[ptr]\n"
174			"stxrh %w[loop], %w[val], %[ptr]\n"
175			: [loop] "=&r"(loop), [ret] "=&r"(ret),
176			  [ptr] "+Q"(*(u16 *)ptr)
177			: [val] "r" (val));
178		} while (loop);
179		break;
180	case 4:
181		do {
182			asm ("//__percpu_xchg_4\n"
183			"ldxr %w[ret], %[ptr]\n"
184			"stxr %w[loop], %w[val], %[ptr]\n"
185			: [loop] "=&r"(loop), [ret] "=&r"(ret),
186			  [ptr] "+Q"(*(u32 *)ptr)
187			: [val] "r" (val));
188		} while (loop);
189		break;
190	case 8:
191		do {
192			asm ("//__percpu_xchg_8\n"
193			"ldxr %[ret], %[ptr]\n"
194			"stxr %w[loop], %[val], %[ptr]\n"
195			: [loop] "=&r"(loop), [ret] "=&r"(ret),
196			  [ptr] "+Q"(*(u64 *)ptr)
197			: [val] "r" (val));
198		} while (loop);
199		break;
200	default:
201		BUILD_BUG();
202	}
203
204	return ret;
205}
206
207#define _percpu_read(pcp)						\
208({									\
209	typeof(pcp) __retval;						\
210	preempt_disable();						\
211	__retval = (typeof(pcp))__percpu_read(raw_cpu_ptr(&(pcp)), 	\
212					      sizeof(pcp));		\
213	preempt_enable();						\
214	__retval;							\
215})
216
217#define _percpu_write(pcp, val)						\
218do {									\
219	preempt_disable();						\
220	__percpu_write(raw_cpu_ptr(&(pcp)), (unsigned long)(val), 	\
221				sizeof(pcp));				\
222	preempt_enable();						\
223} while(0)								\
224
225#define _pcp_protect(operation, pcp, val)			\
226({								\
227	typeof(pcp) __retval;					\
228	preempt_disable();					\
229	__retval = (typeof(pcp))operation(raw_cpu_ptr(&(pcp)),	\
230					  (val), sizeof(pcp));	\
231	preempt_enable();					\
232	__retval;						\
233})
234
235#define _percpu_add(pcp, val) \
236	_pcp_protect(__percpu_add, pcp, val)
237
238#define _percpu_add_return(pcp, val) _percpu_add(pcp, val)
239
240#define _percpu_and(pcp, val) \
241	_pcp_protect(__percpu_and, pcp, val)
242
243#define _percpu_or(pcp, val) \
244	_pcp_protect(__percpu_or, pcp, val)
245
246#define _percpu_xchg(pcp, val) (typeof(pcp)) \
247	_pcp_protect(__percpu_xchg, pcp, (unsigned long)(val))
248
249#define this_cpu_add_1(pcp, val) _percpu_add(pcp, val)
250#define this_cpu_add_2(pcp, val) _percpu_add(pcp, val)
251#define this_cpu_add_4(pcp, val) _percpu_add(pcp, val)
252#define this_cpu_add_8(pcp, val) _percpu_add(pcp, val)
253
254#define this_cpu_add_return_1(pcp, val) _percpu_add_return(pcp, val)
255#define this_cpu_add_return_2(pcp, val) _percpu_add_return(pcp, val)
256#define this_cpu_add_return_4(pcp, val) _percpu_add_return(pcp, val)
257#define this_cpu_add_return_8(pcp, val) _percpu_add_return(pcp, val)
258
259#define this_cpu_and_1(pcp, val) _percpu_and(pcp, val)
260#define this_cpu_and_2(pcp, val) _percpu_and(pcp, val)
261#define this_cpu_and_4(pcp, val) _percpu_and(pcp, val)
262#define this_cpu_and_8(pcp, val) _percpu_and(pcp, val)
263
264#define this_cpu_or_1(pcp, val) _percpu_or(pcp, val)
265#define this_cpu_or_2(pcp, val) _percpu_or(pcp, val)
266#define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
267#define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
268
269#define this_cpu_read_1(pcp) _percpu_read(pcp)
270#define this_cpu_read_2(pcp) _percpu_read(pcp)
271#define this_cpu_read_4(pcp) _percpu_read(pcp)
272#define this_cpu_read_8(pcp) _percpu_read(pcp)
273
274#define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
275#define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
276#define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
277#define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
278
279#define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
280#define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)
281#define this_cpu_xchg_4(pcp, val) _percpu_xchg(pcp, val)
282#define this_cpu_xchg_8(pcp, val) _percpu_xchg(pcp, val)
283
284#include <asm-generic/percpu.h>
285
286#endif /* __ASM_PERCPU_H */
287