This source file includes following definitions.
- __cntlz
- ffz
- __ffs
- ffs
- fls
- __fls
- set_bit
- clear_bit
- change_bit
- test_and_set_bit
- test_and_clear_bit
- test_and_change_bit
- set_bit
- clear_bit
- change_bit
- test_and_set_bit
- test_and_clear_bit
- test_and_change_bit
1
2
3
4
5
6
7
8
9
10
11
12
13 #ifndef _XTENSA_BITOPS_H
14 #define _XTENSA_BITOPS_H
15
16 #ifndef _LINUX_BITOPS_H
17 #error only <linux/bitops.h> can be included directly
18 #endif
19
20 #include <asm/processor.h>
21 #include <asm/byteorder.h>
22 #include <asm/barrier.h>
23
24 #include <asm-generic/bitops/non-atomic.h>
25
26 #if XCHAL_HAVE_NSA
27
28 static inline unsigned long __cntlz (unsigned long x)
29 {
30 int lz;
31 asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
32 return lz;
33 }
34
35
36
37
38
39
40 static inline int ffz(unsigned long x)
41 {
42 return 31 - __cntlz(~x & -~x);
43 }
44
45
46
47
48
49 static inline unsigned long __ffs(unsigned long x)
50 {
51 return 31 - __cntlz(x & -x);
52 }
53
54
55
56
57
58
59
60 static inline int ffs(unsigned long x)
61 {
62 return 32 - __cntlz(x & -x);
63 }
64
65
66
67
68
69
70 static inline int fls (unsigned int x)
71 {
72 return 32 - __cntlz(x);
73 }
74
75
76
77
78
79
80
81 static inline unsigned long __fls(unsigned long word)
82 {
83 return 31 - __cntlz(word);
84 }
85 #else
86
87
88
89 # include <asm-generic/bitops/ffs.h>
90 # include <asm-generic/bitops/__ffs.h>
91 # include <asm-generic/bitops/ffz.h>
92 # include <asm-generic/bitops/fls.h>
93 # include <asm-generic/bitops/__fls.h>
94
95 #endif
96
97 #include <asm-generic/bitops/fls64.h>
98
99 #if XCHAL_HAVE_EXCLUSIVE
100
101 static inline void set_bit(unsigned int bit, volatile unsigned long *p)
102 {
103 unsigned long tmp;
104 unsigned long mask = 1UL << (bit & 31);
105
106 p += bit >> 5;
107
108 __asm__ __volatile__(
109 "1: l32ex %0, %2\n"
110 " or %0, %0, %1\n"
111 " s32ex %0, %2\n"
112 " getex %0\n"
113 " beqz %0, 1b\n"
114 : "=&a" (tmp)
115 : "a" (mask), "a" (p)
116 : "memory");
117 }
118
119 static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
120 {
121 unsigned long tmp;
122 unsigned long mask = 1UL << (bit & 31);
123
124 p += bit >> 5;
125
126 __asm__ __volatile__(
127 "1: l32ex %0, %2\n"
128 " and %0, %0, %1\n"
129 " s32ex %0, %2\n"
130 " getex %0\n"
131 " beqz %0, 1b\n"
132 : "=&a" (tmp)
133 : "a" (~mask), "a" (p)
134 : "memory");
135 }
136
137 static inline void change_bit(unsigned int bit, volatile unsigned long *p)
138 {
139 unsigned long tmp;
140 unsigned long mask = 1UL << (bit & 31);
141
142 p += bit >> 5;
143
144 __asm__ __volatile__(
145 "1: l32ex %0, %2\n"
146 " xor %0, %0, %1\n"
147 " s32ex %0, %2\n"
148 " getex %0\n"
149 " beqz %0, 1b\n"
150 : "=&a" (tmp)
151 : "a" (mask), "a" (p)
152 : "memory");
153 }
154
155 static inline int
156 test_and_set_bit(unsigned int bit, volatile unsigned long *p)
157 {
158 unsigned long tmp, value;
159 unsigned long mask = 1UL << (bit & 31);
160
161 p += bit >> 5;
162
163 __asm__ __volatile__(
164 "1: l32ex %1, %3\n"
165 " or %0, %1, %2\n"
166 " s32ex %0, %3\n"
167 " getex %0\n"
168 " beqz %0, 1b\n"
169 : "=&a" (tmp), "=&a" (value)
170 : "a" (mask), "a" (p)
171 : "memory");
172
173 return value & mask;
174 }
175
176 static inline int
177 test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
178 {
179 unsigned long tmp, value;
180 unsigned long mask = 1UL << (bit & 31);
181
182 p += bit >> 5;
183
184 __asm__ __volatile__(
185 "1: l32ex %1, %3\n"
186 " and %0, %1, %2\n"
187 " s32ex %0, %3\n"
188 " getex %0\n"
189 " beqz %0, 1b\n"
190 : "=&a" (tmp), "=&a" (value)
191 : "a" (~mask), "a" (p)
192 : "memory");
193
194 return value & mask;
195 }
196
197 static inline int
198 test_and_change_bit(unsigned int bit, volatile unsigned long *p)
199 {
200 unsigned long tmp, value;
201 unsigned long mask = 1UL << (bit & 31);
202
203 p += bit >> 5;
204
205 __asm__ __volatile__(
206 "1: l32ex %1, %3\n"
207 " xor %0, %1, %2\n"
208 " s32ex %0, %3\n"
209 " getex %0\n"
210 " beqz %0, 1b\n"
211 : "=&a" (tmp), "=&a" (value)
212 : "a" (mask), "a" (p)
213 : "memory");
214
215 return value & mask;
216 }
217
218 #elif XCHAL_HAVE_S32C1I
219
220 static inline void set_bit(unsigned int bit, volatile unsigned long *p)
221 {
222 unsigned long tmp, value;
223 unsigned long mask = 1UL << (bit & 31);
224
225 p += bit >> 5;
226
227 __asm__ __volatile__(
228 "1: l32i %1, %3, 0\n"
229 " wsr %1, scompare1\n"
230 " or %0, %1, %2\n"
231 " s32c1i %0, %3, 0\n"
232 " bne %0, %1, 1b\n"
233 : "=&a" (tmp), "=&a" (value)
234 : "a" (mask), "a" (p)
235 : "memory");
236 }
237
238 static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
239 {
240 unsigned long tmp, value;
241 unsigned long mask = 1UL << (bit & 31);
242
243 p += bit >> 5;
244
245 __asm__ __volatile__(
246 "1: l32i %1, %3, 0\n"
247 " wsr %1, scompare1\n"
248 " and %0, %1, %2\n"
249 " s32c1i %0, %3, 0\n"
250 " bne %0, %1, 1b\n"
251 : "=&a" (tmp), "=&a" (value)
252 : "a" (~mask), "a" (p)
253 : "memory");
254 }
255
256 static inline void change_bit(unsigned int bit, volatile unsigned long *p)
257 {
258 unsigned long tmp, value;
259 unsigned long mask = 1UL << (bit & 31);
260
261 p += bit >> 5;
262
263 __asm__ __volatile__(
264 "1: l32i %1, %3, 0\n"
265 " wsr %1, scompare1\n"
266 " xor %0, %1, %2\n"
267 " s32c1i %0, %3, 0\n"
268 " bne %0, %1, 1b\n"
269 : "=&a" (tmp), "=&a" (value)
270 : "a" (mask), "a" (p)
271 : "memory");
272 }
273
274 static inline int
275 test_and_set_bit(unsigned int bit, volatile unsigned long *p)
276 {
277 unsigned long tmp, value;
278 unsigned long mask = 1UL << (bit & 31);
279
280 p += bit >> 5;
281
282 __asm__ __volatile__(
283 "1: l32i %1, %3, 0\n"
284 " wsr %1, scompare1\n"
285 " or %0, %1, %2\n"
286 " s32c1i %0, %3, 0\n"
287 " bne %0, %1, 1b\n"
288 : "=&a" (tmp), "=&a" (value)
289 : "a" (mask), "a" (p)
290 : "memory");
291
292 return tmp & mask;
293 }
294
295 static inline int
296 test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
297 {
298 unsigned long tmp, value;
299 unsigned long mask = 1UL << (bit & 31);
300
301 p += bit >> 5;
302
303 __asm__ __volatile__(
304 "1: l32i %1, %3, 0\n"
305 " wsr %1, scompare1\n"
306 " and %0, %1, %2\n"
307 " s32c1i %0, %3, 0\n"
308 " bne %0, %1, 1b\n"
309 : "=&a" (tmp), "=&a" (value)
310 : "a" (~mask), "a" (p)
311 : "memory");
312
313 return tmp & mask;
314 }
315
316 static inline int
317 test_and_change_bit(unsigned int bit, volatile unsigned long *p)
318 {
319 unsigned long tmp, value;
320 unsigned long mask = 1UL << (bit & 31);
321
322 p += bit >> 5;
323
324 __asm__ __volatile__(
325 "1: l32i %1, %3, 0\n"
326 " wsr %1, scompare1\n"
327 " xor %0, %1, %2\n"
328 " s32c1i %0, %3, 0\n"
329 " bne %0, %1, 1b\n"
330 : "=&a" (tmp), "=&a" (value)
331 : "a" (mask), "a" (p)
332 : "memory");
333
334 return tmp & mask;
335 }
336
337 #else
338
339 #include <asm-generic/bitops/atomic.h>
340
341 #endif
342
343 #include <asm-generic/bitops/find.h>
344 #include <asm-generic/bitops/le.h>
345
346 #include <asm-generic/bitops/ext2-atomic-setbit.h>
347
348 #include <asm-generic/bitops/hweight.h>
349 #include <asm-generic/bitops/lock.h>
350 #include <asm-generic/bitops/sched.h>
351
352 #endif