1/*
2 * include/asm-xtensa/bitops.h
3 *
4 * Atomic operations that C can't guarantee us.Useful for resource counting etc.
5 *
6 * This file is subject to the terms and conditions of the GNU General Public
7 * License.  See the file "COPYING" in the main directory of this archive
8 * for more details.
9 *
10 * Copyright (C) 2001 - 2007 Tensilica Inc.
11 */
12
13#ifndef _XTENSA_BITOPS_H
14#define _XTENSA_BITOPS_H
15
16#ifdef __KERNEL__
17
18#ifndef _LINUX_BITOPS_H
19#error only <linux/bitops.h> can be included directly
20#endif
21
22#include <asm/processor.h>
23#include <asm/byteorder.h>
24#include <asm/barrier.h>
25
26#include <asm-generic/bitops/non-atomic.h>
27
28#if XCHAL_HAVE_NSA
29
30static inline unsigned long __cntlz (unsigned long x)
31{
32	int lz;
33	asm ("nsau %0, %1" : "=r" (lz) : "r" (x));
34	return lz;
35}
36
37/*
38 * ffz: Find first zero in word. Undefined if no zero exists.
39 * bit 0 is the LSB of addr; bit 32 is the LSB of (addr+1).
40 */
41
42static inline int ffz(unsigned long x)
43{
44	return 31 - __cntlz(~x & -~x);
45}
46
47/*
48 * __ffs: Find first bit set in word. Return 0 for bit 0
49 */
50
51static inline int __ffs(unsigned long x)
52{
53	return 31 - __cntlz(x & -x);
54}
55
56/*
57 * ffs: Find first bit set in word. This is defined the same way as
58 * the libc and compiler builtin ffs routines, therefore
59 * differs in spirit from the above ffz (man ffs).
60 */
61
62static inline int ffs(unsigned long x)
63{
64	return 32 - __cntlz(x & -x);
65}
66
67/*
68 * fls: Find last (most-significant) bit set in word.
69 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
70 */
71
72static inline int fls (unsigned int x)
73{
74	return 32 - __cntlz(x);
75}
76
77/**
78 * __fls - find last (most-significant) set bit in a long word
79 * @word: the word to search
80 *
81 * Undefined if no set bit exists, so code should check against 0 first.
82 */
83static inline unsigned long __fls(unsigned long word)
84{
85	return 31 - __cntlz(word);
86}
87#else
88
89/* Use the generic implementation if we don't have the nsa/nsau instructions. */
90
91# include <asm-generic/bitops/ffs.h>
92# include <asm-generic/bitops/__ffs.h>
93# include <asm-generic/bitops/ffz.h>
94# include <asm-generic/bitops/fls.h>
95# include <asm-generic/bitops/__fls.h>
96
97#endif
98
99#include <asm-generic/bitops/fls64.h>
100
101#if XCHAL_HAVE_S32C1I
102
103static inline void set_bit(unsigned int bit, volatile unsigned long *p)
104{
105	unsigned long tmp, value;
106	unsigned long mask = 1UL << (bit & 31);
107
108	p += bit >> 5;
109
110	__asm__ __volatile__(
111			"1:     l32i    %1, %3, 0\n"
112			"       wsr     %1, scompare1\n"
113			"       or      %0, %1, %2\n"
114			"       s32c1i  %0, %3, 0\n"
115			"       bne     %0, %1, 1b\n"
116			: "=&a" (tmp), "=&a" (value)
117			: "a" (mask), "a" (p)
118			: "memory");
119}
120
121static inline void clear_bit(unsigned int bit, volatile unsigned long *p)
122{
123	unsigned long tmp, value;
124	unsigned long mask = 1UL << (bit & 31);
125
126	p += bit >> 5;
127
128	__asm__ __volatile__(
129			"1:     l32i    %1, %3, 0\n"
130			"       wsr     %1, scompare1\n"
131			"       and     %0, %1, %2\n"
132			"       s32c1i  %0, %3, 0\n"
133			"       bne     %0, %1, 1b\n"
134			: "=&a" (tmp), "=&a" (value)
135			: "a" (~mask), "a" (p)
136			: "memory");
137}
138
139static inline void change_bit(unsigned int bit, volatile unsigned long *p)
140{
141	unsigned long tmp, value;
142	unsigned long mask = 1UL << (bit & 31);
143
144	p += bit >> 5;
145
146	__asm__ __volatile__(
147			"1:     l32i    %1, %3, 0\n"
148			"       wsr     %1, scompare1\n"
149			"       xor     %0, %1, %2\n"
150			"       s32c1i  %0, %3, 0\n"
151			"       bne     %0, %1, 1b\n"
152			: "=&a" (tmp), "=&a" (value)
153			: "a" (mask), "a" (p)
154			: "memory");
155}
156
157static inline int
158test_and_set_bit(unsigned int bit, volatile unsigned long *p)
159{
160	unsigned long tmp, value;
161	unsigned long mask = 1UL << (bit & 31);
162
163	p += bit >> 5;
164
165	__asm__ __volatile__(
166			"1:     l32i    %1, %3, 0\n"
167			"       wsr     %1, scompare1\n"
168			"       or      %0, %1, %2\n"
169			"       s32c1i  %0, %3, 0\n"
170			"       bne     %0, %1, 1b\n"
171			: "=&a" (tmp), "=&a" (value)
172			: "a" (mask), "a" (p)
173			: "memory");
174
175	return tmp & mask;
176}
177
178static inline int
179test_and_clear_bit(unsigned int bit, volatile unsigned long *p)
180{
181	unsigned long tmp, value;
182	unsigned long mask = 1UL << (bit & 31);
183
184	p += bit >> 5;
185
186	__asm__ __volatile__(
187			"1:     l32i    %1, %3, 0\n"
188			"       wsr     %1, scompare1\n"
189			"       and     %0, %1, %2\n"
190			"       s32c1i  %0, %3, 0\n"
191			"       bne     %0, %1, 1b\n"
192			: "=&a" (tmp), "=&a" (value)
193			: "a" (~mask), "a" (p)
194			: "memory");
195
196	return tmp & mask;
197}
198
199static inline int
200test_and_change_bit(unsigned int bit, volatile unsigned long *p)
201{
202	unsigned long tmp, value;
203	unsigned long mask = 1UL << (bit & 31);
204
205	p += bit >> 5;
206
207	__asm__ __volatile__(
208			"1:     l32i    %1, %3, 0\n"
209			"       wsr     %1, scompare1\n"
210			"       xor     %0, %1, %2\n"
211			"       s32c1i  %0, %3, 0\n"
212			"       bne     %0, %1, 1b\n"
213			: "=&a" (tmp), "=&a" (value)
214			: "a" (mask), "a" (p)
215			: "memory");
216
217	return tmp & mask;
218}
219
220#else
221
222#include <asm-generic/bitops/atomic.h>
223
224#endif /* XCHAL_HAVE_S32C1I */
225
226#include <asm-generic/bitops/find.h>
227#include <asm-generic/bitops/le.h>
228
229#include <asm-generic/bitops/ext2-atomic-setbit.h>
230
231#include <asm-generic/bitops/hweight.h>
232#include <asm-generic/bitops/lock.h>
233#include <asm-generic/bitops/sched.h>
234
235#endif	/* __KERNEL__ */
236
237#endif	/* _XTENSA_BITOPS_H */
238