This source file includes following definitions.
- __set_bit
- __clear_bit
- __change_bit
- __test_and_set_bit
- __test_and_clear_bit
- __test_and_change_bit
- test_bit
1
2 #ifndef __ASM_SH_BITOPS_OP32_H
3 #define __ASM_SH_BITOPS_OP32_H
4
5
6
7
8
9
10 #if defined(__BIG_ENDIAN)
11 #define BITOP_LE_SWIZZLE ((BITS_PER_LONG-1) & ~0x7)
12 #define BYTE_NUMBER(nr) ((nr ^ BITOP_LE_SWIZZLE) / BITS_PER_BYTE)
13 #define BYTE_OFFSET(nr) ((nr ^ BITOP_LE_SWIZZLE) % BITS_PER_BYTE)
14 #else
15 #define BYTE_NUMBER(nr) ((nr) / BITS_PER_BYTE)
16 #define BYTE_OFFSET(nr) ((nr) % BITS_PER_BYTE)
17 #endif
18
19 #define IS_IMMEDIATE(nr) (__builtin_constant_p(nr))
20
21 static inline void __set_bit(int nr, volatile unsigned long *addr)
22 {
23 if (IS_IMMEDIATE(nr)) {
24 __asm__ __volatile__ (
25 "bset.b %1, @(%O2,%0) ! __set_bit\n\t"
26 : "+r" (addr)
27 : "i" (BYTE_OFFSET(nr)), "i" (BYTE_NUMBER(nr))
28 : "t", "memory"
29 );
30 } else {
31 unsigned long mask = BIT_MASK(nr);
32 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
33
34 *p |= mask;
35 }
36 }
37
38 static inline void __clear_bit(int nr, volatile unsigned long *addr)
39 {
40 if (IS_IMMEDIATE(nr)) {
41 __asm__ __volatile__ (
42 "bclr.b %1, @(%O2,%0) ! __clear_bit\n\t"
43 : "+r" (addr)
44 : "i" (BYTE_OFFSET(nr)),
45 "i" (BYTE_NUMBER(nr))
46 : "t", "memory"
47 );
48 } else {
49 unsigned long mask = BIT_MASK(nr);
50 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
51
52 *p &= ~mask;
53 }
54 }
55
56
57
58
59
60
61
62
63
64
65 static inline void __change_bit(int nr, volatile unsigned long *addr)
66 {
67 if (IS_IMMEDIATE(nr)) {
68 __asm__ __volatile__ (
69 "bxor.b %1, @(%O2,%0) ! __change_bit\n\t"
70 : "+r" (addr)
71 : "i" (BYTE_OFFSET(nr)),
72 "i" (BYTE_NUMBER(nr))
73 : "t", "memory"
74 );
75 } else {
76 unsigned long mask = BIT_MASK(nr);
77 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
78
79 *p ^= mask;
80 }
81 }
82
83
84
85
86
87
88
89
90
91
92 static inline int __test_and_set_bit(int nr, volatile unsigned long *addr)
93 {
94 unsigned long mask = BIT_MASK(nr);
95 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
96 unsigned long old = *p;
97
98 *p = old | mask;
99 return (old & mask) != 0;
100 }
101
102
103
104
105
106
107
108
109
110
111 static inline int __test_and_clear_bit(int nr, volatile unsigned long *addr)
112 {
113 unsigned long mask = BIT_MASK(nr);
114 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
115 unsigned long old = *p;
116
117 *p = old & ~mask;
118 return (old & mask) != 0;
119 }
120
121
122 static inline int __test_and_change_bit(int nr,
123 volatile unsigned long *addr)
124 {
125 unsigned long mask = BIT_MASK(nr);
126 unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
127 unsigned long old = *p;
128
129 *p = old ^ mask;
130 return (old & mask) != 0;
131 }
132
133
134
135
136
137
138 static inline int test_bit(int nr, const volatile unsigned long *addr)
139 {
140 return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
141 }
142
143 #endif