This source file includes following definitions.
- cvmx_spinlock_init
- cvmx_spinlock_locked
- cvmx_spinlock_unlock
- cvmx_spinlock_trylock
- cvmx_spinlock_lock
- cvmx_spinlock_bit_lock
- cvmx_spinlock_bit_trylock
- cvmx_spinlock_bit_unlock
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35 #ifndef __CVMX_SPINLOCK_H__
36 #define __CVMX_SPINLOCK_H__
37
38 #include <asm/octeon/cvmx-asm.h>
39
40
41
42
43
44
45
46
47
48 typedef struct {
49 volatile uint32_t value;
50 } cvmx_spinlock_t;
51
52
53 #define CVMX_SPINLOCK_UNLOCKED_VAL 0
54 #define CVMX_SPINLOCK_LOCKED_VAL 1
55
56 #define CVMX_SPINLOCK_UNLOCKED_INITIALIZER {CVMX_SPINLOCK_UNLOCKED_VAL}
57
58
59
60
61
62
63 static inline void cvmx_spinlock_init(cvmx_spinlock_t *lock)
64 {
65 lock->value = CVMX_SPINLOCK_UNLOCKED_VAL;
66 }
67
68
69
70
71
72
73
74 static inline int cvmx_spinlock_locked(cvmx_spinlock_t *lock)
75 {
76 return lock->value != CVMX_SPINLOCK_UNLOCKED_VAL;
77 }
78
79
80
81
82
83
84 static inline void cvmx_spinlock_unlock(cvmx_spinlock_t *lock)
85 {
86 CVMX_SYNCWS;
87 lock->value = 0;
88 CVMX_SYNCWS;
89 }
90
91
92
93
94
95
96
97
98
99
100
101
102
103 static inline unsigned int cvmx_spinlock_trylock(cvmx_spinlock_t *lock)
104 {
105 unsigned int tmp;
106
107 __asm__ __volatile__(".set noreorder \n"
108 "1: ll %[tmp], %[val] \n"
109
110 " bnez %[tmp], 2f \n"
111 " li %[tmp], 1 \n"
112 " sc %[tmp], %[val] \n"
113 " beqz %[tmp], 1b \n"
114 " li %[tmp], 0 \n"
115 "2: \n"
116 ".set reorder \n" :
117 [val] "+m"(lock->value), [tmp] "=&r"(tmp)
118 : : "memory");
119
120 return tmp != 0;
121 }
122
123
124
125
126
127
128 static inline void cvmx_spinlock_lock(cvmx_spinlock_t *lock)
129 {
130 unsigned int tmp;
131
132 __asm__ __volatile__(".set noreorder \n"
133 "1: ll %[tmp], %[val] \n"
134 " bnez %[tmp], 1b \n"
135 " li %[tmp], 1 \n"
136 " sc %[tmp], %[val] \n"
137 " beqz %[tmp], 1b \n"
138 " nop \n"
139 ".set reorder \n" :
140 [val] "+m"(lock->value), [tmp] "=&r"(tmp)
141 : : "memory");
142
143 }
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161 static inline void cvmx_spinlock_bit_lock(uint32_t *word)
162 {
163 unsigned int tmp;
164 unsigned int sav;
165
166 __asm__ __volatile__(".set noreorder \n"
167 ".set noat \n"
168 "1: ll %[tmp], %[val] \n"
169 " bbit1 %[tmp], 31, 1b \n"
170 " li $at, 1 \n"
171 " ins %[tmp], $at, 31, 1 \n"
172 " sc %[tmp], %[val] \n"
173 " beqz %[tmp], 1b \n"
174 " nop \n"
175 ".set at \n"
176 ".set reorder \n" :
177 [val] "+m"(*word), [tmp] "=&r"(tmp), [sav] "=&r"(sav)
178 : : "memory");
179
180 }
181
182
183
184
185
186
187
188
189
190
191
192
193 static inline unsigned int cvmx_spinlock_bit_trylock(uint32_t *word)
194 {
195 unsigned int tmp;
196
197 __asm__ __volatile__(".set noreorder\n\t"
198 ".set noat\n"
199 "1: ll %[tmp], %[val] \n"
200
201 " bbit1 %[tmp], 31, 2f \n"
202 " li $at, 1 \n"
203 " ins %[tmp], $at, 31, 1 \n"
204 " sc %[tmp], %[val] \n"
205 " beqz %[tmp], 1b \n"
206 " li %[tmp], 0 \n"
207 "2: \n"
208 ".set at \n"
209 ".set reorder \n" :
210 [val] "+m"(*word), [tmp] "=&r"(tmp)
211 : : "memory");
212
213 return tmp != 0;
214 }
215
216
217
218
219
220
221
222
223
224
225 static inline void cvmx_spinlock_bit_unlock(uint32_t *word)
226 {
227 CVMX_SYNCWS;
228 *word &= ~(1UL << 31);
229 CVMX_SYNCWS;
230 }
231
232 #endif