1
2
3
4
5
6
7
8
9
10
11
12 #include <linux/linkage.h>
13 #include <linux/init.h>
14 #include <asm/assembler.h>
15 #include <asm/errno.h>
16 #include <asm/unwind.h>
17 #include <asm/v7m.h>
18
19 #include "proc-macros.S"
20
21
22 .macro v7m_cache_read, rt, reg
23 movw \rt, #:lower16:BASEADDR_V7M_SCB + \reg
24 movt \rt, #:upper16:BASEADDR_V7M_SCB + \reg
25 ldr \rt, [\rt]
26 .endm
27
28 .macro v7m_cacheop, rt, tmp, op, c = al
29 movw\c \tmp, #:lower16:BASEADDR_V7M_SCB + \op
30 movt\c \tmp, #:upper16:BASEADDR_V7M_SCB + \op
31 str\c \rt, [\tmp]
32 .endm
33
34
35 .macro read_ccsidr, rt
36 v7m_cache_read \rt, V7M_SCB_CCSIDR
37 .endm
38
39 .macro read_clidr, rt
40 v7m_cache_read \rt, V7M_SCB_CLIDR
41 .endm
42
43 .macro write_csselr, rt, tmp
44 v7m_cacheop \rt, \tmp, V7M_SCB_CSSELR
45 .endm
46
47
48
49
50 .macro dcisw, rt, tmp
51 v7m_cacheop \rt, \tmp, V7M_SCB_DCISW
52 .endm
53
54
55
56
57 .macro dccisw, rt, tmp
58 v7m_cacheop \rt, \tmp, V7M_SCB_DCCISW
59 .endm
60
61
62
63
64 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
65 .macro dccimvac\c, rt, tmp
66 v7m_cacheop \rt, \tmp, V7M_SCB_DCCIMVAC, \c
67 .endm
68 .endr
69
70
71
72
73 .irp c,,eq,ne,cs,cc,mi,pl,vs,vc,hi,ls,ge,lt,gt,le,hs,lo
74 .macro dcimvac\c, rt, tmp
75 v7m_cacheop \rt, \tmp, V7M_SCB_DCIMVAC, \c
76 .endm
77 .endr
78
79
80
81
82 .macro dccmvau, rt, tmp
83 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAU
84 .endm
85
86
87
88
89 .macro dccmvac, rt, tmp
90 v7m_cacheop \rt, \tmp, V7M_SCB_DCCMVAC
91 .endm
92
93
94
95
96 .macro icimvau, rt, tmp
97 v7m_cacheop \rt, \tmp, V7M_SCB_ICIMVAU
98 .endm
99
100
101
102
103
104 .macro invalidate_icache, rt
105 v7m_cacheop \rt, \rt, V7M_SCB_ICIALLU
106 mov \rt, #0
107 .endm
108
109
110
111
112
113 .macro invalidate_bp, rt
114 v7m_cacheop \rt, \rt, V7M_SCB_BPIALL
115 mov \rt, #0
116 .endm
117
118 ENTRY(v7m_invalidate_l1)
119 mov r0, #0
120
121 write_csselr r0, r1
122 read_ccsidr r0
123
124 movw r1, #0x7fff
125 and r2, r1, r0, lsr #13
126
127 movw r1, #0x3ff
128
129 and r3, r1, r0, lsr #3 @ NumWays - 1
130 add r2, r2, #1 @ NumSets
131
132 and r0, r0, #0x7
133 add r0, r0, #4 @ SetShift
134
135 clz r1, r3 @ WayShift
136 add r4, r3, #1 @ NumWays
137 1: sub r2, r2, #1 @ NumSets--
138 mov r3, r4 @ Temp = NumWays
139 2: subs r3, r3, #1 @ Temp--
140 mov r5, r3, lsl r1
141 mov r6, r2, lsl r0
142 orr r5, r5, r6 @ Reg = (Temp<<WayShift)|(NumSets<<SetShift)
143 dcisw r5, r6
144 bgt 2b
145 cmp r2, #0
146 bgt 1b
147 dsb st
148 isb
149 ret lr
150 ENDPROC(v7m_invalidate_l1)
151
152
153
154
155
156
157
158
159
160 ENTRY(v7m_flush_icache_all)
161 invalidate_icache r0
162 ret lr
163 ENDPROC(v7m_flush_icache_all)
164
165
166
167
168
169
170
171
172 ENTRY(v7m_flush_dcache_all)
173 dmb @ ensure ordering with previous memory accesses
174 read_clidr r0
175 mov r3, r0, lsr #23 @ move LoC into position
176 ands r3, r3, #7 << 1 @ extract LoC*2 from clidr
177 beq finished @ if loc is 0, then no need to clean
178 start_flush_levels:
179 mov r10, #0 @ start clean at cache level 0
180 flush_levels:
181 add r2, r10, r10, lsr #1 @ work out 3x current cache level
182 mov r1, r0, lsr r2 @ extract cache type bits from clidr
183 and r1, r1, #7 @ mask of the bits for current cache only
184 cmp r1, #2 @ see what cache we have at this level
185 blt skip @ skip if no cache, or just i-cache
186 #ifdef CONFIG_PREEMPT
187 save_and_disable_irqs_notrace r9 @ make cssr&csidr read atomic
188 #endif
189 write_csselr r10, r1 @ set current cache level
190 isb @ isb to sych the new cssr&csidr
191 read_ccsidr r1 @ read the new csidr
192 #ifdef CONFIG_PREEMPT
193 restore_irqs_notrace r9
194 #endif
195 and r2, r1, #7 @ extract the length of the cache lines
196 add r2, r2, #4 @ add 4 (line length offset)
197 movw r4, #0x3ff
198 ands r4, r4, r1, lsr #3 @ find maximum number on the way size
199 clz r5, r4 @ find bit position of way size increment
200 movw r7, #0x7fff
201 ands r7, r7, r1, lsr #13 @ extract max number of the index size
202 loop1:
203 mov r9, r7 @ create working copy of max index
204 loop2:
205 lsl r6, r4, r5
206 orr r11, r10, r6 @ factor way and cache number into r11
207 lsl r6, r9, r2
208 orr r11, r11, r6 @ factor index number into r11
209 dccisw r11, r6 @ clean/invalidate by set/way
210 subs r9, r9, #1 @ decrement the index
211 bge loop2
212 subs r4, r4, #1 @ decrement the way
213 bge loop1
214 skip:
215 add r10, r10, #2 @ increment cache number
216 cmp r3, r10
217 bgt flush_levels
218 finished:
219 mov r10, #0 @ switch back to cache level 0
220 write_csselr r10, r3 @ select current cache level in cssr
221 dsb st
222 isb
223 ret lr
224 ENDPROC(v7m_flush_dcache_all)
225
226
227
228
229
230
231
232
233
234
235
236
237 ENTRY(v7m_flush_kern_cache_all)
238 stmfd sp!, {r4-r7, r9-r11, lr}
239 bl v7m_flush_dcache_all
240 invalidate_icache r0
241 ldmfd sp!, {r4-r7, r9-r11, lr}
242 ret lr
243 ENDPROC(v7m_flush_kern_cache_all)
244
245
246
247
248
249
250
251
252 ENTRY(v7m_flush_user_cache_all)
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267 ENTRY(v7m_flush_user_cache_range)
268 ret lr
269 ENDPROC(v7m_flush_user_cache_all)
270 ENDPROC(v7m_flush_user_cache_range)
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285 ENTRY(v7m_coherent_kern_range)
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301 ENTRY(v7m_coherent_user_range)
302 UNWIND(.fnstart )
303 dcache_line_size r2, r3
304 sub r3, r2, #1
305 bic r12, r0, r3
306 1:
307
308
309
310
311 dccmvau r12, r3
312 add r12, r12, r2
313 cmp r12, r1
314 blo 1b
315 dsb ishst
316 icache_line_size r2, r3
317 sub r3, r2, #1
318 bic r12, r0, r3
319 2:
320 icimvau r12, r3
321 add r12, r12, r2
322 cmp r12, r1
323 blo 2b
324 invalidate_bp r0
325 dsb ishst
326 isb
327 ret lr
328 UNWIND(.fnend )
329 ENDPROC(v7m_coherent_kern_range)
330 ENDPROC(v7m_coherent_user_range)
331
332
333
334
335
336
337
338
339
340
341 ENTRY(v7m_flush_kern_dcache_area)
342 dcache_line_size r2, r3
343 add r1, r0, r1
344 sub r3, r2, #1
345 bic r0, r0, r3
346 1:
347 dccimvac r0, r3 @ clean & invalidate D line / unified line
348 add r0, r0, r2
349 cmp r0, r1
350 blo 1b
351 dsb st
352 ret lr
353 ENDPROC(v7m_flush_kern_dcache_area)
354
355
356
357
358
359
360
361
362
363
364
365 v7m_dma_inv_range:
366 dcache_line_size r2, r3
367 sub r3, r2, #1
368 tst r0, r3
369 bic r0, r0, r3
370 dccimvacne r0, r3
371 addne r0, r0, r2
372 subne r3, r2, #1 @ restore r3, corrupted by v7m's dccimvac
373 tst r1, r3
374 bic r1, r1, r3
375 dccimvacne r1, r3
376 cmp r0, r1
377 1:
378 dcimvaclo r0, r3
379 addlo r0, r0, r2
380 cmplo r0, r1
381 blo 1b
382 dsb st
383 ret lr
384 ENDPROC(v7m_dma_inv_range)
385
386
387
388
389
390
391 v7m_dma_clean_range:
392 dcache_line_size r2, r3
393 sub r3, r2, #1
394 bic r0, r0, r3
395 1:
396 dccmvac r0, r3 @ clean D / U line
397 add r0, r0, r2
398 cmp r0, r1
399 blo 1b
400 dsb st
401 ret lr
402 ENDPROC(v7m_dma_clean_range)
403
404
405
406
407
408
409 ENTRY(v7m_dma_flush_range)
410 dcache_line_size r2, r3
411 sub r3, r2, #1
412 bic r0, r0, r3
413 1:
414 dccimvac r0, r3 @ clean & invalidate D / U line
415 add r0, r0, r2
416 cmp r0, r1
417 blo 1b
418 dsb st
419 ret lr
420 ENDPROC(v7m_dma_flush_range)
421
422
423
424
425
426
427
428 ENTRY(v7m_dma_map_area)
429 add r1, r1, r0
430 teq r2, #DMA_FROM_DEVICE
431 beq v7m_dma_inv_range
432 b v7m_dma_clean_range
433 ENDPROC(v7m_dma_map_area)
434
435
436
437
438
439
440
441 ENTRY(v7m_dma_unmap_area)
442 add r1, r1, r0
443 teq r2, #DMA_TO_DEVICE
444 bne v7m_dma_inv_range
445 ret lr
446 ENDPROC(v7m_dma_unmap_area)
447
448 .globl v7m_flush_kern_cache_louis
449 .equ v7m_flush_kern_cache_louis, v7m_flush_kern_cache_all
450
451 __INITDATA
452
453 @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
454 define_cache_functions v7m