1
2
3
4
5
6
7
8
9 #include <linux/errno.h>
10 #include <linux/linkage.h>
11 #include <linux/init.h>
12 #include <asm/assembler.h>
13 #include <asm/cpufeature.h>
14 #include <asm/alternative.h>
15 #include <asm/asm-uaccess.h>
16
17
18
19
20
21
22
23
24
25
26
27 ENTRY(__flush_icache_range)
28
29
30
31
32
33
34
35
36
37
38
39
40 ENTRY(__flush_cache_user_range)
41 uaccess_ttbr0_enable x2, x3, x4
42 alternative_if ARM64_HAS_CACHE_IDC
43 dsb ishst
44 b 7f
45 alternative_else_nop_endif
46 dcache_line_size x2, x3
47 sub x3, x2, #1
48 bic x4, x0, x3
49 1:
50 user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
51 add x4, x4, x2
52 cmp x4, x1
53 b.lo 1b
54 dsb ish
55
56 7:
57 alternative_if ARM64_HAS_CACHE_DIC
58 isb
59 b 8f
60 alternative_else_nop_endif
61 invalidate_icache_by_line x0, x1, x2, x3, 9f
62 8: mov x0, #0
63 1:
64 uaccess_ttbr0_disable x1, x2
65 ret
66 9:
67 mov x0, #-EFAULT
68 b 1b
69 ENDPROC(__flush_icache_range)
70 ENDPROC(__flush_cache_user_range)
71
72
73
74
75
76
77
78
79
80 ENTRY(invalidate_icache_range)
81 alternative_if ARM64_HAS_CACHE_DIC
82 mov x0, xzr
83 isb
84 ret
85 alternative_else_nop_endif
86
87 uaccess_ttbr0_enable x2, x3, x4
88
89 invalidate_icache_by_line x0, x1, x2, x3, 2f
90 mov x0, xzr
91 1:
92 uaccess_ttbr0_disable x1, x2
93 ret
94 2:
95 mov x0, #-EFAULT
96 b 1b
97 ENDPROC(invalidate_icache_range)
98
99
100
101
102
103
104
105
106
107
108 ENTRY(__flush_dcache_area)
109 dcache_by_line_op civac, sy, x0, x1, x2, x3
110 ret
111 ENDPIPROC(__flush_dcache_area)
112
113
114
115
116
117
118
119
120
121
122 ENTRY(__clean_dcache_area_pou)
123 alternative_if ARM64_HAS_CACHE_IDC
124 dsb ishst
125 ret
126 alternative_else_nop_endif
127 dcache_by_line_op cvau, ish, x0, x1, x2, x3
128 ret
129 ENDPROC(__clean_dcache_area_pou)
130
131
132
133
134
135
136
137
138
139
140
141 ENTRY(__inval_dcache_area)
142
143
144
145
146
147
148
149 __dma_inv_area:
150 add x1, x1, x0
151 dcache_line_size x2, x3
152 sub x3, x2, #1
153 tst x1, x3
154 bic x1, x1, x3
155 b.eq 1f
156 dc civac, x1
157 1: tst x0, x3
158 bic x0, x0, x3
159 b.eq 2f
160 dc civac, x0
161 b 3f
162 2: dc ivac, x0
163 3: add x0, x0, x2
164 cmp x0, x1
165 b.lo 2b
166 dsb sy
167 ret
168 ENDPIPROC(__inval_dcache_area)
169 ENDPROC(__dma_inv_area)
170
171
172
173
174
175
176
177
178
179
180 ENTRY(__clean_dcache_area_poc)
181
182
183
184
185
186
187
188 __dma_clean_area:
189 dcache_by_line_op cvac, sy, x0, x1, x2, x3
190 ret
191 ENDPIPROC(__clean_dcache_area_poc)
192 ENDPROC(__dma_clean_area)
193
194
195
196
197
198
199
200
201
202
203 ENTRY(__clean_dcache_area_pop)
204 alternative_if_not ARM64_HAS_DCPOP
205 b __clean_dcache_area_poc
206 alternative_else_nop_endif
207 dcache_by_line_op cvap, sy, x0, x1, x2, x3
208 ret
209 ENDPIPROC(__clean_dcache_area_pop)
210
211
212
213
214
215
216
217
218
219 ENTRY(__dma_flush_area)
220 dcache_by_line_op civac, sy, x0, x1, x2, x3
221 ret
222 ENDPIPROC(__dma_flush_area)
223
224
225
226
227
228
229
230 ENTRY(__dma_map_area)
231 cmp w2, #DMA_FROM_DEVICE
232 b.eq __dma_inv_area
233 b __dma_clean_area
234 ENDPIPROC(__dma_map_area)
235
236
237
238
239
240
241
242 ENTRY(__dma_unmap_area)
243 cmp w2, #DMA_TO_DEVICE
244 b.ne __dma_inv_area
245 ret
246 ENDPIPROC(__dma_unmap_area)