1
2
3
4
5
6
7
8
9
10 #include <asm/ppc_asm.h>
11 #include <asm/kvm_asm.h>
12 #include <asm/reg.h>
13 #include <asm/page.h>
14 #include <asm/asm-offsets.h>
15 #include <asm/asm-compat.h>
16
17 #define KVM_MAGIC_PAGE (-4096)
18
19 #ifdef CONFIG_64BIT
20 #define LL64(reg, offs, reg2) ld reg, (offs)(reg2)
21 #define STL64(reg, offs, reg2) std reg, (offs)(reg2)
22 #else
23 #define LL64(reg, offs, reg2) lwz reg, (offs + 4)(reg2)
24 #define STL64(reg, offs, reg2) stw reg, (offs + 4)(reg2)
25 #endif
26
27 #define SCRATCH_SAVE \
28
29 \
30 STL64(r1, KVM_MAGIC_PAGE + KVM_MAGIC_CRITICAL, 0); \
31 \
32 \
33 PPC_STL r31, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH1)(0); \
34 PPC_STL r30, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH2)(0); \
35 mfcr r31; \
36 stw r31, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH3)(0);
37
38 #define SCRATCH_RESTORE \
39 \
40 PPC_LL r31, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH1)(0); \
41 lwz r30, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH3)(0); \
42 mtcr r30; \
43 PPC_LL r30, (KVM_MAGIC_PAGE + KVM_MAGIC_SCRATCH2)(0); \
44 \
45
46 \
47 STL64(r2, KVM_MAGIC_PAGE + KVM_MAGIC_CRITICAL, 0);
48
49 .global kvm_template_start
50 kvm_template_start:
51
52 .global kvm_emulate_mtmsrd
53 kvm_emulate_mtmsrd:
54
55 SCRATCH_SAVE
56
57
58 LL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
59 lis r30, (~(MSR_EE | MSR_RI))@h
60 ori r30, r30, (~(MSR_EE | MSR_RI))@l
61 and r31, r31, r30
62
63
64 kvm_emulate_mtmsrd_reg:
65 ori r30, r0, 0
66 andi. r30, r30, (MSR_EE|MSR_RI)
67 or r31, r31, r30
68
69
70 STL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
71
72
73 lwz r31, (KVM_MAGIC_PAGE + KVM_MAGIC_INT)(0)
74 cmpwi r31, 0
75 beq+ no_check
76
77
78 andi. r30, r30, MSR_EE
79 beq no_check
80
81 SCRATCH_RESTORE
82
83
84 kvm_emulate_mtmsrd_orig_ins:
85 tlbsync
86
87 b kvm_emulate_mtmsrd_branch
88
89 no_check:
90
91 SCRATCH_RESTORE
92
93
94 kvm_emulate_mtmsrd_branch:
95 b .
96 kvm_emulate_mtmsrd_end:
97
98 .global kvm_emulate_mtmsrd_branch_offs
99 kvm_emulate_mtmsrd_branch_offs:
100 .long (kvm_emulate_mtmsrd_branch - kvm_emulate_mtmsrd) / 4
101
102 .global kvm_emulate_mtmsrd_reg_offs
103 kvm_emulate_mtmsrd_reg_offs:
104 .long (kvm_emulate_mtmsrd_reg - kvm_emulate_mtmsrd) / 4
105
106 .global kvm_emulate_mtmsrd_orig_ins_offs
107 kvm_emulate_mtmsrd_orig_ins_offs:
108 .long (kvm_emulate_mtmsrd_orig_ins - kvm_emulate_mtmsrd) / 4
109
110 .global kvm_emulate_mtmsrd_len
111 kvm_emulate_mtmsrd_len:
112 .long (kvm_emulate_mtmsrd_end - kvm_emulate_mtmsrd) / 4
113
114
115 #define MSR_SAFE_BITS (MSR_EE | MSR_RI)
116 #define MSR_CRITICAL_BITS ~MSR_SAFE_BITS
117
118 .global kvm_emulate_mtmsr
119 kvm_emulate_mtmsr:
120
121 SCRATCH_SAVE
122
123
124 LL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
125
126
127 kvm_emulate_mtmsr_reg1:
128 ori r30, r0, 0
129 xor r31, r30, r31
130
131
132 LOAD_REG_IMMEDIATE(r30, MSR_CRITICAL_BITS)
133 and. r31, r31, r30
134
135
136 beq maybe_stay_in_guest
137
138 do_mtmsr:
139
140 SCRATCH_RESTORE
141
142
143 kvm_emulate_mtmsr_orig_ins:
144 mtmsr r0
145
146 b kvm_emulate_mtmsr_branch
147
148 maybe_stay_in_guest:
149
150
151 kvm_emulate_mtmsr_reg2:
152 ori r30, r0, 0
153
154
155 STL64(r30, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
156
157
158 lwz r31, (KVM_MAGIC_PAGE + KVM_MAGIC_INT)(0)
159 cmpwi r31, 0
160 beq+ no_mtmsr
161
162
163 andi. r31, r30, MSR_EE
164 bne do_mtmsr
165
166 no_mtmsr:
167
168 SCRATCH_RESTORE
169
170
171 kvm_emulate_mtmsr_branch:
172 b .
173 kvm_emulate_mtmsr_end:
174
175 .global kvm_emulate_mtmsr_branch_offs
176 kvm_emulate_mtmsr_branch_offs:
177 .long (kvm_emulate_mtmsr_branch - kvm_emulate_mtmsr) / 4
178
179 .global kvm_emulate_mtmsr_reg1_offs
180 kvm_emulate_mtmsr_reg1_offs:
181 .long (kvm_emulate_mtmsr_reg1 - kvm_emulate_mtmsr) / 4
182
183 .global kvm_emulate_mtmsr_reg2_offs
184 kvm_emulate_mtmsr_reg2_offs:
185 .long (kvm_emulate_mtmsr_reg2 - kvm_emulate_mtmsr) / 4
186
187 .global kvm_emulate_mtmsr_orig_ins_offs
188 kvm_emulate_mtmsr_orig_ins_offs:
189 .long (kvm_emulate_mtmsr_orig_ins - kvm_emulate_mtmsr) / 4
190
191 .global kvm_emulate_mtmsr_len
192 kvm_emulate_mtmsr_len:
193 .long (kvm_emulate_mtmsr_end - kvm_emulate_mtmsr) / 4
194
195 #ifdef CONFIG_BOOKE
196
197
198 .global kvm_emulate_wrtee
199 kvm_emulate_wrtee:
200
201 SCRATCH_SAVE
202
203
204 LL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
205
206
207 kvm_emulate_wrtee_reg:
208 ori r30, r0, 0
209 rlwimi r31, r30, 0, MSR_EE
210
211
212
213
214
215
216 andi. r30, r30, MSR_EE
217
218
219 STL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
220
221 beq no_wrtee
222
223
224 lwz r30, (KVM_MAGIC_PAGE + KVM_MAGIC_INT)(0)
225 cmpwi r30, 0
226 bne do_wrtee
227
228 no_wrtee:
229 SCRATCH_RESTORE
230
231
232 kvm_emulate_wrtee_branch:
233 b .
234
235 do_wrtee:
236 SCRATCH_RESTORE
237
238
239 kvm_emulate_wrtee_orig_ins:
240 wrtee r0
241
242 b kvm_emulate_wrtee_branch
243
244 kvm_emulate_wrtee_end:
245
246 .global kvm_emulate_wrtee_branch_offs
247 kvm_emulate_wrtee_branch_offs:
248 .long (kvm_emulate_wrtee_branch - kvm_emulate_wrtee) / 4
249
250 .global kvm_emulate_wrtee_reg_offs
251 kvm_emulate_wrtee_reg_offs:
252 .long (kvm_emulate_wrtee_reg - kvm_emulate_wrtee) / 4
253
254 .global kvm_emulate_wrtee_orig_ins_offs
255 kvm_emulate_wrtee_orig_ins_offs:
256 .long (kvm_emulate_wrtee_orig_ins - kvm_emulate_wrtee) / 4
257
258 .global kvm_emulate_wrtee_len
259 kvm_emulate_wrtee_len:
260 .long (kvm_emulate_wrtee_end - kvm_emulate_wrtee) / 4
261
262 .global kvm_emulate_wrteei_0
263 kvm_emulate_wrteei_0:
264 SCRATCH_SAVE
265
266
267 LL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
268
269
270 rlwinm r31, r31, 0, ~MSR_EE
271
272
273 STL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
274
275 SCRATCH_RESTORE
276
277
278 kvm_emulate_wrteei_0_branch:
279 b .
280 kvm_emulate_wrteei_0_end:
281
282 .global kvm_emulate_wrteei_0_branch_offs
283 kvm_emulate_wrteei_0_branch_offs:
284 .long (kvm_emulate_wrteei_0_branch - kvm_emulate_wrteei_0) / 4
285
286 .global kvm_emulate_wrteei_0_len
287 kvm_emulate_wrteei_0_len:
288 .long (kvm_emulate_wrteei_0_end - kvm_emulate_wrteei_0) / 4
289
290 #endif
291
292 #ifdef CONFIG_PPC_BOOK3S_32
293
294 .global kvm_emulate_mtsrin
295 kvm_emulate_mtsrin:
296
297 SCRATCH_SAVE
298
299 LL64(r31, KVM_MAGIC_PAGE + KVM_MAGIC_MSR, 0)
300 andi. r31, r31, MSR_DR | MSR_IR
301 beq kvm_emulate_mtsrin_reg1
302
303 SCRATCH_RESTORE
304
305 kvm_emulate_mtsrin_orig_ins:
306 nop
307 b kvm_emulate_mtsrin_branch
308
309 kvm_emulate_mtsrin_reg1:
310
311 rlwinm r30,r0,6,26,29
312
313 kvm_emulate_mtsrin_reg2:
314 stw r0, (KVM_MAGIC_PAGE + KVM_MAGIC_SR)(r30)
315
316 SCRATCH_RESTORE
317
318
319 kvm_emulate_mtsrin_branch:
320 b .
321 kvm_emulate_mtsrin_end:
322
323 .global kvm_emulate_mtsrin_branch_offs
324 kvm_emulate_mtsrin_branch_offs:
325 .long (kvm_emulate_mtsrin_branch - kvm_emulate_mtsrin) / 4
326
327 .global kvm_emulate_mtsrin_reg1_offs
328 kvm_emulate_mtsrin_reg1_offs:
329 .long (kvm_emulate_mtsrin_reg1 - kvm_emulate_mtsrin) / 4
330
331 .global kvm_emulate_mtsrin_reg2_offs
332 kvm_emulate_mtsrin_reg2_offs:
333 .long (kvm_emulate_mtsrin_reg2 - kvm_emulate_mtsrin) / 4
334
335 .global kvm_emulate_mtsrin_orig_ins_offs
336 kvm_emulate_mtsrin_orig_ins_offs:
337 .long (kvm_emulate_mtsrin_orig_ins - kvm_emulate_mtsrin) / 4
338
339 .global kvm_emulate_mtsrin_len
340 kvm_emulate_mtsrin_len:
341 .long (kvm_emulate_mtsrin_end - kvm_emulate_mtsrin) / 4
342
343 #endif
344
345 .balign 4
346 .global kvm_tmp
347 kvm_tmp:
348 .space (64 * 1024)
349
350 .global kvm_tmp_end
351 kvm_tmp_end:
352
353 .global kvm_template_end
354 kvm_template_end: