1
2 #include <linux/linkage.h>
3 #include <asm/asm.h>
4 #include <asm/bitsperlong.h>
5 #include <asm/kvm_vcpu_regs.h>
6 #include <asm/nospec-branch.h>
7
8 #define WORD_SIZE (BITS_PER_LONG / 8)
9
10 #define VCPU_RAX __VCPU_REGS_RAX * WORD_SIZE
11 #define VCPU_RCX __VCPU_REGS_RCX * WORD_SIZE
12 #define VCPU_RDX __VCPU_REGS_RDX * WORD_SIZE
13 #define VCPU_RBX __VCPU_REGS_RBX * WORD_SIZE
14
15 #define VCPU_RBP __VCPU_REGS_RBP * WORD_SIZE
16 #define VCPU_RSI __VCPU_REGS_RSI * WORD_SIZE
17 #define VCPU_RDI __VCPU_REGS_RDI * WORD_SIZE
18
19 #ifdef CONFIG_X86_64
20 #define VCPU_R8 __VCPU_REGS_R8 * WORD_SIZE
21 #define VCPU_R9 __VCPU_REGS_R9 * WORD_SIZE
22 #define VCPU_R10 __VCPU_REGS_R10 * WORD_SIZE
23 #define VCPU_R11 __VCPU_REGS_R11 * WORD_SIZE
24 #define VCPU_R12 __VCPU_REGS_R12 * WORD_SIZE
25 #define VCPU_R13 __VCPU_REGS_R13 * WORD_SIZE
26 #define VCPU_R14 __VCPU_REGS_R14 * WORD_SIZE
27 #define VCPU_R15 __VCPU_REGS_R15 * WORD_SIZE
28 #endif
29
30 .text
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46 ENTRY(vmx_vmenter)
47
48 je 2f
49
50 1: vmresume
51 ret
52
53 2: vmlaunch
54 ret
55
56 3: cmpb $0, kvm_rebooting
57 je 4f
58 ret
59 4: ud2
60
61 .pushsection .fixup, "ax"
62 5: jmp 3b
63 .popsection
64
65 _ASM_EXTABLE(1b, 5b)
66 _ASM_EXTABLE(2b, 5b)
67
68 ENDPROC(vmx_vmenter)
69
70
71
72
73
74
75
76
77
78
79
80 ENTRY(vmx_vmexit)
81 #ifdef CONFIG_RETPOLINE
82 ALTERNATIVE "jmp .Lvmexit_skip_rsb", "", X86_FEATURE_RETPOLINE
83
84 push %_ASM_AX
85
86
87 FILL_RETURN_BUFFER %_ASM_AX, RSB_CLEAR_LOOPS, X86_FEATURE_RETPOLINE
88
89
90 or $1, %_ASM_AX
91
92 pop %_ASM_AX
93 .Lvmexit_skip_rsb:
94 #endif
95 ret
96 ENDPROC(vmx_vmexit)
97
98
99
100
101
102
103
104
105
106
107 ENTRY(__vmx_vcpu_run)
108 push %_ASM_BP
109 mov %_ASM_SP, %_ASM_BP
110 #ifdef CONFIG_X86_64
111 push %r15
112 push %r14
113 push %r13
114 push %r12
115 #else
116 push %edi
117 push %esi
118 #endif
119 push %_ASM_BX
120
121
122
123
124
125 push %_ASM_ARG2
126
127
128 mov %_ASM_ARG3B, %bl
129
130
131 lea -WORD_SIZE(%_ASM_SP), %_ASM_ARG2
132 call vmx_update_host_rsp
133
134
135 mov (%_ASM_SP), %_ASM_AX
136
137
138 cmpb $0, %bl
139
140
141 mov VCPU_RBX(%_ASM_AX), %_ASM_BX
142 mov VCPU_RCX(%_ASM_AX), %_ASM_CX
143 mov VCPU_RDX(%_ASM_AX), %_ASM_DX
144 mov VCPU_RSI(%_ASM_AX), %_ASM_SI
145 mov VCPU_RDI(%_ASM_AX), %_ASM_DI
146 mov VCPU_RBP(%_ASM_AX), %_ASM_BP
147 #ifdef CONFIG_X86_64
148 mov VCPU_R8 (%_ASM_AX), %r8
149 mov VCPU_R9 (%_ASM_AX), %r9
150 mov VCPU_R10(%_ASM_AX), %r10
151 mov VCPU_R11(%_ASM_AX), %r11
152 mov VCPU_R12(%_ASM_AX), %r12
153 mov VCPU_R13(%_ASM_AX), %r13
154 mov VCPU_R14(%_ASM_AX), %r14
155 mov VCPU_R15(%_ASM_AX), %r15
156 #endif
157
158 mov VCPU_RAX(%_ASM_AX), %_ASM_AX
159
160
161 call vmx_vmenter
162
163
164 jbe 2f
165
166
167 push %_ASM_AX
168
169
170 mov WORD_SIZE(%_ASM_SP), %_ASM_AX
171
172
173 __ASM_SIZE(pop) VCPU_RAX(%_ASM_AX)
174 mov %_ASM_BX, VCPU_RBX(%_ASM_AX)
175 mov %_ASM_CX, VCPU_RCX(%_ASM_AX)
176 mov %_ASM_DX, VCPU_RDX(%_ASM_AX)
177 mov %_ASM_SI, VCPU_RSI(%_ASM_AX)
178 mov %_ASM_DI, VCPU_RDI(%_ASM_AX)
179 mov %_ASM_BP, VCPU_RBP(%_ASM_AX)
180 #ifdef CONFIG_X86_64
181 mov %r8, VCPU_R8 (%_ASM_AX)
182 mov %r9, VCPU_R9 (%_ASM_AX)
183 mov %r10, VCPU_R10(%_ASM_AX)
184 mov %r11, VCPU_R11(%_ASM_AX)
185 mov %r12, VCPU_R12(%_ASM_AX)
186 mov %r13, VCPU_R13(%_ASM_AX)
187 mov %r14, VCPU_R14(%_ASM_AX)
188 mov %r15, VCPU_R15(%_ASM_AX)
189 #endif
190
191
192 xor %eax, %eax
193
194
195
196
197
198
199
200
201
202
203 1: xor %ebx, %ebx
204 xor %ecx, %ecx
205 xor %edx, %edx
206 xor %esi, %esi
207 xor %edi, %edi
208 xor %ebp, %ebp
209 #ifdef CONFIG_X86_64
210 xor %r8d, %r8d
211 xor %r9d, %r9d
212 xor %r10d, %r10d
213 xor %r11d, %r11d
214 xor %r12d, %r12d
215 xor %r13d, %r13d
216 xor %r14d, %r14d
217 xor %r15d, %r15d
218 #endif
219
220
221 add $WORD_SIZE, %_ASM_SP
222 pop %_ASM_BX
223
224 #ifdef CONFIG_X86_64
225 pop %r12
226 pop %r13
227 pop %r14
228 pop %r15
229 #else
230 pop %esi
231 pop %edi
232 #endif
233 pop %_ASM_BP
234 ret
235
236
237 2: mov $1, %eax
238 jmp 1b
239 ENDPROC(__vmx_vcpu_run)
240
241
242
243
244
245
246
247
248
249 ENTRY(vmread_error_trampoline)
250 push %_ASM_BP
251 mov %_ASM_SP, %_ASM_BP
252
253 push %_ASM_AX
254 push %_ASM_CX
255 push %_ASM_DX
256 #ifdef CONFIG_X86_64
257 push %rdi
258 push %rsi
259 push %r8
260 push %r9
261 push %r10
262 push %r11
263 #endif
264 #ifdef CONFIG_X86_64
265
266 mov 3*WORD_SIZE(%rbp), %_ASM_ARG2
267 mov 2*WORD_SIZE(%rbp), %_ASM_ARG1
268 #else
269
270 push 3*WORD_SIZE(%ebp)
271 push 2*WORD_SIZE(%ebp)
272 #endif
273
274 call vmread_error
275
276 #ifndef CONFIG_X86_64
277 add $8, %esp
278 #endif
279
280
281 _ASM_MOV $0, 3*WORD_SIZE(%_ASM_BP)
282
283 #ifdef CONFIG_X86_64
284 pop %r11
285 pop %r10
286 pop %r9
287 pop %r8
288 pop %rsi
289 pop %rdi
290 #endif
291 pop %_ASM_DX
292 pop %_ASM_CX
293 pop %_ASM_AX
294 pop %_ASM_BP
295
296 ret
297 ENDPROC(vmread_error_trampoline)