1/*
2 * Copyright (C) 2012,2013 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
8 *
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 * GNU General Public License for more details.
13 *
14 * You should have received a copy of the GNU General Public License
15 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
16 */
17
18#include <linux/linkage.h>
19#include <linux/irqchip/arm-gic-v3.h>
20
21#include <asm/assembler.h>
22#include <asm/memory.h>
23#include <asm/asm-offsets.h>
24#include <asm/kvm.h>
25#include <asm/kvm_asm.h>
26#include <asm/kvm_arm.h>
27
28	.text
29	.pushsection	.hyp.text, "ax"
30
31/*
32 * We store LRs in reverse order to let the CPU deal with streaming
33 * access. Use this macro to make it look saner...
34 */
35#define LR_OFFSET(n)	(VGIC_V3_CPU_LR + (15 - n) * 8)
36
37/*
38 * Save the VGIC CPU state into memory
39 * x0: Register pointing to VCPU struct
40 * Do not corrupt x1!!!
41 */
42.macro	save_vgic_v3_state
43	// Compute the address of struct vgic_cpu
44	add	x3, x0, #VCPU_VGIC_CPU
45
46	// Make sure stores to the GIC via the memory mapped interface
47	// are now visible to the system register interface
48	dsb	st
49
50	// Save all interesting registers
51	mrs_s	x4, ICH_HCR_EL2
52	mrs_s	x5, ICH_VMCR_EL2
53	mrs_s	x6, ICH_MISR_EL2
54	mrs_s	x7, ICH_EISR_EL2
55	mrs_s	x8, ICH_ELSR_EL2
56
57	str	w4, [x3, #VGIC_V3_CPU_HCR]
58	str	w5, [x3, #VGIC_V3_CPU_VMCR]
59	str	w6, [x3, #VGIC_V3_CPU_MISR]
60	str	w7, [x3, #VGIC_V3_CPU_EISR]
61	str	w8, [x3, #VGIC_V3_CPU_ELRSR]
62
63	msr_s	ICH_HCR_EL2, xzr
64
65	mrs_s	x21, ICH_VTR_EL2
66	mvn	w22, w21
67	ubfiz	w23, w22, 2, 4	// w23 = (15 - ListRegs) * 4
68
69	adr	x24, 1f
70	add	x24, x24, x23
71	br	x24
72
731:
74	mrs_s	x20, ICH_LR15_EL2
75	mrs_s	x19, ICH_LR14_EL2
76	mrs_s	x18, ICH_LR13_EL2
77	mrs_s	x17, ICH_LR12_EL2
78	mrs_s	x16, ICH_LR11_EL2
79	mrs_s	x15, ICH_LR10_EL2
80	mrs_s	x14, ICH_LR9_EL2
81	mrs_s	x13, ICH_LR8_EL2
82	mrs_s	x12, ICH_LR7_EL2
83	mrs_s	x11, ICH_LR6_EL2
84	mrs_s	x10, ICH_LR5_EL2
85	mrs_s	x9, ICH_LR4_EL2
86	mrs_s	x8, ICH_LR3_EL2
87	mrs_s	x7, ICH_LR2_EL2
88	mrs_s	x6, ICH_LR1_EL2
89	mrs_s	x5, ICH_LR0_EL2
90
91	adr	x24, 1f
92	add	x24, x24, x23
93	br	x24
94
951:
96	str	x20, [x3, #LR_OFFSET(15)]
97	str	x19, [x3, #LR_OFFSET(14)]
98	str	x18, [x3, #LR_OFFSET(13)]
99	str	x17, [x3, #LR_OFFSET(12)]
100	str	x16, [x3, #LR_OFFSET(11)]
101	str	x15, [x3, #LR_OFFSET(10)]
102	str	x14, [x3, #LR_OFFSET(9)]
103	str	x13, [x3, #LR_OFFSET(8)]
104	str	x12, [x3, #LR_OFFSET(7)]
105	str	x11, [x3, #LR_OFFSET(6)]
106	str	x10, [x3, #LR_OFFSET(5)]
107	str	x9, [x3, #LR_OFFSET(4)]
108	str	x8, [x3, #LR_OFFSET(3)]
109	str	x7, [x3, #LR_OFFSET(2)]
110	str	x6, [x3, #LR_OFFSET(1)]
111	str	x5, [x3, #LR_OFFSET(0)]
112
113	tbnz	w21, #29, 6f	// 6 bits
114	tbz	w21, #30, 5f	// 5 bits
115				// 7 bits
116	mrs_s	x20, ICH_AP0R3_EL2
117	str	w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
118	mrs_s	x19, ICH_AP0R2_EL2
119	str	w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
1206:	mrs_s	x18, ICH_AP0R1_EL2
121	str	w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
1225:	mrs_s	x17, ICH_AP0R0_EL2
123	str	w17, [x3, #VGIC_V3_CPU_AP0R]
124
125	tbnz	w21, #29, 6f	// 6 bits
126	tbz	w21, #30, 5f	// 5 bits
127				// 7 bits
128	mrs_s	x20, ICH_AP1R3_EL2
129	str	w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
130	mrs_s	x19, ICH_AP1R2_EL2
131	str	w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
1326:	mrs_s	x18, ICH_AP1R1_EL2
133	str	w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
1345:	mrs_s	x17, ICH_AP1R0_EL2
135	str	w17, [x3, #VGIC_V3_CPU_AP1R]
136
137	// Restore SRE_EL1 access and re-enable SRE at EL1.
138	mrs_s	x5, ICC_SRE_EL2
139	orr	x5, x5, #ICC_SRE_EL2_ENABLE
140	msr_s	ICC_SRE_EL2, x5
141	isb
142	mov	x5, #1
143	msr_s	ICC_SRE_EL1, x5
144.endm
145
146/*
147 * Restore the VGIC CPU state from memory
148 * x0: Register pointing to VCPU struct
149 */
150.macro	restore_vgic_v3_state
151	// Compute the address of struct vgic_cpu
152	add	x3, x0, #VCPU_VGIC_CPU
153
154	// Restore all interesting registers
155	ldr	w4, [x3, #VGIC_V3_CPU_HCR]
156	ldr	w5, [x3, #VGIC_V3_CPU_VMCR]
157	ldr	w25, [x3, #VGIC_V3_CPU_SRE]
158
159	msr_s	ICC_SRE_EL1, x25
160
161	// make sure SRE is valid before writing the other registers
162	isb
163
164	msr_s	ICH_HCR_EL2, x4
165	msr_s	ICH_VMCR_EL2, x5
166
167	mrs_s	x21, ICH_VTR_EL2
168
169	tbnz	w21, #29, 6f	// 6 bits
170	tbz	w21, #30, 5f	// 5 bits
171				// 7 bits
172	ldr	w20, [x3, #(VGIC_V3_CPU_AP1R + 3*4)]
173	msr_s	ICH_AP1R3_EL2, x20
174	ldr	w19, [x3, #(VGIC_V3_CPU_AP1R + 2*4)]
175	msr_s	ICH_AP1R2_EL2, x19
1766:	ldr	w18, [x3, #(VGIC_V3_CPU_AP1R + 1*4)]
177	msr_s	ICH_AP1R1_EL2, x18
1785:	ldr	w17, [x3, #VGIC_V3_CPU_AP1R]
179	msr_s	ICH_AP1R0_EL2, x17
180
181	tbnz	w21, #29, 6f	// 6 bits
182	tbz	w21, #30, 5f	// 5 bits
183				// 7 bits
184	ldr	w20, [x3, #(VGIC_V3_CPU_AP0R + 3*4)]
185	msr_s	ICH_AP0R3_EL2, x20
186	ldr	w19, [x3, #(VGIC_V3_CPU_AP0R + 2*4)]
187	msr_s	ICH_AP0R2_EL2, x19
1886:	ldr	w18, [x3, #(VGIC_V3_CPU_AP0R + 1*4)]
189	msr_s	ICH_AP0R1_EL2, x18
1905:	ldr	w17, [x3, #VGIC_V3_CPU_AP0R]
191	msr_s	ICH_AP0R0_EL2, x17
192
193	and	w22, w21, #0xf
194	mvn	w22, w21
195	ubfiz	w23, w22, 2, 4	// w23 = (15 - ListRegs) * 4
196
197	adr	x24, 1f
198	add	x24, x24, x23
199	br	x24
200
2011:
202	ldr	x20, [x3, #LR_OFFSET(15)]
203	ldr	x19, [x3, #LR_OFFSET(14)]
204	ldr	x18, [x3, #LR_OFFSET(13)]
205	ldr	x17, [x3, #LR_OFFSET(12)]
206	ldr	x16, [x3, #LR_OFFSET(11)]
207	ldr	x15, [x3, #LR_OFFSET(10)]
208	ldr	x14, [x3, #LR_OFFSET(9)]
209	ldr	x13, [x3, #LR_OFFSET(8)]
210	ldr	x12, [x3, #LR_OFFSET(7)]
211	ldr	x11, [x3, #LR_OFFSET(6)]
212	ldr	x10, [x3, #LR_OFFSET(5)]
213	ldr	x9, [x3, #LR_OFFSET(4)]
214	ldr	x8, [x3, #LR_OFFSET(3)]
215	ldr	x7, [x3, #LR_OFFSET(2)]
216	ldr	x6, [x3, #LR_OFFSET(1)]
217	ldr	x5, [x3, #LR_OFFSET(0)]
218
219	adr	x24, 1f
220	add	x24, x24, x23
221	br	x24
222
2231:
224	msr_s	ICH_LR15_EL2, x20
225	msr_s	ICH_LR14_EL2, x19
226	msr_s	ICH_LR13_EL2, x18
227	msr_s	ICH_LR12_EL2, x17
228	msr_s	ICH_LR11_EL2, x16
229	msr_s	ICH_LR10_EL2, x15
230	msr_s	ICH_LR9_EL2,  x14
231	msr_s	ICH_LR8_EL2,  x13
232	msr_s	ICH_LR7_EL2,  x12
233	msr_s	ICH_LR6_EL2,  x11
234	msr_s	ICH_LR5_EL2,  x10
235	msr_s	ICH_LR4_EL2,   x9
236	msr_s	ICH_LR3_EL2,   x8
237	msr_s	ICH_LR2_EL2,   x7
238	msr_s	ICH_LR1_EL2,   x6
239	msr_s	ICH_LR0_EL2,   x5
240
241	// Ensure that the above will have reached the
242	// (re)distributors. This ensure the guest will read
243	// the correct values from the memory-mapped interface.
244	isb
245	dsb	sy
246
247	// Prevent the guest from touching the GIC system registers
248	// if SRE isn't enabled for GICv3 emulation
249	cbnz	x25, 1f
250	mrs_s	x5, ICC_SRE_EL2
251	and	x5, x5, #~ICC_SRE_EL2_ENABLE
252	msr_s	ICC_SRE_EL2, x5
2531:
254.endm
255
256ENTRY(__save_vgic_v3_state)
257	save_vgic_v3_state
258	ret
259ENDPROC(__save_vgic_v3_state)
260
261ENTRY(__restore_vgic_v3_state)
262	restore_vgic_v3_state
263	ret
264ENDPROC(__restore_vgic_v3_state)
265
266ENTRY(__vgic_v3_get_ich_vtr_el2)
267	mrs_s	x0, ICH_VTR_EL2
268	ret
269ENDPROC(__vgic_v3_get_ich_vtr_el2)
270
271	.popsection
272