1 /*
2  * Copyright (C) 2013 Imagination Technologies
3  * Author: Paul Burton <paul.burton@imgtec.com>
4  *
5  * This program is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License as published by the
7  * Free Software Foundation;  either version 2 of the  License, or (at your
8  * option) any later version.
9  */
10 #ifndef _ASM_MSA_H
11 #define _ASM_MSA_H
12 
13 #include <asm/mipsregs.h>
14 
15 #ifndef __ASSEMBLY__
16 
17 extern void _save_msa(struct task_struct *);
18 extern void _restore_msa(struct task_struct *);
19 extern void _init_msa_upper(void);
20 
enable_msa(void)21 static inline void enable_msa(void)
22 {
23 	if (cpu_has_msa) {
24 		set_c0_config5(MIPS_CONF5_MSAEN);
25 		enable_fpu_hazard();
26 	}
27 }
28 
disable_msa(void)29 static inline void disable_msa(void)
30 {
31 	if (cpu_has_msa) {
32 		clear_c0_config5(MIPS_CONF5_MSAEN);
33 		disable_fpu_hazard();
34 	}
35 }
36 
is_msa_enabled(void)37 static inline int is_msa_enabled(void)
38 {
39 	if (!cpu_has_msa)
40 		return 0;
41 
42 	return read_c0_config5() & MIPS_CONF5_MSAEN;
43 }
44 
thread_msa_context_live(void)45 static inline int thread_msa_context_live(void)
46 {
47 	/*
48 	 * Check cpu_has_msa only if it's a constant. This will allow the
49 	 * compiler to optimise out code for CPUs without MSA without adding
50 	 * an extra redundant check for CPUs with MSA.
51 	 */
52 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
53 		return 0;
54 
55 	return test_thread_flag(TIF_MSA_CTX_LIVE);
56 }
57 
save_msa(struct task_struct * t)58 static inline void save_msa(struct task_struct *t)
59 {
60 	if (cpu_has_msa)
61 		_save_msa(t);
62 }
63 
restore_msa(struct task_struct * t)64 static inline void restore_msa(struct task_struct *t)
65 {
66 	if (cpu_has_msa)
67 		_restore_msa(t);
68 }
69 
init_msa_upper(void)70 static inline void init_msa_upper(void)
71 {
72 	/*
73 	 * Check cpu_has_msa only if it's a constant. This will allow the
74 	 * compiler to optimise out code for CPUs without MSA without adding
75 	 * an extra redundant check for CPUs with MSA.
76 	 */
77 	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
78 		return;
79 
80 	_init_msa_upper();
81 }
82 
83 #ifdef TOOLCHAIN_SUPPORTS_MSA
84 
85 #define __BUILD_MSA_CTL_REG(name, cs)				\
86 static inline unsigned int read_msa_##name(void)		\
87 {								\
88 	unsigned int reg;					\
89 	__asm__ __volatile__(					\
90 	"	.set	push\n"					\
91 	"	.set	msa\n"					\
92 	"	cfcmsa	%0, $" #cs "\n"				\
93 	"	.set	pop\n"					\
94 	: "=r"(reg));						\
95 	return reg;						\
96 }								\
97 								\
98 static inline void write_msa_##name(unsigned int val)		\
99 {								\
100 	__asm__ __volatile__(					\
101 	"	.set	push\n"					\
102 	"	.set	msa\n"					\
103 	"	ctcmsa	$" #cs ", %0\n"				\
104 	"	.set	pop\n"					\
105 	: : "r"(val));						\
106 }
107 
108 #else /* !TOOLCHAIN_SUPPORTS_MSA */
109 
110 /*
111  * Define functions using .word for the c[ft]cmsa instructions in order to
112  * allow compilation with toolchains that do not support MSA. Once all
113  * toolchains in use support MSA these can be removed.
114  */
115 #ifdef CONFIG_CPU_MICROMIPS
116 #define CFC_MSA_INSN	0x587e0056
117 #define CTC_MSA_INSN	0x583e0816
118 #else
119 #define CFC_MSA_INSN	0x787e0059
120 #define CTC_MSA_INSN	0x783e0819
121 #endif
122 
123 #define __BUILD_MSA_CTL_REG(name, cs)				\
124 static inline unsigned int read_msa_##name(void)		\
125 {								\
126 	unsigned int reg;					\
127 	__asm__ __volatile__(					\
128 	"	.set	push\n"					\
129 	"	.set	noat\n"					\
130 	"	.insn\n"					\
131 	"	.word	%1 | (" #cs " << 11)\n"			\
132 	"	move	%0, $1\n"				\
133 	"	.set	pop\n"					\
134 	: "=r"(reg) : "i"(CFC_MSA_INSN));			\
135 	return reg;						\
136 }								\
137 								\
138 static inline void write_msa_##name(unsigned int val)		\
139 {								\
140 	__asm__ __volatile__(					\
141 	"	.set	push\n"					\
142 	"	.set	noat\n"					\
143 	"	move	$1, %0\n"				\
144 	"	.insn\n"					\
145 	"	.word	%1 | (" #cs " << 6)\n"			\
146 	"	.set	pop\n"					\
147 	: : "r"(val), "i"(CTC_MSA_INSN));			\
148 }
149 
150 #endif /* !TOOLCHAIN_SUPPORTS_MSA */
151 
152 __BUILD_MSA_CTL_REG(ir, 0)
153 __BUILD_MSA_CTL_REG(csr, 1)
154 __BUILD_MSA_CTL_REG(access, 2)
155 __BUILD_MSA_CTL_REG(save, 3)
156 __BUILD_MSA_CTL_REG(modify, 4)
157 __BUILD_MSA_CTL_REG(request, 5)
158 __BUILD_MSA_CTL_REG(map, 6)
159 __BUILD_MSA_CTL_REG(unmap, 7)
160 
161 #endif /* !__ASSEMBLY__ */
162 
163 #define MSA_IR		0
164 #define MSA_CSR		1
165 #define MSA_ACCESS	2
166 #define MSA_SAVE	3
167 #define MSA_MODIFY	4
168 #define MSA_REQUEST	5
169 #define MSA_MAP		6
170 #define MSA_UNMAP	7
171 
172 /* MSA Implementation Register (MSAIR) */
173 #define MSA_IR_REVB		0
174 #define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
175 #define MSA_IR_PROCB		8
176 #define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
177 #define MSA_IR_WRPB		16
178 #define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
179 
180 /* MSA Control & Status Register (MSACSR) */
181 #define MSA_CSR_RMB		0
182 #define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
183 #define MSA_CSR_RM_NEAREST	0
184 #define MSA_CSR_RM_TO_ZERO	1
185 #define MSA_CSR_RM_TO_POS	2
186 #define MSA_CSR_RM_TO_NEG	3
187 #define MSA_CSR_FLAGSB		2
188 #define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
189 #define MSA_CSR_FLAGS_IB	2
190 #define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
191 #define MSA_CSR_FLAGS_UB	3
192 #define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
193 #define MSA_CSR_FLAGS_OB	4
194 #define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
195 #define MSA_CSR_FLAGS_ZB	5
196 #define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
197 #define MSA_CSR_FLAGS_VB	6
198 #define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
199 #define MSA_CSR_ENABLESB	7
200 #define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
201 #define MSA_CSR_ENABLES_IB	7
202 #define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
203 #define MSA_CSR_ENABLES_UB	8
204 #define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
205 #define MSA_CSR_ENABLES_OB	9
206 #define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
207 #define MSA_CSR_ENABLES_ZB	10
208 #define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
209 #define MSA_CSR_ENABLES_VB	11
210 #define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
211 #define MSA_CSR_CAUSEB		12
212 #define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
213 #define MSA_CSR_CAUSE_IB	12
214 #define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
215 #define MSA_CSR_CAUSE_UB	13
216 #define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
217 #define MSA_CSR_CAUSE_OB	14
218 #define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
219 #define MSA_CSR_CAUSE_ZB	15
220 #define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
221 #define MSA_CSR_CAUSE_VB	16
222 #define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
223 #define MSA_CSR_CAUSE_EB	17
224 #define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
225 #define MSA_CSR_NXB		18
226 #define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
227 #define MSA_CSR_FSB		24
228 #define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
229 
230 #endif /* _ASM_MSA_H */
231