1  /* SPDX-License-Identifier: GPL-2.0-or-later */
2  /*
3   * Copyright (C) 2013 Imagination Technologies
4   * Author: Paul Burton <paul.burton@mips.com>
5   */
6  #ifndef _ASM_MSA_H
7  #define _ASM_MSA_H
8  
9  #include <asm/mipsregs.h>
10  
11  #ifndef __ASSEMBLY__
12  
13  #include <asm/inst.h>
14  
15  extern void _save_msa(struct task_struct *);
16  extern void _restore_msa(struct task_struct *);
17  extern void _init_msa_upper(void);
18  
19  extern void read_msa_wr_b(unsigned idx, union fpureg *to);
20  extern void read_msa_wr_h(unsigned idx, union fpureg *to);
21  extern void read_msa_wr_w(unsigned idx, union fpureg *to);
22  extern void read_msa_wr_d(unsigned idx, union fpureg *to);
23  
24  /**
25   * read_msa_wr() - Read a single MSA vector register
26   * @idx:	The index of the vector register to read
27   * @to:		The FPU register union to store the registers value in
28   * @fmt:	The format of the data in the vector register
29   *
30   * Read the value of MSA vector register idx into the FPU register
31   * union to, using the format fmt.
32   */
read_msa_wr(unsigned idx,union fpureg * to,enum msa_2b_fmt fmt)33  static inline void read_msa_wr(unsigned idx, union fpureg *to,
34  			       enum msa_2b_fmt fmt)
35  {
36  	switch (fmt) {
37  	case msa_fmt_b:
38  		read_msa_wr_b(idx, to);
39  		break;
40  
41  	case msa_fmt_h:
42  		read_msa_wr_h(idx, to);
43  		break;
44  
45  	case msa_fmt_w:
46  		read_msa_wr_w(idx, to);
47  		break;
48  
49  	case msa_fmt_d:
50  		read_msa_wr_d(idx, to);
51  		break;
52  
53  	default:
54  		BUG();
55  	}
56  }
57  
58  extern void write_msa_wr_b(unsigned idx, union fpureg *from);
59  extern void write_msa_wr_h(unsigned idx, union fpureg *from);
60  extern void write_msa_wr_w(unsigned idx, union fpureg *from);
61  extern void write_msa_wr_d(unsigned idx, union fpureg *from);
62  
63  /**
64   * write_msa_wr() - Write a single MSA vector register
65   * @idx:	The index of the vector register to write
66   * @from:	The FPU register union to take the registers value from
67   * @fmt:	The format of the data in the vector register
68   *
69   * Write the value from the FPU register union from into MSA vector
70   * register idx, using the format fmt.
71   */
write_msa_wr(unsigned idx,union fpureg * from,enum msa_2b_fmt fmt)72  static inline void write_msa_wr(unsigned idx, union fpureg *from,
73  				enum msa_2b_fmt fmt)
74  {
75  	switch (fmt) {
76  	case msa_fmt_b:
77  		write_msa_wr_b(idx, from);
78  		break;
79  
80  	case msa_fmt_h:
81  		write_msa_wr_h(idx, from);
82  		break;
83  
84  	case msa_fmt_w:
85  		write_msa_wr_w(idx, from);
86  		break;
87  
88  	case msa_fmt_d:
89  		write_msa_wr_d(idx, from);
90  		break;
91  
92  	default:
93  		BUG();
94  	}
95  }
96  
enable_msa(void)97  static inline void enable_msa(void)
98  {
99  	if (cpu_has_msa) {
100  		set_c0_config5(MIPS_CONF5_MSAEN);
101  		enable_fpu_hazard();
102  	}
103  }
104  
disable_msa(void)105  static inline void disable_msa(void)
106  {
107  	if (cpu_has_msa) {
108  		clear_c0_config5(MIPS_CONF5_MSAEN);
109  		disable_fpu_hazard();
110  	}
111  }
112  
is_msa_enabled(void)113  static inline int is_msa_enabled(void)
114  {
115  	if (!cpu_has_msa)
116  		return 0;
117  
118  	return read_c0_config5() & MIPS_CONF5_MSAEN;
119  }
120  
thread_msa_context_live(void)121  static inline int thread_msa_context_live(void)
122  {
123  	/*
124  	 * Check cpu_has_msa only if it's a constant. This will allow the
125  	 * compiler to optimise out code for CPUs without MSA without adding
126  	 * an extra redundant check for CPUs with MSA.
127  	 */
128  	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
129  		return 0;
130  
131  	return test_thread_flag(TIF_MSA_CTX_LIVE);
132  }
133  
save_msa(struct task_struct * t)134  static inline void save_msa(struct task_struct *t)
135  {
136  	if (cpu_has_msa)
137  		_save_msa(t);
138  }
139  
restore_msa(struct task_struct * t)140  static inline void restore_msa(struct task_struct *t)
141  {
142  	if (cpu_has_msa)
143  		_restore_msa(t);
144  }
145  
init_msa_upper(void)146  static inline void init_msa_upper(void)
147  {
148  	/*
149  	 * Check cpu_has_msa only if it's a constant. This will allow the
150  	 * compiler to optimise out code for CPUs without MSA without adding
151  	 * an extra redundant check for CPUs with MSA.
152  	 */
153  	if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
154  		return;
155  
156  	_init_msa_upper();
157  }
158  
159  #ifndef TOOLCHAIN_SUPPORTS_MSA
160  /*
161   * Define assembler macros using .word for the c[ft]cmsa instructions in order
162   * to allow compilation with toolchains that do not support MSA. Once all
163   * toolchains in use support MSA these can be removed.
164   */
165  
166  #define _ASM_SET_CFCMSA							\
167  	_ASM_MACRO_2R(cfcmsa, rd, cs,					\
168  		      _ASM_INSN_IF_MIPS(0x787e0019 | __cs << 11 | __rd << 6)	\
169  		      _ASM_INSN32_IF_MM(0x587e0016 | __cs << 11 | __rd << 6))
170  #define _ASM_UNSET_CFCMSA ".purgem cfcmsa\n\t"
171  #define _ASM_SET_CTCMSA							\
172  	_ASM_MACRO_2R(ctcmsa, cd, rs,					\
173  		      _ASM_INSN_IF_MIPS(0x783e0019 | __rs << 11 | __cd << 6)	\
174  		      _ASM_INSN32_IF_MM(0x583e0016 | __rs << 11 | __cd << 6))
175  #define _ASM_UNSET_CTCMSA ".purgem ctcmsa\n\t"
176  #else /* TOOLCHAIN_SUPPORTS_MSA */
177  #define _ASM_SET_CFCMSA						\
178  		".set\tfp=64\n\t"				\
179  		".set\tmsa\n\t"
180  #define _ASM_UNSET_CFCMSA
181  #define _ASM_SET_CTCMSA						\
182  		".set\tfp=64\n\t"				\
183  		".set\tmsa\n\t"
184  #define _ASM_UNSET_CTCMSA
185  #endif
186  
187  #define __BUILD_MSA_CTL_REG(name, cs)				\
188  static inline unsigned int read_msa_##name(void)		\
189  {								\
190  	unsigned int reg;					\
191  	__asm__ __volatile__(					\
192  	"	.set	push\n"					\
193  	_ASM_SET_CFCMSA						\
194  	"	cfcmsa	%0, $" #cs "\n"				\
195  	_ASM_UNSET_CFCMSA					\
196  	"	.set	pop\n"					\
197  	: "=r"(reg));						\
198  	return reg;						\
199  }								\
200  								\
201  static inline void write_msa_##name(unsigned int val)		\
202  {								\
203  	__asm__ __volatile__(					\
204  	"	.set	push\n"					\
205  	_ASM_SET_CTCMSA						\
206  	"	ctcmsa	$" #cs ", %0\n"				\
207  	_ASM_UNSET_CTCMSA					\
208  	"	.set	pop\n"					\
209  	: : "r"(val));						\
210  }
211  
212  __BUILD_MSA_CTL_REG(ir, 0)
213  __BUILD_MSA_CTL_REG(csr, 1)
214  __BUILD_MSA_CTL_REG(access, 2)
215  __BUILD_MSA_CTL_REG(save, 3)
216  __BUILD_MSA_CTL_REG(modify, 4)
217  __BUILD_MSA_CTL_REG(request, 5)
218  __BUILD_MSA_CTL_REG(map, 6)
219  __BUILD_MSA_CTL_REG(unmap, 7)
220  
221  #endif /* !__ASSEMBLY__ */
222  
223  #define MSA_IR		0
224  #define MSA_CSR		1
225  #define MSA_ACCESS	2
226  #define MSA_SAVE	3
227  #define MSA_MODIFY	4
228  #define MSA_REQUEST	5
229  #define MSA_MAP		6
230  #define MSA_UNMAP	7
231  
232  /* MSA Implementation Register (MSAIR) */
233  #define MSA_IR_REVB		0
234  #define MSA_IR_REVF		(_ULCAST_(0xff) << MSA_IR_REVB)
235  #define MSA_IR_PROCB		8
236  #define MSA_IR_PROCF		(_ULCAST_(0xff) << MSA_IR_PROCB)
237  #define MSA_IR_WRPB		16
238  #define MSA_IR_WRPF		(_ULCAST_(0x1) << MSA_IR_WRPB)
239  
240  /* MSA Control & Status Register (MSACSR) */
241  #define MSA_CSR_RMB		0
242  #define MSA_CSR_RMF		(_ULCAST_(0x3) << MSA_CSR_RMB)
243  #define MSA_CSR_RM_NEAREST	0
244  #define MSA_CSR_RM_TO_ZERO	1
245  #define MSA_CSR_RM_TO_POS	2
246  #define MSA_CSR_RM_TO_NEG	3
247  #define MSA_CSR_FLAGSB		2
248  #define MSA_CSR_FLAGSF		(_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
249  #define MSA_CSR_FLAGS_IB	2
250  #define MSA_CSR_FLAGS_IF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
251  #define MSA_CSR_FLAGS_UB	3
252  #define MSA_CSR_FLAGS_UF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
253  #define MSA_CSR_FLAGS_OB	4
254  #define MSA_CSR_FLAGS_OF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
255  #define MSA_CSR_FLAGS_ZB	5
256  #define MSA_CSR_FLAGS_ZF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
257  #define MSA_CSR_FLAGS_VB	6
258  #define MSA_CSR_FLAGS_VF	(_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
259  #define MSA_CSR_ENABLESB	7
260  #define MSA_CSR_ENABLESF	(_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
261  #define MSA_CSR_ENABLES_IB	7
262  #define MSA_CSR_ENABLES_IF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
263  #define MSA_CSR_ENABLES_UB	8
264  #define MSA_CSR_ENABLES_UF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
265  #define MSA_CSR_ENABLES_OB	9
266  #define MSA_CSR_ENABLES_OF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
267  #define MSA_CSR_ENABLES_ZB	10
268  #define MSA_CSR_ENABLES_ZF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
269  #define MSA_CSR_ENABLES_VB	11
270  #define MSA_CSR_ENABLES_VF	(_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
271  #define MSA_CSR_CAUSEB		12
272  #define MSA_CSR_CAUSEF		(_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
273  #define MSA_CSR_CAUSE_IB	12
274  #define MSA_CSR_CAUSE_IF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
275  #define MSA_CSR_CAUSE_UB	13
276  #define MSA_CSR_CAUSE_UF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
277  #define MSA_CSR_CAUSE_OB	14
278  #define MSA_CSR_CAUSE_OF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
279  #define MSA_CSR_CAUSE_ZB	15
280  #define MSA_CSR_CAUSE_ZF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
281  #define MSA_CSR_CAUSE_VB	16
282  #define MSA_CSR_CAUSE_VF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
283  #define MSA_CSR_CAUSE_EB	17
284  #define MSA_CSR_CAUSE_EF	(_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
285  #define MSA_CSR_NXB		18
286  #define MSA_CSR_NXF		(_ULCAST_(0x1) << MSA_CSR_NXB)
287  #define MSA_CSR_FSB		24
288  #define MSA_CSR_FSF		(_ULCAST_(0x1) << MSA_CSR_FSB)
289  
290  #endif /* _ASM_MSA_H */
291