1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Cache maintenance
4  *
5  * Copyright (C) 2001 Deep Blue Solutions Ltd.
6  * Copyright (C) 2012 ARM Ltd.
7  */
8 
9 #include <linux/errno.h>
10 #include <linux/linkage.h>
11 #include <linux/init.h>
12 #include <asm/assembler.h>
13 #include <asm/cpufeature.h>
14 #include <asm/alternative.h>
15 #include <asm/asm-uaccess.h>
16 
17 /*
18  *	caches_clean_inval_pou_macro(start,end) [fixup]
19  *
20  *	Ensure that the I and D caches are coherent within specified region.
21  *	This is typically used when code has been written to a memory region,
22  *	and will be executed.
23  *
24  *	- start   - virtual start address of region
25  *	- end     - virtual end address of region
26  *	- fixup   - optional label to branch to on user fault
27  */
28 .macro	caches_clean_inval_pou_macro, fixup
29 alternative_if ARM64_HAS_CACHE_IDC
30 	dsb     ishst
31 	b       .Ldc_skip_\@
32 alternative_else_nop_endif
33 	mov     x2, x0
34 	mov     x3, x1
35 	dcache_by_line_op cvau, ish, x2, x3, x4, x5, \fixup
36 .Ldc_skip_\@:
37 alternative_if ARM64_HAS_CACHE_DIC
38 	isb
39 	b	.Lic_skip_\@
40 alternative_else_nop_endif
41 	invalidate_icache_by_line x0, x1, x2, x3, \fixup
42 .Lic_skip_\@:
43 .endm
44 
45 /*
46  *	caches_clean_inval_pou(start,end)
47  *
48  *	Ensure that the I and D caches are coherent within specified region.
49  *	This is typically used when code has been written to a memory region,
50  *	and will be executed.
51  *
52  *	- start   - virtual start address of region
53  *	- end     - virtual end address of region
54  */
55 SYM_FUNC_START(caches_clean_inval_pou)
56 	caches_clean_inval_pou_macro
57 	ret
58 SYM_FUNC_END(caches_clean_inval_pou)
59 SYM_FUNC_ALIAS(__pi_caches_clean_inval_pou, caches_clean_inval_pou)
60 
61 /*
62  *	caches_clean_inval_user_pou(start,end)
63  *
64  *	Ensure that the I and D caches are coherent within specified region.
65  *	This is typically used when code has been written to a memory region,
66  *	and will be executed.
67  *
68  *	- start   - virtual start address of region
69  *	- end     - virtual end address of region
70  */
71 SYM_FUNC_START(caches_clean_inval_user_pou)
72 	uaccess_ttbr0_enable x2, x3, x4
73 
74 	caches_clean_inval_pou_macro 2f
75 	mov	x0, xzr
76 1:
77 	uaccess_ttbr0_disable x1, x2
78 	ret
79 2:
80 	mov	x0, #-EFAULT
81 	b	1b
82 SYM_FUNC_END(caches_clean_inval_user_pou)
83 
84 /*
85  *	icache_inval_pou(start,end)
86  *
87  *	Ensure that the I cache is invalid within specified region.
88  *
89  *	- start   - virtual start address of region
90  *	- end     - virtual end address of region
91  */
92 SYM_FUNC_START(icache_inval_pou)
93 alternative_if ARM64_HAS_CACHE_DIC
94 	isb
95 	ret
96 alternative_else_nop_endif
97 
98 	invalidate_icache_by_line x0, x1, x2, x3
99 	ret
100 SYM_FUNC_END(icache_inval_pou)
101 
102 /*
103  *	dcache_clean_inval_poc(start, end)
104  *
105  *	Ensure that any D-cache lines for the interval [start, end)
106  *	are cleaned and invalidated to the PoC.
107  *
108  *	- start   - virtual start address of region
109  *	- end     - virtual end address of region
110  */
111 SYM_FUNC_START(__pi_dcache_clean_inval_poc)
112 	dcache_by_line_op civac, sy, x0, x1, x2, x3
113 	ret
114 SYM_FUNC_END(__pi_dcache_clean_inval_poc)
115 SYM_FUNC_ALIAS(dcache_clean_inval_poc, __pi_dcache_clean_inval_poc)
116 
117 /*
118  *	dcache_clean_pou(start, end)
119  *
120  * 	Ensure that any D-cache lines for the interval [start, end)
121  * 	are cleaned to the PoU.
122  *
123  *	- start   - virtual start address of region
124  *	- end     - virtual end address of region
125  */
126 SYM_FUNC_START(dcache_clean_pou)
127 alternative_if ARM64_HAS_CACHE_IDC
128 	dsb	ishst
129 	ret
130 alternative_else_nop_endif
131 	dcache_by_line_op cvau, ish, x0, x1, x2, x3
132 	ret
133 SYM_FUNC_END(dcache_clean_pou)
134 
135 /*
136  *	dcache_inval_poc(start, end)
137  *
138  * 	Ensure that any D-cache lines for the interval [start, end)
139  * 	are invalidated. Any partial lines at the ends of the interval are
140  *	also cleaned to PoC to prevent data loss.
141  *
142  *	- start   - kernel start address of region
143  *	- end     - kernel end address of region
144  */
145 SYM_FUNC_START(__pi_dcache_inval_poc)
146 	dcache_line_size x2, x3
147 	sub	x3, x2, #1
148 	tst	x1, x3				// end cache line aligned?
149 	bic	x1, x1, x3
150 	b.eq	1f
151 	dc	civac, x1			// clean & invalidate D / U line
152 1:	tst	x0, x3				// start cache line aligned?
153 	bic	x0, x0, x3
154 	b.eq	2f
155 	dc	civac, x0			// clean & invalidate D / U line
156 	b	3f
157 2:	dc	ivac, x0			// invalidate D / U line
158 3:	add	x0, x0, x2
159 	cmp	x0, x1
160 	b.lo	2b
161 	dsb	sy
162 	ret
163 SYM_FUNC_END(__pi_dcache_inval_poc)
164 SYM_FUNC_ALIAS(dcache_inval_poc, __pi_dcache_inval_poc)
165 
166 /*
167  *	dcache_clean_poc(start, end)
168  *
169  * 	Ensure that any D-cache lines for the interval [start, end)
170  * 	are cleaned to the PoC.
171  *
172  *	- start   - virtual start address of region
173  *	- end     - virtual end address of region
174  */
175 SYM_FUNC_START(__pi_dcache_clean_poc)
176 	dcache_by_line_op cvac, sy, x0, x1, x2, x3
177 	ret
178 SYM_FUNC_END(__pi_dcache_clean_poc)
179 SYM_FUNC_ALIAS(dcache_clean_poc, __pi_dcache_clean_poc)
180 
181 /*
182  *	dcache_clean_pop(start, end)
183  *
184  * 	Ensure that any D-cache lines for the interval [start, end)
185  * 	are cleaned to the PoP.
186  *
187  *	- start   - virtual start address of region
188  *	- end     - virtual end address of region
189  */
190 SYM_FUNC_START(__pi_dcache_clean_pop)
191 	alternative_if_not ARM64_HAS_DCPOP
192 	b	dcache_clean_poc
193 	alternative_else_nop_endif
194 	dcache_by_line_op cvap, sy, x0, x1, x2, x3
195 	ret
196 SYM_FUNC_END(__pi_dcache_clean_pop)
197 SYM_FUNC_ALIAS(dcache_clean_pop, __pi_dcache_clean_pop)
198