Lines Matching +full:1 +full:- +full:9 +full:a +full:- +full:f
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Memory copy functions for 32-bit PowerPC.
5 * Copyright (C) 1996-2005 Paul Mackerras.
12 #include <asm/code-patching-asm.h>
28 8 ## n ## 1: \
44 9 ## n ## 0: \
45 addi r5,r5,-(16 * n); \
46 b 104f; \
47 9 ## n ## 1: \
48 addi r5,r5,-(16 * n); \
49 b 105f; \
50 EX_TABLE(8 ## n ## 0b,9 ## n ## 0b); \
51 EX_TABLE(8 ## n ## 1b,9 ## n ## 0b); \
52 EX_TABLE(8 ## n ## 2b,9 ## n ## 0b); \
53 EX_TABLE(8 ## n ## 3b,9 ## n ## 0b); \
54 EX_TABLE(8 ## n ## 4b,9 ## n ## 1b); \
55 EX_TABLE(8 ## n ## 5b,9 ## n ## 1b); \
56 EX_TABLE(8 ## n ## 6b,9 ## n ## 1b); \
57 EX_TABLE(8 ## n ## 7b,9 ## n ## 1b)
63 CACHELINE_MASK = (L1_CACHE_BYTES-1)
67 rlwinm. r0 ,r5, 31, 1, 31
68 addi r6, r3, -4
69 beq- 2f
72 1: stwu r4, 4(r6)
73 bdnz 1b
74 2: andi. r0, r5, 1
84 * area is cacheable. -- paulus
88 * replaced by a nop once cache is active. This is done in machine_init()
92 blt 7f
107 5: b 2f
110 clrlwi r7,r6,32-LG_CACHELINE_BYTES
113 addic. r9,r9,-1 /* total number of complete cachelines */
114 ble 2f
117 beq 3f
126 clrlwi r5,r8,32-LG_CACHELINE_BYTES
131 bdz 6f
132 1: stwu r4,4(r6)
133 bdnz 1b
138 8: stbu r4,1(r6)
145 addi r6,r3,-1
146 9: stbu r4,1(r6)
147 bdnz 9b
157 * -- paulus.
161 * replaced by a nop once cache is active. This is done in machine_init()
169 1: b generic_memcpy
170 patch_site 1b, patch__memcpy_nocache
175 cmplw 1,r3,r8
179 addi r4,r4,-4
180 addi r6,r3,-4
183 beq 58f
186 blt 63f /* if not much to do */
187 andi. r8,r0,3 /* get it word-aligned first */
190 beq+ 61f
192 addi r4,r4,1
193 addi r6,r6,1
198 beq 58f
204 clrlwi r5,r5,32-LG_CACHELINE_BYTES
207 beq 63f
228 beq 64f
235 beq+ 65f
238 40: lbzu r0,1(r4)
239 stbu r0,1(r6)
249 addi r6,r3,-4
250 addi r4,r4,-4
251 beq 2f /* if less than 8 bytes to do */
254 bne 5f
255 1: lwz r7,4(r4)
259 bdnz 1b
262 blt 3f
264 addi r5,r5,-4
271 4: lbzu r0,1(r4)
272 stbu r0,1(r6)
278 addi r4,r4,1
280 addi r6,r6,1
283 rlwinm. r7,r5,32-3,3,31
286 b 1b
289 rlwinm. r7,r5,32-3,3,31 /* r0 = r5 >> 3 */
292 beq 2f
295 bne 5f
296 1: lwz r7,-4(r4)
297 lwzu r8,-8(r4)
298 stw r7,-4(r6)
299 stwu r8,-8(r6)
300 bdnz 1b
303 blt 3f
304 lwzu r0,-4(r4)
306 stwu r0,-4(r6)
310 4: lbzu r0,-1(r4)
311 stbu r0,-1(r6)
315 6: lbzu r7,-1(r4)
316 stbu r7,-1(r6)
319 rlwinm. r7,r5,32-3,3,31
322 b 1b
325 addi r4,r4,-4
326 addi r6,r3,-4
329 beq 58f
332 blt 63f /* if not much to do */
333 andi. r8,r0,3 /* get it word-aligned first */
335 beq+ 61f
338 addi r4,r4,1
339 addi r6,r6,1
344 beq 58f
349 EX_TABLE(70b,100f)
350 EX_TABLE(71b,101f)
351 EX_TABLE(72b,102f)
352 EX_TABLE(73b,103f)
355 clrlwi r5,r5,32-LG_CACHELINE_BYTES
357 beq 63f
361 cmpwi r0,1
363 ble 114f
364 li r7,1
365 #if MAX_COPY_PREFETCH > 1
368 we prefetch 1 cacheline ahead. */
370 ble 112f
379 #endif /* MAX_COPY_PREFETCH > 1 */
387 EX_TABLE(54b,105f)
391 COPY_16_BYTES_WITHEX(1)
411 beq 64f
418 beq+ 65f
421 addi r4,r4,1
422 addi r6,r6,1
427 /* read fault, initial single-byte copy */
429 b 90f
430 /* write fault, initial single-byte copy */
431 101: li r9,1
434 b 99f
437 b 91f
439 103: li r9,1
441 b 99f
445 * 104f (if in read part) or 105f (if in write part), after updating r5
449 COPY_16_BYTES_EXCODE(1)
464 b 92f
465 /* fault on dcbz (effectively a write fault) */
467 105: li r9,1
471 b 106f
474 b 93f
476 109: li r9,1
479 b 99f
482 b 94f
484 111: li r9,1
489 * r5 + (ctr << r3), and r9 is 0 for read or 1 for write.
494 beq 120f /* shouldn't happen */
496 bne 120f
497 /* for a read fault, first try to continue the copy one byte at a time */
501 addi r4,r4,1
502 addi r6,r6,1