Lines Matching +full:rx +full:- +full:input
1 /* SPDX-License-Identifier: GPL-2.0-or-later */
3 * Cast5 Cipher 16-way parallel algorithm (AVX/x86_64)
6 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
14 .file "cast5-avx-x86_64-asm_64.S"
26 /* s-boxes */
33 16-way AVX cast5
46 #define RX %xmm8 macro
130 F_head(b1, RX, RGI1, RGI2, op0); \
131 F_head(b2, RX, RGI3, RGI4, op0); \
133 F_tail(b1, RX, RGI1, RGI2, op1, op2, op3); \
136 vpxor a1, RX, a1; \
159 /* add 16-bit rotation to key rotations (mod 32) */ \
164 /* add 16-bit rotation to key rotations (mod 32) */ \
216 /* input:
247 inpack_blocks(RL1, RR1, RTMP, RX, RKM);
248 inpack_blocks(RL2, RR2, RTMP, RX, RKM);
249 inpack_blocks(RL3, RR3, RTMP, RX, RKM);
250 inpack_blocks(RL4, RR4, RTMP, RX, RKM);
280 outunpack_blocks(RR1, RL1, RTMP, RX, RKM);
281 outunpack_blocks(RR2, RL2, RTMP, RX, RKM);
282 outunpack_blocks(RR3, RL3, RTMP, RX, RKM);
283 outunpack_blocks(RR4, RL4, RTMP, RX, RKM);
289 /* input:
320 inpack_blocks(RL1, RR1, RTMP, RX, RKM);
321 inpack_blocks(RL2, RR2, RTMP, RX, RKM);
322 inpack_blocks(RL3, RR3, RTMP, RX, RKM);
323 inpack_blocks(RL4, RR4, RTMP, RX, RKM);
352 outunpack_blocks(RR1, RL1, RTMP, RX, RKM);
353 outunpack_blocks(RR2, RL2, RTMP, RX, RKM);
354 outunpack_blocks(RR3, RL3, RTMP, RX, RKM);
355 outunpack_blocks(RR4, RL4, RTMP, RX, RKM);
365 /* input:
402 /* input:
440 /* input:
465 vmovq (%r12), RX;
466 vpshufd $0x4f, RX, RX;
467 vpxor RX, RR1, RR1;
492 /* input:
507 vpsrldq $8, RTMP, RTMP; /* low: -1, high: 0 */
510 vpaddq RKR, RKR, RKR; /* low: -2, high: -2 */
515 vmovq (%rcx), RX;
516 vpshufb R1ST, RX, RX;
519 vpsubq RTMP, RX, RX; /* le: IV1, IV0 */
520 vpshufb RKM, RX, RL1; /* be: IV0, IV1 */
521 vpsubq RKR, RX, RX;
522 vpshufb RKM, RX, RR1; /* be: IV2, IV3 */
523 vpsubq RKR, RX, RX;
524 vpshufb RKM, RX, RL2; /* be: IV4, IV5 */
525 vpsubq RKR, RX, RX;
526 vpshufb RKM, RX, RR2; /* be: IV6, IV7 */
527 vpsubq RKR, RX, RX;
528 vpshufb RKM, RX, RL3; /* be: IV8, IV9 */
529 vpsubq RKR, RX, RX;
530 vpshufb RKM, RX, RR3; /* be: IV10, IV11 */
531 vpsubq RKR, RX, RX;
532 vpshufb RKM, RX, RL4; /* be: IV12, IV13 */
533 vpsubq RKR, RX, RX;
534 vpshufb RKM, RX, RR4; /* be: IV14, IV15 */
537 vpsubq RTMP, RX, RX; /* le: IV16, IV14 */
538 vpshufb R1ST, RX, RX; /* be: IV16, IV16 */
539 vmovq RX, (%rcx);