Lines Matching refs:RTMP0

25 #define RTMP0        %xmm3  macro
186 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
195 transform_pre(RX0, RTMP4, RB0, MASK_4BIT, RTMP0); \
197 transform_post(RX0, RB1, RB2, MASK_4BIT, RTMP0); \
200 vpshufb RB3, RX0, RTMP0; \
201 vpxor RTMP0, s0, s0; /* s0 ^ x */ \
203 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) */ \
205 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) ^ rol(x,16) */ \
208 vpslld $2, RTMP0, RTMP1; \
209 vpsrld $30, RTMP0, RTMP0; \
210 vpxor RTMP0, s0, s0; \
229 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
273 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
274 transpose_4x4(RB0, RB1, RB2, RB3, RTMP0, RTMP1);
291 transform_pre(RX0, RTMP4, RTMP1, MASK_4BIT, RTMP0); \
292 transform_pre(RX1, RTMP4, RTMP1, MASK_4BIT, RTMP0); \
296 transform_post(RX0, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
297 transform_post(RX1, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
300 vpshufb RTMP4, RX0, RTMP0; \
301 vpxor RTMP0, s0, s0; /* s0 ^ x */ \
306 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) */ \
311 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) ^ rol(x,16) */ \
318 vpslld $2, RTMP0, RTMP1; \
319 vpsrld $30, RTMP0, RTMP0; \
320 vpxor RTMP0, s0, s0; \
345 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
346 transpose_4x4(RB0, RB1, RB2, RB3, RTMP0, RTMP1);
432 vpshufb RBSWAP, RA0, RTMP0; /* be => le */
444 inc_le128(RTMP0, RNOT, RTMP2); /* +1 */
445 vpshufb RBSWAP, RTMP0, RA1;
446 inc_le128(RTMP0, RNOT, RTMP2); /* +2 */
447 vpshufb RBSWAP, RTMP0, RA2;
448 inc_le128(RTMP0, RNOT, RTMP2); /* +3 */
449 vpshufb RBSWAP, RTMP0, RA3;
450 inc_le128(RTMP0, RNOT, RTMP2); /* +4 */
451 vpshufb RBSWAP, RTMP0, RB0;
452 inc_le128(RTMP0, RNOT, RTMP2); /* +5 */
453 vpshufb RBSWAP, RTMP0, RB1;
454 inc_le128(RTMP0, RNOT, RTMP2); /* +6 */
455 vpshufb RBSWAP, RTMP0, RB2;
456 inc_le128(RTMP0, RNOT, RTMP2); /* +7 */
457 vpshufb RBSWAP, RTMP0, RB3;
458 inc_le128(RTMP0, RNOT, RTMP2); /* +8 */
459 vpshufb RBSWAP, RTMP0, RTMP1;