Lines Matching refs:RTMP2

27 #define RTMP2        %xmm5  macro
172 vmovdqa .Lbswap32_mask rRIP, RTMP2;
173 vpshufb RTMP2, RA0, RA0;
174 vpshufb RTMP2, RA1, RA1;
175 vpshufb RTMP2, RA2, RA2;
176 vpshufb RTMP2, RA3, RA3;
184 vmovdqa .Linv_shift_row_rol_8 rRIP, RTMP2;
202 vpshufb RTMP2, RX0, RTMP1; \
227 vmovdqa .Lbswap128_mask rRIP, RTMP2;
230 vpshufb RTMP2, RA0, RA0;
231 vpshufb RTMP2, RA1, RA1;
232 vpshufb RTMP2, RA2, RA2;
233 vpshufb RTMP2, RA3, RA3;
262 vmovdqa .Lbswap32_mask rRIP, RTMP2;
263 vpshufb RTMP2, RA0, RA0;
264 vpshufb RTMP2, RA1, RA1;
265 vpshufb RTMP2, RA2, RA2;
266 vpshufb RTMP2, RA3, RA3;
267 vpshufb RTMP2, RB0, RB0;
268 vpshufb RTMP2, RB1, RB1;
269 vpshufb RTMP2, RB2, RB2;
270 vpshufb RTMP2, RB3, RB3;
284 vmovdqa .Lpost_tf_lo_s rRIP, RTMP2; \
296 transform_post(RX0, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
297 transform_post(RX1, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
302 vpshufb RTMP4, RX1, RTMP2; \
304 vpxor RTMP2, r0, r0; /* r0 ^ x */ \
309 vpxor RTMP3, RTMP2, RTMP2; /* x ^ rol(x,8) */ \
314 vpxor RTMP3, RTMP2, RTMP2; /* x ^ rol(x,8) ^ rol(x,16) */ \
325 vpslld $2, RTMP2, RTMP3; \
326 vpsrld $30, RTMP2, RTMP2; \
327 vpxor RTMP2, r0, r0; \
343 vmovdqa .Lbswap128_mask rRIP, RTMP2;
347 vpshufb RTMP2, RA0, RA0;
348 vpshufb RTMP2, RA1, RA1;
349 vpshufb RTMP2, RA2, RA2;
350 vpshufb RTMP2, RA3, RA3;
351 vpshufb RTMP2, RB0, RB0;
352 vpshufb RTMP2, RB1, RB1;
353 vpshufb RTMP2, RB2, RB2;
354 vpshufb RTMP2, RB3, RB3;
444 inc_le128(RTMP0, RNOT, RTMP2); /* +1 */
446 inc_le128(RTMP0, RNOT, RTMP2); /* +2 */
448 inc_le128(RTMP0, RNOT, RTMP2); /* +3 */
450 inc_le128(RTMP0, RNOT, RTMP2); /* +4 */
452 inc_le128(RTMP0, RNOT, RTMP2); /* +5 */
454 inc_le128(RTMP0, RNOT, RTMP2); /* +6 */
456 inc_le128(RTMP0, RNOT, RTMP2); /* +7 */
458 inc_le128(RTMP0, RNOT, RTMP2); /* +8 */