Lines Matching refs:RTMP0

26 #define RTMP0        %ymm3  macro
179 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
180 transpose_4x4(RB0, RB1, RB2, RB3, RTMP0, RTMP1);
197 transform_pre(RX0, RTMP4, RTMP1, MASK_4BIT, RTMP0); \
198 transform_pre(RX1, RTMP4, RTMP1, MASK_4BIT, RTMP0); \
208 transform_post(RX0, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
209 transform_post(RX1, RTMP2, RTMP3, MASK_4BIT, RTMP0); \
212 vpshufb RTMP4, RX0, RTMP0; \
213 vpxor RTMP0, s0, s0; /* s0 ^ x */ \
218 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) */ \
223 vpxor RTMP1, RTMP0, RTMP0; /* x ^ rol(x,8) ^ rol(x,16) */ \
229 vpslld $2, RTMP0, RTMP1; \
230 vpsrld $30, RTMP0, RTMP0; \
231 vpxor RTMP0, s0, s0; \
257 transpose_4x4(RA0, RA1, RA2, RA3, RTMP0, RTMP1);
258 transpose_4x4(RB0, RB1, RB2, RB3, RTMP0, RTMP1);
306 vinserti128 $1, RTMP4x, RTMP0, RTMP0;
307 vpshufb RTMP3, RTMP0, RA0; /* +1 ; +0 */
314 vpsubq RTMP2, RTMP0, RTMP0; /* +3 ; +2 */
315 vpshufb RTMP3, RTMP0, RA1;
316 vpsubq RTMP2, RTMP0, RTMP0; /* +5 ; +4 */
317 vpshufb RTMP3, RTMP0, RA2;
318 vpsubq RTMP2, RTMP0, RTMP0; /* +7 ; +6 */
319 vpshufb RTMP3, RTMP0, RA3;
320 vpsubq RTMP2, RTMP0, RTMP0; /* +9 ; +8 */
321 vpshufb RTMP3, RTMP0, RB0;
322 vpsubq RTMP2, RTMP0, RTMP0; /* +11 ; +10 */
323 vpshufb RTMP3, RTMP0, RB1;
324 vpsubq RTMP2, RTMP0, RTMP0; /* +13 ; +12 */
325 vpshufb RTMP3, RTMP0, RB2;
326 vpsubq RTMP2, RTMP0, RTMP0; /* +15 ; +14 */
327 vpshufb RTMP3, RTMP0, RB3;
328 vpsubq RTMP2, RTMP0, RTMP0; /* +16 */
335 inc_le128(RTMP0, RNOT, RTMP1);
336 inc_le128(RTMP0, RNOT, RTMP1);
337 vpshufb RTMP3, RTMP0, RA1; /* +3 ; +2 */
338 inc_le128(RTMP0, RNOT, RTMP1);
339 inc_le128(RTMP0, RNOT, RTMP1);
340 vpshufb RTMP3, RTMP0, RA2; /* +5 ; +4 */
341 inc_le128(RTMP0, RNOT, RTMP1);
342 inc_le128(RTMP0, RNOT, RTMP1);
343 vpshufb RTMP3, RTMP0, RA3; /* +7 ; +6 */
344 inc_le128(RTMP0, RNOT, RTMP1);
345 inc_le128(RTMP0, RNOT, RTMP1);
346 vpshufb RTMP3, RTMP0, RB0; /* +9 ; +8 */
347 inc_le128(RTMP0, RNOT, RTMP1);
348 inc_le128(RTMP0, RNOT, RTMP1);
349 vpshufb RTMP3, RTMP0, RB1; /* +11 ; +10 */
350 inc_le128(RTMP0, RNOT, RTMP1);
351 inc_le128(RTMP0, RNOT, RTMP1);
352 vpshufb RTMP3, RTMP0, RB2; /* +13 ; +12 */
353 inc_le128(RTMP0, RNOT, RTMP1);
354 inc_le128(RTMP0, RNOT, RTMP1);
355 vpshufb RTMP3, RTMP0, RB3; /* +15 ; +14 */
356 inc_le128(RTMP0, RNOT, RTMP1);
357 vextracti128 $1, RTMP0, RTMP0x;