Lines Matching refs:m0

588 	s_mov_b32	s_save_m0, m0
602 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
603 s_and_b32 m0, m0, 1
604 s_cmp_eq_u32 m0, 1
671 s_mov_b32 m0, 0x0 //Next lane of v2 to write to
753 s_mov_b32 m0, 0x0 //SGPR initial index value =0
779 s_add_u32 m0, m0, 16 //next sgpr index
780 s_cmp_lt_u32 m0, 96 //scc = (m0 < first 96 SGPR) ? 1 : 0
804 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
805 s_and_b32 m0, m0, 1
806 s_cmp_eq_u32 m0, 1
843 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
844 s_and_b32 m0, m0, 1
845 s_cmp_eq_u32 m0, 1
846 s_mov_b32 m0, 0x0
860 s_add_u32 m0, m0, 128 //every buffer_store_lds does 128 bytes
862 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
879 s_add_u32 m0, m0, s3 //every buffer_store_lds does 128 bytes
882 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
898 s_add_u32 m0, m0, 256 //every buffer_store_lds does 256 bytes
900 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
917 s_add_u32 m0, m0, s3 //every buffer_store_lds does 256 bytes
920 s_cmp_lt_u32 m0, s_save_alloc_size //scc=(m0 < s_save_alloc_size) ? 1 : 0
928 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
929 s_and_b32 m0, m0, 1
930 s_cmp_eq_u32 m0, 1
943 s_lshr_b32 m0, s_wave_size, S_WAVE_SIZE
944 s_and_b32 m0, m0, 1
945 s_cmp_eq_u32 m0, 1
953 s_mov_b32 m0, 0x4 //VGPR initial index value =4
954 s_cmp_lt_u32 m0, s_save_alloc_size
969 s_add_u32 m0, m0, 4
970 s_cmp_lt_u32 m0, s_save_alloc_size
987 s_add_u32 m0, m0, 4 //next vgpr index
989 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
998 s_mov_b32 m0, 0x4 //VGPR initial index value =4
999 s_cmp_lt_u32 m0, s_save_alloc_size
1014 s_add_u32 m0, m0, 4
1015 s_cmp_lt_u32 m0, s_save_alloc_size
1032 s_add_u32 m0, m0, 4 //next vgpr index
1034 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
1045 s_add_u32 s_save_alloc_size, s_save_alloc_size, m0
1058 s_add_u32 m0, m0, 1
1059 s_cmp_lt_u32 m0, s_save_alloc_size
1068 s_add_u32 m0, m0, 1 //next vgpr index
1070 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
1098 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
1099 s_and_b32 m0, m0, 1
1100 s_cmp_eq_u32 m0, 1
1123 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
1124 s_and_b32 m0, m0, 1
1125 s_cmp_eq_u32 m0, 1
1126 s_mov_b32 m0, 0x0
1137 s_add_u32 m0, m0, 128 // 128 DW
1139 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
1151 s_add_u32 m0, m0, 256 // 256 DW
1153 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
1161 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
1162 s_and_b32 m0, m0, 1
1163 s_cmp_eq_u32 m0, 1
1174 s_lshr_b32 m0, s_restore_size, S_WAVE_SIZE
1175 s_and_b32 m0, m0, 1
1176 s_cmp_eq_u32 m0, 1
1184 s_mov_b32 m0, 4 //VGPR initial index value = 4
1185 s_cmp_lt_u32 m0, s_restore_alloc_size
1198 s_add_u32 m0, m0, 4 //next vgpr index
1200 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
1218 s_mov_b32 m0, 4 //VGPR initial index value = 4
1219 s_cmp_lt_u32 m0, s_restore_alloc_size
1232 s_add_u32 m0, m0, 4 //next vgpr index
1234 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
1245 s_add_u32 s_restore_alloc_size, s_restore_alloc_size, m0
1252 s_add_u32 m0, m0, 1 //next vgpr index
1254 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
1279 s_mov_b32 m0, s_sgpr_save_num
1284 s_sub_u32 m0, m0, 4 // Restore from S[0] to S[104]
1293 s_sub_u32 m0, m0, 8 // Restore from S[0] to S[96]
1305 s_sub_u32 m0, m0, 16 // Restore from S[n] to S[0]
1317 s_cmp_eq_u32 m0, 0 //scc = (m0 < s_sgpr_save_num) ? 1 : 0
1399 s_mov_b32 m0, s_restore_m0
1472 v_writelane_b32 v2, s, m0
1473 s_add_u32 m0, m0, 0x1
1475 s_mov_b32 exec_lo, m0
1476 s_mov_b32 m0, s_mem_offset
1477 s_buffer_store_dword s, s_rsrc, m0 S_COHERENCE
1479 s_mov_b32 m0, exec_lo