Lines Matching refs:m0

393     s_mov_b32	    s_save_m0,		m0								    //save M0
464 s_mov_b32 m0, 0x0 //SGPR initial index value =0
468 s_movrels_b64 s0, s0 //s0 = s[0+m0], s1 = s[1+m0]
469 s_movrels_b64 s2, s2 //s2 = s[2+m0], s3 = s[3+m0]
470 s_movrels_b64 s4, s4 //s4 = s[4+m0], s5 = s[5+m0]
471 s_movrels_b64 s6, s6 //s6 = s[6+m0], s7 = s[7+m0]
472 s_movrels_b64 s8, s8 //s8 = s[8+m0], s9 = s[9+m0]
473 s_movrels_b64 s10, s10 //s10 = s[10+m0], s11 = s[11+m0]
474 s_movrels_b64 s12, s12 //s12 = s[12+m0], s13 = s[13+m0]
475 s_movrels_b64 s14, s14 //s14 = s[14+m0], s15 = s[15+m0]
478 s_add_u32 m0, m0, 16 //next sgpr index
479 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
548 s_mov_b32 m0, 0x0 //lds_offset initial value = 0
576 s_mov_b32 m0, 0x10000
612 s_mov_b32 m0, 0x4 //VGPR initial index value =0
613 s_cmp_lt_u32 m0, s_save_alloc_size
617 s_set_gpr_idx_on m0, 0x1 //M0[7:0] = M0[7:0] and M0[15:12] = 0x1
618 s_add_u32 s_save_alloc_size, s_save_alloc_size, 0x1000 //add 0x1000 since we compare m0 against it later
627 s_add_u32 m0, m0, 4
628 s_cmp_lt_u32 m0, s_save_alloc_size
636 v_mov_b32 v0, v0 //v0 = v[0+m0]
637 v_mov_b32 v1, v1 //v0 = v[0+m0]
638 v_mov_b32 v2, v2 //v0 = v[0+m0]
639 v_mov_b32 v3, v3 //v0 = v[0+m0]
643 s_add_u32 m0, m0, 4 //next vgpr index
645 s_cmp_lt_u32 m0, s_save_alloc_size //scc = (m0 < s_save_alloc_size) ? 1 : 0
659 s_add_u32 s_save_alloc_size, s_save_alloc_size, 0x1000 //add 0x1000 since we compare m0 against it later
662 s_mov_b32 m0, 0x0 //VGPR initial index value =0
663 s_set_gpr_idx_on m0, 0x1 //M0[7:0] = M0[7:0] and M0[15:12] = 0x1
671 v_accvgpr_read v[vgpr], acc[vgpr] // v[N] = acc[N+m0]
676 s_add_u32 m0, m0, 4
677 s_cmp_lt_u32 m0, s_save_alloc_size
686 v_accvgpr_read v[vgpr], acc[vgpr] // v[N] = acc[N+m0]
691 s_add_u32 m0, m0, 4
693 s_cmp_lt_u32 m0, s_save_alloc_size
746 s_mov_b32 m0, 0x0 //lds_offset initial value = 0
751 s_add_u32 m0, m0, 256*2 // 128 DW
753 s_cmp_lt_u32 m0, s_restore_alloc_size //scc=(m0 < s_restore_alloc_size) ? 1 : 0
766 s_add_u32 s_restore_alloc_size, s_restore_alloc_size, 0x8000 //add 0x8000 since we compare m0 against it later
772 s_mov_b32 m0, 4 //VGPR initial index value = 1
773 s_set_gpr_idx_on m0, 0x8 //M0[7:0] = M0[7:0] and M0[15:12] = 0x8
777 v_mov_b32 v0, v0 //v[0+m0] = v0
781 s_add_u32 m0, m0, 4 //next vgpr index
783 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
791 s_add_u32 s_restore_alloc_size, s_restore_alloc_size, 0x8000 //add 0x8000 since we compare m0 against it later
796 s_mov_b32 m0, 0
797 s_set_gpr_idx_on m0, 0x8 //M0[7:0] = M0[7:0] and M0[15:12] = 0x8
806 s_add_u32 m0, m0, 4 //next vgpr index
808 s_cmp_lt_u32 m0, s_restore_alloc_size //scc = (m0 < s_restore_alloc_size) ? 1 : 0
835 s_mov_b32 m0, s_restore_alloc_size
841 s_sub_u32 m0, m0, 16 // Restore from S[n] to S[0]
844 s_movreld_b64 s0, s0 //s[0+m0] = s0
853 s_cmp_eq_u32 m0, 0 //scc = (m0 < s_restore_alloc_size) ? 1 : 0
883 s_mov_b32 m0, s_restore_m0
935 s_mov_b32 exec_lo, m0 //assuming exec_lo is not needed anymore from this point on
936 s_mov_b32 m0, s_mem_offset
937 s_buffer_store_dword s, s_rsrc, m0 glc:1
940 s_mov_b32 m0, exec_lo