• Home
  • History
  • Annotate
  • Raw
  • Download
  • only in /freebsd-13-stable/contrib/llvm-project/llvm/lib/Target/AMDGPU/

Lines Matching refs:TII

57     : InstructionSelector(), TII(*STI.getInstrInfo()),
98 MI.setDesc(TII.get(NewOpc));
123 I.setDesc(TII.get(TargetOpcode::COPY));
155 BuildMI(*BB, &I, DL, TII.get(AndOpc), MaskedReg)
158 BuildMI(*BB, &I, DL, TII.get(AMDGPU::V_CMP_NE_U32_e64), DstReg)
236 I.setDesc(TII.get(TargetOpcode::PHI));
252 BuildMI(*BB, MI, MI->getDebugLoc(), TII.get(AMDGPU::COPY), DstReg)
300 I.setDesc(TII.get(InstOpc));
323 I.setDesc(TII.get(InstOpc));
329 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
353 BuildMI(*BB, &I, DL, TII.get(Opc), DstReg)
357 return constrainSelectedInstRegOperands(*Add, TII, TRI, RBI);
362 I.setDesc(TII.get(Opc));
365 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
372 = BuildMI(*BB, &I, DL, TII.get(Opc), DstReg)
378 return constrainSelectedInstRegOperands(*Add, TII, TRI, RBI);
397 BuildMI(*BB, &I, DL, TII.get(AMDGPU::S_ADD_U32), DstLo)
400 BuildMI(*BB, &I, DL, TII.get(AMDGPU::S_ADDC_U32), DstHi)
406 BuildMI(*BB, &I, DL, TII.get(AMDGPU::V_ADD_I32_e64), DstLo)
411 MachineInstr *Addc = BuildMI(*BB, &I, DL, TII.get(AMDGPU::V_ADDC_U32_e64), DstHi)
418 if (!constrainSelectedInstRegOperands(*Addc, TII, TRI, RBI))
422 BuildMI(*BB, &I, DL, TII.get(AMDGPU::REG_SEQUENCE), DstReg)
454 I.setDesc(TII.get(HasCarryIn ? CarryOpc : NoCarryOpc));
457 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
464 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), AMDGPU::SCC)
471 BuildMI(*BB, &I, DL, TII.get(HasCarryIn ? CarryOpc : NoCarryOpc), Dst0Reg)
474 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), Dst1Reg)
530 SrcReg = constrainOperandRegClass(*MF, TRI, *MRI, TII, RBI, I,
533 BuildMI(*BB, &I, DL, TII.get(TargetOpcode::COPY), DstReg)
560 BuildMI(*BB, &MI, DL, TII.get(TargetOpcode::REG_SEQUENCE), DstReg);
608 BuildMI(*BB, &MI, DL, TII.get(TargetOpcode::COPY), Dst.getReg())
654 MI.setDesc(TII.get(AMDGPU::COPY));
693 auto MIB = BuildMI(*BB, &MI, DL, TII.get(AMDGPU::S_LSHR_B32), Dst)
698 return constrainSelectedInstRegOperands(*MIB, TII, TRI, RBI);
701 MI.setDesc(TII.get(Opc));
702 return constrainSelectedInstRegOperands(MI, TII, TRI, RBI);
717 I.setDesc(TII.get(TargetOpcode::IMPLICIT_DEF));
770 BuildMI(*BB, &I, DL, TII.get(TargetOpcode::INSERT_SUBREG), DstReg)
802 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
804 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::V_INTERP_MOV_F32), InterpMov)
809 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::V_INTERP_P1LV_F16), Dst)
848 auto MIB = BuildMI(*MBB, &MI, DL, TII.get(Opc), Dst0)
855 return constrainSelectedInstRegOperands(*MIB, TII, TRI, RBI);
866 BuildMI(*BB, &I, I.getDebugLoc(), TII.get(AMDGPU::SI_IF_BREAK))
989 MachineInstr *ICmp = BuildMI(*BB, &I, DL, TII.get(Opcode))
992 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), CCReg)
995 constrainSelectedInstRegOperands(*ICmp, TII, TRI, RBI) &&
1005 MachineInstr *ICmp = BuildMI(*BB, &I, DL, TII.get(Opcode),
1011 bool Ret = constrainSelectedInstRegOperands(*ICmp, TII, TRI, RBI);
1034 MachineInstr *ICmp = BuildMI(*BB, &I, DL, TII.get(Opcode), Dst)
1039 bool Ret = constrainSelectedInstRegOperands(*ICmp, TII, TRI, RBI);
1061 BuildMI(*BB, &I, DL, TII.get(Opcode), DstReg).addImm(0);
1064 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), DstReg).addReg(SrcReg);
1069 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), DstReg).addReg(SrcReg);
1080 BuildMI(*BB, &MI, MI.getDebugLoc(), TII.get(AMDGPU::SI_END_CF))
1156 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
1162 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::DS_ORDERED_COUNT), DstReg)
1170 bool Ret = constrainSelectedInstRegOperands(*DS, TII, TRI, RBI);
1235 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::S_MOV_B32), AMDGPU::M0)
1256 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::S_LSHL_B32), M0Base)
1260 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
1267 auto MIB = BuildMI(*MBB, &MI, DL, TII.get(gwsIntrinToOpcode(IID)));
1303 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
1305 BuildMI(*MBB, &MI, DL, TII.get(Opc), MI.getOperand(0).getReg())
1536 auto MIB = BuildMI(*MBB, &MI, DL, TII.get(Opcode))
1548 BuildMI(*MBB, &MI, DL, TII.get(AMDGPU::COPY), VDataOut)
1594 return constrainSelectedInstRegOperands(*MIB, TII, TRI, RBI);
1638 MachineInstr *CopySCC = BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), AMDGPU::SCC)
1646 MachineInstr *Select = BuildMI(*BB, &I, DL, TII.get(SelectOpcode), DstReg)
1650 bool Ret = constrainSelectedInstRegOperands(*Select, TII, TRI, RBI) |
1651 constrainSelectedInstRegOperands(*CopySCC, TII, TRI, RBI);
1661 BuildMI(*BB, &I, DL, TII.get(AMDGPU::V_CNDMASK_B32_e64), DstReg)
1668 bool Ret = constrainSelectedInstRegOperands(*Select, TII, TRI, RBI);
1742 BuildMI(*MBB, I, DL, TII.get(AMDGPU::COPY), LoReg)
1744 BuildMI(*MBB, I, DL, TII.get(AMDGPU::COPY), HiReg)
1751 BuildMI(*MBB, I, DL, TII.get(AMDGPU::V_MOV_B32_sdwa), DstReg)
1765 BuildMI(*MBB, I, DL, TII.get(AMDGPU::V_LSHLREV_B32_e64), TmpReg0)
1769 BuildMI(*MBB, I, DL, TII.get(AMDGPU::S_LSHL_B32), TmpReg0)
1778 BuildMI(*MBB, I, DL, TII.get(MovOpc), ImmReg)
1780 BuildMI(*MBB, I, DL, TII.get(AndOpc), TmpReg1)
1783 BuildMI(*MBB, I, DL, TII.get(OrOpc), DstReg)
1815 I.setDesc(TII.get(TargetOpcode::COPY));
1869 BuildMI(MBB, I, DL, TII.get(AMDGPU::V_AND_B32_e32), DstReg)
1873 return constrainSelectedInstRegOperands(*ExtI, TII, TRI, RBI);
1878 BuildMI(MBB, I, DL, TII.get(BFE), DstReg)
1883 return constrainSelectedInstRegOperands(*ExtI, TII, TRI, RBI);
1895 BuildMI(MBB, I, DL, TII.get(SextOpc), DstReg)
1911 BuildMI(MBB, I, DL, TII.get(AMDGPU::IMPLICIT_DEF), UndefReg);
1912 BuildMI(MBB, I, DL, TII.get(AMDGPU::REG_SEQUENCE), ExtReg)
1918 BuildMI(MBB, I, DL, TII.get(BFE64), DstReg)
1928 BuildMI(MBB, I, DL, TII.get(AMDGPU::S_AND_B32), DstReg)
1932 BuildMI(MBB, I, DL, TII.get(BFE32), DstReg)
1974 I.setDesc(TII.get(Opcode));
1976 return constrainSelectedInstRegOperands(I, TII, TRI, RBI);
1984 if (IsSgpr && TII.isInlineConstant(Imm)) {
1985 ResInst = BuildMI(*BB, &I, DL, TII.get(AMDGPU::S_MOV_B64), DstReg)
1993 BuildMI(*BB, &I, DL, TII.get(Opcode), LoReg)
1996 BuildMI(*BB, &I, DL, TII.get(Opcode), HiReg)
1999 ResInst = BuildMI(*BB, &I, DL, TII.get(AMDGPU::REG_SEQUENCE), DstReg)
2050 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), LoReg)
2052 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), HiReg)
2054 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::S_MOV_B32), ConstReg)
2059 BuildMI(*BB, &MI, DL, TII.get(Opc), OpReg)
2062 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::REG_SEQUENCE), Dst)
2091 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), LoReg)
2093 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), HiReg)
2095 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::S_MOV_B32), ConstReg)
2100 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::S_AND_B32), OpReg)
2103 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::REG_SEQUENCE), Dst)
2189 BuildMI(*BB, &I, I.getDebugLoc(), TII.get(AMDGPU::S_MOV_B32), AMDGPU::M0)
2232 auto MIB = BuildMI(*BB, &MI, DL, TII.get(Opcode), TmpReg)
2248 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), DstReg)
2255 return constrainSelectedInstRegOperands(*MIB, TII, TRI, RBI);
2295 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), CondPhysReg)
2297 BuildMI(*BB, &I, DL, TII.get(BrOpcode))
2309 I.setDesc(TII.get(IsVGPR ? AMDGPU::V_MOV_B32_e32 : AMDGPU::S_MOV_B32));
2353 BuildMI(*BB, &I, DL, TII.get(NewOpc), DstReg)
2364 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), LoReg)
2366 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), HiReg)
2385 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), MaskLo)
2387 BuildMI(*BB, &I, DL, TII.get(NewOpc), MaskedLo)
2399 BuildMI(*BB, &I, DL, TII.get(AMDGPU::COPY), MaskHi)
2401 BuildMI(*BB, &I, DL, TII.get(NewOpc), MaskedHi)
2406 BuildMI(*BB, &I, DL, TII.get(AMDGPU::REG_SEQUENCE), DstReg)
2486 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
2490 BuildMI(*BB, &MI, DL, TII.get(Opc), DstReg)
2501 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
2503 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::V_MOVRELS_B32_e32), DstReg)
2510 BuildMI(*BB, MI, DL, TII.get(AMDGPU::S_SET_GPR_IDX_ON))
2513 BuildMI(*BB, MI, DL, TII.get(AMDGPU::V_MOV_B32_e32), DstReg)
2517 BuildMI(*BB, MI, DL, TII.get(AMDGPU::S_SET_GPR_IDX_OFF));
2572 BuildMI(*BB, MI, DL, TII.get(AMDGPU::S_SET_GPR_IDX_ON))
2576 BuildMI(*BB, &MI, DL, TII.get(AMDGPU::COPY), AMDGPU::M0)
2581 = TII.getIndirectRegWritePseudo(VecSize, ValSize,
2589 BuildMI(*BB, MI, DL, TII.get(AMDGPU::S_SET_GPR_IDX_OFF));
2653 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::IMPLICIT_DEF), DstReg);
2669 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::COPY), DstReg)
2678 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::V_LSHRREV_B32_e64), DstReg)
2682 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_LSHR_B32), DstReg)
2688 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::V_LSHLREV_B32_e64), DstReg)
2692 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_LSHL_B32), DstReg)
2700 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::V_MOV_B32_sdwa), DstReg)
2710 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_PACK_LL_B32_B16), DstReg)
2718 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::V_MOV_B32_sdwa), DstReg)
2728 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_PACK_HH_B32_B16), DstReg)
2734 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::V_ALIGNBIT_B32), DstReg)
2740 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_LSHR_B32), TmpReg)
2743 BuildMI(*MBB, MI, DL, TII.get(AMDGPU::S_PACK_LL_B32_B16), DstReg)
2914 TII.get(AMDGPU::COPY), VGPRSrc)
3106 BuildMI(*MBB, MI, MI->getDebugLoc(), TII.get(AMDGPU::S_MOV_B32), OffsetReg)
3138 if (!TII.isLegalFLATOffset(Offset.getValue(), AddrSpace, Signed))
3179 BuildMI(*MBB, MI, MI->getDebugLoc(), TII.get(AMDGPU::V_MOV_B32_e32),
3456 const SIInstrInfo &TII, Register BasePtr) {
3457 uint64_t DefaultFormat = TII.getDefaultRsrcDataFormat();
3465 const SIInstrInfo &TII, Register BasePtr) {
3466 uint64_t DefaultFormat = TII.getDefaultRsrcDataFormat();
3577 RSrcReg = buildAddr64RSrc(B, *MRI, TII, SRDPtr);
3597 RSrcReg = buildOffsetSrc(B, *MRI, TII, SRDPtr);
3849 return TII.isInlineConstant(Imm);