1//===- ARCInstrInfo.cpp - ARC Instruction Information -----------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file contains the ARC implementation of the TargetInstrInfo class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "ARCInstrInfo.h"
14#include "ARC.h"
15#include "ARCMachineFunctionInfo.h"
16#include "ARCSubtarget.h"
17#include "MCTargetDesc/ARCInfo.h"
18#include "llvm/CodeGen/MachineFrameInfo.h"
19#include "llvm/CodeGen/MachineInstrBuilder.h"
20#include "llvm/CodeGen/MachineMemOperand.h"
21#include "llvm/Support/Debug.h"
22#include "llvm/Support/TargetRegistry.h"
23
24using namespace llvm;
25
26#define GET_INSTRINFO_CTOR_DTOR
27#include "ARCGenInstrInfo.inc"
28
29#define DEBUG_TYPE "arc-inst-info"
30
31enum AddrIncType {
32    NoAddInc = 0,
33    PreInc   = 1,
34    PostInc  = 2,
35    Scaled   = 3
36};
37
38enum TSFlagsConstants {
39    TSF_AddrModeOff = 0,
40    TSF_AddModeMask = 3
41};
42
43// Pin the vtable to this file.
44void ARCInstrInfo::anchor() {}
45
46ARCInstrInfo::ARCInstrInfo()
47    : ARCGenInstrInfo(ARC::ADJCALLSTACKDOWN, ARC::ADJCALLSTACKUP), RI() {}
48
49static bool isZeroImm(const MachineOperand &Op) {
50  return Op.isImm() && Op.getImm() == 0;
51}
52
53static bool isLoad(int Opcode) {
54  return Opcode == ARC::LD_rs9 || Opcode == ARC::LDH_rs9 ||
55         Opcode == ARC::LDB_rs9;
56}
57
58static bool isStore(int Opcode) {
59  return Opcode == ARC::ST_rs9 || Opcode == ARC::STH_rs9 ||
60         Opcode == ARC::STB_rs9;
61}
62
63/// If the specified machine instruction is a direct
64/// load from a stack slot, return the virtual or physical register number of
65/// the destination along with the FrameIndex of the loaded stack slot.  If
66/// not, return 0.  This predicate must return 0 if the instruction has
67/// any side effects other than loading from the stack slot.
68unsigned ARCInstrInfo::isLoadFromStackSlot(const MachineInstr &MI,
69                                           int &FrameIndex) const {
70  int Opcode = MI.getOpcode();
71  if (isLoad(Opcode)) {
72    if ((MI.getOperand(1).isFI()) &&  // is a stack slot
73        (MI.getOperand(2).isImm()) && // the imm is zero
74        (isZeroImm(MI.getOperand(2)))) {
75      FrameIndex = MI.getOperand(1).getIndex();
76      return MI.getOperand(0).getReg();
77    }
78  }
79  return 0;
80}
81
82/// If the specified machine instruction is a direct
83/// store to a stack slot, return the virtual or physical register number of
84/// the source reg along with the FrameIndex of the loaded stack slot.  If
85/// not, return 0.  This predicate must return 0 if the instruction has
86/// any side effects other than storing to the stack slot.
87unsigned ARCInstrInfo::isStoreToStackSlot(const MachineInstr &MI,
88                                          int &FrameIndex) const {
89  int Opcode = MI.getOpcode();
90  if (isStore(Opcode)) {
91    if ((MI.getOperand(1).isFI()) &&  // is a stack slot
92        (MI.getOperand(2).isImm()) && // the imm is zero
93        (isZeroImm(MI.getOperand(2)))) {
94      FrameIndex = MI.getOperand(1).getIndex();
95      return MI.getOperand(0).getReg();
96    }
97  }
98  return 0;
99}
100
101/// Return the inverse of passed condition, i.e. turning COND_E to COND_NE.
102static ARCCC::CondCode GetOppositeBranchCondition(ARCCC::CondCode CC) {
103  switch (CC) {
104  default:
105    llvm_unreachable("Illegal condition code!");
106  case ARCCC::EQ:
107    return ARCCC::NE;
108  case ARCCC::NE:
109    return ARCCC::EQ;
110  case ARCCC::LO:
111    return ARCCC::HS;
112  case ARCCC::HS:
113    return ARCCC::LO;
114  case ARCCC::GT:
115    return ARCCC::LE;
116  case ARCCC::GE:
117    return ARCCC::LT;
118  case ARCCC::VS:
119    return ARCCC::VC;
120  case ARCCC::VC:
121    return ARCCC::VS;
122  case ARCCC::LT:
123    return ARCCC::GE;
124  case ARCCC::LE:
125    return ARCCC::GT;
126  case ARCCC::HI:
127    return ARCCC::LS;
128  case ARCCC::LS:
129    return ARCCC::HI;
130  case ARCCC::NZ:
131    return ARCCC::Z;
132  case ARCCC::Z:
133    return ARCCC::NZ;
134  }
135}
136
137static bool isUncondBranchOpcode(int Opc) { return Opc == ARC::BR; }
138
139static bool isCondBranchOpcode(int Opc) {
140  return Opc == ARC::BRcc_rr_p || Opc == ARC::BRcc_ru6_p;
141}
142
143static bool isJumpOpcode(int Opc) { return Opc == ARC::J; }
144
145/// Analyze the branching code at the end of MBB, returning
146/// true if it cannot be understood (e.g. it's a switch dispatch or isn't
147/// implemented for a target).  Upon success, this returns false and returns
148/// with the following information in various cases:
149///
150/// 1. If this block ends with no branches (it just falls through to its succ)
151///    just return false, leaving TBB/FBB null.
152/// 2. If this block ends with only an unconditional branch, it sets TBB to be
153///    the destination block.
154/// 3. If this block ends with a conditional branch and it falls through to a
155///    successor block, it sets TBB to be the branch destination block and a
156///    list of operands that evaluate the condition. These operands can be
157///    passed to other TargetInstrInfo methods to create new branches.
158/// 4. If this block ends with a conditional branch followed by an
159///    unconditional branch, it returns the 'true' destination in TBB, the
160///    'false' destination in FBB, and a list of operands that evaluate the
161///    condition.  These operands can be passed to other TargetInstrInfo
162///    methods to create new branches.
163///
164/// Note that RemoveBranch and InsertBranch must be implemented to support
165/// cases where this method returns success.
166///
167/// If AllowModify is true, then this routine is allowed to modify the basic
168/// block (e.g. delete instructions after the unconditional branch).
169
170bool ARCInstrInfo::analyzeBranch(MachineBasicBlock &MBB,
171                                 MachineBasicBlock *&TBB,
172                                 MachineBasicBlock *&FBB,
173                                 SmallVectorImpl<MachineOperand> &Cond,
174                                 bool AllowModify) const {
175  TBB = FBB = nullptr;
176  MachineBasicBlock::iterator I = MBB.end();
177  if (I == MBB.begin())
178    return false;
179  --I;
180
181  while (isPredicated(*I) || I->isTerminator() || I->isDebugValue()) {
182    // Flag to be raised on unanalyzeable instructions. This is useful in cases
183    // where we want to clean up on the end of the basic block before we bail
184    // out.
185    bool CantAnalyze = false;
186
187    // Skip over DEBUG values and predicated nonterminators.
188    while (I->isDebugInstr() || !I->isTerminator()) {
189      if (I == MBB.begin())
190        return false;
191      --I;
192    }
193
194    if (isJumpOpcode(I->getOpcode())) {
195      // Indirect branches and jump tables can't be analyzed, but we still want
196      // to clean up any instructions at the tail of the basic block.
197      CantAnalyze = true;
198    } else if (isUncondBranchOpcode(I->getOpcode())) {
199      TBB = I->getOperand(0).getMBB();
200    } else if (isCondBranchOpcode(I->getOpcode())) {
201      // Bail out if we encounter multiple conditional branches.
202      if (!Cond.empty())
203        return true;
204
205      assert(!FBB && "FBB should have been null.");
206      FBB = TBB;
207      TBB = I->getOperand(0).getMBB();
208      Cond.push_back(I->getOperand(1));
209      Cond.push_back(I->getOperand(2));
210      Cond.push_back(I->getOperand(3));
211    } else if (I->isReturn()) {
212      // Returns can't be analyzed, but we should run cleanup.
213      CantAnalyze = !isPredicated(*I);
214    } else {
215      // We encountered other unrecognized terminator. Bail out immediately.
216      return true;
217    }
218
219    // Cleanup code - to be run for unpredicated unconditional branches and
220    //                returns.
221    if (!isPredicated(*I) && (isUncondBranchOpcode(I->getOpcode()) ||
222                              isJumpOpcode(I->getOpcode()) || I->isReturn())) {
223      // Forget any previous condition branch information - it no longer
224      // applies.
225      Cond.clear();
226      FBB = nullptr;
227
228      // If we can modify the function, delete everything below this
229      // unconditional branch.
230      if (AllowModify) {
231        MachineBasicBlock::iterator DI = std::next(I);
232        while (DI != MBB.end()) {
233          MachineInstr &InstToDelete = *DI;
234          ++DI;
235          InstToDelete.eraseFromParent();
236        }
237      }
238    }
239
240    if (CantAnalyze)
241      return true;
242
243    if (I == MBB.begin())
244      return false;
245
246    --I;
247  }
248
249  // We made it past the terminators without bailing out - we must have
250  // analyzed this branch successfully.
251  return false;
252}
253
254unsigned ARCInstrInfo::removeBranch(MachineBasicBlock &MBB,
255                                    int *BytesRemoved) const {
256  assert(!BytesRemoved && "Code size not handled");
257  MachineBasicBlock::iterator I = MBB.getLastNonDebugInstr();
258  if (I == MBB.end())
259    return 0;
260
261  if (!isUncondBranchOpcode(I->getOpcode()) &&
262      !isCondBranchOpcode(I->getOpcode()))
263    return 0;
264
265  // Remove the branch.
266  I->eraseFromParent();
267
268  I = MBB.end();
269
270  if (I == MBB.begin())
271    return 1;
272  --I;
273  if (!isCondBranchOpcode(I->getOpcode()))
274    return 1;
275
276  // Remove the branch.
277  I->eraseFromParent();
278  return 2;
279}
280
281void ARCInstrInfo::copyPhysReg(MachineBasicBlock &MBB,
282                               MachineBasicBlock::iterator I,
283                               const DebugLoc &dl, MCRegister DestReg,
284                               MCRegister SrcReg, bool KillSrc) const {
285  assert(ARC::GPR32RegClass.contains(SrcReg) &&
286         "Only GPR32 src copy supported.");
287  assert(ARC::GPR32RegClass.contains(DestReg) &&
288         "Only GPR32 dest copy supported.");
289  BuildMI(MBB, I, dl, get(ARC::MOV_rr), DestReg)
290      .addReg(SrcReg, getKillRegState(KillSrc));
291}
292
293void ARCInstrInfo::storeRegToStackSlot(MachineBasicBlock &MBB,
294                                       MachineBasicBlock::iterator I,
295                                       unsigned SrcReg, bool isKill,
296                                       int FrameIndex,
297                                       const TargetRegisterClass *RC,
298                                       const TargetRegisterInfo *TRI) const {
299  DebugLoc dl = MBB.findDebugLoc(I);
300  MachineFunction &MF = *MBB.getParent();
301  MachineFrameInfo &MFI = MF.getFrameInfo();
302  unsigned Align = MFI.getObjectAlignment(FrameIndex);
303
304  MachineMemOperand *MMO = MF.getMachineMemOperand(
305      MachinePointerInfo::getFixedStack(MF, FrameIndex),
306      MachineMemOperand::MOStore, MFI.getObjectSize(FrameIndex), Align);
307
308  assert(MMO && "Couldn't get MachineMemOperand for store to stack.");
309  assert(TRI->getSpillSize(*RC) == 4 &&
310         "Only support 4-byte stores to stack now.");
311  assert(ARC::GPR32RegClass.hasSubClassEq(RC) &&
312         "Only support GPR32 stores to stack now.");
313  LLVM_DEBUG(dbgs() << "Created store reg=" << printReg(SrcReg, TRI)
314                    << " to FrameIndex=" << FrameIndex << "\n");
315  BuildMI(MBB, I, dl, get(ARC::ST_rs9))
316      .addReg(SrcReg, getKillRegState(isKill))
317      .addFrameIndex(FrameIndex)
318      .addImm(0)
319      .addMemOperand(MMO);
320}
321
322void ARCInstrInfo::loadRegFromStackSlot(MachineBasicBlock &MBB,
323                                        MachineBasicBlock::iterator I,
324                                        unsigned DestReg, int FrameIndex,
325                                        const TargetRegisterClass *RC,
326                                        const TargetRegisterInfo *TRI) const {
327  DebugLoc dl = MBB.findDebugLoc(I);
328  MachineFunction &MF = *MBB.getParent();
329  MachineFrameInfo &MFI = MF.getFrameInfo();
330  unsigned Align = MFI.getObjectAlignment(FrameIndex);
331  MachineMemOperand *MMO = MF.getMachineMemOperand(
332      MachinePointerInfo::getFixedStack(MF, FrameIndex),
333      MachineMemOperand::MOLoad, MFI.getObjectSize(FrameIndex), Align);
334
335  assert(MMO && "Couldn't get MachineMemOperand for store to stack.");
336  assert(TRI->getSpillSize(*RC) == 4 &&
337         "Only support 4-byte loads from stack now.");
338  assert(ARC::GPR32RegClass.hasSubClassEq(RC) &&
339         "Only support GPR32 stores to stack now.");
340  LLVM_DEBUG(dbgs() << "Created load reg=" << printReg(DestReg, TRI)
341                    << " from FrameIndex=" << FrameIndex << "\n");
342  BuildMI(MBB, I, dl, get(ARC::LD_rs9))
343      .addReg(DestReg, RegState::Define)
344      .addFrameIndex(FrameIndex)
345      .addImm(0)
346      .addMemOperand(MMO);
347}
348
349/// Return the inverse opcode of the specified Branch instruction.
350bool ARCInstrInfo::reverseBranchCondition(
351    SmallVectorImpl<MachineOperand> &Cond) const {
352  assert((Cond.size() == 3) && "Invalid ARC branch condition!");
353  Cond[2].setImm(GetOppositeBranchCondition((ARCCC::CondCode)Cond[2].getImm()));
354  return false;
355}
356
357MachineBasicBlock::iterator
358ARCInstrInfo::loadImmediate(MachineBasicBlock &MBB,
359                            MachineBasicBlock::iterator MI, unsigned Reg,
360                            uint64_t Value) const {
361  DebugLoc dl = MBB.findDebugLoc(MI);
362  if (isInt<12>(Value)) {
363    return BuildMI(MBB, MI, dl, get(ARC::MOV_rs12), Reg)
364        .addImm(Value)
365        .getInstr();
366  }
367  llvm_unreachable("Need Arc long immediate instructions.");
368}
369
370unsigned ARCInstrInfo::insertBranch(MachineBasicBlock &MBB,
371                                    MachineBasicBlock *TBB,
372                                    MachineBasicBlock *FBB,
373                                    ArrayRef<MachineOperand> Cond,
374                                    const DebugLoc &dl, int *BytesAdded) const {
375  assert(!BytesAdded && "Code size not handled.");
376
377  // Shouldn't be a fall through.
378  assert(TBB && "InsertBranch must not be told to insert a fallthrough");
379  assert((Cond.size() == 3 || Cond.size() == 0) &&
380         "ARC branch conditions have two components!");
381
382  if (Cond.empty()) {
383    BuildMI(&MBB, dl, get(ARC::BR)).addMBB(TBB);
384    return 1;
385  }
386  int BccOpc = Cond[1].isImm() ? ARC::BRcc_ru6_p : ARC::BRcc_rr_p;
387  MachineInstrBuilder MIB = BuildMI(&MBB, dl, get(BccOpc));
388  MIB.addMBB(TBB);
389  for (unsigned i = 0; i < 3; i++) {
390    MIB.add(Cond[i]);
391  }
392
393  // One-way conditional branch.
394  if (!FBB) {
395    return 1;
396  }
397
398  // Two-way conditional branch.
399  BuildMI(&MBB, dl, get(ARC::BR)).addMBB(FBB);
400  return 2;
401}
402
403unsigned ARCInstrInfo::getInstSizeInBytes(const MachineInstr &MI) const {
404  if (MI.isInlineAsm()) {
405    const MachineFunction *MF = MI.getParent()->getParent();
406    const char *AsmStr = MI.getOperand(0).getSymbolName();
407    return getInlineAsmLength(AsmStr, *MF->getTarget().getMCAsmInfo());
408  }
409  return MI.getDesc().getSize();
410}
411
412bool ARCInstrInfo::isPostIncrement(const MachineInstr &MI) const {
413  const MCInstrDesc &MID = MI.getDesc();
414  const uint64_t F = MID.TSFlags;
415  return ((F >> TSF_AddrModeOff) & TSF_AddModeMask) == PostInc;
416}
417
418bool ARCInstrInfo::isPreIncrement(const MachineInstr &MI) const {
419  const MCInstrDesc &MID = MI.getDesc();
420  const uint64_t F = MID.TSFlags;
421  return ((F >> TSF_AddrModeOff) & TSF_AddModeMask) == PreInc;
422}
423
424bool ARCInstrInfo::getBaseAndOffsetPosition(const MachineInstr &MI,
425                                        unsigned &BasePos,
426                                        unsigned &OffsetPos) const {
427  if (!MI.mayLoad() && !MI.mayStore())
428    return false;
429
430  BasePos = 1;
431  OffsetPos = 2;
432
433  if (isPostIncrement(MI) || isPreIncrement(MI)) {
434    BasePos++;
435    OffsetPos++;
436  }
437
438  if (!MI.getOperand(BasePos).isReg() || !MI.getOperand(OffsetPos).isImm())
439    return false;
440
441  return true;
442}
443