Lines Matching refs:VirtReg

139   LiveRangeStage getStage(const LiveInterval &VirtReg) const {
140 return ExtraRegInfo[VirtReg.reg].Stage;
143 void setStage(const LiveInterval &VirtReg, LiveRangeStage Stage) {
145 ExtraRegInfo[VirtReg.reg].Stage = Stage;
266 unsigned canReassign(LiveInterval &VirtReg, unsigned PhysReg);
359 bool RAGreedy::LRE_CanEraseVirtReg(unsigned VirtReg) {
360 if (VRM->hasPhys(VirtReg)) {
361 Matrix->unassign(LIS->getInterval(VirtReg));
369 void RAGreedy::LRE_WillShrinkVirtReg(unsigned VirtReg) {
370 if (!VRM->hasPhys(VirtReg))
374 LiveInterval &LI = LIS->getInterval(VirtReg);
455 /// tryAssign - Try to assign VirtReg to an available register.
456 unsigned RAGreedy::tryAssign(LiveInterval &VirtReg,
462 if (!Matrix->checkInterference(VirtReg, PhysReg))
471 if (unsigned Hint = MRI->getSimpleHint(VirtReg.reg))
475 if (canEvictInterference(VirtReg, Hint, true, MaxCost)) {
476 evictInterference(VirtReg, Hint, NewVRegs);
490 unsigned CheapReg = tryEvict(VirtReg, Order, NewVRegs, Cost);
499 unsigned RAGreedy::canReassign(LiveInterval &VirtReg, unsigned PrevReg) {
500 AllocationOrder Order(VirtReg.reg, *VRM, RegClassInfo);
509 LiveIntervalUnion::Query subQ(&VirtReg, &Matrix->getLiveUnions()[*Units]);
518 DEBUG(dbgs() << "can reassign: " << VirtReg << " from "
549 /// canEvictInterference - Return true if all interferences between VirtReg and
552 /// @param VirtReg Live range that is about to be assigned.
554 /// @param IsHint True when PhysReg is VirtReg's preferred register.
558 bool RAGreedy::canEvictInterference(LiveInterval &VirtReg, unsigned PhysReg,
561 if (Matrix->checkInterference(VirtReg, PhysReg) > LiveRegMatrix::IK_VirtReg)
564 bool IsLocal = LIS->intervalIsInOneMBB(VirtReg);
566 // Find VirtReg's cascade number. This will be unassigned if VirtReg was never
573 unsigned Cascade = ExtraRegInfo[VirtReg.reg].Cascade;
579 LiveIntervalUnion::Query &Q = Matrix->query(VirtReg, *Units);
598 bool Urgent = !VirtReg.isSpillable() &&
600 RegClassInfo.getNumAllocatableRegs(MRI->getRegClass(VirtReg.reg)) <
629 if (!shouldEvict(VirtReg, IsHint, *Intf, BreaksHint))
637 /// evictInterference - Evict any interferring registers that prevent VirtReg
640 void RAGreedy::evictInterference(LiveInterval &VirtReg, unsigned PhysReg,
642 // Make sure that VirtReg has a cascade number, and assign that cascade
645 unsigned Cascade = ExtraRegInfo[VirtReg.reg].Cascade;
647 Cascade = ExtraRegInfo[VirtReg.reg].Cascade = NextCascade++;
655 LiveIntervalUnion::Query &Q = Matrix->query(VirtReg, *Units);
664 // The same VirtReg may be present in multiple RegUnits. Skip duplicates.
669 VirtReg.isSpillable() < Intf->isSpillable()) &&
678 /// @param VirtReg Currently unassigned virtual register.
680 /// @return Physreg to assign VirtReg, or 0.
681 unsigned RAGreedy::tryEvict(LiveInterval &VirtReg,
696 BestCost.MaxWeight = VirtReg.weight;
699 const TargetRegisterClass *RC = MRI->getRegClass(VirtReg.reg);
729 if (!canEvictInterference(VirtReg, PhysReg, false, BestCost))
743 evictInterference(VirtReg, BestPhys, NewVRegs);
1172 unsigned RAGreedy::tryRegionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1264 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM, this);
1307 unsigned RAGreedy::tryBlockSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1309 assert(&SA->getParent() == &VirtReg && "Live range wasn't analyzed");
1310 unsigned Reg = VirtReg.reg;
1312 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM, this);
1359 RAGreedy::tryInstructionSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1362 if (!RegClassInfo.isProperSubClass(MRI->getRegClass(VirtReg.reg)))
1367 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM, this);
1396 DebugVars->splitRegister(VirtReg.reg, LREdit.regs(), *LIS);
1436 // We know that VirtReg is a continuous interval from FirstInstr to
1490 /// tryLocalSplit - Try to split VirtReg into smaller intervals inside its only
1493 unsigned RAGreedy::tryLocalSplit(LiveInterval &VirtReg, AllocationOrder &Order,
1517 // If VirtReg is live across any register mask operands, compute a list of
1520 if (Matrix->checkRegMaskInterference(VirtReg)) {
1524 // Constrain to VirtReg's live range.
1565 bool ProgressRequired = getStage(VirtReg) >= RS_Split2;
1584 if (Matrix->checkRegMaskInterference(VirtReg, PhysReg))
1679 LiveRangeEdit LREdit(&VirtReg, NewVRegs, *MF, *LIS, VRM, this);
1688 DebugVars->splitRegister(VirtReg.reg, LREdit.regs(), *LIS);
1715 /// trySplit - Try to split VirtReg or one of its interferences, making it
1717 /// @return Physreg when VirtReg may be assigned and/or new NewVRegs.
1718 unsigned RAGreedy::trySplit(LiveInterval &VirtReg, AllocationOrder &Order,
1721 if (getStage(VirtReg) >= RS_Spill)
1725 if (LIS->intervalIsInOneMBB(VirtReg)) {
1727 SA->analyze(&VirtReg);
1728 unsigned PhysReg = tryLocalSplit(VirtReg, Order, NewVRegs);
1731 return tryInstructionSplit(VirtReg, Order, NewVRegs);
1736 SA->analyze(&VirtReg);
1743 // VirtReg has changed, so all cached queries are invalid.
1745 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1752 if (getStage(VirtReg) < RS_Split2) {
1753 unsigned PhysReg = tryRegionSplit(VirtReg, Order, NewVRegs);
1759 return tryBlockSplit(VirtReg, Order, NewVRegs);
1767 unsigned RAGreedy::selectOrSplit(LiveInterval &VirtReg,
1770 AllocationOrder Order(VirtReg.reg, *VRM, RegClassInfo);
1771 if (unsigned PhysReg = tryAssign(VirtReg, Order, NewVRegs))
1774 LiveRangeStage Stage = getStage(VirtReg);
1776 << " Cascade " << ExtraRegInfo[VirtReg.reg].Cascade << '\n');
1782 if (unsigned PhysReg = tryEvict(VirtReg, Order, NewVRegs))
1791 setStage(VirtReg, RS_Split);
1793 NewVRegs.push_back(VirtReg.reg);
1799 if (Stage >= RS_Done || !VirtReg.isSpillable())
1802 // Try splitting VirtReg or interferences.
1803 unsigned PhysReg = trySplit(VirtReg, Order, NewVRegs);
1807 // Finally spill VirtReg itself.
1809 LiveRangeEdit LRE(&VirtReg, NewVRegs, *MF, *LIS, VRM, this);