MachineFunction.cpp revision 296417
1//===-- MachineFunction.cpp -----------------------------------------------===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// Collect native machine code information for a function. This allows 11// target-specific information about the generated code to be stored with each 12// function. 13// 14//===----------------------------------------------------------------------===// 15 16#include "llvm/CodeGen/MachineFunction.h" 17#include "llvm/ADT/STLExtras.h" 18#include "llvm/ADT/SmallString.h" 19#include "llvm/Analysis/ConstantFolding.h" 20#include "llvm/Analysis/EHPersonalities.h" 21#include "llvm/CodeGen/MachineConstantPool.h" 22#include "llvm/CodeGen/MachineFrameInfo.h" 23#include "llvm/CodeGen/MachineFunctionInitializer.h" 24#include "llvm/CodeGen/MachineFunctionPass.h" 25#include "llvm/CodeGen/MachineInstr.h" 26#include "llvm/CodeGen/MachineJumpTableInfo.h" 27#include "llvm/CodeGen/MachineModuleInfo.h" 28#include "llvm/CodeGen/MachineRegisterInfo.h" 29#include "llvm/CodeGen/Passes.h" 30#include "llvm/CodeGen/PseudoSourceValue.h" 31#include "llvm/CodeGen/WinEHFuncInfo.h" 32#include "llvm/IR/DataLayout.h" 33#include "llvm/IR/DebugInfo.h" 34#include "llvm/IR/Function.h" 35#include "llvm/IR/Module.h" 36#include "llvm/IR/ModuleSlotTracker.h" 37#include "llvm/MC/MCAsmInfo.h" 38#include "llvm/MC/MCContext.h" 39#include "llvm/Support/Debug.h" 40#include "llvm/Support/GraphWriter.h" 41#include "llvm/Support/raw_ostream.h" 42#include "llvm/Target/TargetFrameLowering.h" 43#include "llvm/Target/TargetLowering.h" 44#include "llvm/Target/TargetMachine.h" 45#include "llvm/Target/TargetSubtargetInfo.h" 46using namespace llvm; 47 48#define DEBUG_TYPE "codegen" 49 50static cl::opt<unsigned> 51 AlignAllFunctions("align-all-functions", 52 cl::desc("Force the alignment of all functions."), 53 cl::init(0), cl::Hidden); 54 55void MachineFunctionInitializer::anchor() {} 56 57//===----------------------------------------------------------------------===// 58// MachineFunction implementation 59//===----------------------------------------------------------------------===// 60 61// Out-of-line virtual method. 62MachineFunctionInfo::~MachineFunctionInfo() {} 63 64void ilist_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) { 65 MBB->getParent()->DeleteMachineBasicBlock(MBB); 66} 67 68MachineFunction::MachineFunction(const Function *F, const TargetMachine &TM, 69 unsigned FunctionNum, MachineModuleInfo &mmi) 70 : Fn(F), Target(TM), STI(TM.getSubtargetImpl(*F)), Ctx(mmi.getContext()), 71 MMI(mmi) { 72 if (STI->getRegisterInfo()) 73 RegInfo = new (Allocator) MachineRegisterInfo(this); 74 else 75 RegInfo = nullptr; 76 77 MFInfo = nullptr; 78 FrameInfo = new (Allocator) 79 MachineFrameInfo(STI->getFrameLowering()->getStackAlignment(), 80 STI->getFrameLowering()->isStackRealignable(), 81 !F->hasFnAttribute("no-realign-stack")); 82 83 if (Fn->hasFnAttribute(Attribute::StackAlignment)) 84 FrameInfo->ensureMaxAlignment(Fn->getFnStackAlignment()); 85 86 ConstantPool = new (Allocator) MachineConstantPool(getDataLayout()); 87 Alignment = STI->getTargetLowering()->getMinFunctionAlignment(); 88 89 // FIXME: Shouldn't use pref alignment if explicit alignment is set on Fn. 90 // FIXME: Use Function::optForSize(). 91 if (!Fn->hasFnAttribute(Attribute::OptimizeForSize)) 92 Alignment = std::max(Alignment, 93 STI->getTargetLowering()->getPrefFunctionAlignment()); 94 95 if (AlignAllFunctions) 96 Alignment = AlignAllFunctions; 97 98 FunctionNumber = FunctionNum; 99 JumpTableInfo = nullptr; 100 101 if (isFuncletEHPersonality(classifyEHPersonality( 102 F->hasPersonalityFn() ? F->getPersonalityFn() : nullptr))) { 103 WinEHInfo = new (Allocator) WinEHFuncInfo(); 104 } 105 106 assert(TM.isCompatibleDataLayout(getDataLayout()) && 107 "Can't create a MachineFunction using a Module with a " 108 "Target-incompatible DataLayout attached\n"); 109 110 PSVManager = llvm::make_unique<PseudoSourceValueManager>(); 111} 112 113MachineFunction::~MachineFunction() { 114 // Don't call destructors on MachineInstr and MachineOperand. All of their 115 // memory comes from the BumpPtrAllocator which is about to be purged. 116 // 117 // Do call MachineBasicBlock destructors, it contains std::vectors. 118 for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I)) 119 I->Insts.clearAndLeakNodesUnsafely(); 120 121 InstructionRecycler.clear(Allocator); 122 OperandRecycler.clear(Allocator); 123 BasicBlockRecycler.clear(Allocator); 124 if (RegInfo) { 125 RegInfo->~MachineRegisterInfo(); 126 Allocator.Deallocate(RegInfo); 127 } 128 if (MFInfo) { 129 MFInfo->~MachineFunctionInfo(); 130 Allocator.Deallocate(MFInfo); 131 } 132 133 FrameInfo->~MachineFrameInfo(); 134 Allocator.Deallocate(FrameInfo); 135 136 ConstantPool->~MachineConstantPool(); 137 Allocator.Deallocate(ConstantPool); 138 139 if (JumpTableInfo) { 140 JumpTableInfo->~MachineJumpTableInfo(); 141 Allocator.Deallocate(JumpTableInfo); 142 } 143 144 if (WinEHInfo) { 145 WinEHInfo->~WinEHFuncInfo(); 146 Allocator.Deallocate(WinEHInfo); 147 } 148} 149 150const DataLayout &MachineFunction::getDataLayout() const { 151 return Fn->getParent()->getDataLayout(); 152} 153 154/// Get the JumpTableInfo for this function. 155/// If it does not already exist, allocate one. 156MachineJumpTableInfo *MachineFunction:: 157getOrCreateJumpTableInfo(unsigned EntryKind) { 158 if (JumpTableInfo) return JumpTableInfo; 159 160 JumpTableInfo = new (Allocator) 161 MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind); 162 return JumpTableInfo; 163} 164 165/// Should we be emitting segmented stack stuff for the function 166bool MachineFunction::shouldSplitStack() const { 167 return getFunction()->hasFnAttribute("split-stack"); 168} 169 170/// This discards all of the MachineBasicBlock numbers and recomputes them. 171/// This guarantees that the MBB numbers are sequential, dense, and match the 172/// ordering of the blocks within the function. If a specific MachineBasicBlock 173/// is specified, only that block and those after it are renumbered. 174void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) { 175 if (empty()) { MBBNumbering.clear(); return; } 176 MachineFunction::iterator MBBI, E = end(); 177 if (MBB == nullptr) 178 MBBI = begin(); 179 else 180 MBBI = MBB->getIterator(); 181 182 // Figure out the block number this should have. 183 unsigned BlockNo = 0; 184 if (MBBI != begin()) 185 BlockNo = std::prev(MBBI)->getNumber() + 1; 186 187 for (; MBBI != E; ++MBBI, ++BlockNo) { 188 if (MBBI->getNumber() != (int)BlockNo) { 189 // Remove use of the old number. 190 if (MBBI->getNumber() != -1) { 191 assert(MBBNumbering[MBBI->getNumber()] == &*MBBI && 192 "MBB number mismatch!"); 193 MBBNumbering[MBBI->getNumber()] = nullptr; 194 } 195 196 // If BlockNo is already taken, set that block's number to -1. 197 if (MBBNumbering[BlockNo]) 198 MBBNumbering[BlockNo]->setNumber(-1); 199 200 MBBNumbering[BlockNo] = &*MBBI; 201 MBBI->setNumber(BlockNo); 202 } 203 } 204 205 // Okay, all the blocks are renumbered. If we have compactified the block 206 // numbering, shrink MBBNumbering now. 207 assert(BlockNo <= MBBNumbering.size() && "Mismatch!"); 208 MBBNumbering.resize(BlockNo); 209} 210 211/// Allocate a new MachineInstr. Use this instead of `new MachineInstr'. 212MachineInstr * 213MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID, 214 DebugLoc DL, bool NoImp) { 215 return new (InstructionRecycler.Allocate<MachineInstr>(Allocator)) 216 MachineInstr(*this, MCID, DL, NoImp); 217} 218 219/// Create a new MachineInstr which is a copy of the 'Orig' instruction, 220/// identical in all ways except the instruction has no parent, prev, or next. 221MachineInstr * 222MachineFunction::CloneMachineInstr(const MachineInstr *Orig) { 223 return new (InstructionRecycler.Allocate<MachineInstr>(Allocator)) 224 MachineInstr(*this, *Orig); 225} 226 227/// Delete the given MachineInstr. 228/// 229/// This function also serves as the MachineInstr destructor - the real 230/// ~MachineInstr() destructor must be empty. 231void 232MachineFunction::DeleteMachineInstr(MachineInstr *MI) { 233 // Strip it for parts. The operand array and the MI object itself are 234 // independently recyclable. 235 if (MI->Operands) 236 deallocateOperandArray(MI->CapOperands, MI->Operands); 237 // Don't call ~MachineInstr() which must be trivial anyway because 238 // ~MachineFunction drops whole lists of MachineInstrs wihout calling their 239 // destructors. 240 InstructionRecycler.Deallocate(Allocator, MI); 241} 242 243/// Allocate a new MachineBasicBlock. Use this instead of 244/// `new MachineBasicBlock'. 245MachineBasicBlock * 246MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) { 247 return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator)) 248 MachineBasicBlock(*this, bb); 249} 250 251/// Delete the given MachineBasicBlock. 252void 253MachineFunction::DeleteMachineBasicBlock(MachineBasicBlock *MBB) { 254 assert(MBB->getParent() == this && "MBB parent mismatch!"); 255 MBB->~MachineBasicBlock(); 256 BasicBlockRecycler.Deallocate(Allocator, MBB); 257} 258 259MachineMemOperand * 260MachineFunction::getMachineMemOperand(MachinePointerInfo PtrInfo, unsigned f, 261 uint64_t s, unsigned base_alignment, 262 const AAMDNodes &AAInfo, 263 const MDNode *Ranges) { 264 return new (Allocator) MachineMemOperand(PtrInfo, f, s, base_alignment, 265 AAInfo, Ranges); 266} 267 268MachineMemOperand * 269MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO, 270 int64_t Offset, uint64_t Size) { 271 if (MMO->getValue()) 272 return new (Allocator) 273 MachineMemOperand(MachinePointerInfo(MMO->getValue(), 274 MMO->getOffset()+Offset), 275 MMO->getFlags(), Size, 276 MMO->getBaseAlignment()); 277 return new (Allocator) 278 MachineMemOperand(MachinePointerInfo(MMO->getPseudoValue(), 279 MMO->getOffset()+Offset), 280 MMO->getFlags(), Size, 281 MMO->getBaseAlignment()); 282} 283 284MachineInstr::mmo_iterator 285MachineFunction::allocateMemRefsArray(unsigned long Num) { 286 return Allocator.Allocate<MachineMemOperand *>(Num); 287} 288 289std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator> 290MachineFunction::extractLoadMemRefs(MachineInstr::mmo_iterator Begin, 291 MachineInstr::mmo_iterator End) { 292 // Count the number of load mem refs. 293 unsigned Num = 0; 294 for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) 295 if ((*I)->isLoad()) 296 ++Num; 297 298 // Allocate a new array and populate it with the load information. 299 MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num); 300 unsigned Index = 0; 301 for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) { 302 if ((*I)->isLoad()) { 303 if (!(*I)->isStore()) 304 // Reuse the MMO. 305 Result[Index] = *I; 306 else { 307 // Clone the MMO and unset the store flag. 308 MachineMemOperand *JustLoad = 309 getMachineMemOperand((*I)->getPointerInfo(), 310 (*I)->getFlags() & ~MachineMemOperand::MOStore, 311 (*I)->getSize(), (*I)->getBaseAlignment(), 312 (*I)->getAAInfo()); 313 Result[Index] = JustLoad; 314 } 315 ++Index; 316 } 317 } 318 return std::make_pair(Result, Result + Num); 319} 320 321std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator> 322MachineFunction::extractStoreMemRefs(MachineInstr::mmo_iterator Begin, 323 MachineInstr::mmo_iterator End) { 324 // Count the number of load mem refs. 325 unsigned Num = 0; 326 for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) 327 if ((*I)->isStore()) 328 ++Num; 329 330 // Allocate a new array and populate it with the store information. 331 MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num); 332 unsigned Index = 0; 333 for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) { 334 if ((*I)->isStore()) { 335 if (!(*I)->isLoad()) 336 // Reuse the MMO. 337 Result[Index] = *I; 338 else { 339 // Clone the MMO and unset the load flag. 340 MachineMemOperand *JustStore = 341 getMachineMemOperand((*I)->getPointerInfo(), 342 (*I)->getFlags() & ~MachineMemOperand::MOLoad, 343 (*I)->getSize(), (*I)->getBaseAlignment(), 344 (*I)->getAAInfo()); 345 Result[Index] = JustStore; 346 } 347 ++Index; 348 } 349 } 350 return std::make_pair(Result, Result + Num); 351} 352 353const char *MachineFunction::createExternalSymbolName(StringRef Name) { 354 char *Dest = Allocator.Allocate<char>(Name.size() + 1); 355 std::copy(Name.begin(), Name.end(), Dest); 356 Dest[Name.size()] = 0; 357 return Dest; 358} 359 360#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 361void MachineFunction::dump() const { 362 print(dbgs()); 363} 364#endif 365 366StringRef MachineFunction::getName() const { 367 assert(getFunction() && "No function!"); 368 return getFunction()->getName(); 369} 370 371void MachineFunction::print(raw_ostream &OS, SlotIndexes *Indexes) const { 372 OS << "# Machine code for function " << getName() << ": "; 373 if (RegInfo) { 374 OS << (RegInfo->isSSA() ? "SSA" : "Post SSA"); 375 if (!RegInfo->tracksLiveness()) 376 OS << ", not tracking liveness"; 377 } 378 OS << '\n'; 379 380 // Print Frame Information 381 FrameInfo->print(*this, OS); 382 383 // Print JumpTable Information 384 if (JumpTableInfo) 385 JumpTableInfo->print(OS); 386 387 // Print Constant Pool 388 ConstantPool->print(OS); 389 390 const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo(); 391 392 if (RegInfo && !RegInfo->livein_empty()) { 393 OS << "Function Live Ins: "; 394 for (MachineRegisterInfo::livein_iterator 395 I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) { 396 OS << PrintReg(I->first, TRI); 397 if (I->second) 398 OS << " in " << PrintReg(I->second, TRI); 399 if (std::next(I) != E) 400 OS << ", "; 401 } 402 OS << '\n'; 403 } 404 405 ModuleSlotTracker MST(getFunction()->getParent()); 406 MST.incorporateFunction(*getFunction()); 407 for (const auto &BB : *this) { 408 OS << '\n'; 409 BB.print(OS, MST, Indexes); 410 } 411 412 OS << "\n# End machine code for function " << getName() << ".\n\n"; 413} 414 415namespace llvm { 416 template<> 417 struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits { 418 419 DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {} 420 421 static std::string getGraphName(const MachineFunction *F) { 422 return ("CFG for '" + F->getName() + "' function").str(); 423 } 424 425 std::string getNodeLabel(const MachineBasicBlock *Node, 426 const MachineFunction *Graph) { 427 std::string OutStr; 428 { 429 raw_string_ostream OSS(OutStr); 430 431 if (isSimple()) { 432 OSS << "BB#" << Node->getNumber(); 433 if (const BasicBlock *BB = Node->getBasicBlock()) 434 OSS << ": " << BB->getName(); 435 } else 436 Node->print(OSS); 437 } 438 439 if (OutStr[0] == '\n') OutStr.erase(OutStr.begin()); 440 441 // Process string output to make it nicer... 442 for (unsigned i = 0; i != OutStr.length(); ++i) 443 if (OutStr[i] == '\n') { // Left justify 444 OutStr[i] = '\\'; 445 OutStr.insert(OutStr.begin()+i+1, 'l'); 446 } 447 return OutStr; 448 } 449 }; 450} 451 452void MachineFunction::viewCFG() const 453{ 454#ifndef NDEBUG 455 ViewGraph(this, "mf" + getName()); 456#else 457 errs() << "MachineFunction::viewCFG is only available in debug builds on " 458 << "systems with Graphviz or gv!\n"; 459#endif // NDEBUG 460} 461 462void MachineFunction::viewCFGOnly() const 463{ 464#ifndef NDEBUG 465 ViewGraph(this, "mf" + getName(), true); 466#else 467 errs() << "MachineFunction::viewCFGOnly is only available in debug builds on " 468 << "systems with Graphviz or gv!\n"; 469#endif // NDEBUG 470} 471 472/// Add the specified physical register as a live-in value and 473/// create a corresponding virtual register for it. 474unsigned MachineFunction::addLiveIn(unsigned PReg, 475 const TargetRegisterClass *RC) { 476 MachineRegisterInfo &MRI = getRegInfo(); 477 unsigned VReg = MRI.getLiveInVirtReg(PReg); 478 if (VReg) { 479 const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg); 480 (void)VRegRC; 481 // A physical register can be added several times. 482 // Between two calls, the register class of the related virtual register 483 // may have been constrained to match some operation constraints. 484 // In that case, check that the current register class includes the 485 // physical register and is a sub class of the specified RC. 486 assert((VRegRC == RC || (VRegRC->contains(PReg) && 487 RC->hasSubClassEq(VRegRC))) && 488 "Register class mismatch!"); 489 return VReg; 490 } 491 VReg = MRI.createVirtualRegister(RC); 492 MRI.addLiveIn(PReg, VReg); 493 return VReg; 494} 495 496/// Return the MCSymbol for the specified non-empty jump table. 497/// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a 498/// normal 'L' label is returned. 499MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx, 500 bool isLinkerPrivate) const { 501 const DataLayout &DL = getDataLayout(); 502 assert(JumpTableInfo && "No jump tables"); 503 assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!"); 504 505 const char *Prefix = isLinkerPrivate ? DL.getLinkerPrivateGlobalPrefix() 506 : DL.getPrivateGlobalPrefix(); 507 SmallString<60> Name; 508 raw_svector_ostream(Name) 509 << Prefix << "JTI" << getFunctionNumber() << '_' << JTI; 510 return Ctx.getOrCreateSymbol(Name); 511} 512 513/// Return a function-local symbol to represent the PIC base. 514MCSymbol *MachineFunction::getPICBaseSymbol() const { 515 const DataLayout &DL = getDataLayout(); 516 return Ctx.getOrCreateSymbol(Twine(DL.getPrivateGlobalPrefix()) + 517 Twine(getFunctionNumber()) + "$pb"); 518} 519 520//===----------------------------------------------------------------------===// 521// MachineFrameInfo implementation 522//===----------------------------------------------------------------------===// 523 524/// Make sure the function is at least Align bytes aligned. 525void MachineFrameInfo::ensureMaxAlignment(unsigned Align) { 526 if (!StackRealignable || !RealignOption) 527 assert(Align <= StackAlignment && 528 "For targets without stack realignment, Align is out of limit!"); 529 if (MaxAlignment < Align) MaxAlignment = Align; 530} 531 532/// Clamp the alignment if requested and emit a warning. 533static inline unsigned clampStackAlignment(bool ShouldClamp, unsigned Align, 534 unsigned StackAlign) { 535 if (!ShouldClamp || Align <= StackAlign) 536 return Align; 537 DEBUG(dbgs() << "Warning: requested alignment " << Align 538 << " exceeds the stack alignment " << StackAlign 539 << " when stack realignment is off" << '\n'); 540 return StackAlign; 541} 542 543/// Create a new statically sized stack object, returning a nonnegative 544/// identifier to represent it. 545int MachineFrameInfo::CreateStackObject(uint64_t Size, unsigned Alignment, 546 bool isSS, const AllocaInst *Alloca) { 547 assert(Size != 0 && "Cannot allocate zero size stack objects!"); 548 Alignment = clampStackAlignment(!StackRealignable || !RealignOption, 549 Alignment, StackAlignment); 550 Objects.push_back(StackObject(Size, Alignment, 0, false, isSS, Alloca, 551 !isSS)); 552 int Index = (int)Objects.size() - NumFixedObjects - 1; 553 assert(Index >= 0 && "Bad frame index!"); 554 ensureMaxAlignment(Alignment); 555 return Index; 556} 557 558/// Create a new statically sized stack object that represents a spill slot, 559/// returning a nonnegative identifier to represent it. 560int MachineFrameInfo::CreateSpillStackObject(uint64_t Size, 561 unsigned Alignment) { 562 Alignment = clampStackAlignment(!StackRealignable || !RealignOption, 563 Alignment, StackAlignment); 564 CreateStackObject(Size, Alignment, true); 565 int Index = (int)Objects.size() - NumFixedObjects - 1; 566 ensureMaxAlignment(Alignment); 567 return Index; 568} 569 570/// Notify the MachineFrameInfo object that a variable sized object has been 571/// created. This must be created whenever a variable sized object is created, 572/// whether or not the index returned is actually used. 573int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment, 574 const AllocaInst *Alloca) { 575 HasVarSizedObjects = true; 576 Alignment = clampStackAlignment(!StackRealignable || !RealignOption, 577 Alignment, StackAlignment); 578 Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca, true)); 579 ensureMaxAlignment(Alignment); 580 return (int)Objects.size()-NumFixedObjects-1; 581} 582 583/// Create a new object at a fixed location on the stack. 584/// All fixed objects should be created before other objects are created for 585/// efficiency. By default, fixed objects are immutable. This returns an 586/// index with a negative value. 587int MachineFrameInfo::CreateFixedObject(uint64_t Size, int64_t SPOffset, 588 bool Immutable, bool isAliased) { 589 assert(Size != 0 && "Cannot allocate zero size fixed stack objects!"); 590 // The alignment of the frame index can be determined from its offset from 591 // the incoming frame position. If the frame object is at offset 32 and 592 // the stack is guaranteed to be 16-byte aligned, then we know that the 593 // object is 16-byte aligned. 594 unsigned Align = MinAlign(SPOffset, StackAlignment); 595 Align = clampStackAlignment(!StackRealignable || !RealignOption, Align, 596 StackAlignment); 597 Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable, 598 /*isSS*/ false, 599 /*Alloca*/ nullptr, isAliased)); 600 return -++NumFixedObjects; 601} 602 603/// Create a spill slot at a fixed location on the stack. 604/// Returns an index with a negative value. 605int MachineFrameInfo::CreateFixedSpillStackObject(uint64_t Size, 606 int64_t SPOffset) { 607 unsigned Align = MinAlign(SPOffset, StackAlignment); 608 Align = clampStackAlignment(!StackRealignable || !RealignOption, Align, 609 StackAlignment); 610 Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, 611 /*Immutable*/ true, 612 /*isSS*/ true, 613 /*Alloca*/ nullptr, 614 /*isAliased*/ false)); 615 return -++NumFixedObjects; 616} 617 618BitVector MachineFrameInfo::getPristineRegs(const MachineFunction &MF) const { 619 const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo(); 620 BitVector BV(TRI->getNumRegs()); 621 622 // Before CSI is calculated, no registers are considered pristine. They can be 623 // freely used and PEI will make sure they are saved. 624 if (!isCalleeSavedInfoValid()) 625 return BV; 626 627 for (const MCPhysReg *CSR = TRI->getCalleeSavedRegs(&MF); CSR && *CSR; ++CSR) 628 BV.set(*CSR); 629 630 // Saved CSRs are not pristine. 631 for (auto &I : getCalleeSavedInfo()) 632 for (MCSubRegIterator S(I.getReg(), TRI, true); S.isValid(); ++S) 633 BV.reset(*S); 634 635 return BV; 636} 637 638unsigned MachineFrameInfo::estimateStackSize(const MachineFunction &MF) const { 639 const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering(); 640 const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo(); 641 unsigned MaxAlign = getMaxAlignment(); 642 int Offset = 0; 643 644 // This code is very, very similar to PEI::calculateFrameObjectOffsets(). 645 // It really should be refactored to share code. Until then, changes 646 // should keep in mind that there's tight coupling between the two. 647 648 for (int i = getObjectIndexBegin(); i != 0; ++i) { 649 int FixedOff = -getObjectOffset(i); 650 if (FixedOff > Offset) Offset = FixedOff; 651 } 652 for (unsigned i = 0, e = getObjectIndexEnd(); i != e; ++i) { 653 if (isDeadObjectIndex(i)) 654 continue; 655 Offset += getObjectSize(i); 656 unsigned Align = getObjectAlignment(i); 657 // Adjust to alignment boundary 658 Offset = (Offset+Align-1)/Align*Align; 659 660 MaxAlign = std::max(Align, MaxAlign); 661 } 662 663 if (adjustsStack() && TFI->hasReservedCallFrame(MF)) 664 Offset += getMaxCallFrameSize(); 665 666 // Round up the size to a multiple of the alignment. If the function has 667 // any calls or alloca's, align to the target's StackAlignment value to 668 // ensure that the callee's frame or the alloca data is suitably aligned; 669 // otherwise, for leaf functions, align to the TransientStackAlignment 670 // value. 671 unsigned StackAlign; 672 if (adjustsStack() || hasVarSizedObjects() || 673 (RegInfo->needsStackRealignment(MF) && getObjectIndexEnd() != 0)) 674 StackAlign = TFI->getStackAlignment(); 675 else 676 StackAlign = TFI->getTransientStackAlignment(); 677 678 // If the frame pointer is eliminated, all frame offsets will be relative to 679 // SP not FP. Align to MaxAlign so this works. 680 StackAlign = std::max(StackAlign, MaxAlign); 681 unsigned AlignMask = StackAlign - 1; 682 Offset = (Offset + AlignMask) & ~uint64_t(AlignMask); 683 684 return (unsigned)Offset; 685} 686 687void MachineFrameInfo::print(const MachineFunction &MF, raw_ostream &OS) const{ 688 if (Objects.empty()) return; 689 690 const TargetFrameLowering *FI = MF.getSubtarget().getFrameLowering(); 691 int ValOffset = (FI ? FI->getOffsetOfLocalArea() : 0); 692 693 OS << "Frame Objects:\n"; 694 695 for (unsigned i = 0, e = Objects.size(); i != e; ++i) { 696 const StackObject &SO = Objects[i]; 697 OS << " fi#" << (int)(i-NumFixedObjects) << ": "; 698 if (SO.Size == ~0ULL) { 699 OS << "dead\n"; 700 continue; 701 } 702 if (SO.Size == 0) 703 OS << "variable sized"; 704 else 705 OS << "size=" << SO.Size; 706 OS << ", align=" << SO.Alignment; 707 708 if (i < NumFixedObjects) 709 OS << ", fixed"; 710 if (i < NumFixedObjects || SO.SPOffset != -1) { 711 int64_t Off = SO.SPOffset - ValOffset; 712 OS << ", at location [SP"; 713 if (Off > 0) 714 OS << "+" << Off; 715 else if (Off < 0) 716 OS << Off; 717 OS << "]"; 718 } 719 OS << "\n"; 720 } 721} 722 723#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 724void MachineFrameInfo::dump(const MachineFunction &MF) const { 725 print(MF, dbgs()); 726} 727#endif 728 729//===----------------------------------------------------------------------===// 730// MachineJumpTableInfo implementation 731//===----------------------------------------------------------------------===// 732 733/// Return the size of each entry in the jump table. 734unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const { 735 // The size of a jump table entry is 4 bytes unless the entry is just the 736 // address of a block, in which case it is the pointer size. 737 switch (getEntryKind()) { 738 case MachineJumpTableInfo::EK_BlockAddress: 739 return TD.getPointerSize(); 740 case MachineJumpTableInfo::EK_GPRel64BlockAddress: 741 return 8; 742 case MachineJumpTableInfo::EK_GPRel32BlockAddress: 743 case MachineJumpTableInfo::EK_LabelDifference32: 744 case MachineJumpTableInfo::EK_Custom32: 745 return 4; 746 case MachineJumpTableInfo::EK_Inline: 747 return 0; 748 } 749 llvm_unreachable("Unknown jump table encoding!"); 750} 751 752/// Return the alignment of each entry in the jump table. 753unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const { 754 // The alignment of a jump table entry is the alignment of int32 unless the 755 // entry is just the address of a block, in which case it is the pointer 756 // alignment. 757 switch (getEntryKind()) { 758 case MachineJumpTableInfo::EK_BlockAddress: 759 return TD.getPointerABIAlignment(); 760 case MachineJumpTableInfo::EK_GPRel64BlockAddress: 761 return TD.getABIIntegerTypeAlignment(64); 762 case MachineJumpTableInfo::EK_GPRel32BlockAddress: 763 case MachineJumpTableInfo::EK_LabelDifference32: 764 case MachineJumpTableInfo::EK_Custom32: 765 return TD.getABIIntegerTypeAlignment(32); 766 case MachineJumpTableInfo::EK_Inline: 767 return 1; 768 } 769 llvm_unreachable("Unknown jump table encoding!"); 770} 771 772/// Create a new jump table entry in the jump table info. 773unsigned MachineJumpTableInfo::createJumpTableIndex( 774 const std::vector<MachineBasicBlock*> &DestBBs) { 775 assert(!DestBBs.empty() && "Cannot create an empty jump table!"); 776 JumpTables.push_back(MachineJumpTableEntry(DestBBs)); 777 return JumpTables.size()-1; 778} 779 780/// If Old is the target of any jump tables, update the jump tables to branch 781/// to New instead. 782bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old, 783 MachineBasicBlock *New) { 784 assert(Old != New && "Not making a change?"); 785 bool MadeChange = false; 786 for (size_t i = 0, e = JumpTables.size(); i != e; ++i) 787 ReplaceMBBInJumpTable(i, Old, New); 788 return MadeChange; 789} 790 791/// If Old is a target of the jump tables, update the jump table to branch to 792/// New instead. 793bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx, 794 MachineBasicBlock *Old, 795 MachineBasicBlock *New) { 796 assert(Old != New && "Not making a change?"); 797 bool MadeChange = false; 798 MachineJumpTableEntry &JTE = JumpTables[Idx]; 799 for (size_t j = 0, e = JTE.MBBs.size(); j != e; ++j) 800 if (JTE.MBBs[j] == Old) { 801 JTE.MBBs[j] = New; 802 MadeChange = true; 803 } 804 return MadeChange; 805} 806 807void MachineJumpTableInfo::print(raw_ostream &OS) const { 808 if (JumpTables.empty()) return; 809 810 OS << "Jump Tables:\n"; 811 812 for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) { 813 OS << " jt#" << i << ": "; 814 for (unsigned j = 0, f = JumpTables[i].MBBs.size(); j != f; ++j) 815 OS << " BB#" << JumpTables[i].MBBs[j]->getNumber(); 816 } 817 818 OS << '\n'; 819} 820 821#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 822void MachineJumpTableInfo::dump() const { print(dbgs()); } 823#endif 824 825 826//===----------------------------------------------------------------------===// 827// MachineConstantPool implementation 828//===----------------------------------------------------------------------===// 829 830void MachineConstantPoolValue::anchor() { } 831 832Type *MachineConstantPoolEntry::getType() const { 833 if (isMachineConstantPoolEntry()) 834 return Val.MachineCPVal->getType(); 835 return Val.ConstVal->getType(); 836} 837 838bool MachineConstantPoolEntry::needsRelocation() const { 839 if (isMachineConstantPoolEntry()) 840 return true; 841 return Val.ConstVal->needsRelocation(); 842} 843 844SectionKind 845MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const { 846 if (needsRelocation()) 847 return SectionKind::getReadOnlyWithRel(); 848 switch (DL->getTypeAllocSize(getType())) { 849 case 4: 850 return SectionKind::getMergeableConst4(); 851 case 8: 852 return SectionKind::getMergeableConst8(); 853 case 16: 854 return SectionKind::getMergeableConst16(); 855 default: 856 return SectionKind::getReadOnly(); 857 } 858} 859 860MachineConstantPool::~MachineConstantPool() { 861 for (unsigned i = 0, e = Constants.size(); i != e; ++i) 862 if (Constants[i].isMachineConstantPoolEntry()) 863 delete Constants[i].Val.MachineCPVal; 864 for (DenseSet<MachineConstantPoolValue*>::iterator I = 865 MachineCPVsSharingEntries.begin(), E = MachineCPVsSharingEntries.end(); 866 I != E; ++I) 867 delete *I; 868} 869 870/// Test whether the given two constants can be allocated the same constant pool 871/// entry. 872static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B, 873 const DataLayout &DL) { 874 // Handle the trivial case quickly. 875 if (A == B) return true; 876 877 // If they have the same type but weren't the same constant, quickly 878 // reject them. 879 if (A->getType() == B->getType()) return false; 880 881 // We can't handle structs or arrays. 882 if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) || 883 isa<StructType>(B->getType()) || isa<ArrayType>(B->getType())) 884 return false; 885 886 // For now, only support constants with the same size. 887 uint64_t StoreSize = DL.getTypeStoreSize(A->getType()); 888 if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128) 889 return false; 890 891 Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8); 892 893 // Try constant folding a bitcast of both instructions to an integer. If we 894 // get two identical ConstantInt's, then we are good to share them. We use 895 // the constant folding APIs to do this so that we get the benefit of 896 // DataLayout. 897 if (isa<PointerType>(A->getType())) 898 A = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy, 899 const_cast<Constant *>(A), DL); 900 else if (A->getType() != IntTy) 901 A = ConstantFoldInstOperands(Instruction::BitCast, IntTy, 902 const_cast<Constant *>(A), DL); 903 if (isa<PointerType>(B->getType())) 904 B = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy, 905 const_cast<Constant *>(B), DL); 906 else if (B->getType() != IntTy) 907 B = ConstantFoldInstOperands(Instruction::BitCast, IntTy, 908 const_cast<Constant *>(B), DL); 909 910 return A == B; 911} 912 913/// Create a new entry in the constant pool or return an existing one. 914/// User must specify the log2 of the minimum required alignment for the object. 915unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C, 916 unsigned Alignment) { 917 assert(Alignment && "Alignment must be specified!"); 918 if (Alignment > PoolAlignment) PoolAlignment = Alignment; 919 920 // Check to see if we already have this constant. 921 // 922 // FIXME, this could be made much more efficient for large constant pools. 923 for (unsigned i = 0, e = Constants.size(); i != e; ++i) 924 if (!Constants[i].isMachineConstantPoolEntry() && 925 CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) { 926 if ((unsigned)Constants[i].getAlignment() < Alignment) 927 Constants[i].Alignment = Alignment; 928 return i; 929 } 930 931 Constants.push_back(MachineConstantPoolEntry(C, Alignment)); 932 return Constants.size()-1; 933} 934 935unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V, 936 unsigned Alignment) { 937 assert(Alignment && "Alignment must be specified!"); 938 if (Alignment > PoolAlignment) PoolAlignment = Alignment; 939 940 // Check to see if we already have this constant. 941 // 942 // FIXME, this could be made much more efficient for large constant pools. 943 int Idx = V->getExistingMachineCPValue(this, Alignment); 944 if (Idx != -1) { 945 MachineCPVsSharingEntries.insert(V); 946 return (unsigned)Idx; 947 } 948 949 Constants.push_back(MachineConstantPoolEntry(V, Alignment)); 950 return Constants.size()-1; 951} 952 953void MachineConstantPool::print(raw_ostream &OS) const { 954 if (Constants.empty()) return; 955 956 OS << "Constant Pool:\n"; 957 for (unsigned i = 0, e = Constants.size(); i != e; ++i) { 958 OS << " cp#" << i << ": "; 959 if (Constants[i].isMachineConstantPoolEntry()) 960 Constants[i].Val.MachineCPVal->print(OS); 961 else 962 Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false); 963 OS << ", align=" << Constants[i].getAlignment(); 964 OS << "\n"; 965 } 966} 967 968#if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 969void MachineConstantPool::dump() const { print(dbgs()); } 970#endif 971