DwarfEHPrepare.cpp revision 208954
1//===-- DwarfEHPrepare - Prepare exception handling for code generation ---===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This pass mulches exception handling code into a form adapted to code 11// generation. Required if using dwarf exception handling. 12// 13//===----------------------------------------------------------------------===// 14 15#define DEBUG_TYPE "dwarfehprepare" 16#include "llvm/Function.h" 17#include "llvm/Instructions.h" 18#include "llvm/IntrinsicInst.h" 19#include "llvm/Module.h" 20#include "llvm/Pass.h" 21#include "llvm/ADT/Statistic.h" 22#include "llvm/Analysis/Dominators.h" 23#include "llvm/CodeGen/Passes.h" 24#include "llvm/MC/MCAsmInfo.h" 25#include "llvm/Target/TargetLowering.h" 26#include "llvm/Transforms/Utils/BasicBlockUtils.h" 27#include "llvm/Transforms/Utils/PromoteMemToReg.h" 28using namespace llvm; 29 30STATISTIC(NumLandingPadsSplit, "Number of landing pads split"); 31STATISTIC(NumUnwindsLowered, "Number of unwind instructions lowered"); 32STATISTIC(NumExceptionValuesMoved, "Number of eh.exception calls moved"); 33STATISTIC(NumStackTempsIntroduced, "Number of stack temporaries introduced"); 34 35namespace { 36 class DwarfEHPrepare : public FunctionPass { 37 const TargetMachine *TM; 38 const TargetLowering *TLI; 39 bool CompileFast; 40 41 // The eh.exception intrinsic. 42 Function *ExceptionValueIntrinsic; 43 44 // The eh.selector intrinsic. 45 Function *SelectorIntrinsic; 46 47 // _Unwind_Resume_or_Rethrow call. 48 Constant *URoR; 49 50 // The EH language-specific catch-all type. 51 GlobalVariable *EHCatchAllValue; 52 53 // _Unwind_Resume or the target equivalent. 54 Constant *RewindFunction; 55 56 // Dominator info is used when turning stack temporaries into registers. 57 DominatorTree *DT; 58 DominanceFrontier *DF; 59 60 // The function we are running on. 61 Function *F; 62 63 // The landing pads for this function. 64 typedef SmallPtrSet<BasicBlock*, 8> BBSet; 65 BBSet LandingPads; 66 67 // Stack temporary used to hold eh.exception values. 68 AllocaInst *ExceptionValueVar; 69 70 bool NormalizeLandingPads(); 71 bool LowerUnwinds(); 72 bool MoveExceptionValueCalls(); 73 bool FinishStackTemporaries(); 74 bool PromoteStackTemporaries(); 75 76 Instruction *CreateExceptionValueCall(BasicBlock *BB); 77 Instruction *CreateValueLoad(BasicBlock *BB); 78 79 /// CreateReadOfExceptionValue - Return the result of the eh.exception 80 /// intrinsic by calling the intrinsic if in a landing pad, or loading it 81 /// from the exception value variable otherwise. 82 Instruction *CreateReadOfExceptionValue(BasicBlock *BB) { 83 return LandingPads.count(BB) ? 84 CreateExceptionValueCall(BB) : CreateValueLoad(BB); 85 } 86 87 /// CleanupSelectors - Any remaining eh.selector intrinsic calls which still 88 /// use the ".llvm.eh.catch.all.value" call need to convert to using its 89 /// initializer instead. 90 bool CleanupSelectors(); 91 92 /// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups. 93 void FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels); 94 95 /// FindAllURoRInvokes - Find all URoR invokes in the function. 96 void FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes); 97 98 /// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" 99 /// calls. The "unwind" part of these invokes jump to a landing pad within 100 /// the current function. This is a candidate to merge the selector 101 /// associated with the URoR invoke with the one from the URoR's landing 102 /// pad. 103 bool HandleURoRInvokes(); 104 105 /// FindSelectorAndURoR - Find the eh.selector call and URoR call associated 106 /// with the eh.exception call. This recursively looks past instructions 107 /// which don't change the EH pointer value, like casts or PHI nodes. 108 bool FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke, 109 SmallPtrSet<IntrinsicInst*, 8> &SelCalls); 110 111 /// DoMem2RegPromotion - Take an alloca call and promote it from memory to a 112 /// register. 113 bool DoMem2RegPromotion(Value *V) { 114 AllocaInst *AI = dyn_cast<AllocaInst>(V); 115 if (!AI || !isAllocaPromotable(AI)) return false; 116 117 // Turn the alloca into a register. 118 std::vector<AllocaInst*> Allocas(1, AI); 119 PromoteMemToReg(Allocas, *DT, *DF); 120 return true; 121 } 122 123 /// PromoteStoreInst - Perform Mem2Reg on a StoreInst. 124 bool PromoteStoreInst(StoreInst *SI) { 125 if (!SI || !DT || !DF) return false; 126 if (DoMem2RegPromotion(SI->getOperand(1))) 127 return true; 128 return false; 129 } 130 131 /// PromoteEHPtrStore - Promote the storing of an EH pointer into a 132 /// register. This should get rid of the store and subsequent loads. 133 bool PromoteEHPtrStore(IntrinsicInst *II) { 134 if (!DT || !DF) return false; 135 136 bool Changed = false; 137 StoreInst *SI; 138 139 while (1) { 140 SI = 0; 141 for (Value::use_iterator 142 I = II->use_begin(), E = II->use_end(); I != E; ++I) { 143 SI = dyn_cast<StoreInst>(I); 144 if (SI) break; 145 } 146 147 if (!PromoteStoreInst(SI)) 148 break; 149 150 Changed = true; 151 } 152 153 return false; 154 } 155 156 public: 157 static char ID; // Pass identification, replacement for typeid. 158 DwarfEHPrepare(const TargetMachine *tm, bool fast) : 159 FunctionPass(&ID), TM(tm), TLI(TM->getTargetLowering()), 160 CompileFast(fast), 161 ExceptionValueIntrinsic(0), SelectorIntrinsic(0), 162 URoR(0), EHCatchAllValue(0), RewindFunction(0) {} 163 164 virtual bool runOnFunction(Function &Fn); 165 166 // getAnalysisUsage - We need dominance frontiers for memory promotion. 167 virtual void getAnalysisUsage(AnalysisUsage &AU) const { 168 if (!CompileFast) 169 AU.addRequired<DominatorTree>(); 170 AU.addPreserved<DominatorTree>(); 171 if (!CompileFast) 172 AU.addRequired<DominanceFrontier>(); 173 AU.addPreserved<DominanceFrontier>(); 174 } 175 176 const char *getPassName() const { 177 return "Exception handling preparation"; 178 } 179 180 }; 181} // end anonymous namespace 182 183char DwarfEHPrepare::ID = 0; 184 185FunctionPass *llvm::createDwarfEHPass(const TargetMachine *tm, bool fast) { 186 return new DwarfEHPrepare(tm, fast); 187} 188 189/// FindAllCleanupSelectors - Find all eh.selector calls that are clean-ups. 190void DwarfEHPrepare:: 191FindAllCleanupSelectors(SmallPtrSet<IntrinsicInst*, 32> &Sels) { 192 for (Value::use_iterator 193 I = SelectorIntrinsic->use_begin(), 194 E = SelectorIntrinsic->use_end(); I != E; ++I) { 195 IntrinsicInst *SI = cast<IntrinsicInst>(I); 196 if (!SI || SI->getParent()->getParent() != F) continue; 197 198 unsigned NumOps = SI->getNumOperands(); 199 if (NumOps > 4) continue; 200 bool IsCleanUp = (NumOps == 3); 201 202 if (!IsCleanUp) 203 if (ConstantInt *CI = dyn_cast<ConstantInt>(SI->getOperand(3))) 204 IsCleanUp = (CI->getZExtValue() == 0); 205 206 if (IsCleanUp) 207 Sels.insert(SI); 208 } 209} 210 211/// FindAllURoRInvokes - Find all URoR invokes in the function. 212void DwarfEHPrepare:: 213FindAllURoRInvokes(SmallPtrSet<InvokeInst*, 32> &URoRInvokes) { 214 for (Value::use_iterator 215 I = URoR->use_begin(), 216 E = URoR->use_end(); I != E; ++I) { 217 if (InvokeInst *II = dyn_cast<InvokeInst>(I)) 218 URoRInvokes.insert(II); 219 } 220} 221 222/// CleanupSelectors - Any remaining eh.selector intrinsic calls which still use 223/// the ".llvm.eh.catch.all.value" call need to convert to using its 224/// initializer instead. 225bool DwarfEHPrepare::CleanupSelectors() { 226 if (!EHCatchAllValue) return false; 227 228 if (!SelectorIntrinsic) { 229 SelectorIntrinsic = 230 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector); 231 if (!SelectorIntrinsic) return false; 232 } 233 234 bool Changed = false; 235 for (Value::use_iterator 236 I = SelectorIntrinsic->use_begin(), 237 E = SelectorIntrinsic->use_end(); I != E; ++I) { 238 IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(I); 239 if (!Sel || Sel->getParent()->getParent() != F) continue; 240 241 // Index of the ".llvm.eh.catch.all.value" variable. 242 unsigned OpIdx = Sel->getNumOperands() - 1; 243 GlobalVariable *GV = dyn_cast<GlobalVariable>(Sel->getOperand(OpIdx)); 244 if (GV != EHCatchAllValue) continue; 245 Sel->setOperand(OpIdx, EHCatchAllValue->getInitializer()); 246 Changed = true; 247 } 248 249 return Changed; 250} 251 252/// FindSelectorAndURoR - Find the eh.selector call associated with the 253/// eh.exception call. And indicate if there is a URoR "invoke" associated with 254/// the eh.exception call. This recursively looks past instructions which don't 255/// change the EH pointer value, like casts or PHI nodes. 256bool 257DwarfEHPrepare::FindSelectorAndURoR(Instruction *Inst, bool &URoRInvoke, 258 SmallPtrSet<IntrinsicInst*, 8> &SelCalls) { 259 SmallPtrSet<PHINode*, 32> SeenPHIs; 260 bool Changed = false; 261 262 restart: 263 for (Value::use_iterator 264 I = Inst->use_begin(), E = Inst->use_end(); I != E; ++I) { 265 Instruction *II = dyn_cast<Instruction>(I); 266 if (!II || II->getParent()->getParent() != F) continue; 267 268 if (IntrinsicInst *Sel = dyn_cast<IntrinsicInst>(II)) { 269 if (Sel->getIntrinsicID() == Intrinsic::eh_selector) 270 SelCalls.insert(Sel); 271 } else if (InvokeInst *Invoke = dyn_cast<InvokeInst>(II)) { 272 if (Invoke->getCalledFunction() == URoR) 273 URoRInvoke = true; 274 } else if (CastInst *CI = dyn_cast<CastInst>(II)) { 275 Changed |= FindSelectorAndURoR(CI, URoRInvoke, SelCalls); 276 } else if (StoreInst *SI = dyn_cast<StoreInst>(II)) { 277 if (!PromoteStoreInst(SI)) continue; 278 Changed = true; 279 SeenPHIs.clear(); 280 goto restart; // Uses may have changed, restart loop. 281 } else if (PHINode *PN = dyn_cast<PHINode>(II)) { 282 if (SeenPHIs.insert(PN)) 283 // Don't process a PHI node more than once. 284 Changed |= FindSelectorAndURoR(PN, URoRInvoke, SelCalls); 285 } 286 } 287 288 return Changed; 289} 290 291/// HandleURoRInvokes - Handle invokes of "_Unwind_Resume_or_Rethrow" calls. The 292/// "unwind" part of these invokes jump to a landing pad within the current 293/// function. This is a candidate to merge the selector associated with the URoR 294/// invoke with the one from the URoR's landing pad. 295bool DwarfEHPrepare::HandleURoRInvokes() { 296 if (!DT) return CleanupSelectors(); // We require DominatorTree information. 297 298 if (!EHCatchAllValue) { 299 EHCatchAllValue = 300 F->getParent()->getNamedGlobal(".llvm.eh.catch.all.value"); 301 if (!EHCatchAllValue) return false; 302 } 303 304 if (!SelectorIntrinsic) { 305 SelectorIntrinsic = 306 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_selector); 307 if (!SelectorIntrinsic) return false; 308 } 309 310 if (!URoR) { 311 URoR = F->getParent()->getFunction("_Unwind_Resume_or_Rethrow"); 312 if (!URoR) return CleanupSelectors(); 313 } 314 315 SmallPtrSet<IntrinsicInst*, 32> Sels; 316 SmallPtrSet<InvokeInst*, 32> URoRInvokes; 317 FindAllCleanupSelectors(Sels); 318 FindAllURoRInvokes(URoRInvokes); 319 320 SmallPtrSet<IntrinsicInst*, 32> SelsToConvert; 321 322 for (SmallPtrSet<IntrinsicInst*, 32>::iterator 323 SI = Sels.begin(), SE = Sels.end(); SI != SE; ++SI) { 324 const BasicBlock *SelBB = (*SI)->getParent(); 325 for (SmallPtrSet<InvokeInst*, 32>::iterator 326 UI = URoRInvokes.begin(), UE = URoRInvokes.end(); UI != UE; ++UI) { 327 const BasicBlock *URoRBB = (*UI)->getParent(); 328 if (SelBB == URoRBB || DT->dominates(SelBB, URoRBB)) { 329 SelsToConvert.insert(*SI); 330 break; 331 } 332 } 333 } 334 335 bool Changed = false; 336 337 if (Sels.size() != SelsToConvert.size()) { 338 // If we haven't been able to convert all of the clean-up selectors, then 339 // loop through the slow way to see if they still need to be converted. 340 if (!ExceptionValueIntrinsic) { 341 ExceptionValueIntrinsic = 342 Intrinsic::getDeclaration(F->getParent(), Intrinsic::eh_exception); 343 if (!ExceptionValueIntrinsic) return CleanupSelectors(); 344 } 345 346 for (Value::use_iterator 347 I = ExceptionValueIntrinsic->use_begin(), 348 E = ExceptionValueIntrinsic->use_end(); I != E; ++I) { 349 IntrinsicInst *EHPtr = dyn_cast<IntrinsicInst>(I); 350 if (!EHPtr || EHPtr->getParent()->getParent() != F) continue; 351 352 Changed |= PromoteEHPtrStore(EHPtr); 353 354 bool URoRInvoke = false; 355 SmallPtrSet<IntrinsicInst*, 8> SelCalls; 356 Changed |= FindSelectorAndURoR(EHPtr, URoRInvoke, SelCalls); 357 358 if (URoRInvoke) { 359 // This EH pointer is being used by an invoke of an URoR instruction and 360 // an eh.selector intrinsic call. If the eh.selector is a 'clean-up', we 361 // need to convert it to a 'catch-all'. 362 for (SmallPtrSet<IntrinsicInst*, 8>::iterator 363 SI = SelCalls.begin(), SE = SelCalls.end(); SI != SE; ++SI) { 364 IntrinsicInst *II = *SI; 365 unsigned NumOps = II->getNumOperands(); 366 367 if (NumOps <= 4) { 368 bool IsCleanUp = (NumOps == 3); 369 370 if (!IsCleanUp) 371 if (ConstantInt *CI = dyn_cast<ConstantInt>(II->getOperand(3))) 372 IsCleanUp = (CI->getZExtValue() == 0); 373 374 if (IsCleanUp) 375 SelsToConvert.insert(II); 376 } 377 } 378 } 379 } 380 } 381 382 if (!SelsToConvert.empty()) { 383 // Convert all clean-up eh.selectors, which are associated with "invokes" of 384 // URoR calls, into catch-all eh.selectors. 385 Changed = true; 386 387 for (SmallPtrSet<IntrinsicInst*, 8>::iterator 388 SI = SelsToConvert.begin(), SE = SelsToConvert.end(); 389 SI != SE; ++SI) { 390 IntrinsicInst *II = *SI; 391 SmallVector<Value*, 8> Args; 392 393 // Use the exception object pointer and the personality function 394 // from the original selector. 395 Args.push_back(II->getOperand(1)); // Exception object pointer. 396 Args.push_back(II->getOperand(2)); // Personality function. 397 Args.push_back(EHCatchAllValue->getInitializer()); // Catch-all indicator. 398 399 CallInst *NewSelector = 400 CallInst::Create(SelectorIntrinsic, Args.begin(), Args.end(), 401 "eh.sel.catch.all", II); 402 403 NewSelector->setTailCall(II->isTailCall()); 404 NewSelector->setAttributes(II->getAttributes()); 405 NewSelector->setCallingConv(II->getCallingConv()); 406 407 II->replaceAllUsesWith(NewSelector); 408 II->eraseFromParent(); 409 } 410 } 411 412 Changed |= CleanupSelectors(); 413 return Changed; 414} 415 416/// NormalizeLandingPads - Normalize and discover landing pads, noting them 417/// in the LandingPads set. A landing pad is normal if the only CFG edges 418/// that end at it are unwind edges from invoke instructions. If we inlined 419/// through an invoke we could have a normal branch from the previous 420/// unwind block through to the landing pad for the original invoke. 421/// Abnormal landing pads are fixed up by redirecting all unwind edges to 422/// a new basic block which falls through to the original. 423bool DwarfEHPrepare::NormalizeLandingPads() { 424 bool Changed = false; 425 426 const MCAsmInfo *MAI = TM->getMCAsmInfo(); 427 bool usingSjLjEH = MAI->getExceptionHandlingType() == ExceptionHandling::SjLj; 428 429 for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) { 430 TerminatorInst *TI = I->getTerminator(); 431 if (!isa<InvokeInst>(TI)) 432 continue; 433 BasicBlock *LPad = TI->getSuccessor(1); 434 // Skip landing pads that have already been normalized. 435 if (LandingPads.count(LPad)) 436 continue; 437 438 // Check that only invoke unwind edges end at the landing pad. 439 bool OnlyUnwoundTo = true; 440 bool SwitchOK = usingSjLjEH; 441 for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); 442 PI != PE; ++PI) { 443 TerminatorInst *PT = (*PI)->getTerminator(); 444 // The SjLj dispatch block uses a switch instruction. This is effectively 445 // an unwind edge, so we can disregard it here. There will only ever 446 // be one dispatch, however, so if there are multiple switches, one 447 // of them truly is a normal edge, not an unwind edge. 448 if (SwitchOK && isa<SwitchInst>(PT)) { 449 SwitchOK = false; 450 continue; 451 } 452 if (!isa<InvokeInst>(PT) || LPad == PT->getSuccessor(0)) { 453 OnlyUnwoundTo = false; 454 break; 455 } 456 } 457 458 if (OnlyUnwoundTo) { 459 // Only unwind edges lead to the landing pad. Remember the landing pad. 460 LandingPads.insert(LPad); 461 continue; 462 } 463 464 // At least one normal edge ends at the landing pad. Redirect the unwind 465 // edges to a new basic block which falls through into this one. 466 467 // Create the new basic block. 468 BasicBlock *NewBB = BasicBlock::Create(F->getContext(), 469 LPad->getName() + "_unwind_edge"); 470 471 // Insert it into the function right before the original landing pad. 472 LPad->getParent()->getBasicBlockList().insert(LPad, NewBB); 473 474 // Redirect unwind edges from the original landing pad to NewBB. 475 for (pred_iterator PI = pred_begin(LPad), PE = pred_end(LPad); PI != PE; ) { 476 TerminatorInst *PT = (*PI++)->getTerminator(); 477 if (isa<InvokeInst>(PT) && PT->getSuccessor(1) == LPad) 478 // Unwind to the new block. 479 PT->setSuccessor(1, NewBB); 480 } 481 482 // If there are any PHI nodes in LPad, we need to update them so that they 483 // merge incoming values from NewBB instead. 484 for (BasicBlock::iterator II = LPad->begin(); isa<PHINode>(II); ++II) { 485 PHINode *PN = cast<PHINode>(II); 486 pred_iterator PB = pred_begin(NewBB), PE = pred_end(NewBB); 487 488 // Check to see if all of the values coming in via unwind edges are the 489 // same. If so, we don't need to create a new PHI node. 490 Value *InVal = PN->getIncomingValueForBlock(*PB); 491 for (pred_iterator PI = PB; PI != PE; ++PI) { 492 if (PI != PB && InVal != PN->getIncomingValueForBlock(*PI)) { 493 InVal = 0; 494 break; 495 } 496 } 497 498 if (InVal == 0) { 499 // Different unwind edges have different values. Create a new PHI node 500 // in NewBB. 501 PHINode *NewPN = PHINode::Create(PN->getType(), PN->getName()+".unwind", 502 NewBB); 503 // Add an entry for each unwind edge, using the value from the old PHI. 504 for (pred_iterator PI = PB; PI != PE; ++PI) 505 NewPN->addIncoming(PN->getIncomingValueForBlock(*PI), *PI); 506 507 // Now use this new PHI as the common incoming value for NewBB in PN. 508 InVal = NewPN; 509 } 510 511 // Revector exactly one entry in the PHI node to come from NewBB 512 // and delete all other entries that come from unwind edges. If 513 // there are both normal and unwind edges from the same predecessor, 514 // this leaves an entry for the normal edge. 515 for (pred_iterator PI = PB; PI != PE; ++PI) 516 PN->removeIncomingValue(*PI); 517 PN->addIncoming(InVal, NewBB); 518 } 519 520 // Add a fallthrough from NewBB to the original landing pad. 521 BranchInst::Create(LPad, NewBB); 522 523 // Now update DominatorTree and DominanceFrontier analysis information. 524 if (DT) 525 DT->splitBlock(NewBB); 526 if (DF) 527 DF->splitBlock(NewBB); 528 529 // Remember the newly constructed landing pad. The original landing pad 530 // LPad is no longer a landing pad now that all unwind edges have been 531 // revectored to NewBB. 532 LandingPads.insert(NewBB); 533 ++NumLandingPadsSplit; 534 Changed = true; 535 } 536 537 return Changed; 538} 539 540/// LowerUnwinds - Turn unwind instructions into calls to _Unwind_Resume, 541/// rethrowing any previously caught exception. This will crash horribly 542/// at runtime if there is no such exception: using unwind to throw a new 543/// exception is currently not supported. 544bool DwarfEHPrepare::LowerUnwinds() { 545 SmallVector<TerminatorInst*, 16> UnwindInsts; 546 547 for (Function::iterator I = F->begin(), E = F->end(); I != E; ++I) { 548 TerminatorInst *TI = I->getTerminator(); 549 if (isa<UnwindInst>(TI)) 550 UnwindInsts.push_back(TI); 551 } 552 553 if (UnwindInsts.empty()) return false; 554 555 // Find the rewind function if we didn't already. 556 if (!RewindFunction) { 557 LLVMContext &Ctx = UnwindInsts[0]->getContext(); 558 std::vector<const Type*> 559 Params(1, Type::getInt8PtrTy(Ctx)); 560 FunctionType *FTy = FunctionType::get(Type::getVoidTy(Ctx), 561 Params, false); 562 const char *RewindName = TLI->getLibcallName(RTLIB::UNWIND_RESUME); 563 RewindFunction = F->getParent()->getOrInsertFunction(RewindName, FTy); 564 } 565 566 bool Changed = false; 567 568 for (SmallVectorImpl<TerminatorInst*>::iterator 569 I = UnwindInsts.begin(), E = UnwindInsts.end(); I != E; ++I) { 570 TerminatorInst *TI = *I; 571 572 // Replace the unwind instruction with a call to _Unwind_Resume (or the 573 // appropriate target equivalent) followed by an UnreachableInst. 574 575 // Create the call... 576 CallInst *CI = CallInst::Create(RewindFunction, 577 CreateReadOfExceptionValue(TI->getParent()), 578 "", TI); 579 CI->setCallingConv(TLI->getLibcallCallingConv(RTLIB::UNWIND_RESUME)); 580 // ...followed by an UnreachableInst. 581 new UnreachableInst(TI->getContext(), TI); 582 583 // Nuke the unwind instruction. 584 TI->eraseFromParent(); 585 ++NumUnwindsLowered; 586 Changed = true; 587 } 588 589 return Changed; 590} 591 592/// MoveExceptionValueCalls - Ensure that eh.exception is only ever called from 593/// landing pads by replacing calls outside of landing pads with loads from a 594/// stack temporary. Move eh.exception calls inside landing pads to the start 595/// of the landing pad (optional, but may make things simpler for later passes). 596bool DwarfEHPrepare::MoveExceptionValueCalls() { 597 // If the eh.exception intrinsic is not declared in the module then there is 598 // nothing to do. Speed up compilation by checking for this common case. 599 if (!ExceptionValueIntrinsic && 600 !F->getParent()->getFunction(Intrinsic::getName(Intrinsic::eh_exception))) 601 return false; 602 603 bool Changed = false; 604 605 for (Function::iterator BB = F->begin(), E = F->end(); BB != E; ++BB) { 606 for (BasicBlock::iterator II = BB->begin(), E = BB->end(); II != E;) 607 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(II++)) 608 if (CI->getIntrinsicID() == Intrinsic::eh_exception) { 609 if (!CI->use_empty()) { 610 Value *ExceptionValue = CreateReadOfExceptionValue(BB); 611 if (CI == ExceptionValue) { 612 // The call was at the start of a landing pad - leave it alone. 613 assert(LandingPads.count(BB) && 614 "Created eh.exception call outside landing pad!"); 615 continue; 616 } 617 CI->replaceAllUsesWith(ExceptionValue); 618 } 619 CI->eraseFromParent(); 620 ++NumExceptionValuesMoved; 621 Changed = true; 622 } 623 } 624 625 return Changed; 626} 627 628/// FinishStackTemporaries - If we introduced a stack variable to hold the 629/// exception value then initialize it in each landing pad. 630bool DwarfEHPrepare::FinishStackTemporaries() { 631 if (!ExceptionValueVar) 632 // Nothing to do. 633 return false; 634 635 bool Changed = false; 636 637 // Make sure that there is a store of the exception value at the start of 638 // each landing pad. 639 for (BBSet::iterator LI = LandingPads.begin(), LE = LandingPads.end(); 640 LI != LE; ++LI) { 641 Instruction *ExceptionValue = CreateReadOfExceptionValue(*LI); 642 Instruction *Store = new StoreInst(ExceptionValue, ExceptionValueVar); 643 Store->insertAfter(ExceptionValue); 644 Changed = true; 645 } 646 647 return Changed; 648} 649 650/// PromoteStackTemporaries - Turn any stack temporaries we introduced into 651/// registers if possible. 652bool DwarfEHPrepare::PromoteStackTemporaries() { 653 if (ExceptionValueVar && DT && DF && isAllocaPromotable(ExceptionValueVar)) { 654 // Turn the exception temporary into registers and phi nodes if possible. 655 std::vector<AllocaInst*> Allocas(1, ExceptionValueVar); 656 PromoteMemToReg(Allocas, *DT, *DF); 657 return true; 658 } 659 return false; 660} 661 662/// CreateExceptionValueCall - Insert a call to the eh.exception intrinsic at 663/// the start of the basic block (unless there already is one, in which case 664/// the existing call is returned). 665Instruction *DwarfEHPrepare::CreateExceptionValueCall(BasicBlock *BB) { 666 Instruction *Start = BB->getFirstNonPHIOrDbg(); 667 // Is this a call to eh.exception? 668 if (IntrinsicInst *CI = dyn_cast<IntrinsicInst>(Start)) 669 if (CI->getIntrinsicID() == Intrinsic::eh_exception) 670 // Reuse the existing call. 671 return Start; 672 673 // Find the eh.exception intrinsic if we didn't already. 674 if (!ExceptionValueIntrinsic) 675 ExceptionValueIntrinsic = Intrinsic::getDeclaration(F->getParent(), 676 Intrinsic::eh_exception); 677 678 // Create the call. 679 return CallInst::Create(ExceptionValueIntrinsic, "eh.value.call", Start); 680} 681 682/// CreateValueLoad - Insert a load of the exception value stack variable 683/// (creating it if necessary) at the start of the basic block (unless 684/// there already is a load, in which case the existing load is returned). 685Instruction *DwarfEHPrepare::CreateValueLoad(BasicBlock *BB) { 686 Instruction *Start = BB->getFirstNonPHIOrDbg(); 687 // Is this a load of the exception temporary? 688 if (ExceptionValueVar) 689 if (LoadInst* LI = dyn_cast<LoadInst>(Start)) 690 if (LI->getPointerOperand() == ExceptionValueVar) 691 // Reuse the existing load. 692 return Start; 693 694 // Create the temporary if we didn't already. 695 if (!ExceptionValueVar) { 696 ExceptionValueVar = new AllocaInst(PointerType::getUnqual( 697 Type::getInt8Ty(BB->getContext())), "eh.value", F->begin()->begin()); 698 ++NumStackTempsIntroduced; 699 } 700 701 // Load the value. 702 return new LoadInst(ExceptionValueVar, "eh.value.load", Start); 703} 704 705bool DwarfEHPrepare::runOnFunction(Function &Fn) { 706 bool Changed = false; 707 708 // Initialize internal state. 709 DT = getAnalysisIfAvailable<DominatorTree>(); 710 DF = getAnalysisIfAvailable<DominanceFrontier>(); 711 ExceptionValueVar = 0; 712 F = &Fn; 713 714 // Ensure that only unwind edges end at landing pads (a landing pad is a 715 // basic block where an invoke unwind edge ends). 716 Changed |= NormalizeLandingPads(); 717 718 // Turn unwind instructions into libcalls. 719 Changed |= LowerUnwinds(); 720 721 // TODO: Move eh.selector calls to landing pads and combine them. 722 723 // Move eh.exception calls to landing pads. 724 Changed |= MoveExceptionValueCalls(); 725 726 // Initialize any stack temporaries we introduced. 727 Changed |= FinishStackTemporaries(); 728 729 // Turn any stack temporaries into registers if possible. 730 if (!CompileFast) 731 Changed |= PromoteStackTemporaries(); 732 733 Changed |= HandleURoRInvokes(); 734 735 LandingPads.clear(); 736 737 return Changed; 738} 739