1218887Sdim//===--- CGCleanup.cpp - Bookkeeping and code emission for cleanups -------===// 2218887Sdim// 3218887Sdim// The LLVM Compiler Infrastructure 4218887Sdim// 5218887Sdim// This file is distributed under the University of Illinois Open Source 6218887Sdim// License. See LICENSE.TXT for details. 7218887Sdim// 8218887Sdim//===----------------------------------------------------------------------===// 9218887Sdim// 10218887Sdim// This file contains code dealing with the IR generation for cleanups 11218887Sdim// and related information. 12218887Sdim// 13218887Sdim// A "cleanup" is a piece of code which needs to be executed whenever 14218887Sdim// control transfers out of a particular scope. This can be 15218887Sdim// conditionalized to occur only on exceptional control flow, only on 16218887Sdim// normal control flow, or both. 17218887Sdim// 18218887Sdim//===----------------------------------------------------------------------===// 19218887Sdim 20263508Sdim#include "CGCleanup.h" 21218887Sdim#include "CodeGenFunction.h" 22218887Sdim 23218887Sdimusing namespace clang; 24218887Sdimusing namespace CodeGen; 25218887Sdim 26218887Sdimbool DominatingValue<RValue>::saved_type::needsSaving(RValue rv) { 27218887Sdim if (rv.isScalar()) 28218887Sdim return DominatingLLVMValue::needsSaving(rv.getScalarVal()); 29218887Sdim if (rv.isAggregate()) 30218887Sdim return DominatingLLVMValue::needsSaving(rv.getAggregateAddr()); 31218887Sdim return true; 32218887Sdim} 33218887Sdim 34218887SdimDominatingValue<RValue>::saved_type 35218887SdimDominatingValue<RValue>::saved_type::save(CodeGenFunction &CGF, RValue rv) { 36218887Sdim if (rv.isScalar()) { 37218887Sdim llvm::Value *V = rv.getScalarVal(); 38218887Sdim 39218887Sdim // These automatically dominate and don't need to be saved. 40218887Sdim if (!DominatingLLVMValue::needsSaving(V)) 41218887Sdim return saved_type(V, ScalarLiteral); 42218887Sdim 43218887Sdim // Everything else needs an alloca. 44218887Sdim llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue"); 45218887Sdim CGF.Builder.CreateStore(V, addr); 46218887Sdim return saved_type(addr, ScalarAddress); 47218887Sdim } 48218887Sdim 49218887Sdim if (rv.isComplex()) { 50218887Sdim CodeGenFunction::ComplexPairTy V = rv.getComplexVal(); 51226633Sdim llvm::Type *ComplexTy = 52224145Sdim llvm::StructType::get(V.first->getType(), V.second->getType(), 53218887Sdim (void*) 0); 54218887Sdim llvm::Value *addr = CGF.CreateTempAlloca(ComplexTy, "saved-complex"); 55249423Sdim CGF.Builder.CreateStore(V.first, CGF.Builder.CreateStructGEP(addr, 0)); 56249423Sdim CGF.Builder.CreateStore(V.second, CGF.Builder.CreateStructGEP(addr, 1)); 57218887Sdim return saved_type(addr, ComplexAddress); 58218887Sdim } 59218887Sdim 60218887Sdim assert(rv.isAggregate()); 61218887Sdim llvm::Value *V = rv.getAggregateAddr(); // TODO: volatile? 62218887Sdim if (!DominatingLLVMValue::needsSaving(V)) 63218887Sdim return saved_type(V, AggregateLiteral); 64218887Sdim 65218887Sdim llvm::Value *addr = CGF.CreateTempAlloca(V->getType(), "saved-rvalue"); 66218887Sdim CGF.Builder.CreateStore(V, addr); 67218887Sdim return saved_type(addr, AggregateAddress); 68218887Sdim} 69218887Sdim 70218887Sdim/// Given a saved r-value produced by SaveRValue, perform the code 71218887Sdim/// necessary to restore it to usability at the current insertion 72218887Sdim/// point. 73218887SdimRValue DominatingValue<RValue>::saved_type::restore(CodeGenFunction &CGF) { 74218887Sdim switch (K) { 75218887Sdim case ScalarLiteral: 76218887Sdim return RValue::get(Value); 77218887Sdim case ScalarAddress: 78218887Sdim return RValue::get(CGF.Builder.CreateLoad(Value)); 79218887Sdim case AggregateLiteral: 80218887Sdim return RValue::getAggregate(Value); 81218887Sdim case AggregateAddress: 82218887Sdim return RValue::getAggregate(CGF.Builder.CreateLoad(Value)); 83249423Sdim case ComplexAddress: { 84249423Sdim llvm::Value *real = 85249423Sdim CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(Value, 0)); 86249423Sdim llvm::Value *imag = 87249423Sdim CGF.Builder.CreateLoad(CGF.Builder.CreateStructGEP(Value, 1)); 88249423Sdim return RValue::getComplex(real, imag); 89218887Sdim } 90249423Sdim } 91218887Sdim 92218887Sdim llvm_unreachable("bad saved r-value kind"); 93218887Sdim} 94218887Sdim 95218887Sdim/// Push an entry of the given size onto this protected-scope stack. 96218887Sdimchar *EHScopeStack::allocate(size_t Size) { 97218887Sdim if (!StartOfBuffer) { 98218887Sdim unsigned Capacity = 1024; 99218887Sdim while (Capacity < Size) Capacity *= 2; 100218887Sdim StartOfBuffer = new char[Capacity]; 101218887Sdim StartOfData = EndOfBuffer = StartOfBuffer + Capacity; 102218887Sdim } else if (static_cast<size_t>(StartOfData - StartOfBuffer) < Size) { 103218887Sdim unsigned CurrentCapacity = EndOfBuffer - StartOfBuffer; 104218887Sdim unsigned UsedCapacity = CurrentCapacity - (StartOfData - StartOfBuffer); 105218887Sdim 106218887Sdim unsigned NewCapacity = CurrentCapacity; 107218887Sdim do { 108218887Sdim NewCapacity *= 2; 109218887Sdim } while (NewCapacity < UsedCapacity + Size); 110218887Sdim 111218887Sdim char *NewStartOfBuffer = new char[NewCapacity]; 112218887Sdim char *NewEndOfBuffer = NewStartOfBuffer + NewCapacity; 113218887Sdim char *NewStartOfData = NewEndOfBuffer - UsedCapacity; 114218887Sdim memcpy(NewStartOfData, StartOfData, UsedCapacity); 115218887Sdim delete [] StartOfBuffer; 116218887Sdim StartOfBuffer = NewStartOfBuffer; 117218887Sdim EndOfBuffer = NewEndOfBuffer; 118218887Sdim StartOfData = NewStartOfData; 119218887Sdim } 120218887Sdim 121218887Sdim assert(StartOfBuffer + Size <= StartOfData); 122218887Sdim StartOfData -= Size; 123218887Sdim return StartOfData; 124218887Sdim} 125218887Sdim 126218887SdimEHScopeStack::stable_iterator 127226633SdimEHScopeStack::getInnermostActiveNormalCleanup() const { 128226633Sdim for (stable_iterator si = getInnermostNormalCleanup(), se = stable_end(); 129226633Sdim si != se; ) { 130226633Sdim EHCleanupScope &cleanup = cast<EHCleanupScope>(*find(si)); 131226633Sdim if (cleanup.isActive()) return si; 132226633Sdim si = cleanup.getEnclosingNormalCleanup(); 133226633Sdim } 134226633Sdim return stable_end(); 135226633Sdim} 136226633Sdim 137226633SdimEHScopeStack::stable_iterator EHScopeStack::getInnermostActiveEHScope() const { 138226633Sdim for (stable_iterator si = getInnermostEHScope(), se = stable_end(); 139226633Sdim si != se; ) { 140226633Sdim // Skip over inactive cleanups. 141226633Sdim EHCleanupScope *cleanup = dyn_cast<EHCleanupScope>(&*find(si)); 142226633Sdim if (cleanup && !cleanup->isActive()) { 143226633Sdim si = cleanup->getEnclosingEHScope(); 144226633Sdim continue; 145218887Sdim } 146226633Sdim 147226633Sdim // All other scopes are always active. 148226633Sdim return si; 149226633Sdim } 150226633Sdim 151218887Sdim return stable_end(); 152218887Sdim} 153218887Sdim 154218887Sdim 155218887Sdimvoid *EHScopeStack::pushCleanup(CleanupKind Kind, size_t Size) { 156218887Sdim assert(((Size % sizeof(void*)) == 0) && "cleanup type is misaligned"); 157218887Sdim char *Buffer = allocate(EHCleanupScope::getSizeForCleanupSize(Size)); 158218887Sdim bool IsNormalCleanup = Kind & NormalCleanup; 159218887Sdim bool IsEHCleanup = Kind & EHCleanup; 160218887Sdim bool IsActive = !(Kind & InactiveCleanup); 161218887Sdim EHCleanupScope *Scope = 162218887Sdim new (Buffer) EHCleanupScope(IsNormalCleanup, 163218887Sdim IsEHCleanup, 164218887Sdim IsActive, 165218887Sdim Size, 166218887Sdim BranchFixups.size(), 167218887Sdim InnermostNormalCleanup, 168226633Sdim InnermostEHScope); 169218887Sdim if (IsNormalCleanup) 170218887Sdim InnermostNormalCleanup = stable_begin(); 171218887Sdim if (IsEHCleanup) 172226633Sdim InnermostEHScope = stable_begin(); 173218887Sdim 174218887Sdim return Scope->getCleanupBuffer(); 175218887Sdim} 176218887Sdim 177218887Sdimvoid EHScopeStack::popCleanup() { 178218887Sdim assert(!empty() && "popping exception stack when not empty"); 179218887Sdim 180218887Sdim assert(isa<EHCleanupScope>(*begin())); 181218887Sdim EHCleanupScope &Cleanup = cast<EHCleanupScope>(*begin()); 182218887Sdim InnermostNormalCleanup = Cleanup.getEnclosingNormalCleanup(); 183226633Sdim InnermostEHScope = Cleanup.getEnclosingEHScope(); 184218887Sdim StartOfData += Cleanup.getAllocatedSize(); 185218887Sdim 186218887Sdim // Destroy the cleanup. 187218887Sdim Cleanup.~EHCleanupScope(); 188218887Sdim 189218887Sdim // Check whether we can shrink the branch-fixups stack. 190218887Sdim if (!BranchFixups.empty()) { 191218887Sdim // If we no longer have any normal cleanups, all the fixups are 192218887Sdim // complete. 193218887Sdim if (!hasNormalCleanups()) 194218887Sdim BranchFixups.clear(); 195218887Sdim 196218887Sdim // Otherwise we can still trim out unnecessary nulls. 197218887Sdim else 198218887Sdim popNullFixups(); 199218887Sdim } 200218887Sdim} 201218887Sdim 202226633SdimEHFilterScope *EHScopeStack::pushFilter(unsigned numFilters) { 203226633Sdim assert(getInnermostEHScope() == stable_end()); 204226633Sdim char *buffer = allocate(EHFilterScope::getSizeForNumFilters(numFilters)); 205226633Sdim EHFilterScope *filter = new (buffer) EHFilterScope(numFilters); 206226633Sdim InnermostEHScope = stable_begin(); 207226633Sdim return filter; 208218887Sdim} 209218887Sdim 210218887Sdimvoid EHScopeStack::popFilter() { 211218887Sdim assert(!empty() && "popping exception stack when not empty"); 212218887Sdim 213226633Sdim EHFilterScope &filter = cast<EHFilterScope>(*begin()); 214226633Sdim StartOfData += EHFilterScope::getSizeForNumFilters(filter.getNumFilters()); 215218887Sdim 216226633Sdim InnermostEHScope = filter.getEnclosingEHScope(); 217218887Sdim} 218218887Sdim 219226633SdimEHCatchScope *EHScopeStack::pushCatch(unsigned numHandlers) { 220226633Sdim char *buffer = allocate(EHCatchScope::getSizeForNumHandlers(numHandlers)); 221226633Sdim EHCatchScope *scope = 222226633Sdim new (buffer) EHCatchScope(numHandlers, InnermostEHScope); 223226633Sdim InnermostEHScope = stable_begin(); 224226633Sdim return scope; 225218887Sdim} 226218887Sdim 227218887Sdimvoid EHScopeStack::pushTerminate() { 228218887Sdim char *Buffer = allocate(EHTerminateScope::getSize()); 229226633Sdim new (Buffer) EHTerminateScope(InnermostEHScope); 230226633Sdim InnermostEHScope = stable_begin(); 231218887Sdim} 232218887Sdim 233218887Sdim/// Remove any 'null' fixups on the stack. However, we can't pop more 234218887Sdim/// fixups than the fixup depth on the innermost normal cleanup, or 235218887Sdim/// else fixups that we try to add to that cleanup will end up in the 236218887Sdim/// wrong place. We *could* try to shrink fixup depths, but that's 237218887Sdim/// actually a lot of work for little benefit. 238218887Sdimvoid EHScopeStack::popNullFixups() { 239218887Sdim // We expect this to only be called when there's still an innermost 240218887Sdim // normal cleanup; otherwise there really shouldn't be any fixups. 241218887Sdim assert(hasNormalCleanups()); 242218887Sdim 243218887Sdim EHScopeStack::iterator it = find(InnermostNormalCleanup); 244218887Sdim unsigned MinSize = cast<EHCleanupScope>(*it).getFixupDepth(); 245218887Sdim assert(BranchFixups.size() >= MinSize && "fixup stack out of order"); 246218887Sdim 247218887Sdim while (BranchFixups.size() > MinSize && 248218887Sdim BranchFixups.back().Destination == 0) 249218887Sdim BranchFixups.pop_back(); 250218887Sdim} 251218887Sdim 252218887Sdimvoid CodeGenFunction::initFullExprCleanup() { 253218887Sdim // Create a variable to decide whether the cleanup needs to be run. 254218887Sdim llvm::AllocaInst *active 255218887Sdim = CreateTempAlloca(Builder.getInt1Ty(), "cleanup.cond"); 256218887Sdim 257218887Sdim // Initialize it to false at a site that's guaranteed to be run 258218887Sdim // before each evaluation. 259234353Sdim setBeforeOutermostConditional(Builder.getFalse(), active); 260218887Sdim 261218887Sdim // Initialize it to true at the current location. 262218887Sdim Builder.CreateStore(Builder.getTrue(), active); 263218887Sdim 264218887Sdim // Set that as the active flag in the cleanup. 265218887Sdim EHCleanupScope &cleanup = cast<EHCleanupScope>(*EHStack.begin()); 266218887Sdim assert(cleanup.getActiveFlag() == 0 && "cleanup already has active flag?"); 267218887Sdim cleanup.setActiveFlag(active); 268218887Sdim 269218887Sdim if (cleanup.isNormalCleanup()) cleanup.setTestFlagInNormalCleanup(); 270218887Sdim if (cleanup.isEHCleanup()) cleanup.setTestFlagInEHCleanup(); 271218887Sdim} 272218887Sdim 273224145Sdimvoid EHScopeStack::Cleanup::anchor() {} 274218887Sdim 275218887Sdim/// All the branch fixups on the EH stack have propagated out past the 276218887Sdim/// outermost normal cleanup; resolve them all by adding cases to the 277218887Sdim/// given switch instruction. 278218887Sdimstatic void ResolveAllBranchFixups(CodeGenFunction &CGF, 279218887Sdim llvm::SwitchInst *Switch, 280218887Sdim llvm::BasicBlock *CleanupEntry) { 281218887Sdim llvm::SmallPtrSet<llvm::BasicBlock*, 4> CasesAdded; 282218887Sdim 283218887Sdim for (unsigned I = 0, E = CGF.EHStack.getNumBranchFixups(); I != E; ++I) { 284218887Sdim // Skip this fixup if its destination isn't set. 285218887Sdim BranchFixup &Fixup = CGF.EHStack.getBranchFixup(I); 286218887Sdim if (Fixup.Destination == 0) continue; 287218887Sdim 288218887Sdim // If there isn't an OptimisticBranchBlock, then InitialBranch is 289218887Sdim // still pointing directly to its destination; forward it to the 290218887Sdim // appropriate cleanup entry. This is required in the specific 291218887Sdim // case of 292218887Sdim // { std::string s; goto lbl; } 293218887Sdim // lbl: 294218887Sdim // i.e. where there's an unresolved fixup inside a single cleanup 295218887Sdim // entry which we're currently popping. 296218887Sdim if (Fixup.OptimisticBranchBlock == 0) { 297218887Sdim new llvm::StoreInst(CGF.Builder.getInt32(Fixup.DestinationIndex), 298218887Sdim CGF.getNormalCleanupDestSlot(), 299218887Sdim Fixup.InitialBranch); 300218887Sdim Fixup.InitialBranch->setSuccessor(0, CleanupEntry); 301218887Sdim } 302218887Sdim 303218887Sdim // Don't add this case to the switch statement twice. 304218887Sdim if (!CasesAdded.insert(Fixup.Destination)) continue; 305218887Sdim 306218887Sdim Switch->addCase(CGF.Builder.getInt32(Fixup.DestinationIndex), 307218887Sdim Fixup.Destination); 308218887Sdim } 309218887Sdim 310218887Sdim CGF.EHStack.clearFixups(); 311218887Sdim} 312218887Sdim 313218887Sdim/// Transitions the terminator of the given exit-block of a cleanup to 314218887Sdim/// be a cleanup switch. 315218887Sdimstatic llvm::SwitchInst *TransitionToCleanupSwitch(CodeGenFunction &CGF, 316218887Sdim llvm::BasicBlock *Block) { 317218887Sdim // If it's a branch, turn it into a switch whose default 318218887Sdim // destination is its original target. 319218887Sdim llvm::TerminatorInst *Term = Block->getTerminator(); 320218887Sdim assert(Term && "can't transition block without terminator"); 321218887Sdim 322218887Sdim if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 323218887Sdim assert(Br->isUnconditional()); 324218887Sdim llvm::LoadInst *Load = 325218887Sdim new llvm::LoadInst(CGF.getNormalCleanupDestSlot(), "cleanup.dest", Term); 326218887Sdim llvm::SwitchInst *Switch = 327218887Sdim llvm::SwitchInst::Create(Load, Br->getSuccessor(0), 4, Block); 328218887Sdim Br->eraseFromParent(); 329218887Sdim return Switch; 330218887Sdim } else { 331218887Sdim return cast<llvm::SwitchInst>(Term); 332218887Sdim } 333218887Sdim} 334218887Sdim 335218887Sdimvoid CodeGenFunction::ResolveBranchFixups(llvm::BasicBlock *Block) { 336218887Sdim assert(Block && "resolving a null target block"); 337218887Sdim if (!EHStack.getNumBranchFixups()) return; 338218887Sdim 339218887Sdim assert(EHStack.hasNormalCleanups() && 340218887Sdim "branch fixups exist with no normal cleanups on stack"); 341218887Sdim 342218887Sdim llvm::SmallPtrSet<llvm::BasicBlock*, 4> ModifiedOptimisticBlocks; 343218887Sdim bool ResolvedAny = false; 344218887Sdim 345218887Sdim for (unsigned I = 0, E = EHStack.getNumBranchFixups(); I != E; ++I) { 346218887Sdim // Skip this fixup if its destination doesn't match. 347218887Sdim BranchFixup &Fixup = EHStack.getBranchFixup(I); 348218887Sdim if (Fixup.Destination != Block) continue; 349218887Sdim 350218887Sdim Fixup.Destination = 0; 351218887Sdim ResolvedAny = true; 352218887Sdim 353218887Sdim // If it doesn't have an optimistic branch block, LatestBranch is 354218887Sdim // already pointing to the right place. 355218887Sdim llvm::BasicBlock *BranchBB = Fixup.OptimisticBranchBlock; 356218887Sdim if (!BranchBB) 357218887Sdim continue; 358218887Sdim 359218887Sdim // Don't process the same optimistic branch block twice. 360218887Sdim if (!ModifiedOptimisticBlocks.insert(BranchBB)) 361218887Sdim continue; 362218887Sdim 363218887Sdim llvm::SwitchInst *Switch = TransitionToCleanupSwitch(*this, BranchBB); 364218887Sdim 365218887Sdim // Add a case to the switch. 366218887Sdim Switch->addCase(Builder.getInt32(Fixup.DestinationIndex), Block); 367218887Sdim } 368218887Sdim 369218887Sdim if (ResolvedAny) 370218887Sdim EHStack.popNullFixups(); 371218887Sdim} 372218887Sdim 373218887Sdim/// Pops cleanup blocks until the given savepoint is reached. 374263508Sdimvoid CodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old) { 375218887Sdim assert(Old.isValid()); 376218887Sdim 377218887Sdim while (EHStack.stable_begin() != Old) { 378218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 379218887Sdim 380218887Sdim // As long as Old strictly encloses the scope's enclosing normal 381218887Sdim // cleanup, we're going to emit another normal cleanup which 382218887Sdim // fallthrough can propagate through. 383218887Sdim bool FallThroughIsBranchThrough = 384218887Sdim Old.strictlyEncloses(Scope.getEnclosingNormalCleanup()); 385218887Sdim 386263508Sdim PopCleanupBlock(FallThroughIsBranchThrough); 387218887Sdim } 388218887Sdim} 389218887Sdim 390263508Sdim/// Pops cleanup blocks until the given savepoint is reached, then add the 391263508Sdim/// cleanups from the given savepoint in the lifetime-extended cleanups stack. 392263508Sdimvoid 393263508SdimCodeGenFunction::PopCleanupBlocks(EHScopeStack::stable_iterator Old, 394263508Sdim size_t OldLifetimeExtendedSize) { 395263508Sdim PopCleanupBlocks(Old); 396263508Sdim 397263508Sdim // Move our deferred cleanups onto the EH stack. 398263508Sdim for (size_t I = OldLifetimeExtendedSize, 399263508Sdim E = LifetimeExtendedCleanupStack.size(); I != E; /**/) { 400263508Sdim // Alignment should be guaranteed by the vptrs in the individual cleanups. 401263508Sdim assert((I % llvm::alignOf<LifetimeExtendedCleanupHeader>() == 0) && 402263508Sdim "misaligned cleanup stack entry"); 403263508Sdim 404263508Sdim LifetimeExtendedCleanupHeader &Header = 405263508Sdim reinterpret_cast<LifetimeExtendedCleanupHeader&>( 406263508Sdim LifetimeExtendedCleanupStack[I]); 407263508Sdim I += sizeof(Header); 408263508Sdim 409263508Sdim EHStack.pushCopyOfCleanup(Header.getKind(), 410263508Sdim &LifetimeExtendedCleanupStack[I], 411263508Sdim Header.getSize()); 412263508Sdim I += Header.getSize(); 413263508Sdim } 414263508Sdim LifetimeExtendedCleanupStack.resize(OldLifetimeExtendedSize); 415263508Sdim} 416263508Sdim 417218887Sdimstatic llvm::BasicBlock *CreateNormalEntry(CodeGenFunction &CGF, 418218887Sdim EHCleanupScope &Scope) { 419218887Sdim assert(Scope.isNormalCleanup()); 420218887Sdim llvm::BasicBlock *Entry = Scope.getNormalBlock(); 421218887Sdim if (!Entry) { 422218887Sdim Entry = CGF.createBasicBlock("cleanup"); 423218887Sdim Scope.setNormalBlock(Entry); 424218887Sdim } 425218887Sdim return Entry; 426218887Sdim} 427218887Sdim 428218887Sdim/// Attempts to reduce a cleanup's entry block to a fallthrough. This 429218887Sdim/// is basically llvm::MergeBlockIntoPredecessor, except 430218887Sdim/// simplified/optimized for the tighter constraints on cleanup blocks. 431218887Sdim/// 432218887Sdim/// Returns the new block, whatever it is. 433218887Sdimstatic llvm::BasicBlock *SimplifyCleanupEntry(CodeGenFunction &CGF, 434218887Sdim llvm::BasicBlock *Entry) { 435218887Sdim llvm::BasicBlock *Pred = Entry->getSinglePredecessor(); 436218887Sdim if (!Pred) return Entry; 437218887Sdim 438218887Sdim llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Pred->getTerminator()); 439218887Sdim if (!Br || Br->isConditional()) return Entry; 440218887Sdim assert(Br->getSuccessor(0) == Entry); 441218887Sdim 442218887Sdim // If we were previously inserting at the end of the cleanup entry 443218887Sdim // block, we'll need to continue inserting at the end of the 444218887Sdim // predecessor. 445218887Sdim bool WasInsertBlock = CGF.Builder.GetInsertBlock() == Entry; 446218887Sdim assert(!WasInsertBlock || CGF.Builder.GetInsertPoint() == Entry->end()); 447218887Sdim 448218887Sdim // Kill the branch. 449218887Sdim Br->eraseFromParent(); 450218887Sdim 451218887Sdim // Replace all uses of the entry with the predecessor, in case there 452218887Sdim // are phis in the cleanup. 453218887Sdim Entry->replaceAllUsesWith(Pred); 454218887Sdim 455224145Sdim // Merge the blocks. 456224145Sdim Pred->getInstList().splice(Pred->end(), Entry->getInstList()); 457224145Sdim 458218887Sdim // Kill the entry block. 459218887Sdim Entry->eraseFromParent(); 460218887Sdim 461218887Sdim if (WasInsertBlock) 462218887Sdim CGF.Builder.SetInsertPoint(Pred); 463218887Sdim 464218887Sdim return Pred; 465218887Sdim} 466218887Sdim 467218887Sdimstatic void EmitCleanup(CodeGenFunction &CGF, 468218887Sdim EHScopeStack::Cleanup *Fn, 469224145Sdim EHScopeStack::Cleanup::Flags flags, 470218887Sdim llvm::Value *ActiveFlag) { 471218887Sdim // EH cleanups always occur within a terminate scope. 472224145Sdim if (flags.isForEHCleanup()) CGF.EHStack.pushTerminate(); 473218887Sdim 474218887Sdim // If there's an active flag, load it and skip the cleanup if it's 475218887Sdim // false. 476218887Sdim llvm::BasicBlock *ContBB = 0; 477218887Sdim if (ActiveFlag) { 478218887Sdim ContBB = CGF.createBasicBlock("cleanup.done"); 479218887Sdim llvm::BasicBlock *CleanupBB = CGF.createBasicBlock("cleanup.action"); 480218887Sdim llvm::Value *IsActive 481218887Sdim = CGF.Builder.CreateLoad(ActiveFlag, "cleanup.is_active"); 482218887Sdim CGF.Builder.CreateCondBr(IsActive, CleanupBB, ContBB); 483218887Sdim CGF.EmitBlock(CleanupBB); 484218887Sdim } 485218887Sdim 486218887Sdim // Ask the cleanup to emit itself. 487224145Sdim Fn->Emit(CGF, flags); 488218887Sdim assert(CGF.HaveInsertPoint() && "cleanup ended with no insertion point?"); 489218887Sdim 490218887Sdim // Emit the continuation block if there was an active flag. 491218887Sdim if (ActiveFlag) 492218887Sdim CGF.EmitBlock(ContBB); 493218887Sdim 494218887Sdim // Leave the terminate scope. 495224145Sdim if (flags.isForEHCleanup()) CGF.EHStack.popTerminate(); 496218887Sdim} 497218887Sdim 498218887Sdimstatic void ForwardPrebranchedFallthrough(llvm::BasicBlock *Exit, 499218887Sdim llvm::BasicBlock *From, 500218887Sdim llvm::BasicBlock *To) { 501218887Sdim // Exit is the exit block of a cleanup, so it always terminates in 502218887Sdim // an unconditional branch or a switch. 503218887Sdim llvm::TerminatorInst *Term = Exit->getTerminator(); 504218887Sdim 505218887Sdim if (llvm::BranchInst *Br = dyn_cast<llvm::BranchInst>(Term)) { 506218887Sdim assert(Br->isUnconditional() && Br->getSuccessor(0) == From); 507218887Sdim Br->setSuccessor(0, To); 508218887Sdim } else { 509218887Sdim llvm::SwitchInst *Switch = cast<llvm::SwitchInst>(Term); 510218887Sdim for (unsigned I = 0, E = Switch->getNumSuccessors(); I != E; ++I) 511218887Sdim if (Switch->getSuccessor(I) == From) 512218887Sdim Switch->setSuccessor(I, To); 513218887Sdim } 514218887Sdim} 515218887Sdim 516226633Sdim/// We don't need a normal entry block for the given cleanup. 517226633Sdim/// Optimistic fixup branches can cause these blocks to come into 518226633Sdim/// existence anyway; if so, destroy it. 519226633Sdim/// 520226633Sdim/// The validity of this transformation is very much specific to the 521226633Sdim/// exact ways in which we form branches to cleanup entries. 522226633Sdimstatic void destroyOptimisticNormalEntry(CodeGenFunction &CGF, 523226633Sdim EHCleanupScope &scope) { 524226633Sdim llvm::BasicBlock *entry = scope.getNormalBlock(); 525226633Sdim if (!entry) return; 526226633Sdim 527226633Sdim // Replace all the uses with unreachable. 528226633Sdim llvm::BasicBlock *unreachableBB = CGF.getUnreachableBlock(); 529226633Sdim for (llvm::BasicBlock::use_iterator 530226633Sdim i = entry->use_begin(), e = entry->use_end(); i != e; ) { 531226633Sdim llvm::Use &use = i.getUse(); 532226633Sdim ++i; 533226633Sdim 534226633Sdim use.set(unreachableBB); 535226633Sdim 536226633Sdim // The only uses should be fixup switches. 537226633Sdim llvm::SwitchInst *si = cast<llvm::SwitchInst>(use.getUser()); 538234353Sdim if (si->getNumCases() == 1 && si->getDefaultDest() == unreachableBB) { 539226633Sdim // Replace the switch with a branch. 540234353Sdim llvm::BranchInst::Create(si->case_begin().getCaseSuccessor(), si); 541226633Sdim 542226633Sdim // The switch operand is a load from the cleanup-dest alloca. 543226633Sdim llvm::LoadInst *condition = cast<llvm::LoadInst>(si->getCondition()); 544226633Sdim 545226633Sdim // Destroy the switch. 546226633Sdim si->eraseFromParent(); 547226633Sdim 548226633Sdim // Destroy the load. 549226633Sdim assert(condition->getOperand(0) == CGF.NormalCleanupDest); 550226633Sdim assert(condition->use_empty()); 551226633Sdim condition->eraseFromParent(); 552226633Sdim } 553226633Sdim } 554226633Sdim 555226633Sdim assert(entry->use_empty()); 556226633Sdim delete entry; 557226633Sdim} 558226633Sdim 559218887Sdim/// Pops a cleanup block. If the block includes a normal cleanup, the 560218887Sdim/// current insertion point is threaded through the cleanup, as are 561218887Sdim/// any branch fixups on the cleanup. 562263508Sdimvoid CodeGenFunction::PopCleanupBlock(bool FallthroughIsBranchThrough) { 563218887Sdim assert(!EHStack.empty() && "cleanup stack is empty!"); 564218887Sdim assert(isa<EHCleanupScope>(*EHStack.begin()) && "top not a cleanup!"); 565218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.begin()); 566218887Sdim assert(Scope.getFixupDepth() <= EHStack.getNumBranchFixups()); 567218887Sdim 568218887Sdim // Remember activation information. 569218887Sdim bool IsActive = Scope.isActive(); 570218887Sdim llvm::Value *NormalActiveFlag = 571218887Sdim Scope.shouldTestFlagInNormalCleanup() ? Scope.getActiveFlag() : 0; 572218887Sdim llvm::Value *EHActiveFlag = 573218887Sdim Scope.shouldTestFlagInEHCleanup() ? Scope.getActiveFlag() : 0; 574218887Sdim 575218887Sdim // Check whether we need an EH cleanup. This is only true if we've 576218887Sdim // generated a lazy EH cleanup block. 577226633Sdim llvm::BasicBlock *EHEntry = Scope.getCachedEHDispatchBlock(); 578226633Sdim assert(Scope.hasEHBranches() == (EHEntry != 0)); 579226633Sdim bool RequiresEHCleanup = (EHEntry != 0); 580226633Sdim EHScopeStack::stable_iterator EHParent = Scope.getEnclosingEHScope(); 581218887Sdim 582218887Sdim // Check the three conditions which might require a normal cleanup: 583218887Sdim 584218887Sdim // - whether there are branch fix-ups through this cleanup 585218887Sdim unsigned FixupDepth = Scope.getFixupDepth(); 586218887Sdim bool HasFixups = EHStack.getNumBranchFixups() != FixupDepth; 587218887Sdim 588218887Sdim // - whether there are branch-throughs or branch-afters 589218887Sdim bool HasExistingBranches = Scope.hasBranches(); 590218887Sdim 591218887Sdim // - whether there's a fallthrough 592218887Sdim llvm::BasicBlock *FallthroughSource = Builder.GetInsertBlock(); 593218887Sdim bool HasFallthrough = (FallthroughSource != 0 && IsActive); 594218887Sdim 595218887Sdim // Branch-through fall-throughs leave the insertion point set to the 596218887Sdim // end of the last cleanup, which points to the current scope. The 597218887Sdim // rest of IR gen doesn't need to worry about this; it only happens 598218887Sdim // during the execution of PopCleanupBlocks(). 599218887Sdim bool HasPrebranchedFallthrough = 600218887Sdim (FallthroughSource && FallthroughSource->getTerminator()); 601218887Sdim 602218887Sdim // If this is a normal cleanup, then having a prebranched 603218887Sdim // fallthrough implies that the fallthrough source unconditionally 604218887Sdim // jumps here. 605218887Sdim assert(!Scope.isNormalCleanup() || !HasPrebranchedFallthrough || 606218887Sdim (Scope.getNormalBlock() && 607218887Sdim FallthroughSource->getTerminator()->getSuccessor(0) 608218887Sdim == Scope.getNormalBlock())); 609218887Sdim 610218887Sdim bool RequiresNormalCleanup = false; 611218887Sdim if (Scope.isNormalCleanup() && 612218887Sdim (HasFixups || HasExistingBranches || HasFallthrough)) { 613218887Sdim RequiresNormalCleanup = true; 614218887Sdim } 615218887Sdim 616226633Sdim // If we have a prebranched fallthrough into an inactive normal 617226633Sdim // cleanup, rewrite it so that it leads to the appropriate place. 618226633Sdim if (Scope.isNormalCleanup() && HasPrebranchedFallthrough && !IsActive) { 619226633Sdim llvm::BasicBlock *prebranchDest; 620226633Sdim 621226633Sdim // If the prebranch is semantically branching through the next 622226633Sdim // cleanup, just forward it to the next block, leaving the 623226633Sdim // insertion point in the prebranched block. 624218887Sdim if (FallthroughIsBranchThrough) { 625226633Sdim EHScope &enclosing = *EHStack.find(Scope.getEnclosingNormalCleanup()); 626226633Sdim prebranchDest = CreateNormalEntry(*this, cast<EHCleanupScope>(enclosing)); 627218887Sdim 628226633Sdim // Otherwise, we need to make a new block. If the normal cleanup 629226633Sdim // isn't being used at all, we could actually reuse the normal 630226633Sdim // entry block, but this is simpler, and it avoids conflicts with 631226633Sdim // dead optimistic fixup branches. 632218887Sdim } else { 633226633Sdim prebranchDest = createBasicBlock("forwarded-prebranch"); 634226633Sdim EmitBlock(prebranchDest); 635218887Sdim } 636226633Sdim 637226633Sdim llvm::BasicBlock *normalEntry = Scope.getNormalBlock(); 638226633Sdim assert(normalEntry && !normalEntry->use_empty()); 639226633Sdim 640226633Sdim ForwardPrebranchedFallthrough(FallthroughSource, 641226633Sdim normalEntry, prebranchDest); 642218887Sdim } 643218887Sdim 644218887Sdim // If we don't need the cleanup at all, we're done. 645218887Sdim if (!RequiresNormalCleanup && !RequiresEHCleanup) { 646226633Sdim destroyOptimisticNormalEntry(*this, Scope); 647218887Sdim EHStack.popCleanup(); // safe because there are no fixups 648218887Sdim assert(EHStack.getNumBranchFixups() == 0 || 649218887Sdim EHStack.hasNormalCleanups()); 650218887Sdim return; 651218887Sdim } 652218887Sdim 653218887Sdim // Copy the cleanup emission data out. Note that SmallVector 654218887Sdim // guarantees maximal alignment for its buffer regardless of its 655218887Sdim // type parameter. 656226633Sdim SmallVector<char, 8*sizeof(void*)> CleanupBuffer; 657218887Sdim CleanupBuffer.reserve(Scope.getCleanupSize()); 658218887Sdim memcpy(CleanupBuffer.data(), 659218887Sdim Scope.getCleanupBuffer(), Scope.getCleanupSize()); 660218887Sdim CleanupBuffer.set_size(Scope.getCleanupSize()); 661218887Sdim EHScopeStack::Cleanup *Fn = 662218887Sdim reinterpret_cast<EHScopeStack::Cleanup*>(CleanupBuffer.data()); 663218887Sdim 664226633Sdim EHScopeStack::Cleanup::Flags cleanupFlags; 665226633Sdim if (Scope.isNormalCleanup()) 666226633Sdim cleanupFlags.setIsNormalCleanupKind(); 667226633Sdim if (Scope.isEHCleanup()) 668226633Sdim cleanupFlags.setIsEHCleanupKind(); 669218887Sdim 670218887Sdim if (!RequiresNormalCleanup) { 671226633Sdim destroyOptimisticNormalEntry(*this, Scope); 672218887Sdim EHStack.popCleanup(); 673218887Sdim } else { 674218887Sdim // If we have a fallthrough and no other need for the cleanup, 675218887Sdim // emit it directly. 676218887Sdim if (HasFallthrough && !HasPrebranchedFallthrough && 677218887Sdim !HasFixups && !HasExistingBranches) { 678218887Sdim 679226633Sdim destroyOptimisticNormalEntry(*this, Scope); 680218887Sdim EHStack.popCleanup(); 681218887Sdim 682224145Sdim EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 683218887Sdim 684218887Sdim // Otherwise, the best approach is to thread everything through 685218887Sdim // the cleanup block and then try to clean up after ourselves. 686218887Sdim } else { 687218887Sdim // Force the entry block to exist. 688218887Sdim llvm::BasicBlock *NormalEntry = CreateNormalEntry(*this, Scope); 689218887Sdim 690218887Sdim // I. Set up the fallthrough edge in. 691218887Sdim 692226633Sdim CGBuilderTy::InsertPoint savedInactiveFallthroughIP; 693226633Sdim 694218887Sdim // If there's a fallthrough, we need to store the cleanup 695218887Sdim // destination index. For fall-throughs this is always zero. 696218887Sdim if (HasFallthrough) { 697218887Sdim if (!HasPrebranchedFallthrough) 698218887Sdim Builder.CreateStore(Builder.getInt32(0), getNormalCleanupDestSlot()); 699218887Sdim 700226633Sdim // Otherwise, save and clear the IP if we don't have fallthrough 701226633Sdim // because the cleanup is inactive. 702218887Sdim } else if (FallthroughSource) { 703218887Sdim assert(!IsActive && "source without fallthrough for active cleanup"); 704226633Sdim savedInactiveFallthroughIP = Builder.saveAndClearIP(); 705218887Sdim } 706218887Sdim 707218887Sdim // II. Emit the entry block. This implicitly branches to it if 708218887Sdim // we have fallthrough. All the fixups and existing branches 709218887Sdim // should already be branched to it. 710218887Sdim EmitBlock(NormalEntry); 711218887Sdim 712218887Sdim // III. Figure out where we're going and build the cleanup 713218887Sdim // epilogue. 714218887Sdim 715218887Sdim bool HasEnclosingCleanups = 716218887Sdim (Scope.getEnclosingNormalCleanup() != EHStack.stable_end()); 717218887Sdim 718218887Sdim // Compute the branch-through dest if we need it: 719218887Sdim // - if there are branch-throughs threaded through the scope 720218887Sdim // - if fall-through is a branch-through 721218887Sdim // - if there are fixups that will be optimistically forwarded 722218887Sdim // to the enclosing cleanup 723218887Sdim llvm::BasicBlock *BranchThroughDest = 0; 724218887Sdim if (Scope.hasBranchThroughs() || 725218887Sdim (FallthroughSource && FallthroughIsBranchThrough) || 726218887Sdim (HasFixups && HasEnclosingCleanups)) { 727218887Sdim assert(HasEnclosingCleanups); 728218887Sdim EHScope &S = *EHStack.find(Scope.getEnclosingNormalCleanup()); 729218887Sdim BranchThroughDest = CreateNormalEntry(*this, cast<EHCleanupScope>(S)); 730218887Sdim } 731218887Sdim 732218887Sdim llvm::BasicBlock *FallthroughDest = 0; 733226633Sdim SmallVector<llvm::Instruction*, 2> InstsToAppend; 734218887Sdim 735218887Sdim // If there's exactly one branch-after and no other threads, 736218887Sdim // we can route it without a switch. 737218887Sdim if (!Scope.hasBranchThroughs() && !HasFixups && !HasFallthrough && 738218887Sdim Scope.getNumBranchAfters() == 1) { 739218887Sdim assert(!BranchThroughDest || !IsActive); 740218887Sdim 741218887Sdim // TODO: clean up the possibly dead stores to the cleanup dest slot. 742218887Sdim llvm::BasicBlock *BranchAfter = Scope.getBranchAfterBlock(0); 743218887Sdim InstsToAppend.push_back(llvm::BranchInst::Create(BranchAfter)); 744218887Sdim 745218887Sdim // Build a switch-out if we need it: 746218887Sdim // - if there are branch-afters threaded through the scope 747218887Sdim // - if fall-through is a branch-after 748218887Sdim // - if there are fixups that have nowhere left to go and 749218887Sdim // so must be immediately resolved 750218887Sdim } else if (Scope.getNumBranchAfters() || 751218887Sdim (HasFallthrough && !FallthroughIsBranchThrough) || 752218887Sdim (HasFixups && !HasEnclosingCleanups)) { 753218887Sdim 754218887Sdim llvm::BasicBlock *Default = 755218887Sdim (BranchThroughDest ? BranchThroughDest : getUnreachableBlock()); 756218887Sdim 757218887Sdim // TODO: base this on the number of branch-afters and fixups 758218887Sdim const unsigned SwitchCapacity = 10; 759218887Sdim 760218887Sdim llvm::LoadInst *Load = 761218887Sdim new llvm::LoadInst(getNormalCleanupDestSlot(), "cleanup.dest"); 762218887Sdim llvm::SwitchInst *Switch = 763218887Sdim llvm::SwitchInst::Create(Load, Default, SwitchCapacity); 764218887Sdim 765218887Sdim InstsToAppend.push_back(Load); 766218887Sdim InstsToAppend.push_back(Switch); 767218887Sdim 768218887Sdim // Branch-after fallthrough. 769218887Sdim if (FallthroughSource && !FallthroughIsBranchThrough) { 770218887Sdim FallthroughDest = createBasicBlock("cleanup.cont"); 771218887Sdim if (HasFallthrough) 772218887Sdim Switch->addCase(Builder.getInt32(0), FallthroughDest); 773218887Sdim } 774218887Sdim 775218887Sdim for (unsigned I = 0, E = Scope.getNumBranchAfters(); I != E; ++I) { 776218887Sdim Switch->addCase(Scope.getBranchAfterIndex(I), 777218887Sdim Scope.getBranchAfterBlock(I)); 778218887Sdim } 779218887Sdim 780218887Sdim // If there aren't any enclosing cleanups, we can resolve all 781218887Sdim // the fixups now. 782218887Sdim if (HasFixups && !HasEnclosingCleanups) 783218887Sdim ResolveAllBranchFixups(*this, Switch, NormalEntry); 784218887Sdim } else { 785218887Sdim // We should always have a branch-through destination in this case. 786218887Sdim assert(BranchThroughDest); 787218887Sdim InstsToAppend.push_back(llvm::BranchInst::Create(BranchThroughDest)); 788218887Sdim } 789218887Sdim 790218887Sdim // IV. Pop the cleanup and emit it. 791218887Sdim EHStack.popCleanup(); 792218887Sdim assert(EHStack.hasNormalCleanups() == HasEnclosingCleanups); 793218887Sdim 794224145Sdim EmitCleanup(*this, Fn, cleanupFlags, NormalActiveFlag); 795218887Sdim 796218887Sdim // Append the prepared cleanup prologue from above. 797218887Sdim llvm::BasicBlock *NormalExit = Builder.GetInsertBlock(); 798218887Sdim for (unsigned I = 0, E = InstsToAppend.size(); I != E; ++I) 799218887Sdim NormalExit->getInstList().push_back(InstsToAppend[I]); 800218887Sdim 801218887Sdim // Optimistically hope that any fixups will continue falling through. 802218887Sdim for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 803218887Sdim I < E; ++I) { 804218887Sdim BranchFixup &Fixup = EHStack.getBranchFixup(I); 805218887Sdim if (!Fixup.Destination) continue; 806218887Sdim if (!Fixup.OptimisticBranchBlock) { 807218887Sdim new llvm::StoreInst(Builder.getInt32(Fixup.DestinationIndex), 808218887Sdim getNormalCleanupDestSlot(), 809218887Sdim Fixup.InitialBranch); 810218887Sdim Fixup.InitialBranch->setSuccessor(0, NormalEntry); 811218887Sdim } 812218887Sdim Fixup.OptimisticBranchBlock = NormalExit; 813218887Sdim } 814218887Sdim 815218887Sdim // V. Set up the fallthrough edge out. 816218887Sdim 817226633Sdim // Case 1: a fallthrough source exists but doesn't branch to the 818226633Sdim // cleanup because the cleanup is inactive. 819218887Sdim if (!HasFallthrough && FallthroughSource) { 820226633Sdim // Prebranched fallthrough was forwarded earlier. 821226633Sdim // Non-prebranched fallthrough doesn't need to be forwarded. 822226633Sdim // Either way, all we need to do is restore the IP we cleared before. 823218887Sdim assert(!IsActive); 824226633Sdim Builder.restoreIP(savedInactiveFallthroughIP); 825218887Sdim 826218887Sdim // Case 2: a fallthrough source exists and should branch to the 827218887Sdim // cleanup, but we're not supposed to branch through to the next 828218887Sdim // cleanup. 829218887Sdim } else if (HasFallthrough && FallthroughDest) { 830218887Sdim assert(!FallthroughIsBranchThrough); 831218887Sdim EmitBlock(FallthroughDest); 832218887Sdim 833218887Sdim // Case 3: a fallthrough source exists and should branch to the 834218887Sdim // cleanup and then through to the next. 835218887Sdim } else if (HasFallthrough) { 836218887Sdim // Everything is already set up for this. 837218887Sdim 838218887Sdim // Case 4: no fallthrough source exists. 839218887Sdim } else { 840218887Sdim Builder.ClearInsertionPoint(); 841218887Sdim } 842218887Sdim 843218887Sdim // VI. Assorted cleaning. 844218887Sdim 845218887Sdim // Check whether we can merge NormalEntry into a single predecessor. 846218887Sdim // This might invalidate (non-IR) pointers to NormalEntry. 847218887Sdim llvm::BasicBlock *NewNormalEntry = 848218887Sdim SimplifyCleanupEntry(*this, NormalEntry); 849218887Sdim 850218887Sdim // If it did invalidate those pointers, and NormalEntry was the same 851218887Sdim // as NormalExit, go back and patch up the fixups. 852218887Sdim if (NewNormalEntry != NormalEntry && NormalEntry == NormalExit) 853218887Sdim for (unsigned I = FixupDepth, E = EHStack.getNumBranchFixups(); 854218887Sdim I < E; ++I) 855218887Sdim EHStack.getBranchFixup(I).OptimisticBranchBlock = NewNormalEntry; 856218887Sdim } 857218887Sdim } 858218887Sdim 859218887Sdim assert(EHStack.hasNormalCleanups() || EHStack.getNumBranchFixups() == 0); 860218887Sdim 861218887Sdim // Emit the EH cleanup if required. 862218887Sdim if (RequiresEHCleanup) { 863251662Sdim if (CGDebugInfo *DI = getDebugInfo()) 864263508Sdim DI->EmitLocation(Builder, CurEHLocation); 865251662Sdim 866218887Sdim CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 867218887Sdim 868218887Sdim EmitBlock(EHEntry); 869218887Sdim 870239462Sdim // We only actually emit the cleanup code if the cleanup is either 871239462Sdim // active or was used before it was deactivated. 872239462Sdim if (EHActiveFlag || IsActive) { 873251662Sdim 874239462Sdim cleanupFlags.setIsForEHCleanup(); 875239462Sdim EmitCleanup(*this, Fn, cleanupFlags, EHActiveFlag); 876239462Sdim } 877224145Sdim 878226633Sdim Builder.CreateBr(getEHDispatchBlock(EHParent)); 879218887Sdim 880218887Sdim Builder.restoreIP(SavedIP); 881218887Sdim 882218887Sdim SimplifyCleanupEntry(*this, EHEntry); 883218887Sdim } 884218887Sdim} 885218887Sdim 886221345Sdim/// isObviouslyBranchWithoutCleanups - Return true if a branch to the 887221345Sdim/// specified destination obviously has no cleanups to run. 'false' is always 888221345Sdim/// a conservatively correct answer for this method. 889221345Sdimbool CodeGenFunction::isObviouslyBranchWithoutCleanups(JumpDest Dest) const { 890221345Sdim assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 891221345Sdim && "stale jump destination"); 892221345Sdim 893221345Sdim // Calculate the innermost active normal cleanup. 894221345Sdim EHScopeStack::stable_iterator TopCleanup = 895221345Sdim EHStack.getInnermostActiveNormalCleanup(); 896221345Sdim 897221345Sdim // If we're not in an active normal cleanup scope, or if the 898221345Sdim // destination scope is within the innermost active normal cleanup 899221345Sdim // scope, we don't need to worry about fixups. 900221345Sdim if (TopCleanup == EHStack.stable_end() || 901221345Sdim TopCleanup.encloses(Dest.getScopeDepth())) // works for invalid 902221345Sdim return true; 903221345Sdim 904221345Sdim // Otherwise, we might need some cleanups. 905221345Sdim return false; 906221345Sdim} 907221345Sdim 908221345Sdim 909218887Sdim/// Terminate the current block by emitting a branch which might leave 910218887Sdim/// the current cleanup-protected scope. The target scope may not yet 911218887Sdim/// be known, in which case this will require a fixup. 912218887Sdim/// 913218887Sdim/// As a side-effect, this method clears the insertion point. 914218887Sdimvoid CodeGenFunction::EmitBranchThroughCleanup(JumpDest Dest) { 915219077Sdim assert(Dest.getScopeDepth().encloses(EHStack.stable_begin()) 916218887Sdim && "stale jump destination"); 917218887Sdim 918218887Sdim if (!HaveInsertPoint()) 919218887Sdim return; 920218887Sdim 921218887Sdim // Create the branch. 922218887Sdim llvm::BranchInst *BI = Builder.CreateBr(Dest.getBlock()); 923218887Sdim 924218887Sdim // Calculate the innermost active normal cleanup. 925218887Sdim EHScopeStack::stable_iterator 926218887Sdim TopCleanup = EHStack.getInnermostActiveNormalCleanup(); 927218887Sdim 928218887Sdim // If we're not in an active normal cleanup scope, or if the 929218887Sdim // destination scope is within the innermost active normal cleanup 930218887Sdim // scope, we don't need to worry about fixups. 931218887Sdim if (TopCleanup == EHStack.stable_end() || 932218887Sdim TopCleanup.encloses(Dest.getScopeDepth())) { // works for invalid 933218887Sdim Builder.ClearInsertionPoint(); 934218887Sdim return; 935218887Sdim } 936218887Sdim 937218887Sdim // If we can't resolve the destination cleanup scope, just add this 938218887Sdim // to the current cleanup scope as a branch fixup. 939218887Sdim if (!Dest.getScopeDepth().isValid()) { 940218887Sdim BranchFixup &Fixup = EHStack.addBranchFixup(); 941218887Sdim Fixup.Destination = Dest.getBlock(); 942218887Sdim Fixup.DestinationIndex = Dest.getDestIndex(); 943218887Sdim Fixup.InitialBranch = BI; 944218887Sdim Fixup.OptimisticBranchBlock = 0; 945218887Sdim 946218887Sdim Builder.ClearInsertionPoint(); 947218887Sdim return; 948218887Sdim } 949218887Sdim 950218887Sdim // Otherwise, thread through all the normal cleanups in scope. 951218887Sdim 952218887Sdim // Store the index at the start. 953218887Sdim llvm::ConstantInt *Index = Builder.getInt32(Dest.getDestIndex()); 954218887Sdim new llvm::StoreInst(Index, getNormalCleanupDestSlot(), BI); 955218887Sdim 956218887Sdim // Adjust BI to point to the first cleanup block. 957218887Sdim { 958218887Sdim EHCleanupScope &Scope = 959218887Sdim cast<EHCleanupScope>(*EHStack.find(TopCleanup)); 960218887Sdim BI->setSuccessor(0, CreateNormalEntry(*this, Scope)); 961218887Sdim } 962218887Sdim 963218887Sdim // Add this destination to all the scopes involved. 964218887Sdim EHScopeStack::stable_iterator I = TopCleanup; 965218887Sdim EHScopeStack::stable_iterator E = Dest.getScopeDepth(); 966218887Sdim if (E.strictlyEncloses(I)) { 967218887Sdim while (true) { 968218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(I)); 969218887Sdim assert(Scope.isNormalCleanup()); 970218887Sdim I = Scope.getEnclosingNormalCleanup(); 971218887Sdim 972218887Sdim // If this is the last cleanup we're propagating through, tell it 973218887Sdim // that there's a resolved jump moving through it. 974218887Sdim if (!E.strictlyEncloses(I)) { 975218887Sdim Scope.addBranchAfter(Index, Dest.getBlock()); 976218887Sdim break; 977218887Sdim } 978218887Sdim 979218887Sdim // Otherwise, tell the scope that there's a jump propoagating 980218887Sdim // through it. If this isn't new information, all the rest of 981218887Sdim // the work has been done before. 982218887Sdim if (!Scope.addBranchThrough(Dest.getBlock())) 983218887Sdim break; 984218887Sdim } 985218887Sdim } 986218887Sdim 987218887Sdim Builder.ClearInsertionPoint(); 988218887Sdim} 989218887Sdim 990218887Sdimstatic bool IsUsedAsNormalCleanup(EHScopeStack &EHStack, 991218887Sdim EHScopeStack::stable_iterator C) { 992218887Sdim // If we needed a normal block for any reason, that counts. 993218887Sdim if (cast<EHCleanupScope>(*EHStack.find(C)).getNormalBlock()) 994218887Sdim return true; 995218887Sdim 996218887Sdim // Check whether any enclosed cleanups were needed. 997218887Sdim for (EHScopeStack::stable_iterator 998218887Sdim I = EHStack.getInnermostNormalCleanup(); 999218887Sdim I != C; ) { 1000218887Sdim assert(C.strictlyEncloses(I)); 1001218887Sdim EHCleanupScope &S = cast<EHCleanupScope>(*EHStack.find(I)); 1002218887Sdim if (S.getNormalBlock()) return true; 1003218887Sdim I = S.getEnclosingNormalCleanup(); 1004218887Sdim } 1005218887Sdim 1006218887Sdim return false; 1007218887Sdim} 1008218887Sdim 1009218887Sdimstatic bool IsUsedAsEHCleanup(EHScopeStack &EHStack, 1010226633Sdim EHScopeStack::stable_iterator cleanup) { 1011218887Sdim // If we needed an EH block for any reason, that counts. 1012226633Sdim if (EHStack.find(cleanup)->hasEHBranches()) 1013218887Sdim return true; 1014218887Sdim 1015218887Sdim // Check whether any enclosed cleanups were needed. 1016218887Sdim for (EHScopeStack::stable_iterator 1017226633Sdim i = EHStack.getInnermostEHScope(); i != cleanup; ) { 1018226633Sdim assert(cleanup.strictlyEncloses(i)); 1019226633Sdim 1020226633Sdim EHScope &scope = *EHStack.find(i); 1021226633Sdim if (scope.hasEHBranches()) 1022226633Sdim return true; 1023226633Sdim 1024226633Sdim i = scope.getEnclosingEHScope(); 1025218887Sdim } 1026218887Sdim 1027218887Sdim return false; 1028218887Sdim} 1029218887Sdim 1030218887Sdimenum ForActivation_t { 1031218887Sdim ForActivation, 1032218887Sdim ForDeactivation 1033218887Sdim}; 1034218887Sdim 1035218887Sdim/// The given cleanup block is changing activation state. Configure a 1036218887Sdim/// cleanup variable if necessary. 1037218887Sdim/// 1038218887Sdim/// It would be good if we had some way of determining if there were 1039218887Sdim/// extra uses *after* the change-over point. 1040218887Sdimstatic void SetupCleanupBlockActivation(CodeGenFunction &CGF, 1041218887Sdim EHScopeStack::stable_iterator C, 1042234353Sdim ForActivation_t kind, 1043234353Sdim llvm::Instruction *dominatingIP) { 1044218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*CGF.EHStack.find(C)); 1045218887Sdim 1046234353Sdim // We always need the flag if we're activating the cleanup in a 1047234353Sdim // conditional context, because we have to assume that the current 1048234353Sdim // location doesn't necessarily dominate the cleanup's code. 1049234353Sdim bool isActivatedInConditional = 1050234353Sdim (kind == ForActivation && CGF.isInConditionalBranch()); 1051218887Sdim 1052234353Sdim bool needFlag = false; 1053234353Sdim 1054218887Sdim // Calculate whether the cleanup was used: 1055218887Sdim 1056218887Sdim // - as a normal cleanup 1057234353Sdim if (Scope.isNormalCleanup() && 1058234353Sdim (isActivatedInConditional || IsUsedAsNormalCleanup(CGF.EHStack, C))) { 1059218887Sdim Scope.setTestFlagInNormalCleanup(); 1060234353Sdim needFlag = true; 1061218887Sdim } 1062218887Sdim 1063218887Sdim // - as an EH cleanup 1064234353Sdim if (Scope.isEHCleanup() && 1065234353Sdim (isActivatedInConditional || IsUsedAsEHCleanup(CGF.EHStack, C))) { 1066218887Sdim Scope.setTestFlagInEHCleanup(); 1067234353Sdim needFlag = true; 1068218887Sdim } 1069218887Sdim 1070218887Sdim // If it hasn't yet been used as either, we're done. 1071234353Sdim if (!needFlag) return; 1072218887Sdim 1073234353Sdim llvm::AllocaInst *var = Scope.getActiveFlag(); 1074234353Sdim if (!var) { 1075234353Sdim var = CGF.CreateTempAlloca(CGF.Builder.getInt1Ty(), "cleanup.isactive"); 1076234353Sdim Scope.setActiveFlag(var); 1077218887Sdim 1078234353Sdim assert(dominatingIP && "no existing variable and no dominating IP!"); 1079234353Sdim 1080218887Sdim // Initialize to true or false depending on whether it was 1081218887Sdim // active up to this point. 1082234353Sdim llvm::Value *value = CGF.Builder.getInt1(kind == ForDeactivation); 1083234353Sdim 1084234353Sdim // If we're in a conditional block, ignore the dominating IP and 1085234353Sdim // use the outermost conditional branch. 1086234353Sdim if (CGF.isInConditionalBranch()) { 1087234353Sdim CGF.setBeforeOutermostConditional(value, var); 1088234353Sdim } else { 1089234353Sdim new llvm::StoreInst(value, var, dominatingIP); 1090234353Sdim } 1091218887Sdim } 1092218887Sdim 1093234353Sdim CGF.Builder.CreateStore(CGF.Builder.getInt1(kind == ForActivation), var); 1094218887Sdim} 1095218887Sdim 1096218887Sdim/// Activate a cleanup that was created in an inactivated state. 1097234353Sdimvoid CodeGenFunction::ActivateCleanupBlock(EHScopeStack::stable_iterator C, 1098234353Sdim llvm::Instruction *dominatingIP) { 1099218887Sdim assert(C != EHStack.stable_end() && "activating bottom of stack?"); 1100218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 1101218887Sdim assert(!Scope.isActive() && "double activation"); 1102218887Sdim 1103234353Sdim SetupCleanupBlockActivation(*this, C, ForActivation, dominatingIP); 1104218887Sdim 1105218887Sdim Scope.setActive(true); 1106218887Sdim} 1107218887Sdim 1108218887Sdim/// Deactive a cleanup that was created in an active state. 1109234353Sdimvoid CodeGenFunction::DeactivateCleanupBlock(EHScopeStack::stable_iterator C, 1110234353Sdim llvm::Instruction *dominatingIP) { 1111218887Sdim assert(C != EHStack.stable_end() && "deactivating bottom of stack?"); 1112218887Sdim EHCleanupScope &Scope = cast<EHCleanupScope>(*EHStack.find(C)); 1113218887Sdim assert(Scope.isActive() && "double deactivation"); 1114218887Sdim 1115218887Sdim // If it's the top of the stack, just pop it. 1116218887Sdim if (C == EHStack.stable_begin()) { 1117218887Sdim // If it's a normal cleanup, we need to pretend that the 1118218887Sdim // fallthrough is unreachable. 1119218887Sdim CGBuilderTy::InsertPoint SavedIP = Builder.saveAndClearIP(); 1120218887Sdim PopCleanupBlock(); 1121218887Sdim Builder.restoreIP(SavedIP); 1122218887Sdim return; 1123218887Sdim } 1124218887Sdim 1125218887Sdim // Otherwise, follow the general case. 1126234353Sdim SetupCleanupBlockActivation(*this, C, ForDeactivation, dominatingIP); 1127218887Sdim 1128218887Sdim Scope.setActive(false); 1129218887Sdim} 1130218887Sdim 1131218887Sdimllvm::Value *CodeGenFunction::getNormalCleanupDestSlot() { 1132218887Sdim if (!NormalCleanupDest) 1133218887Sdim NormalCleanupDest = 1134218887Sdim CreateTempAlloca(Builder.getInt32Ty(), "cleanup.dest.slot"); 1135218887Sdim return NormalCleanupDest; 1136218887Sdim} 1137234353Sdim 1138234353Sdim/// Emits all the code to cause the given temporary to be cleaned up. 1139234353Sdimvoid CodeGenFunction::EmitCXXTemporary(const CXXTemporary *Temporary, 1140234353Sdim QualType TempType, 1141234353Sdim llvm::Value *Ptr) { 1142234353Sdim pushDestroy(NormalAndEHCleanup, Ptr, TempType, destroyCXXObject, 1143234353Sdim /*useEHCleanup*/ true); 1144234353Sdim} 1145