1//===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file defines late ObjC ARC optimizations. ARC stands for Automatic
10/// Reference Counting and is a system for managing reference counts for objects
11/// in Objective C.
12///
13/// This specific file mainly deals with ``contracting'' multiple lower level
14/// operations into singular higher level operations through pattern matching.
15///
16/// WARNING: This file knows about certain library functions. It recognizes them
17/// by name, and hardwires knowledge of their semantics.
18///
19/// WARNING: This file knows about how certain Objective-C library functions are
20/// used. Naive LLVM IR transformations which would otherwise be
21/// behavior-preserving may break these assumptions.
22///
23//===----------------------------------------------------------------------===//
24
25// TODO: ObjCARCContract could insert PHI nodes when uses aren't
26// dominated by single calls.
27
28#include "ARCRuntimeEntryPoints.h"
29#include "DependencyAnalysis.h"
30#include "ObjCARC.h"
31#include "ProvenanceAnalysis.h"
32#include "llvm/ADT/Statistic.h"
33#include "llvm/Analysis/EHPersonalities.h"
34#include "llvm/IR/Dominators.h"
35#include "llvm/IR/InlineAsm.h"
36#include "llvm/IR/InstIterator.h"
37#include "llvm/IR/Operator.h"
38#include "llvm/InitializePasses.h"
39#include "llvm/Support/CommandLine.h"
40#include "llvm/Support/Debug.h"
41#include "llvm/Support/raw_ostream.h"
42
43using namespace llvm;
44using namespace llvm::objcarc;
45
46#define DEBUG_TYPE "objc-arc-contract"
47
48STATISTIC(NumPeeps,       "Number of calls peephole-optimized");
49STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
50
51//===----------------------------------------------------------------------===//
52//                                Declarations
53//===----------------------------------------------------------------------===//
54
55namespace {
56  /// Late ARC optimizations
57  ///
58  /// These change the IR in a way that makes it difficult to be analyzed by
59  /// ObjCARCOpt, so it's run late.
60  class ObjCARCContract : public FunctionPass {
61    bool Changed;
62    AliasAnalysis *AA;
63    DominatorTree *DT;
64    ProvenanceAnalysis PA;
65    ARCRuntimeEntryPoints EP;
66
67    /// A flag indicating whether this optimization pass should run.
68    bool Run;
69
70    /// The inline asm string to insert between calls and RetainRV calls to make
71    /// the optimization work on targets which need it.
72    const MDString *RVInstMarker;
73
74    /// The set of inserted objc_storeStrong calls. If at the end of walking the
75    /// function we have found no alloca instructions, these calls can be marked
76    /// "tail".
77    SmallPtrSet<CallInst *, 8> StoreStrongCalls;
78
79    /// Returns true if we eliminated Inst.
80    bool tryToPeepholeInstruction(
81        Function &F, Instruction *Inst, inst_iterator &Iter,
82        SmallPtrSetImpl<Instruction *> &DepInsts,
83        SmallPtrSetImpl<const BasicBlock *> &Visited,
84        bool &TailOkForStoreStrong,
85        const DenseMap<BasicBlock *, ColorVector> &BlockColors);
86
87    bool optimizeRetainCall(Function &F, Instruction *Retain);
88
89    bool
90    contractAutorelease(Function &F, Instruction *Autorelease,
91                        ARCInstKind Class,
92                        SmallPtrSetImpl<Instruction *> &DependingInstructions,
93                        SmallPtrSetImpl<const BasicBlock *> &Visited);
94
95    void tryToContractReleaseIntoStoreStrong(
96        Instruction *Release, inst_iterator &Iter,
97        const DenseMap<BasicBlock *, ColorVector> &BlockColors);
98
99    void getAnalysisUsage(AnalysisUsage &AU) const override;
100    bool doInitialization(Module &M) override;
101    bool runOnFunction(Function &F) override;
102
103  public:
104    static char ID;
105    ObjCARCContract() : FunctionPass(ID) {
106      initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
107    }
108  };
109}
110
111//===----------------------------------------------------------------------===//
112//                               Implementation
113//===----------------------------------------------------------------------===//
114
115/// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
116/// return value. We do this late so we do not disrupt the dataflow analysis in
117/// ObjCARCOpt.
118bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
119  const auto *Call = dyn_cast<CallBase>(GetArgRCIdentityRoot(Retain));
120  if (!Call)
121    return false;
122  if (Call->getParent() != Retain->getParent())
123    return false;
124
125  // Check that the call is next to the retain.
126  BasicBlock::const_iterator I = ++Call->getIterator();
127  while (IsNoopInstruction(&*I))
128    ++I;
129  if (&*I != Retain)
130    return false;
131
132  // Turn it to an objc_retainAutoreleasedReturnValue.
133  Changed = true;
134  ++NumPeeps;
135
136  LLVM_DEBUG(
137      dbgs() << "Transforming objc_retain => "
138                "objc_retainAutoreleasedReturnValue since the operand is a "
139                "return value.\nOld: "
140             << *Retain << "\n");
141
142  // We do not have to worry about tail calls/does not throw since
143  // retain/retainRV have the same properties.
144  Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
145  cast<CallInst>(Retain)->setCalledFunction(Decl);
146
147  LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");
148  return true;
149}
150
151/// Merge an autorelease with a retain into a fused call.
152bool ObjCARCContract::contractAutorelease(
153    Function &F, Instruction *Autorelease, ARCInstKind Class,
154    SmallPtrSetImpl<Instruction *> &DependingInstructions,
155    SmallPtrSetImpl<const BasicBlock *> &Visited) {
156  const Value *Arg = GetArgRCIdentityRoot(Autorelease);
157
158  // Check that there are no instructions between the retain and the autorelease
159  // (such as an autorelease_pop) which may change the count.
160  CallInst *Retain = nullptr;
161  if (Class == ARCInstKind::AutoreleaseRV)
162    FindDependencies(RetainAutoreleaseRVDep, Arg,
163                     Autorelease->getParent(), Autorelease,
164                     DependingInstructions, Visited, PA);
165  else
166    FindDependencies(RetainAutoreleaseDep, Arg,
167                     Autorelease->getParent(), Autorelease,
168                     DependingInstructions, Visited, PA);
169
170  Visited.clear();
171  if (DependingInstructions.size() != 1) {
172    DependingInstructions.clear();
173    return false;
174  }
175
176  Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
177  DependingInstructions.clear();
178
179  if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||
180      GetArgRCIdentityRoot(Retain) != Arg)
181    return false;
182
183  Changed = true;
184  ++NumPeeps;
185
186  LLVM_DEBUG(dbgs() << "    Fusing retain/autorelease!\n"
187                       "        Autorelease:"
188                    << *Autorelease
189                    << "\n"
190                       "        Retain: "
191                    << *Retain << "\n");
192
193  Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
194                              ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV
195                              : ARCRuntimeEntryPointKind::RetainAutorelease);
196  Retain->setCalledFunction(Decl);
197
198  LLVM_DEBUG(dbgs() << "        New RetainAutorelease: " << *Retain << "\n");
199
200  EraseInstruction(Autorelease);
201  return true;
202}
203
204static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
205                                                         Instruction *Release,
206                                                         ProvenanceAnalysis &PA,
207                                                         AliasAnalysis *AA) {
208  StoreInst *Store = nullptr;
209  bool SawRelease = false;
210
211  // Get the location associated with Load.
212  MemoryLocation Loc = MemoryLocation::get(Load);
213  auto *LocPtr = Loc.Ptr->stripPointerCasts();
214
215  // Walk down to find the store and the release, which may be in either order.
216  for (auto I = std::next(BasicBlock::iterator(Load)),
217            E = Load->getParent()->end();
218       I != E; ++I) {
219    // If we found the store we were looking for and saw the release,
220    // break. There is no more work to be done.
221    if (Store && SawRelease)
222      break;
223
224    // Now we know that we have not seen either the store or the release. If I
225    // is the release, mark that we saw the release and continue.
226    Instruction *Inst = &*I;
227    if (Inst == Release) {
228      SawRelease = true;
229      continue;
230    }
231
232    // Otherwise, we check if Inst is a "good" store. Grab the instruction class
233    // of Inst.
234    ARCInstKind Class = GetBasicARCInstKind(Inst);
235
236    // If Inst is an unrelated retain, we don't care about it.
237    //
238    // TODO: This is one area where the optimization could be made more
239    // aggressive.
240    if (IsRetain(Class))
241      continue;
242
243    // If we have seen the store, but not the release...
244    if (Store) {
245      // We need to make sure that it is safe to move the release from its
246      // current position to the store. This implies proving that any
247      // instruction in between Store and the Release conservatively can not use
248      // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
249      // continue...
250      if (!CanUse(Inst, Load, PA, Class)) {
251        continue;
252      }
253
254      // Otherwise, be conservative and return nullptr.
255      return nullptr;
256    }
257
258    // Ok, now we know we have not seen a store yet. See if Inst can write to
259    // our load location, if it can not, just ignore the instruction.
260    if (!isModSet(AA->getModRefInfo(Inst, Loc)))
261      continue;
262
263    Store = dyn_cast<StoreInst>(Inst);
264
265    // If Inst can, then check if Inst is a simple store. If Inst is not a
266    // store or a store that is not simple, then we have some we do not
267    // understand writing to this memory implying we can not move the load
268    // over the write to any subsequent store that we may find.
269    if (!Store || !Store->isSimple())
270      return nullptr;
271
272    // Then make sure that the pointer we are storing to is Ptr. If so, we
273    // found our Store!
274    if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)
275      continue;
276
277    // Otherwise, we have an unknown store to some other ptr that clobbers
278    // Loc.Ptr. Bail!
279    return nullptr;
280  }
281
282  // If we did not find the store or did not see the release, fail.
283  if (!Store || !SawRelease)
284    return nullptr;
285
286  // We succeeded!
287  return Store;
288}
289
290static Instruction *
291findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
292                                    Instruction *Release,
293                                    ProvenanceAnalysis &PA) {
294  // Walk up from the Store to find the retain.
295  BasicBlock::iterator I = Store->getIterator();
296  BasicBlock::iterator Begin = Store->getParent()->begin();
297  while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {
298    Instruction *Inst = &*I;
299
300    // It is only safe to move the retain to the store if we can prove
301    // conservatively that nothing besides the release can decrement reference
302    // counts in between the retain and the store.
303    if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)
304      return nullptr;
305    --I;
306  }
307  Instruction *Retain = &*I;
308  if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
309    return nullptr;
310  if (GetArgRCIdentityRoot(Retain) != New)
311    return nullptr;
312  return Retain;
313}
314
315/// Create a call instruction with the correct funclet token. Should be used
316/// instead of calling CallInst::Create directly.
317static CallInst *
318createCallInst(FunctionType *FTy, Value *Func, ArrayRef<Value *> Args,
319               const Twine &NameStr, Instruction *InsertBefore,
320               const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
321  SmallVector<OperandBundleDef, 1> OpBundles;
322  if (!BlockColors.empty()) {
323    const ColorVector &CV = BlockColors.find(InsertBefore->getParent())->second;
324    assert(CV.size() == 1 && "non-unique color for block!");
325    Instruction *EHPad = CV.front()->getFirstNonPHI();
326    if (EHPad->isEHPad())
327      OpBundles.emplace_back("funclet", EHPad);
328  }
329
330  return CallInst::Create(FTy, Func, Args, OpBundles, NameStr, InsertBefore);
331}
332
333static CallInst *
334createCallInst(FunctionCallee Func, ArrayRef<Value *> Args, const Twine &NameStr,
335               Instruction *InsertBefore,
336               const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
337  return createCallInst(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
338                        InsertBefore, BlockColors);
339}
340
341/// Attempt to merge an objc_release with a store, load, and objc_retain to form
342/// an objc_storeStrong. An objc_storeStrong:
343///
344///   objc_storeStrong(i8** %old_ptr, i8* new_value)
345///
346/// is equivalent to the following IR sequence:
347///
348///   ; Load old value.
349///   %old_value = load i8** %old_ptr               (1)
350///
351///   ; Increment the new value and then release the old value. This must occur
352///   ; in order in case old_value releases new_value in its destructor causing
353///   ; us to potentially have a dangling ptr.
354///   tail call i8* @objc_retain(i8* %new_value)    (2)
355///   tail call void @objc_release(i8* %old_value)  (3)
356///
357///   ; Store the new_value into old_ptr
358///   store i8* %new_value, i8** %old_ptr           (4)
359///
360/// The safety of this optimization is based around the following
361/// considerations:
362///
363///  1. We are forming the store strong at the store. Thus to perform this
364///     optimization it must be safe to move the retain, load, and release to
365///     (4).
366///  2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
367///     safe.
368void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
369    Instruction *Release, inst_iterator &Iter,
370    const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
371  // See if we are releasing something that we just loaded.
372  auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
373  if (!Load || !Load->isSimple())
374    return;
375
376  // For now, require everything to be in one basic block.
377  BasicBlock *BB = Release->getParent();
378  if (Load->getParent() != BB)
379    return;
380
381  // First scan down the BB from Load, looking for a store of the RCIdentityRoot
382  // of Load's
383  StoreInst *Store =
384      findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);
385  // If we fail, bail.
386  if (!Store)
387    return;
388
389  // Then find what new_value's RCIdentity Root is.
390  Value *New = GetRCIdentityRoot(Store->getValueOperand());
391
392  // Then walk up the BB and look for a retain on New without any intervening
393  // instructions which conservatively might decrement ref counts.
394  Instruction *Retain =
395      findRetainForStoreStrongContraction(New, Store, Release, PA);
396
397  // If we fail, bail.
398  if (!Retain)
399    return;
400
401  Changed = true;
402  ++NumStoreStrongs;
403
404  LLVM_DEBUG(
405      llvm::dbgs() << "    Contracting retain, release into objc_storeStrong.\n"
406                   << "        Old:\n"
407                   << "            Store:   " << *Store << "\n"
408                   << "            Release: " << *Release << "\n"
409                   << "            Retain:  " << *Retain << "\n"
410                   << "            Load:    " << *Load << "\n");
411
412  LLVMContext &C = Release->getContext();
413  Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
414  Type *I8XX = PointerType::getUnqual(I8X);
415
416  Value *Args[] = { Load->getPointerOperand(), New };
417  if (Args[0]->getType() != I8XX)
418    Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
419  if (Args[1]->getType() != I8X)
420    Args[1] = new BitCastInst(Args[1], I8X, "", Store);
421  Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
422  CallInst *StoreStrong = createCallInst(Decl, Args, "", Store, BlockColors);
423  StoreStrong->setDoesNotThrow();
424  StoreStrong->setDebugLoc(Store->getDebugLoc());
425
426  // We can't set the tail flag yet, because we haven't yet determined
427  // whether there are any escaping allocas. Remember this call, so that
428  // we can set the tail flag once we know it's safe.
429  StoreStrongCalls.insert(StoreStrong);
430
431  LLVM_DEBUG(llvm::dbgs() << "        New Store Strong: " << *StoreStrong
432                          << "\n");
433
434  if (&*Iter == Retain) ++Iter;
435  if (&*Iter == Store) ++Iter;
436  Store->eraseFromParent();
437  Release->eraseFromParent();
438  EraseInstruction(Retain);
439  if (Load->use_empty())
440    Load->eraseFromParent();
441}
442
443bool ObjCARCContract::tryToPeepholeInstruction(
444    Function &F, Instruction *Inst, inst_iterator &Iter,
445    SmallPtrSetImpl<Instruction *> &DependingInsts,
446    SmallPtrSetImpl<const BasicBlock *> &Visited, bool &TailOkForStoreStrongs,
447    const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
448  // Only these library routines return their argument. In particular,
449  // objc_retainBlock does not necessarily return its argument.
450  ARCInstKind Class = GetBasicARCInstKind(Inst);
451  switch (Class) {
452  case ARCInstKind::FusedRetainAutorelease:
453  case ARCInstKind::FusedRetainAutoreleaseRV:
454    return false;
455  case ARCInstKind::Autorelease:
456  case ARCInstKind::AutoreleaseRV:
457    return contractAutorelease(F, Inst, Class, DependingInsts, Visited);
458  case ARCInstKind::Retain:
459    // Attempt to convert retains to retainrvs if they are next to function
460    // calls.
461    if (!optimizeRetainCall(F, Inst))
462      return false;
463    // If we succeed in our optimization, fall through.
464    LLVM_FALLTHROUGH;
465  case ARCInstKind::RetainRV:
466  case ARCInstKind::ClaimRV: {
467    // If we're compiling for a target which needs a special inline-asm
468    // marker to do the return value optimization, insert it now.
469    if (!RVInstMarker)
470      return false;
471    BasicBlock::iterator BBI = Inst->getIterator();
472    BasicBlock *InstParent = Inst->getParent();
473
474    // Step up to see if the call immediately precedes the RV call.
475    // If it's an invoke, we have to cross a block boundary. And we have
476    // to carefully dodge no-op instructions.
477    do {
478      if (BBI == InstParent->begin()) {
479        BasicBlock *Pred = InstParent->getSinglePredecessor();
480        if (!Pred)
481          goto decline_rv_optimization;
482        BBI = Pred->getTerminator()->getIterator();
483        break;
484      }
485      --BBI;
486    } while (IsNoopInstruction(&*BBI));
487
488    if (&*BBI == GetArgRCIdentityRoot(Inst)) {
489      LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "
490                           "optimization.\n");
491      Changed = true;
492      InlineAsm *IA =
493          InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
494                                           /*isVarArg=*/false),
495                         RVInstMarker->getString(),
496                         /*Constraints=*/"", /*hasSideEffects=*/true);
497
498      createCallInst(IA, None, "", Inst, BlockColors);
499    }
500  decline_rv_optimization:
501    return false;
502  }
503  case ARCInstKind::InitWeak: {
504    // objc_initWeak(p, null) => *p = null
505    CallInst *CI = cast<CallInst>(Inst);
506    if (IsNullOrUndef(CI->getArgOperand(1))) {
507      Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));
508      Changed = true;
509      new StoreInst(Null, CI->getArgOperand(0), CI);
510
511      LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
512                        << "                 New = " << *Null << "\n");
513
514      CI->replaceAllUsesWith(Null);
515      CI->eraseFromParent();
516    }
517    return true;
518  }
519  case ARCInstKind::Release:
520    // Try to form an objc store strong from our release. If we fail, there is
521    // nothing further to do below, so continue.
522    tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);
523    return true;
524  case ARCInstKind::User:
525    // Be conservative if the function has any alloca instructions.
526    // Technically we only care about escaping alloca instructions,
527    // but this is sufficient to handle some interesting cases.
528    if (isa<AllocaInst>(Inst))
529      TailOkForStoreStrongs = false;
530    return true;
531  case ARCInstKind::IntrinsicUser:
532    // Remove calls to @llvm.objc.clang.arc.use(...).
533    Changed = true;
534    Inst->eraseFromParent();
535    return true;
536  default:
537    return true;
538  }
539}
540
541//===----------------------------------------------------------------------===//
542//                              Top Level Driver
543//===----------------------------------------------------------------------===//
544
545bool ObjCARCContract::runOnFunction(Function &F) {
546  if (!EnableARCOpts)
547    return false;
548
549  // If nothing in the Module uses ARC, don't do anything.
550  if (!Run)
551    return false;
552
553  Changed = false;
554  AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
555  DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
556
557  PA.setAA(&getAnalysis<AAResultsWrapperPass>().getAAResults());
558
559  DenseMap<BasicBlock *, ColorVector> BlockColors;
560  if (F.hasPersonalityFn() &&
561      isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))
562    BlockColors = colorEHFunclets(F);
563
564  LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
565
566  // Track whether it's ok to mark objc_storeStrong calls with the "tail"
567  // keyword. Be conservative if the function has variadic arguments.
568  // It seems that functions which "return twice" are also unsafe for the
569  // "tail" argument, because they are setjmp, which could need to
570  // return to an earlier stack state.
571  bool TailOkForStoreStrongs =
572      !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
573
574  // For ObjC library calls which return their argument, replace uses of the
575  // argument with uses of the call return value, if it dominates the use. This
576  // reduces register pressure.
577  SmallPtrSet<Instruction *, 4> DependingInstructions;
578  SmallPtrSet<const BasicBlock *, 4> Visited;
579
580  for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
581    Instruction *Inst = &*I++;
582
583    LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
584
585    // First try to peephole Inst. If there is nothing further we can do in
586    // terms of undoing objc-arc-expand, process the next inst.
587    if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited,
588                                 TailOkForStoreStrongs, BlockColors))
589      continue;
590
591    // Otherwise, try to undo objc-arc-expand.
592
593    // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
594    // and such; to do the replacement, the argument must have type i8*.
595
596    // Function for replacing uses of Arg dominated by Inst.
597    auto ReplaceArgUses = [Inst, this](Value *Arg) {
598      // If we're compiling bugpointed code, don't get in trouble.
599      if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
600        return;
601
602      // Look through the uses of the pointer.
603      for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
604           UI != UE; ) {
605        // Increment UI now, because we may unlink its element.
606        Use &U = *UI++;
607        unsigned OperandNo = U.getOperandNo();
608
609        // If the call's return value dominates a use of the call's argument
610        // value, rewrite the use to use the return value. We check for
611        // reachability here because an unreachable call is considered to
612        // trivially dominate itself, which would lead us to rewriting its
613        // argument in terms of its return value, which would lead to
614        // infinite loops in GetArgRCIdentityRoot.
615        if (!DT->isReachableFromEntry(U) || !DT->dominates(Inst, U))
616          continue;
617
618        Changed = true;
619        Instruction *Replacement = Inst;
620        Type *UseTy = U.get()->getType();
621        if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
622          // For PHI nodes, insert the bitcast in the predecessor block.
623          unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
624          BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);
625          if (Replacement->getType() != UseTy) {
626            // A catchswitch is both a pad and a terminator, meaning a basic
627            // block with a catchswitch has no insertion point. Keep going up
628            // the dominator tree until we find a non-catchswitch.
629            BasicBlock *InsertBB = IncomingBB;
630            while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {
631              InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();
632            }
633
634            assert(DT->dominates(Inst, &InsertBB->back()) &&
635                   "Invalid insertion point for bitcast");
636            Replacement =
637                new BitCastInst(Replacement, UseTy, "", &InsertBB->back());
638          }
639
640          // While we're here, rewrite all edges for this PHI, rather
641          // than just one use at a time, to minimize the number of
642          // bitcasts we emit.
643          for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
644            if (PHI->getIncomingBlock(i) == IncomingBB) {
645              // Keep the UI iterator valid.
646              if (UI != UE &&
647                  &PHI->getOperandUse(
648                      PHINode::getOperandNumForIncomingValue(i)) == &*UI)
649                ++UI;
650              PHI->setIncomingValue(i, Replacement);
651            }
652        } else {
653          if (Replacement->getType() != UseTy)
654            Replacement = new BitCastInst(Replacement, UseTy, "",
655                                          cast<Instruction>(U.getUser()));
656          U.set(Replacement);
657        }
658      }
659    };
660
661    Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
662    Value *OrigArg = Arg;
663
664    // TODO: Change this to a do-while.
665    for (;;) {
666      ReplaceArgUses(Arg);
667
668      // If Arg is a no-op casted pointer, strip one level of casts and iterate.
669      if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
670        Arg = BI->getOperand(0);
671      else if (isa<GEPOperator>(Arg) &&
672               cast<GEPOperator>(Arg)->hasAllZeroIndices())
673        Arg = cast<GEPOperator>(Arg)->getPointerOperand();
674      else if (isa<GlobalAlias>(Arg) &&
675               !cast<GlobalAlias>(Arg)->isInterposable())
676        Arg = cast<GlobalAlias>(Arg)->getAliasee();
677      else {
678        // If Arg is a PHI node, get PHIs that are equivalent to it and replace
679        // their uses.
680        if (PHINode *PN = dyn_cast<PHINode>(Arg)) {
681          SmallVector<Value *, 1> PHIList;
682          getEquivalentPHIs(*PN, PHIList);
683          for (Value *PHI : PHIList)
684            ReplaceArgUses(PHI);
685        }
686        break;
687      }
688    }
689
690    // Replace bitcast users of Arg that are dominated by Inst.
691    SmallVector<BitCastInst *, 2> BitCastUsers;
692
693    // Add all bitcast users of the function argument first.
694    for (User *U : OrigArg->users())
695      if (auto *BC = dyn_cast<BitCastInst>(U))
696        BitCastUsers.push_back(BC);
697
698    // Replace the bitcasts with the call return. Iterate until list is empty.
699    while (!BitCastUsers.empty()) {
700      auto *BC = BitCastUsers.pop_back_val();
701      for (User *U : BC->users())
702        if (auto *B = dyn_cast<BitCastInst>(U))
703          BitCastUsers.push_back(B);
704
705      ReplaceArgUses(BC);
706    }
707  }
708
709  // If this function has no escaping allocas or suspicious vararg usage,
710  // objc_storeStrong calls can be marked with the "tail" keyword.
711  if (TailOkForStoreStrongs)
712    for (CallInst *CI : StoreStrongCalls)
713      CI->setTailCall();
714  StoreStrongCalls.clear();
715
716  return Changed;
717}
718
719//===----------------------------------------------------------------------===//
720//                             Misc Pass Manager
721//===----------------------------------------------------------------------===//
722
723char ObjCARCContract::ID = 0;
724INITIALIZE_PASS_BEGIN(ObjCARCContract, "objc-arc-contract",
725                      "ObjC ARC contraction", false, false)
726INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
727INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
728INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract",
729                    "ObjC ARC contraction", false, false)
730
731void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
732  AU.addRequired<AAResultsWrapperPass>();
733  AU.addRequired<DominatorTreeWrapperPass>();
734  AU.setPreservesCFG();
735}
736
737Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); }
738
739bool ObjCARCContract::doInitialization(Module &M) {
740  // If nothing in the Module uses ARC, don't do anything.
741  Run = ModuleHasARC(M);
742  if (!Run)
743    return false;
744
745  EP.init(&M);
746
747  // Initialize RVInstMarker.
748  const char *MarkerKey = "clang.arc.retainAutoreleasedReturnValueMarker";
749  RVInstMarker = dyn_cast_or_null<MDString>(M.getModuleFlag(MarkerKey));
750
751  return false;
752}
753