ObjCARCContract.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665
  1. //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. /// \file
  10. /// This file defines late ObjC ARC optimizations. ARC stands for Automatic
  11. /// Reference Counting and is a system for managing reference counts for objects
  12. /// in Objective C.
  13. ///
  14. /// This specific file mainly deals with ``contracting'' multiple lower level
  15. /// operations into singular higher level operations through pattern matching.
  16. ///
  17. /// WARNING: This file knows about certain library functions. It recognizes them
  18. /// by name, and hardwires knowledge of their semantics.
  19. ///
  20. /// WARNING: This file knows about how certain Objective-C library functions are
  21. /// used. Naive LLVM IR transformations which would otherwise be
  22. /// behavior-preserving may break these assumptions.
  23. ///
  24. //===----------------------------------------------------------------------===//
  25. // TODO: ObjCARCContract could insert PHI nodes when uses aren't
  26. // dominated by single calls.
  27. #include "ObjCARC.h"
  28. #include "ARCRuntimeEntryPoints.h"
  29. #include "DependencyAnalysis.h"
  30. #include "ProvenanceAnalysis.h"
  31. #include "llvm/ADT/Statistic.h"
  32. #include "llvm/IR/Dominators.h"
  33. #include "llvm/IR/InlineAsm.h"
  34. #include "llvm/IR/Operator.h"
  35. #include "llvm/Support/Debug.h"
  36. #include "llvm/Support/raw_ostream.h"
  37. using namespace llvm;
  38. using namespace llvm::objcarc;
  39. #define DEBUG_TYPE "objc-arc-contract"
  40. STATISTIC(NumPeeps, "Number of calls peephole-optimized");
  41. STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
  42. //===----------------------------------------------------------------------===//
  43. // Declarations
  44. //===----------------------------------------------------------------------===//
  45. namespace {
  46. /// \brief Late ARC optimizations
  47. ///
  48. /// These change the IR in a way that makes it difficult to be analyzed by
  49. /// ObjCARCOpt, so it's run late.
  50. class ObjCARCContract : public FunctionPass {
  51. bool Changed;
  52. AliasAnalysis *AA;
  53. DominatorTree *DT;
  54. ProvenanceAnalysis PA;
  55. ARCRuntimeEntryPoints EP;
  56. /// A flag indicating whether this optimization pass should run.
  57. bool Run;
  58. /// The inline asm string to insert between calls and RetainRV calls to make
  59. /// the optimization work on targets which need it.
  60. const MDString *RetainRVMarker;
  61. /// The set of inserted objc_storeStrong calls. If at the end of walking the
  62. /// function we have found no alloca instructions, these calls can be marked
  63. /// "tail".
  64. SmallPtrSet<CallInst *, 8> StoreStrongCalls;
  65. /// Returns true if we eliminated Inst.
  66. bool tryToPeepholeInstruction(Function &F, Instruction *Inst,
  67. inst_iterator &Iter,
  68. SmallPtrSetImpl<Instruction *> &DepInsts,
  69. SmallPtrSetImpl<const BasicBlock *> &Visited,
  70. bool &TailOkForStoreStrong);
  71. bool optimizeRetainCall(Function &F, Instruction *Retain);
  72. bool
  73. contractAutorelease(Function &F, Instruction *Autorelease,
  74. ARCInstKind Class,
  75. SmallPtrSetImpl<Instruction *> &DependingInstructions,
  76. SmallPtrSetImpl<const BasicBlock *> &Visited);
  77. void tryToContractReleaseIntoStoreStrong(Instruction *Release,
  78. inst_iterator &Iter);
  79. void getAnalysisUsage(AnalysisUsage &AU) const override;
  80. bool doInitialization(Module &M) override;
  81. bool runOnFunction(Function &F) override;
  82. public:
  83. static char ID;
  84. ObjCARCContract() : FunctionPass(ID) {
  85. initializeObjCARCContractPass(*PassRegistry::getPassRegistry());
  86. }
  87. };
  88. }
  89. //===----------------------------------------------------------------------===//
  90. // Implementation
  91. //===----------------------------------------------------------------------===//
  92. /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
  93. /// return value. We do this late so we do not disrupt the dataflow analysis in
  94. /// ObjCARCOpt.
  95. bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
  96. ImmutableCallSite CS(GetArgRCIdentityRoot(Retain));
  97. const Instruction *Call = CS.getInstruction();
  98. if (!Call)
  99. return false;
  100. if (Call->getParent() != Retain->getParent())
  101. return false;
  102. // Check that the call is next to the retain.
  103. BasicBlock::const_iterator I = Call;
  104. ++I;
  105. while (IsNoopInstruction(I)) ++I;
  106. if (&*I != Retain)
  107. return false;
  108. // Turn it to an objc_retainAutoreleasedReturnValue.
  109. Changed = true;
  110. ++NumPeeps;
  111. DEBUG(dbgs() << "Transforming objc_retain => "
  112. "objc_retainAutoreleasedReturnValue since the operand is a "
  113. "return value.\nOld: "<< *Retain << "\n");
  114. // We do not have to worry about tail calls/does not throw since
  115. // retain/retainRV have the same properties.
  116. Constant *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
  117. cast<CallInst>(Retain)->setCalledFunction(Decl);
  118. DEBUG(dbgs() << "New: " << *Retain << "\n");
  119. return true;
  120. }
  121. /// Merge an autorelease with a retain into a fused call.
  122. bool ObjCARCContract::contractAutorelease(
  123. Function &F, Instruction *Autorelease, ARCInstKind Class,
  124. SmallPtrSetImpl<Instruction *> &DependingInstructions,
  125. SmallPtrSetImpl<const BasicBlock *> &Visited) {
  126. const Value *Arg = GetArgRCIdentityRoot(Autorelease);
  127. // Check that there are no instructions between the retain and the autorelease
  128. // (such as an autorelease_pop) which may change the count.
  129. CallInst *Retain = nullptr;
  130. if (Class == ARCInstKind::AutoreleaseRV)
  131. FindDependencies(RetainAutoreleaseRVDep, Arg,
  132. Autorelease->getParent(), Autorelease,
  133. DependingInstructions, Visited, PA);
  134. else
  135. FindDependencies(RetainAutoreleaseDep, Arg,
  136. Autorelease->getParent(), Autorelease,
  137. DependingInstructions, Visited, PA);
  138. Visited.clear();
  139. if (DependingInstructions.size() != 1) {
  140. DependingInstructions.clear();
  141. return false;
  142. }
  143. Retain = dyn_cast_or_null<CallInst>(*DependingInstructions.begin());
  144. DependingInstructions.clear();
  145. if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||
  146. GetArgRCIdentityRoot(Retain) != Arg)
  147. return false;
  148. Changed = true;
  149. ++NumPeeps;
  150. DEBUG(dbgs() << " Fusing retain/autorelease!\n"
  151. " Autorelease:" << *Autorelease << "\n"
  152. " Retain: " << *Retain << "\n");
  153. Constant *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
  154. ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV
  155. : ARCRuntimeEntryPointKind::RetainAutorelease);
  156. Retain->setCalledFunction(Decl);
  157. DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n");
  158. EraseInstruction(Autorelease);
  159. return true;
  160. }
  161. static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
  162. Instruction *Release,
  163. ProvenanceAnalysis &PA,
  164. AliasAnalysis *AA) {
  165. StoreInst *Store = nullptr;
  166. bool SawRelease = false;
  167. // Get the location associated with Load.
  168. MemoryLocation Loc = MemoryLocation::get(Load);
  169. // Walk down to find the store and the release, which may be in either order.
  170. for (auto I = std::next(BasicBlock::iterator(Load)),
  171. E = Load->getParent()->end();
  172. I != E; ++I) {
  173. // If we found the store we were looking for and saw the release,
  174. // break. There is no more work to be done.
  175. if (Store && SawRelease)
  176. break;
  177. // Now we know that we have not seen either the store or the release. If I
  178. // is the release, mark that we saw the release and continue.
  179. Instruction *Inst = &*I;
  180. if (Inst == Release) {
  181. SawRelease = true;
  182. continue;
  183. }
  184. // Otherwise, we check if Inst is a "good" store. Grab the instruction class
  185. // of Inst.
  186. ARCInstKind Class = GetBasicARCInstKind(Inst);
  187. // If Inst is an unrelated retain, we don't care about it.
  188. //
  189. // TODO: This is one area where the optimization could be made more
  190. // aggressive.
  191. if (IsRetain(Class))
  192. continue;
  193. // If we have seen the store, but not the release...
  194. if (Store) {
  195. // We need to make sure that it is safe to move the release from its
  196. // current position to the store. This implies proving that any
  197. // instruction in between Store and the Release conservatively can not use
  198. // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
  199. // continue...
  200. if (!CanUse(Inst, Load, PA, Class)) {
  201. continue;
  202. }
  203. // Otherwise, be conservative and return nullptr.
  204. return nullptr;
  205. }
  206. // Ok, now we know we have not seen a store yet. See if Inst can write to
  207. // our load location, if it can not, just ignore the instruction.
  208. if (!(AA->getModRefInfo(Inst, Loc) & AliasAnalysis::Mod))
  209. continue;
  210. Store = dyn_cast<StoreInst>(Inst);
  211. // If Inst can, then check if Inst is a simple store. If Inst is not a
  212. // store or a store that is not simple, then we have some we do not
  213. // understand writing to this memory implying we can not move the load
  214. // over the write to any subsequent store that we may find.
  215. if (!Store || !Store->isSimple())
  216. return nullptr;
  217. // Then make sure that the pointer we are storing to is Ptr. If so, we
  218. // found our Store!
  219. if (Store->getPointerOperand() == Loc.Ptr)
  220. continue;
  221. // Otherwise, we have an unknown store to some other ptr that clobbers
  222. // Loc.Ptr. Bail!
  223. return nullptr;
  224. }
  225. // If we did not find the store or did not see the release, fail.
  226. if (!Store || !SawRelease)
  227. return nullptr;
  228. // We succeeded!
  229. return Store;
  230. }
  231. static Instruction *
  232. findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
  233. Instruction *Release,
  234. ProvenanceAnalysis &PA) {
  235. // Walk up from the Store to find the retain.
  236. BasicBlock::iterator I = Store;
  237. BasicBlock::iterator Begin = Store->getParent()->begin();
  238. while (I != Begin && GetBasicARCInstKind(I) != ARCInstKind::Retain) {
  239. Instruction *Inst = &*I;
  240. // It is only safe to move the retain to the store if we can prove
  241. // conservatively that nothing besides the release can decrement reference
  242. // counts in between the retain and the store.
  243. if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)
  244. return nullptr;
  245. --I;
  246. }
  247. Instruction *Retain = I;
  248. if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
  249. return nullptr;
  250. if (GetArgRCIdentityRoot(Retain) != New)
  251. return nullptr;
  252. return Retain;
  253. }
  254. /// Attempt to merge an objc_release with a store, load, and objc_retain to form
  255. /// an objc_storeStrong. An objc_storeStrong:
  256. ///
  257. /// objc_storeStrong(i8** %old_ptr, i8* new_value)
  258. ///
  259. /// is equivalent to the following IR sequence:
  260. ///
  261. /// ; Load old value.
  262. /// %old_value = load i8** %old_ptr (1)
  263. ///
  264. /// ; Increment the new value and then release the old value. This must occur
  265. /// ; in order in case old_value releases new_value in its destructor causing
  266. /// ; us to potentially have a dangling ptr.
  267. /// tail call i8* @objc_retain(i8* %new_value) (2)
  268. /// tail call void @objc_release(i8* %old_value) (3)
  269. ///
  270. /// ; Store the new_value into old_ptr
  271. /// store i8* %new_value, i8** %old_ptr (4)
  272. ///
  273. /// The safety of this optimization is based around the following
  274. /// considerations:
  275. ///
  276. /// 1. We are forming the store strong at the store. Thus to perform this
  277. /// optimization it must be safe to move the retain, load, and release to
  278. /// (4).
  279. /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
  280. /// safe.
  281. void ObjCARCContract::tryToContractReleaseIntoStoreStrong(Instruction *Release,
  282. inst_iterator &Iter) {
  283. // See if we are releasing something that we just loaded.
  284. auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
  285. if (!Load || !Load->isSimple())
  286. return;
  287. // For now, require everything to be in one basic block.
  288. BasicBlock *BB = Release->getParent();
  289. if (Load->getParent() != BB)
  290. return;
  291. // First scan down the BB from Load, looking for a store of the RCIdentityRoot
  292. // of Load's
  293. StoreInst *Store =
  294. findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);
  295. // If we fail, bail.
  296. if (!Store)
  297. return;
  298. // Then find what new_value's RCIdentity Root is.
  299. Value *New = GetRCIdentityRoot(Store->getValueOperand());
  300. // Then walk up the BB and look for a retain on New without any intervening
  301. // instructions which conservatively might decrement ref counts.
  302. Instruction *Retain =
  303. findRetainForStoreStrongContraction(New, Store, Release, PA);
  304. // If we fail, bail.
  305. if (!Retain)
  306. return;
  307. Changed = true;
  308. ++NumStoreStrongs;
  309. DEBUG(
  310. llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n"
  311. << " Old:\n"
  312. << " Store: " << *Store << "\n"
  313. << " Release: " << *Release << "\n"
  314. << " Retain: " << *Retain << "\n"
  315. << " Load: " << *Load << "\n");
  316. LLVMContext &C = Release->getContext();
  317. Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
  318. Type *I8XX = PointerType::getUnqual(I8X);
  319. Value *Args[] = { Load->getPointerOperand(), New };
  320. if (Args[0]->getType() != I8XX)
  321. Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
  322. if (Args[1]->getType() != I8X)
  323. Args[1] = new BitCastInst(Args[1], I8X, "", Store);
  324. Constant *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
  325. CallInst *StoreStrong = CallInst::Create(Decl, Args, "", Store);
  326. StoreStrong->setDoesNotThrow();
  327. StoreStrong->setDebugLoc(Store->getDebugLoc());
  328. // We can't set the tail flag yet, because we haven't yet determined
  329. // whether there are any escaping allocas. Remember this call, so that
  330. // we can set the tail flag once we know it's safe.
  331. StoreStrongCalls.insert(StoreStrong);
  332. DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong << "\n");
  333. if (&*Iter == Store) ++Iter;
  334. Store->eraseFromParent();
  335. Release->eraseFromParent();
  336. EraseInstruction(Retain);
  337. if (Load->use_empty())
  338. Load->eraseFromParent();
  339. }
  340. bool ObjCARCContract::tryToPeepholeInstruction(
  341. Function &F, Instruction *Inst, inst_iterator &Iter,
  342. SmallPtrSetImpl<Instruction *> &DependingInsts,
  343. SmallPtrSetImpl<const BasicBlock *> &Visited,
  344. bool &TailOkForStoreStrongs) {
  345. // Only these library routines return their argument. In particular,
  346. // objc_retainBlock does not necessarily return its argument.
  347. ARCInstKind Class = GetBasicARCInstKind(Inst);
  348. switch (Class) {
  349. case ARCInstKind::FusedRetainAutorelease:
  350. case ARCInstKind::FusedRetainAutoreleaseRV:
  351. return false;
  352. case ARCInstKind::Autorelease:
  353. case ARCInstKind::AutoreleaseRV:
  354. return contractAutorelease(F, Inst, Class, DependingInsts, Visited);
  355. case ARCInstKind::Retain:
  356. // Attempt to convert retains to retainrvs if they are next to function
  357. // calls.
  358. if (!optimizeRetainCall(F, Inst))
  359. return false;
  360. // If we succeed in our optimization, fall through.
  361. // FALLTHROUGH
  362. case ARCInstKind::RetainRV: {
  363. // If we're compiling for a target which needs a special inline-asm
  364. // marker to do the retainAutoreleasedReturnValue optimization,
  365. // insert it now.
  366. if (!RetainRVMarker)
  367. return false;
  368. BasicBlock::iterator BBI = Inst;
  369. BasicBlock *InstParent = Inst->getParent();
  370. // Step up to see if the call immediately precedes the RetainRV call.
  371. // If it's an invoke, we have to cross a block boundary. And we have
  372. // to carefully dodge no-op instructions.
  373. do {
  374. if (&*BBI == InstParent->begin()) {
  375. BasicBlock *Pred = InstParent->getSinglePredecessor();
  376. if (!Pred)
  377. goto decline_rv_optimization;
  378. BBI = Pred->getTerminator();
  379. break;
  380. }
  381. --BBI;
  382. } while (IsNoopInstruction(BBI));
  383. if (&*BBI == GetArgRCIdentityRoot(Inst)) {
  384. DEBUG(dbgs() << "Adding inline asm marker for "
  385. "retainAutoreleasedReturnValue optimization.\n");
  386. Changed = true;
  387. InlineAsm *IA =
  388. InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
  389. /*isVarArg=*/false),
  390. RetainRVMarker->getString(),
  391. /*Constraints=*/"", /*hasSideEffects=*/true);
  392. CallInst::Create(IA, "", Inst);
  393. }
  394. decline_rv_optimization:
  395. return false;
  396. }
  397. case ARCInstKind::InitWeak: {
  398. // objc_initWeak(p, null) => *p = null
  399. CallInst *CI = cast<CallInst>(Inst);
  400. if (IsNullOrUndef(CI->getArgOperand(1))) {
  401. Value *Null =
  402. ConstantPointerNull::get(cast<PointerType>(CI->getType()));
  403. Changed = true;
  404. new StoreInst(Null, CI->getArgOperand(0), CI);
  405. DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
  406. << " New = " << *Null << "\n");
  407. CI->replaceAllUsesWith(Null);
  408. CI->eraseFromParent();
  409. }
  410. return true;
  411. }
  412. case ARCInstKind::Release:
  413. // Try to form an objc store strong from our release. If we fail, there is
  414. // nothing further to do below, so continue.
  415. tryToContractReleaseIntoStoreStrong(Inst, Iter);
  416. return true;
  417. case ARCInstKind::User:
  418. // Be conservative if the function has any alloca instructions.
  419. // Technically we only care about escaping alloca instructions,
  420. // but this is sufficient to handle some interesting cases.
  421. if (isa<AllocaInst>(Inst))
  422. TailOkForStoreStrongs = false;
  423. return true;
  424. case ARCInstKind::IntrinsicUser:
  425. // Remove calls to @clang.arc.use(...).
  426. Inst->eraseFromParent();
  427. return true;
  428. default:
  429. return true;
  430. }
  431. }
  432. //===----------------------------------------------------------------------===//
  433. // Top Level Driver
  434. //===----------------------------------------------------------------------===//
  435. bool ObjCARCContract::runOnFunction(Function &F) {
  436. if (!EnableARCOpts)
  437. return false;
  438. // If nothing in the Module uses ARC, don't do anything.
  439. if (!Run)
  440. return false;
  441. Changed = false;
  442. AA = &getAnalysis<AliasAnalysis>();
  443. DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  444. PA.setAA(&getAnalysis<AliasAnalysis>());
  445. DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
  446. // Track whether it's ok to mark objc_storeStrong calls with the "tail"
  447. // keyword. Be conservative if the function has variadic arguments.
  448. // It seems that functions which "return twice" are also unsafe for the
  449. // "tail" argument, because they are setjmp, which could need to
  450. // return to an earlier stack state.
  451. bool TailOkForStoreStrongs =
  452. !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
  453. // For ObjC library calls which return their argument, replace uses of the
  454. // argument with uses of the call return value, if it dominates the use. This
  455. // reduces register pressure.
  456. SmallPtrSet<Instruction *, 4> DependingInstructions;
  457. SmallPtrSet<const BasicBlock *, 4> Visited;
  458. for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
  459. Instruction *Inst = &*I++;
  460. DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
  461. // First try to peephole Inst. If there is nothing further we can do in
  462. // terms of undoing objc-arc-expand, process the next inst.
  463. if (tryToPeepholeInstruction(F, Inst, I, DependingInstructions, Visited,
  464. TailOkForStoreStrongs))
  465. continue;
  466. // Otherwise, try to undo objc-arc-expand.
  467. // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
  468. // and such; to do the replacement, the argument must have type i8*.
  469. Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
  470. // TODO: Change this to a do-while.
  471. for (;;) {
  472. // If we're compiling bugpointed code, don't get in trouble.
  473. if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
  474. break;
  475. // Look through the uses of the pointer.
  476. for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
  477. UI != UE; ) {
  478. // Increment UI now, because we may unlink its element.
  479. Use &U = *UI++;
  480. unsigned OperandNo = U.getOperandNo();
  481. // If the call's return value dominates a use of the call's argument
  482. // value, rewrite the use to use the return value. We check for
  483. // reachability here because an unreachable call is considered to
  484. // trivially dominate itself, which would lead us to rewriting its
  485. // argument in terms of its return value, which would lead to
  486. // infinite loops in GetArgRCIdentityRoot.
  487. if (DT->isReachableFromEntry(U) && DT->dominates(Inst, U)) {
  488. Changed = true;
  489. Instruction *Replacement = Inst;
  490. Type *UseTy = U.get()->getType();
  491. if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
  492. // For PHI nodes, insert the bitcast in the predecessor block.
  493. unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
  494. BasicBlock *BB = PHI->getIncomingBlock(ValNo);
  495. if (Replacement->getType() != UseTy)
  496. Replacement = new BitCastInst(Replacement, UseTy, "",
  497. &BB->back());
  498. // While we're here, rewrite all edges for this PHI, rather
  499. // than just one use at a time, to minimize the number of
  500. // bitcasts we emit.
  501. for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
  502. if (PHI->getIncomingBlock(i) == BB) {
  503. // Keep the UI iterator valid.
  504. if (UI != UE &&
  505. &PHI->getOperandUse(
  506. PHINode::getOperandNumForIncomingValue(i)) == &*UI)
  507. ++UI;
  508. PHI->setIncomingValue(i, Replacement);
  509. }
  510. } else {
  511. if (Replacement->getType() != UseTy)
  512. Replacement = new BitCastInst(Replacement, UseTy, "",
  513. cast<Instruction>(U.getUser()));
  514. U.set(Replacement);
  515. }
  516. }
  517. }
  518. // If Arg is a no-op casted pointer, strip one level of casts and iterate.
  519. if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
  520. Arg = BI->getOperand(0);
  521. else if (isa<GEPOperator>(Arg) &&
  522. cast<GEPOperator>(Arg)->hasAllZeroIndices())
  523. Arg = cast<GEPOperator>(Arg)->getPointerOperand();
  524. else if (isa<GlobalAlias>(Arg) &&
  525. !cast<GlobalAlias>(Arg)->mayBeOverridden())
  526. Arg = cast<GlobalAlias>(Arg)->getAliasee();
  527. else
  528. break;
  529. }
  530. }
  531. // If this function has no escaping allocas or suspicious vararg usage,
  532. // objc_storeStrong calls can be marked with the "tail" keyword.
  533. if (TailOkForStoreStrongs)
  534. for (CallInst *CI : StoreStrongCalls)
  535. CI->setTailCall();
  536. StoreStrongCalls.clear();
  537. return Changed;
  538. }
  539. //===----------------------------------------------------------------------===//
  540. // Misc Pass Manager
  541. //===----------------------------------------------------------------------===//
  542. char ObjCARCContract::ID = 0;
  543. INITIALIZE_PASS_BEGIN(ObjCARCContract, "objc-arc-contract",
  544. "ObjC ARC contraction", false, false)
  545. INITIALIZE_AG_DEPENDENCY(AliasAnalysis)
  546. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  547. INITIALIZE_PASS_END(ObjCARCContract, "objc-arc-contract",
  548. "ObjC ARC contraction", false, false)
  549. void ObjCARCContract::getAnalysisUsage(AnalysisUsage &AU) const {
  550. AU.addRequired<AliasAnalysis>();
  551. AU.addRequired<DominatorTreeWrapperPass>();
  552. AU.setPreservesCFG();
  553. }
  554. Pass *llvm::createObjCARCContractPass() { return new ObjCARCContract(); }
  555. bool ObjCARCContract::doInitialization(Module &M) {
  556. // If nothing in the Module uses ARC, don't do anything.
  557. Run = ModuleHasARC(M);
  558. if (!Run)
  559. return false;
  560. EP.init(&M);
  561. // Initialize RetainRVMarker.
  562. RetainRVMarker = nullptr;
  563. if (NamedMDNode *NMD =
  564. M.getNamedMetadata("clang.arc.retainAutoreleasedReturnValueMarker"))
  565. if (NMD->getNumOperands() == 1) {
  566. const MDNode *N = NMD->getOperand(0);
  567. if (N->getNumOperands() == 1)
  568. if (const MDString *S = dyn_cast<MDString>(N->getOperand(0)))
  569. RetainRVMarker = S;
  570. }
  571. return false;
  572. }