CodeMetrics.cpp 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187
  1. //===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements code cost measurement utilities.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "llvm/Analysis/AssumptionCache.h"
  14. #include "llvm/Analysis/CodeMetrics.h"
  15. #include "llvm/Analysis/LoopInfo.h"
  16. #include "llvm/Analysis/TargetTransformInfo.h"
  17. #include "llvm/Analysis/ValueTracking.h"
  18. #include "llvm/IR/CallSite.h"
  19. #include "llvm/IR/DataLayout.h"
  20. #include "llvm/IR/Function.h"
  21. #include "llvm/IR/IntrinsicInst.h"
  22. #include "llvm/Support/Debug.h"
  23. #include "llvm/Support/raw_ostream.h"
  24. #define DEBUG_TYPE "code-metrics"
  25. using namespace llvm;
  26. static void completeEphemeralValues(SmallVector<const Value *, 16> &WorkSet,
  27. SmallPtrSetImpl<const Value*> &EphValues) {
  28. SmallPtrSet<const Value *, 32> Visited;
  29. // Make sure that all of the items in WorkSet are in our EphValues set.
  30. EphValues.insert(WorkSet.begin(), WorkSet.end());
  31. // Note: We don't speculate PHIs here, so we'll miss instruction chains kept
  32. // alive only by ephemeral values.
  33. while (!WorkSet.empty()) {
  34. const Value *V = WorkSet.front();
  35. WorkSet.erase(WorkSet.begin());
  36. if (!Visited.insert(V).second)
  37. continue;
  38. // If all uses of this value are ephemeral, then so is this value.
  39. bool FoundNEUse = false;
  40. for (const User *I : V->users())
  41. if (!EphValues.count(I)) {
  42. FoundNEUse = true;
  43. break;
  44. }
  45. if (FoundNEUse)
  46. continue;
  47. EphValues.insert(V);
  48. DEBUG(dbgs() << "Ephemeral Value: " << *V << "\n");
  49. if (const User *U = dyn_cast<User>(V))
  50. for (const Value *J : U->operands()) {
  51. if (isSafeToSpeculativelyExecute(J))
  52. WorkSet.push_back(J);
  53. }
  54. }
  55. }
  56. // Find all ephemeral values.
  57. void CodeMetrics::collectEphemeralValues(
  58. const Loop *L, AssumptionCache *AC,
  59. SmallPtrSetImpl<const Value *> &EphValues) {
  60. SmallVector<const Value *, 16> WorkSet;
  61. for (auto &AssumeVH : AC->assumptions()) {
  62. if (!AssumeVH)
  63. continue;
  64. Instruction *I = cast<Instruction>(AssumeVH);
  65. // Filter out call sites outside of the loop so we don't to a function's
  66. // worth of work for each of its loops (and, in the common case, ephemeral
  67. // values in the loop are likely due to @llvm.assume calls in the loop).
  68. if (!L->contains(I->getParent()))
  69. continue;
  70. WorkSet.push_back(I);
  71. }
  72. completeEphemeralValues(WorkSet, EphValues);
  73. }
  74. void CodeMetrics::collectEphemeralValues(
  75. const Function *F, AssumptionCache *AC,
  76. SmallPtrSetImpl<const Value *> &EphValues) {
  77. SmallVector<const Value *, 16> WorkSet;
  78. for (auto &AssumeVH : AC->assumptions()) {
  79. if (!AssumeVH)
  80. continue;
  81. Instruction *I = cast<Instruction>(AssumeVH);
  82. assert(I->getParent()->getParent() == F &&
  83. "Found assumption for the wrong function!");
  84. WorkSet.push_back(I);
  85. }
  86. completeEphemeralValues(WorkSet, EphValues);
  87. }
  88. /// analyzeBasicBlock - Fill in the current structure with information gleaned
  89. /// from the specified block.
  90. void CodeMetrics::analyzeBasicBlock(const BasicBlock *BB,
  91. const TargetTransformInfo &TTI,
  92. SmallPtrSetImpl<const Value*> &EphValues) {
  93. ++NumBlocks;
  94. unsigned NumInstsBeforeThisBB = NumInsts;
  95. for (BasicBlock::const_iterator II = BB->begin(), E = BB->end();
  96. II != E; ++II) {
  97. // Skip ephemeral values.
  98. if (EphValues.count(II))
  99. continue;
  100. // Special handling for calls.
  101. if (isa<CallInst>(II) || isa<InvokeInst>(II)) {
  102. ImmutableCallSite CS(cast<Instruction>(II));
  103. if (const Function *F = CS.getCalledFunction()) {
  104. // If a function is both internal and has a single use, then it is
  105. // extremely likely to get inlined in the future (it was probably
  106. // exposed by an interleaved devirtualization pass).
  107. if (!CS.isNoInline() && F->hasInternalLinkage() && F->hasOneUse())
  108. ++NumInlineCandidates;
  109. // If this call is to function itself, then the function is recursive.
  110. // Inlining it into other functions is a bad idea, because this is
  111. // basically just a form of loop peeling, and our metrics aren't useful
  112. // for that case.
  113. if (F == BB->getParent())
  114. isRecursive = true;
  115. if (TTI.isLoweredToCall(F))
  116. ++NumCalls;
  117. } else {
  118. // We don't want inline asm to count as a call - that would prevent loop
  119. // unrolling. The argument setup cost is still real, though.
  120. if (!isa<InlineAsm>(CS.getCalledValue()))
  121. ++NumCalls;
  122. }
  123. }
  124. if (const AllocaInst *AI = dyn_cast<AllocaInst>(II)) {
  125. if (!AI->isStaticAlloca())
  126. this->usesDynamicAlloca = true;
  127. }
  128. if (isa<ExtractElementInst>(II) || II->getType()->isVectorTy())
  129. ++NumVectorInsts;
  130. if (const CallInst *CI = dyn_cast<CallInst>(II))
  131. if (CI->cannotDuplicate())
  132. notDuplicatable = true;
  133. if (const InvokeInst *InvI = dyn_cast<InvokeInst>(II))
  134. if (InvI->cannotDuplicate())
  135. notDuplicatable = true;
  136. NumInsts += TTI.getUserCost(&*II);
  137. }
  138. if (isa<ReturnInst>(BB->getTerminator()))
  139. ++NumRets;
  140. // We never want to inline functions that contain an indirectbr. This is
  141. // incorrect because all the blockaddress's (in static global initializers
  142. // for example) would be referring to the original function, and this indirect
  143. // jump would jump from the inlined copy of the function into the original
  144. // function which is extremely undefined behavior.
  145. // FIXME: This logic isn't really right; we can safely inline functions
  146. // with indirectbr's as long as no other function or global references the
  147. // blockaddress of a block within the current function. And as a QOI issue,
  148. // if someone is using a blockaddress without an indirectbr, and that
  149. // reference somehow ends up in another function or global, we probably
  150. // don't want to inline this function.
  151. notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
  152. // Remember NumInsts for this BB.
  153. NumBBInsts[BB] = NumInsts - NumInstsBeforeThisBB;
  154. }