2
0

MachineFunction.cpp 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952
  1. //===-- MachineFunction.cpp -----------------------------------------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // Collect native machine code information for a function. This allows
  11. // target-specific information about the generated code to be stored with each
  12. // function.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/MachineFunction.h"
  16. #include "llvm/ADT/STLExtras.h"
  17. #include "llvm/ADT/SmallString.h"
  18. #include "llvm/Analysis/ConstantFolding.h"
  19. #include "llvm/CodeGen/MachineConstantPool.h"
  20. #include "llvm/CodeGen/MachineFrameInfo.h"
  21. #include "llvm/CodeGen/MachineFunctionInitializer.h"
  22. #include "llvm/CodeGen/MachineFunctionPass.h"
  23. #include "llvm/CodeGen/MachineInstr.h"
  24. #include "llvm/CodeGen/MachineJumpTableInfo.h"
  25. #include "llvm/CodeGen/MachineModuleInfo.h"
  26. #include "llvm/CodeGen/MachineRegisterInfo.h"
  27. #include "llvm/CodeGen/Passes.h"
  28. #include "llvm/IR/DataLayout.h"
  29. #include "llvm/IR/DebugInfo.h"
  30. #include "llvm/IR/Function.h"
  31. #include "llvm/IR/Module.h"
  32. #include "llvm/IR/ModuleSlotTracker.h"
  33. #include "llvm/MC/MCAsmInfo.h"
  34. #include "llvm/MC/MCContext.h"
  35. #include "llvm/Support/Debug.h"
  36. #include "llvm/Support/GraphWriter.h"
  37. #include "llvm/Support/raw_ostream.h"
  38. #include "llvm/Target/TargetFrameLowering.h"
  39. #include "llvm/Target/TargetLowering.h"
  40. #include "llvm/Target/TargetMachine.h"
  41. #include "llvm/Target/TargetSubtargetInfo.h"
  42. using namespace llvm;
  43. #define DEBUG_TYPE "codegen"
  44. void MachineFunctionInitializer::anchor() {}
  45. //===----------------------------------------------------------------------===//
  46. // MachineFunction implementation
  47. //===----------------------------------------------------------------------===//
  48. // Out-of-line virtual method.
  49. MachineFunctionInfo::~MachineFunctionInfo() {}
  50. void ilist_traits<MachineBasicBlock>::deleteNode(MachineBasicBlock *MBB) {
  51. MBB->getParent()->DeleteMachineBasicBlock(MBB);
  52. }
  53. MachineFunction::MachineFunction(const Function *F, const TargetMachine &TM,
  54. unsigned FunctionNum, MachineModuleInfo &mmi)
  55. : Fn(F), Target(TM), STI(TM.getSubtargetImpl(*F)), Ctx(mmi.getContext()),
  56. MMI(mmi) {
  57. if (STI->getRegisterInfo())
  58. RegInfo = new (Allocator) MachineRegisterInfo(this);
  59. else
  60. RegInfo = nullptr;
  61. MFInfo = nullptr;
  62. FrameInfo = new (Allocator)
  63. MachineFrameInfo(STI->getFrameLowering()->getStackAlignment(),
  64. STI->getFrameLowering()->isStackRealignable(),
  65. !F->hasFnAttribute("no-realign-stack"));
  66. if (Fn->hasFnAttribute(Attribute::StackAlignment))
  67. FrameInfo->ensureMaxAlignment(Fn->getFnStackAlignment());
  68. ConstantPool = new (Allocator) MachineConstantPool(getDataLayout());
  69. Alignment = STI->getTargetLowering()->getMinFunctionAlignment();
  70. // FIXME: Shouldn't use pref alignment if explicit alignment is set on Fn.
  71. if (!Fn->hasFnAttribute(Attribute::OptimizeForSize))
  72. Alignment = std::max(Alignment,
  73. STI->getTargetLowering()->getPrefFunctionAlignment());
  74. FunctionNumber = FunctionNum;
  75. JumpTableInfo = nullptr;
  76. }
  77. MachineFunction::~MachineFunction() {
  78. // Don't call destructors on MachineInstr and MachineOperand. All of their
  79. // memory comes from the BumpPtrAllocator which is about to be purged.
  80. //
  81. // Do call MachineBasicBlock destructors, it contains std::vectors.
  82. for (iterator I = begin(), E = end(); I != E; I = BasicBlocks.erase(I))
  83. I->Insts.clearAndLeakNodesUnsafely();
  84. InstructionRecycler.clear(Allocator);
  85. OperandRecycler.clear(Allocator);
  86. BasicBlockRecycler.clear(Allocator);
  87. if (RegInfo) {
  88. RegInfo->~MachineRegisterInfo();
  89. Allocator.Deallocate(RegInfo);
  90. }
  91. if (MFInfo) {
  92. MFInfo->~MachineFunctionInfo();
  93. Allocator.Deallocate(MFInfo);
  94. }
  95. FrameInfo->~MachineFrameInfo();
  96. Allocator.Deallocate(FrameInfo);
  97. ConstantPool->~MachineConstantPool();
  98. Allocator.Deallocate(ConstantPool);
  99. if (JumpTableInfo) {
  100. JumpTableInfo->~MachineJumpTableInfo();
  101. Allocator.Deallocate(JumpTableInfo);
  102. }
  103. }
  104. const DataLayout &MachineFunction::getDataLayout() const {
  105. return Fn->getParent()->getDataLayout();
  106. }
  107. /// Get the JumpTableInfo for this function.
  108. /// If it does not already exist, allocate one.
  109. MachineJumpTableInfo *MachineFunction::
  110. getOrCreateJumpTableInfo(unsigned EntryKind) {
  111. if (JumpTableInfo) return JumpTableInfo;
  112. JumpTableInfo = new (Allocator)
  113. MachineJumpTableInfo((MachineJumpTableInfo::JTEntryKind)EntryKind);
  114. return JumpTableInfo;
  115. }
  116. /// Should we be emitting segmented stack stuff for the function
  117. bool MachineFunction::shouldSplitStack() {
  118. return getFunction()->hasFnAttribute("split-stack");
  119. }
  120. /// This discards all of the MachineBasicBlock numbers and recomputes them.
  121. /// This guarantees that the MBB numbers are sequential, dense, and match the
  122. /// ordering of the blocks within the function. If a specific MachineBasicBlock
  123. /// is specified, only that block and those after it are renumbered.
  124. void MachineFunction::RenumberBlocks(MachineBasicBlock *MBB) {
  125. if (empty()) { MBBNumbering.clear(); return; }
  126. MachineFunction::iterator MBBI, E = end();
  127. if (MBB == nullptr)
  128. MBBI = begin();
  129. else
  130. MBBI = MBB;
  131. // Figure out the block number this should have.
  132. unsigned BlockNo = 0;
  133. if (MBBI != begin())
  134. BlockNo = std::prev(MBBI)->getNumber() + 1;
  135. for (; MBBI != E; ++MBBI, ++BlockNo) {
  136. if (MBBI->getNumber() != (int)BlockNo) {
  137. // Remove use of the old number.
  138. if (MBBI->getNumber() != -1) {
  139. assert(MBBNumbering[MBBI->getNumber()] == &*MBBI &&
  140. "MBB number mismatch!");
  141. MBBNumbering[MBBI->getNumber()] = nullptr;
  142. }
  143. // If BlockNo is already taken, set that block's number to -1.
  144. if (MBBNumbering[BlockNo])
  145. MBBNumbering[BlockNo]->setNumber(-1);
  146. MBBNumbering[BlockNo] = MBBI;
  147. MBBI->setNumber(BlockNo);
  148. }
  149. }
  150. // Okay, all the blocks are renumbered. If we have compactified the block
  151. // numbering, shrink MBBNumbering now.
  152. assert(BlockNo <= MBBNumbering.size() && "Mismatch!");
  153. MBBNumbering.resize(BlockNo);
  154. }
  155. /// Allocate a new MachineInstr. Use this instead of `new MachineInstr'.
  156. MachineInstr *
  157. MachineFunction::CreateMachineInstr(const MCInstrDesc &MCID,
  158. DebugLoc DL, bool NoImp) {
  159. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  160. MachineInstr(*this, MCID, DL, NoImp);
  161. }
  162. /// Create a new MachineInstr which is a copy of the 'Orig' instruction,
  163. /// identical in all ways except the instruction has no parent, prev, or next.
  164. MachineInstr *
  165. MachineFunction::CloneMachineInstr(const MachineInstr *Orig) {
  166. return new (InstructionRecycler.Allocate<MachineInstr>(Allocator))
  167. MachineInstr(*this, *Orig);
  168. }
  169. /// Delete the given MachineInstr.
  170. ///
  171. /// This function also serves as the MachineInstr destructor - the real
  172. /// ~MachineInstr() destructor must be empty.
  173. void
  174. MachineFunction::DeleteMachineInstr(MachineInstr *MI) {
  175. // Strip it for parts. The operand array and the MI object itself are
  176. // independently recyclable.
  177. if (MI->Operands)
  178. deallocateOperandArray(MI->CapOperands, MI->Operands);
  179. // Don't call ~MachineInstr() which must be trivial anyway because
  180. // ~MachineFunction drops whole lists of MachineInstrs wihout calling their
  181. // destructors.
  182. InstructionRecycler.Deallocate(Allocator, MI);
  183. }
  184. /// Allocate a new MachineBasicBlock. Use this instead of
  185. /// `new MachineBasicBlock'.
  186. MachineBasicBlock *
  187. MachineFunction::CreateMachineBasicBlock(const BasicBlock *bb) {
  188. return new (BasicBlockRecycler.Allocate<MachineBasicBlock>(Allocator))
  189. MachineBasicBlock(*this, bb);
  190. }
  191. /// Delete the given MachineBasicBlock.
  192. void
  193. MachineFunction::DeleteMachineBasicBlock(MachineBasicBlock *MBB) {
  194. assert(MBB->getParent() == this && "MBB parent mismatch!");
  195. MBB->~MachineBasicBlock();
  196. BasicBlockRecycler.Deallocate(Allocator, MBB);
  197. }
  198. MachineMemOperand *
  199. MachineFunction::getMachineMemOperand(MachinePointerInfo PtrInfo, unsigned f,
  200. uint64_t s, unsigned base_alignment,
  201. const AAMDNodes &AAInfo,
  202. const MDNode *Ranges) {
  203. return new (Allocator) MachineMemOperand(PtrInfo, f, s, base_alignment,
  204. AAInfo, Ranges);
  205. }
  206. MachineMemOperand *
  207. MachineFunction::getMachineMemOperand(const MachineMemOperand *MMO,
  208. int64_t Offset, uint64_t Size) {
  209. if (MMO->getValue())
  210. return new (Allocator)
  211. MachineMemOperand(MachinePointerInfo(MMO->getValue(),
  212. MMO->getOffset()+Offset),
  213. MMO->getFlags(), Size,
  214. MMO->getBaseAlignment());
  215. return new (Allocator)
  216. MachineMemOperand(MachinePointerInfo(MMO->getPseudoValue(),
  217. MMO->getOffset()+Offset),
  218. MMO->getFlags(), Size,
  219. MMO->getBaseAlignment());
  220. }
  221. MachineInstr::mmo_iterator
  222. MachineFunction::allocateMemRefsArray(unsigned long Num) {
  223. return Allocator.Allocate<MachineMemOperand *>(Num);
  224. }
  225. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  226. MachineFunction::extractLoadMemRefs(MachineInstr::mmo_iterator Begin,
  227. MachineInstr::mmo_iterator End) {
  228. // Count the number of load mem refs.
  229. unsigned Num = 0;
  230. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  231. if ((*I)->isLoad())
  232. ++Num;
  233. // Allocate a new array and populate it with the load information.
  234. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  235. unsigned Index = 0;
  236. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  237. if ((*I)->isLoad()) {
  238. if (!(*I)->isStore())
  239. // Reuse the MMO.
  240. Result[Index] = *I;
  241. else {
  242. // Clone the MMO and unset the store flag.
  243. MachineMemOperand *JustLoad =
  244. getMachineMemOperand((*I)->getPointerInfo(),
  245. (*I)->getFlags() & ~MachineMemOperand::MOStore,
  246. (*I)->getSize(), (*I)->getBaseAlignment(),
  247. (*I)->getAAInfo());
  248. Result[Index] = JustLoad;
  249. }
  250. ++Index;
  251. }
  252. }
  253. return std::make_pair(Result, Result + Num);
  254. }
  255. std::pair<MachineInstr::mmo_iterator, MachineInstr::mmo_iterator>
  256. MachineFunction::extractStoreMemRefs(MachineInstr::mmo_iterator Begin,
  257. MachineInstr::mmo_iterator End) {
  258. // Count the number of load mem refs.
  259. unsigned Num = 0;
  260. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I)
  261. if ((*I)->isStore())
  262. ++Num;
  263. // Allocate a new array and populate it with the store information.
  264. MachineInstr::mmo_iterator Result = allocateMemRefsArray(Num);
  265. unsigned Index = 0;
  266. for (MachineInstr::mmo_iterator I = Begin; I != End; ++I) {
  267. if ((*I)->isStore()) {
  268. if (!(*I)->isLoad())
  269. // Reuse the MMO.
  270. Result[Index] = *I;
  271. else {
  272. // Clone the MMO and unset the load flag.
  273. MachineMemOperand *JustStore =
  274. getMachineMemOperand((*I)->getPointerInfo(),
  275. (*I)->getFlags() & ~MachineMemOperand::MOLoad,
  276. (*I)->getSize(), (*I)->getBaseAlignment(),
  277. (*I)->getAAInfo());
  278. Result[Index] = JustStore;
  279. }
  280. ++Index;
  281. }
  282. }
  283. return std::make_pair(Result, Result + Num);
  284. }
  285. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  286. void MachineFunction::dump() const {
  287. print(dbgs());
  288. }
  289. #endif
  290. StringRef MachineFunction::getName() const {
  291. assert(getFunction() && "No function!");
  292. return getFunction()->getName();
  293. }
  294. void MachineFunction::print(raw_ostream &OS, SlotIndexes *Indexes) const {
  295. OS << "# Machine code for function " << getName() << ": ";
  296. if (RegInfo) {
  297. OS << (RegInfo->isSSA() ? "SSA" : "Post SSA");
  298. if (!RegInfo->tracksLiveness())
  299. OS << ", not tracking liveness";
  300. }
  301. OS << '\n';
  302. // Print Frame Information
  303. FrameInfo->print(*this, OS);
  304. // Print JumpTable Information
  305. if (JumpTableInfo)
  306. JumpTableInfo->print(OS);
  307. // Print Constant Pool
  308. ConstantPool->print(OS);
  309. const TargetRegisterInfo *TRI = getSubtarget().getRegisterInfo();
  310. if (RegInfo && !RegInfo->livein_empty()) {
  311. OS << "Function Live Ins: ";
  312. for (MachineRegisterInfo::livein_iterator
  313. I = RegInfo->livein_begin(), E = RegInfo->livein_end(); I != E; ++I) {
  314. OS << PrintReg(I->first, TRI);
  315. if (I->second)
  316. OS << " in " << PrintReg(I->second, TRI);
  317. if (std::next(I) != E)
  318. OS << ", ";
  319. }
  320. OS << '\n';
  321. }
  322. ModuleSlotTracker MST(getFunction()->getParent());
  323. MST.incorporateFunction(*getFunction());
  324. for (const auto &BB : *this) {
  325. OS << '\n';
  326. BB.print(OS, MST, Indexes);
  327. }
  328. OS << "\n# End machine code for function " << getName() << ".\n\n";
  329. }
  330. namespace llvm {
  331. template<>
  332. struct DOTGraphTraits<const MachineFunction*> : public DefaultDOTGraphTraits {
  333. DOTGraphTraits (bool isSimple=false) : DefaultDOTGraphTraits(isSimple) {}
  334. static std::string getGraphName(const MachineFunction *F) {
  335. return ("CFG for '" + F->getName() + "' function").str();
  336. }
  337. std::string getNodeLabel(const MachineBasicBlock *Node,
  338. const MachineFunction *Graph) {
  339. std::string OutStr;
  340. {
  341. raw_string_ostream OSS(OutStr);
  342. if (isSimple()) {
  343. OSS << "BB#" << Node->getNumber();
  344. if (const BasicBlock *BB = Node->getBasicBlock())
  345. OSS << ": " << BB->getName();
  346. } else
  347. Node->print(OSS);
  348. }
  349. if (OutStr[0] == '\n') OutStr.erase(OutStr.begin());
  350. // Process string output to make it nicer...
  351. for (unsigned i = 0; i != OutStr.length(); ++i)
  352. if (OutStr[i] == '\n') { // Left justify
  353. OutStr[i] = '\\';
  354. OutStr.insert(OutStr.begin()+i+1, 'l');
  355. }
  356. return OutStr;
  357. }
  358. };
  359. }
  360. void MachineFunction::viewCFG() const
  361. {
  362. #ifndef NDEBUG
  363. ViewGraph(this, "mf" + getName());
  364. #else
  365. errs() << "MachineFunction::viewCFG is only available in debug builds on "
  366. << "systems with Graphviz or gv!\n";
  367. #endif // NDEBUG
  368. }
  369. void MachineFunction::viewCFGOnly() const
  370. {
  371. #ifndef NDEBUG
  372. ViewGraph(this, "mf" + getName(), true);
  373. #else
  374. errs() << "MachineFunction::viewCFGOnly is only available in debug builds on "
  375. << "systems with Graphviz or gv!\n";
  376. #endif // NDEBUG
  377. }
  378. /// Add the specified physical register as a live-in value and
  379. /// create a corresponding virtual register for it.
  380. unsigned MachineFunction::addLiveIn(unsigned PReg,
  381. const TargetRegisterClass *RC) {
  382. MachineRegisterInfo &MRI = getRegInfo();
  383. unsigned VReg = MRI.getLiveInVirtReg(PReg);
  384. if (VReg) {
  385. const TargetRegisterClass *VRegRC = MRI.getRegClass(VReg);
  386. (void)VRegRC;
  387. // A physical register can be added several times.
  388. // Between two calls, the register class of the related virtual register
  389. // may have been constrained to match some operation constraints.
  390. // In that case, check that the current register class includes the
  391. // physical register and is a sub class of the specified RC.
  392. assert((VRegRC == RC || (VRegRC->contains(PReg) &&
  393. RC->hasSubClassEq(VRegRC))) &&
  394. "Register class mismatch!");
  395. return VReg;
  396. }
  397. VReg = MRI.createVirtualRegister(RC);
  398. MRI.addLiveIn(PReg, VReg);
  399. return VReg;
  400. }
  401. /// Return the MCSymbol for the specified non-empty jump table.
  402. /// If isLinkerPrivate is specified, an 'l' label is returned, otherwise a
  403. /// normal 'L' label is returned.
  404. MCSymbol *MachineFunction::getJTISymbol(unsigned JTI, MCContext &Ctx,
  405. bool isLinkerPrivate) const {
  406. const DataLayout &DL = getDataLayout();
  407. assert(JumpTableInfo && "No jump tables");
  408. assert(JTI < JumpTableInfo->getJumpTables().size() && "Invalid JTI!");
  409. const char *Prefix = isLinkerPrivate ? DL.getLinkerPrivateGlobalPrefix()
  410. : DL.getPrivateGlobalPrefix();
  411. SmallString<60> Name;
  412. raw_svector_ostream(Name)
  413. << Prefix << "JTI" << getFunctionNumber() << '_' << JTI;
  414. return Ctx.getOrCreateSymbol(Name);
  415. }
  416. /// Return a function-local symbol to represent the PIC base.
  417. MCSymbol *MachineFunction::getPICBaseSymbol() const {
  418. const DataLayout &DL = getDataLayout();
  419. return Ctx.getOrCreateSymbol(Twine(DL.getPrivateGlobalPrefix()) +
  420. Twine(getFunctionNumber()) + "$pb");
  421. }
  422. //===----------------------------------------------------------------------===//
  423. // MachineFrameInfo implementation
  424. //===----------------------------------------------------------------------===//
  425. /// Make sure the function is at least Align bytes aligned.
  426. void MachineFrameInfo::ensureMaxAlignment(unsigned Align) {
  427. if (!StackRealignable || !RealignOption)
  428. assert(Align <= StackAlignment &&
  429. "For targets without stack realignment, Align is out of limit!");
  430. if (MaxAlignment < Align) MaxAlignment = Align;
  431. }
  432. /// Clamp the alignment if requested and emit a warning.
  433. static inline unsigned clampStackAlignment(bool ShouldClamp, unsigned Align,
  434. unsigned StackAlign) {
  435. if (!ShouldClamp || Align <= StackAlign)
  436. return Align;
  437. DEBUG(dbgs() << "Warning: requested alignment " << Align
  438. << " exceeds the stack alignment " << StackAlign
  439. << " when stack realignment is off" << '\n');
  440. return StackAlign;
  441. }
  442. /// Create a new statically sized stack object, returning a nonnegative
  443. /// identifier to represent it.
  444. int MachineFrameInfo::CreateStackObject(uint64_t Size, unsigned Alignment,
  445. bool isSS, const AllocaInst *Alloca) {
  446. assert(Size != 0 && "Cannot allocate zero size stack objects!");
  447. Alignment = clampStackAlignment(!StackRealignable || !RealignOption,
  448. Alignment, StackAlignment);
  449. Objects.push_back(StackObject(Size, Alignment, 0, false, isSS, Alloca,
  450. !isSS));
  451. int Index = (int)Objects.size() - NumFixedObjects - 1;
  452. assert(Index >= 0 && "Bad frame index!");
  453. ensureMaxAlignment(Alignment);
  454. return Index;
  455. }
  456. /// Create a new statically sized stack object that represents a spill slot,
  457. /// returning a nonnegative identifier to represent it.
  458. int MachineFrameInfo::CreateSpillStackObject(uint64_t Size,
  459. unsigned Alignment) {
  460. Alignment = clampStackAlignment(!StackRealignable || !RealignOption,
  461. Alignment, StackAlignment);
  462. CreateStackObject(Size, Alignment, true);
  463. int Index = (int)Objects.size() - NumFixedObjects - 1;
  464. ensureMaxAlignment(Alignment);
  465. return Index;
  466. }
  467. /// Notify the MachineFrameInfo object that a variable sized object has been
  468. /// created. This must be created whenever a variable sized object is created,
  469. /// whether or not the index returned is actually used.
  470. int MachineFrameInfo::CreateVariableSizedObject(unsigned Alignment,
  471. const AllocaInst *Alloca) {
  472. HasVarSizedObjects = true;
  473. Alignment = clampStackAlignment(!StackRealignable || !RealignOption,
  474. Alignment, StackAlignment);
  475. Objects.push_back(StackObject(0, Alignment, 0, false, false, Alloca, true));
  476. ensureMaxAlignment(Alignment);
  477. return (int)Objects.size()-NumFixedObjects-1;
  478. }
  479. /// Create a new object at a fixed location on the stack.
  480. /// All fixed objects should be created before other objects are created for
  481. /// efficiency. By default, fixed objects are immutable. This returns an
  482. /// index with a negative value.
  483. int MachineFrameInfo::CreateFixedObject(uint64_t Size, int64_t SPOffset,
  484. bool Immutable, bool isAliased) {
  485. assert(Size != 0 && "Cannot allocate zero size fixed stack objects!");
  486. // The alignment of the frame index can be determined from its offset from
  487. // the incoming frame position. If the frame object is at offset 32 and
  488. // the stack is guaranteed to be 16-byte aligned, then we know that the
  489. // object is 16-byte aligned.
  490. unsigned Align = MinAlign(SPOffset, StackAlignment);
  491. Align = clampStackAlignment(!StackRealignable || !RealignOption, Align,
  492. StackAlignment);
  493. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset, Immutable,
  494. /*isSS*/ false,
  495. /*Alloca*/ nullptr, isAliased));
  496. return -++NumFixedObjects;
  497. }
  498. /// Create a spill slot at a fixed location on the stack.
  499. /// Returns an index with a negative value.
  500. int MachineFrameInfo::CreateFixedSpillStackObject(uint64_t Size,
  501. int64_t SPOffset) {
  502. unsigned Align = MinAlign(SPOffset, StackAlignment);
  503. Align = clampStackAlignment(!StackRealignable || !RealignOption, Align,
  504. StackAlignment);
  505. Objects.insert(Objects.begin(), StackObject(Size, Align, SPOffset,
  506. /*Immutable*/ true,
  507. /*isSS*/ true,
  508. /*Alloca*/ nullptr,
  509. /*isAliased*/ false));
  510. return -++NumFixedObjects;
  511. }
  512. BitVector MachineFrameInfo::getPristineRegs(const MachineFunction &MF) const {
  513. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  514. BitVector BV(TRI->getNumRegs());
  515. // Before CSI is calculated, no registers are considered pristine. They can be
  516. // freely used and PEI will make sure they are saved.
  517. if (!isCalleeSavedInfoValid())
  518. return BV;
  519. for (const MCPhysReg *CSR = TRI->getCalleeSavedRegs(&MF); CSR && *CSR; ++CSR)
  520. BV.set(*CSR);
  521. // Saved CSRs are not pristine.
  522. const std::vector<CalleeSavedInfo> &CSI = getCalleeSavedInfo();
  523. for (std::vector<CalleeSavedInfo>::const_iterator I = CSI.begin(),
  524. E = CSI.end(); I != E; ++I)
  525. BV.reset(I->getReg());
  526. return BV;
  527. }
  528. unsigned MachineFrameInfo::estimateStackSize(const MachineFunction &MF) const {
  529. const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
  530. const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
  531. unsigned MaxAlign = getMaxAlignment();
  532. int Offset = 0;
  533. // This code is very, very similar to PEI::calculateFrameObjectOffsets().
  534. // It really should be refactored to share code. Until then, changes
  535. // should keep in mind that there's tight coupling between the two.
  536. for (int i = getObjectIndexBegin(); i != 0; ++i) {
  537. int FixedOff = -getObjectOffset(i);
  538. if (FixedOff > Offset) Offset = FixedOff;
  539. }
  540. for (unsigned i = 0, e = getObjectIndexEnd(); i != e; ++i) {
  541. if (isDeadObjectIndex(i))
  542. continue;
  543. Offset += getObjectSize(i);
  544. unsigned Align = getObjectAlignment(i);
  545. // Adjust to alignment boundary
  546. Offset = (Offset+Align-1)/Align*Align;
  547. MaxAlign = std::max(Align, MaxAlign);
  548. }
  549. if (adjustsStack() && TFI->hasReservedCallFrame(MF))
  550. Offset += getMaxCallFrameSize();
  551. // Round up the size to a multiple of the alignment. If the function has
  552. // any calls or alloca's, align to the target's StackAlignment value to
  553. // ensure that the callee's frame or the alloca data is suitably aligned;
  554. // otherwise, for leaf functions, align to the TransientStackAlignment
  555. // value.
  556. unsigned StackAlign;
  557. if (adjustsStack() || hasVarSizedObjects() ||
  558. (RegInfo->needsStackRealignment(MF) && getObjectIndexEnd() != 0))
  559. StackAlign = TFI->getStackAlignment();
  560. else
  561. StackAlign = TFI->getTransientStackAlignment();
  562. // If the frame pointer is eliminated, all frame offsets will be relative to
  563. // SP not FP. Align to MaxAlign so this works.
  564. StackAlign = std::max(StackAlign, MaxAlign);
  565. unsigned AlignMask = StackAlign - 1;
  566. Offset = (Offset + AlignMask) & ~uint64_t(AlignMask);
  567. return (unsigned)Offset;
  568. }
  569. void MachineFrameInfo::print(const MachineFunction &MF, raw_ostream &OS) const{
  570. if (Objects.empty()) return;
  571. const TargetFrameLowering *FI = MF.getSubtarget().getFrameLowering();
  572. int ValOffset = (FI ? FI->getOffsetOfLocalArea() : 0);
  573. OS << "Frame Objects:\n";
  574. for (unsigned i = 0, e = Objects.size(); i != e; ++i) {
  575. const StackObject &SO = Objects[i];
  576. OS << " fi#" << (int)(i-NumFixedObjects) << ": ";
  577. if (SO.Size == ~0ULL) {
  578. OS << "dead\n";
  579. continue;
  580. }
  581. if (SO.Size == 0)
  582. OS << "variable sized";
  583. else
  584. OS << "size=" << SO.Size;
  585. OS << ", align=" << SO.Alignment;
  586. if (i < NumFixedObjects)
  587. OS << ", fixed";
  588. if (i < NumFixedObjects || SO.SPOffset != -1) {
  589. int64_t Off = SO.SPOffset - ValOffset;
  590. OS << ", at location [SP";
  591. if (Off > 0)
  592. OS << "+" << Off;
  593. else if (Off < 0)
  594. OS << Off;
  595. OS << "]";
  596. }
  597. OS << "\n";
  598. }
  599. }
  600. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  601. void MachineFrameInfo::dump(const MachineFunction &MF) const {
  602. print(MF, dbgs());
  603. }
  604. #endif
  605. //===----------------------------------------------------------------------===//
  606. // MachineJumpTableInfo implementation
  607. //===----------------------------------------------------------------------===//
  608. /// Return the size of each entry in the jump table.
  609. unsigned MachineJumpTableInfo::getEntrySize(const DataLayout &TD) const {
  610. // The size of a jump table entry is 4 bytes unless the entry is just the
  611. // address of a block, in which case it is the pointer size.
  612. switch (getEntryKind()) {
  613. case MachineJumpTableInfo::EK_BlockAddress:
  614. return TD.getPointerSize();
  615. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  616. return 8;
  617. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  618. case MachineJumpTableInfo::EK_LabelDifference32:
  619. case MachineJumpTableInfo::EK_Custom32:
  620. return 4;
  621. case MachineJumpTableInfo::EK_Inline:
  622. return 0;
  623. }
  624. llvm_unreachable("Unknown jump table encoding!");
  625. }
  626. /// Return the alignment of each entry in the jump table.
  627. unsigned MachineJumpTableInfo::getEntryAlignment(const DataLayout &TD) const {
  628. // The alignment of a jump table entry is the alignment of int32 unless the
  629. // entry is just the address of a block, in which case it is the pointer
  630. // alignment.
  631. switch (getEntryKind()) {
  632. case MachineJumpTableInfo::EK_BlockAddress:
  633. return TD.getPointerABIAlignment();
  634. case MachineJumpTableInfo::EK_GPRel64BlockAddress:
  635. return TD.getABIIntegerTypeAlignment(64);
  636. case MachineJumpTableInfo::EK_GPRel32BlockAddress:
  637. case MachineJumpTableInfo::EK_LabelDifference32:
  638. case MachineJumpTableInfo::EK_Custom32:
  639. return TD.getABIIntegerTypeAlignment(32);
  640. case MachineJumpTableInfo::EK_Inline:
  641. return 1;
  642. }
  643. llvm_unreachable("Unknown jump table encoding!");
  644. }
  645. /// Create a new jump table entry in the jump table info.
  646. unsigned MachineJumpTableInfo::createJumpTableIndex(
  647. const std::vector<MachineBasicBlock*> &DestBBs) {
  648. assert(!DestBBs.empty() && "Cannot create an empty jump table!");
  649. JumpTables.push_back(MachineJumpTableEntry(DestBBs));
  650. return JumpTables.size()-1;
  651. }
  652. /// If Old is the target of any jump tables, update the jump tables to branch
  653. /// to New instead.
  654. bool MachineJumpTableInfo::ReplaceMBBInJumpTables(MachineBasicBlock *Old,
  655. MachineBasicBlock *New) {
  656. assert(Old != New && "Not making a change?");
  657. bool MadeChange = false;
  658. for (size_t i = 0, e = JumpTables.size(); i != e; ++i)
  659. ReplaceMBBInJumpTable(i, Old, New);
  660. return MadeChange;
  661. }
  662. /// If Old is a target of the jump tables, update the jump table to branch to
  663. /// New instead.
  664. bool MachineJumpTableInfo::ReplaceMBBInJumpTable(unsigned Idx,
  665. MachineBasicBlock *Old,
  666. MachineBasicBlock *New) {
  667. assert(Old != New && "Not making a change?");
  668. bool MadeChange = false;
  669. MachineJumpTableEntry &JTE = JumpTables[Idx];
  670. for (size_t j = 0, e = JTE.MBBs.size(); j != e; ++j)
  671. if (JTE.MBBs[j] == Old) {
  672. JTE.MBBs[j] = New;
  673. MadeChange = true;
  674. }
  675. return MadeChange;
  676. }
  677. void MachineJumpTableInfo::print(raw_ostream &OS) const {
  678. if (JumpTables.empty()) return;
  679. OS << "Jump Tables:\n";
  680. for (unsigned i = 0, e = JumpTables.size(); i != e; ++i) {
  681. OS << " jt#" << i << ": ";
  682. for (unsigned j = 0, f = JumpTables[i].MBBs.size(); j != f; ++j)
  683. OS << " BB#" << JumpTables[i].MBBs[j]->getNumber();
  684. }
  685. OS << '\n';
  686. }
  687. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  688. void MachineJumpTableInfo::dump() const { print(dbgs()); }
  689. #endif
  690. //===----------------------------------------------------------------------===//
  691. // MachineConstantPool implementation
  692. //===----------------------------------------------------------------------===//
  693. void MachineConstantPoolValue::anchor() { }
  694. Type *MachineConstantPoolEntry::getType() const {
  695. if (isMachineConstantPoolEntry())
  696. return Val.MachineCPVal->getType();
  697. return Val.ConstVal->getType();
  698. }
  699. unsigned MachineConstantPoolEntry::getRelocationInfo() const {
  700. if (isMachineConstantPoolEntry())
  701. return Val.MachineCPVal->getRelocationInfo();
  702. return Val.ConstVal->getRelocationInfo();
  703. }
  704. SectionKind
  705. MachineConstantPoolEntry::getSectionKind(const DataLayout *DL) const {
  706. SectionKind Kind;
  707. switch (getRelocationInfo()) {
  708. default:
  709. llvm_unreachable("Unknown section kind");
  710. case Constant::GlobalRelocations:
  711. Kind = SectionKind::getReadOnlyWithRel();
  712. break;
  713. case Constant::LocalRelocation:
  714. Kind = SectionKind::getReadOnlyWithRelLocal();
  715. break;
  716. case Constant::NoRelocation:
  717. switch (DL->getTypeAllocSize(getType())) {
  718. case 4:
  719. Kind = SectionKind::getMergeableConst4();
  720. break;
  721. case 8:
  722. Kind = SectionKind::getMergeableConst8();
  723. break;
  724. case 16:
  725. Kind = SectionKind::getMergeableConst16();
  726. break;
  727. default:
  728. Kind = SectionKind::getReadOnly();
  729. break;
  730. }
  731. }
  732. return Kind;
  733. }
  734. MachineConstantPool::~MachineConstantPool() {
  735. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  736. if (Constants[i].isMachineConstantPoolEntry())
  737. delete Constants[i].Val.MachineCPVal;
  738. for (DenseSet<MachineConstantPoolValue*>::iterator I =
  739. MachineCPVsSharingEntries.begin(), E = MachineCPVsSharingEntries.end();
  740. I != E; ++I)
  741. delete *I;
  742. }
  743. /// Test whether the given two constants can be allocated the same constant pool
  744. /// entry.
  745. static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
  746. const DataLayout &DL) {
  747. // Handle the trivial case quickly.
  748. if (A == B) return true;
  749. // If they have the same type but weren't the same constant, quickly
  750. // reject them.
  751. if (A->getType() == B->getType()) return false;
  752. // We can't handle structs or arrays.
  753. if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
  754. isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
  755. return false;
  756. // For now, only support constants with the same size.
  757. uint64_t StoreSize = DL.getTypeStoreSize(A->getType());
  758. if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128)
  759. return false;
  760. Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
  761. // Try constant folding a bitcast of both instructions to an integer. If we
  762. // get two identical ConstantInt's, then we are good to share them. We use
  763. // the constant folding APIs to do this so that we get the benefit of
  764. // DataLayout.
  765. if (isa<PointerType>(A->getType()))
  766. A = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy,
  767. const_cast<Constant *>(A), DL);
  768. else if (A->getType() != IntTy)
  769. A = ConstantFoldInstOperands(Instruction::BitCast, IntTy,
  770. const_cast<Constant *>(A), DL);
  771. if (isa<PointerType>(B->getType()))
  772. B = ConstantFoldInstOperands(Instruction::PtrToInt, IntTy,
  773. const_cast<Constant *>(B), DL);
  774. else if (B->getType() != IntTy)
  775. B = ConstantFoldInstOperands(Instruction::BitCast, IntTy,
  776. const_cast<Constant *>(B), DL);
  777. return A == B;
  778. }
  779. /// Create a new entry in the constant pool or return an existing one.
  780. /// User must specify the log2 of the minimum required alignment for the object.
  781. unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
  782. unsigned Alignment) {
  783. assert(Alignment && "Alignment must be specified!");
  784. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  785. // Check to see if we already have this constant.
  786. //
  787. // FIXME, this could be made much more efficient for large constant pools.
  788. for (unsigned i = 0, e = Constants.size(); i != e; ++i)
  789. if (!Constants[i].isMachineConstantPoolEntry() &&
  790. CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) {
  791. if ((unsigned)Constants[i].getAlignment() < Alignment)
  792. Constants[i].Alignment = Alignment;
  793. return i;
  794. }
  795. Constants.push_back(MachineConstantPoolEntry(C, Alignment));
  796. return Constants.size()-1;
  797. }
  798. unsigned MachineConstantPool::getConstantPoolIndex(MachineConstantPoolValue *V,
  799. unsigned Alignment) {
  800. assert(Alignment && "Alignment must be specified!");
  801. if (Alignment > PoolAlignment) PoolAlignment = Alignment;
  802. // Check to see if we already have this constant.
  803. //
  804. // FIXME, this could be made much more efficient for large constant pools.
  805. int Idx = V->getExistingMachineCPValue(this, Alignment);
  806. if (Idx != -1) {
  807. MachineCPVsSharingEntries.insert(V);
  808. return (unsigned)Idx;
  809. }
  810. Constants.push_back(MachineConstantPoolEntry(V, Alignment));
  811. return Constants.size()-1;
  812. }
  813. void MachineConstantPool::print(raw_ostream &OS) const {
  814. if (Constants.empty()) return;
  815. OS << "Constant Pool:\n";
  816. for (unsigned i = 0, e = Constants.size(); i != e; ++i) {
  817. OS << " cp#" << i << ": ";
  818. if (Constants[i].isMachineConstantPoolEntry())
  819. Constants[i].Val.MachineCPVal->print(OS);
  820. else
  821. Constants[i].Val.ConstVal->printAsOperand(OS, /*PrintType=*/false);
  822. OS << ", align=" << Constants[i].getAlignment();
  823. OS << "\n";
  824. }
  825. }
  826. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  827. void MachineConstantPool::dump() const { print(dbgs()); }
  828. #endif