LiveRangeEdit.cpp 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425
  1. //===-- LiveRangeEdit.cpp - Basic tools for editing a register live range -===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // The LiveRangeEdit class represents changes done to a virtual register when it
  11. // is spilled or split.
  12. //===----------------------------------------------------------------------===//
  13. #include "llvm/CodeGen/LiveRangeEdit.h"
  14. #include "llvm/ADT/Statistic.h"
  15. #include "llvm/CodeGen/CalcSpillWeights.h"
  16. #include "llvm/CodeGen/LiveIntervalAnalysis.h"
  17. #include "llvm/CodeGen/MachineRegisterInfo.h"
  18. #include "llvm/CodeGen/VirtRegMap.h"
  19. #include "llvm/Support/Debug.h"
  20. #include "llvm/Support/raw_ostream.h"
  21. #include "llvm/Target/TargetInstrInfo.h"
  22. using namespace llvm;
  23. #define DEBUG_TYPE "regalloc"
  24. STATISTIC(NumDCEDeleted, "Number of instructions deleted by DCE");
  25. STATISTIC(NumDCEFoldedLoads, "Number of single use loads folded after DCE");
  26. STATISTIC(NumFracRanges, "Number of live ranges fractured by DCE");
  27. void LiveRangeEdit::Delegate::anchor() { }
  28. LiveInterval &LiveRangeEdit::createEmptyIntervalFrom(unsigned OldReg) {
  29. unsigned VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg));
  30. if (VRM) {
  31. VRM->setIsSplitFromReg(VReg, VRM->getOriginal(OldReg));
  32. }
  33. LiveInterval &LI = LIS.createEmptyInterval(VReg);
  34. return LI;
  35. }
  36. unsigned LiveRangeEdit::createFrom(unsigned OldReg) {
  37. unsigned VReg = MRI.createVirtualRegister(MRI.getRegClass(OldReg));
  38. if (VRM) {
  39. VRM->setIsSplitFromReg(VReg, VRM->getOriginal(OldReg));
  40. }
  41. return VReg;
  42. }
  43. bool LiveRangeEdit::checkRematerializable(VNInfo *VNI,
  44. const MachineInstr *DefMI,
  45. AliasAnalysis *aa) {
  46. assert(DefMI && "Missing instruction");
  47. ScannedRemattable = true;
  48. if (!TII.isTriviallyReMaterializable(DefMI, aa))
  49. return false;
  50. Remattable.insert(VNI);
  51. return true;
  52. }
  53. void LiveRangeEdit::scanRemattable(AliasAnalysis *aa) {
  54. for (VNInfo *VNI : getParent().valnos) {
  55. if (VNI->isUnused())
  56. continue;
  57. MachineInstr *DefMI = LIS.getInstructionFromIndex(VNI->def);
  58. if (!DefMI)
  59. continue;
  60. checkRematerializable(VNI, DefMI, aa);
  61. }
  62. ScannedRemattable = true;
  63. }
  64. bool LiveRangeEdit::anyRematerializable(AliasAnalysis *aa) {
  65. if (!ScannedRemattable)
  66. scanRemattable(aa);
  67. return !Remattable.empty();
  68. }
  69. /// allUsesAvailableAt - Return true if all registers used by OrigMI at
  70. /// OrigIdx are also available with the same value at UseIdx.
  71. bool LiveRangeEdit::allUsesAvailableAt(const MachineInstr *OrigMI,
  72. SlotIndex OrigIdx,
  73. SlotIndex UseIdx) const {
  74. OrigIdx = OrigIdx.getRegSlot(true);
  75. UseIdx = UseIdx.getRegSlot(true);
  76. for (unsigned i = 0, e = OrigMI->getNumOperands(); i != e; ++i) {
  77. const MachineOperand &MO = OrigMI->getOperand(i);
  78. if (!MO.isReg() || !MO.getReg() || !MO.readsReg())
  79. continue;
  80. // We can't remat physreg uses, unless it is a constant.
  81. if (TargetRegisterInfo::isPhysicalRegister(MO.getReg())) {
  82. if (MRI.isConstantPhysReg(MO.getReg(), *OrigMI->getParent()->getParent()))
  83. continue;
  84. return false;
  85. }
  86. LiveInterval &li = LIS.getInterval(MO.getReg());
  87. const VNInfo *OVNI = li.getVNInfoAt(OrigIdx);
  88. if (!OVNI)
  89. continue;
  90. // Don't allow rematerialization immediately after the original def.
  91. // It would be incorrect if OrigMI redefines the register.
  92. // See PR14098.
  93. if (SlotIndex::isSameInstr(OrigIdx, UseIdx))
  94. return false;
  95. if (OVNI != li.getVNInfoAt(UseIdx))
  96. return false;
  97. }
  98. return true;
  99. }
  100. bool LiveRangeEdit::canRematerializeAt(Remat &RM,
  101. SlotIndex UseIdx,
  102. bool cheapAsAMove) {
  103. assert(ScannedRemattable && "Call anyRematerializable first");
  104. // Use scanRemattable info.
  105. if (!Remattable.count(RM.ParentVNI))
  106. return false;
  107. // No defining instruction provided.
  108. SlotIndex DefIdx;
  109. if (RM.OrigMI)
  110. DefIdx = LIS.getInstructionIndex(RM.OrigMI);
  111. else {
  112. DefIdx = RM.ParentVNI->def;
  113. RM.OrigMI = LIS.getInstructionFromIndex(DefIdx);
  114. assert(RM.OrigMI && "No defining instruction for remattable value");
  115. }
  116. // If only cheap remats were requested, bail out early.
  117. if (cheapAsAMove && !TII.isAsCheapAsAMove(RM.OrigMI))
  118. return false;
  119. // Verify that all used registers are available with the same values.
  120. if (!allUsesAvailableAt(RM.OrigMI, DefIdx, UseIdx))
  121. return false;
  122. return true;
  123. }
  124. SlotIndex LiveRangeEdit::rematerializeAt(MachineBasicBlock &MBB,
  125. MachineBasicBlock::iterator MI,
  126. unsigned DestReg,
  127. const Remat &RM,
  128. const TargetRegisterInfo &tri,
  129. bool Late) {
  130. assert(RM.OrigMI && "Invalid remat");
  131. TII.reMaterialize(MBB, MI, DestReg, 0, RM.OrigMI, tri);
  132. Rematted.insert(RM.ParentVNI);
  133. return LIS.getSlotIndexes()->insertMachineInstrInMaps(--MI, Late)
  134. .getRegSlot();
  135. }
  136. void LiveRangeEdit::eraseVirtReg(unsigned Reg) {
  137. if (TheDelegate && TheDelegate->LRE_CanEraseVirtReg(Reg))
  138. LIS.removeInterval(Reg);
  139. }
  140. bool LiveRangeEdit::foldAsLoad(LiveInterval *LI,
  141. SmallVectorImpl<MachineInstr*> &Dead) {
  142. MachineInstr *DefMI = nullptr, *UseMI = nullptr;
  143. // Check that there is a single def and a single use.
  144. for (MachineOperand &MO : MRI.reg_nodbg_operands(LI->reg)) {
  145. MachineInstr *MI = MO.getParent();
  146. if (MO.isDef()) {
  147. if (DefMI && DefMI != MI)
  148. return false;
  149. if (!MI->canFoldAsLoad())
  150. return false;
  151. DefMI = MI;
  152. } else if (!MO.isUndef()) {
  153. if (UseMI && UseMI != MI)
  154. return false;
  155. // FIXME: Targets don't know how to fold subreg uses.
  156. if (MO.getSubReg())
  157. return false;
  158. UseMI = MI;
  159. }
  160. }
  161. if (!DefMI || !UseMI)
  162. return false;
  163. // Since we're moving the DefMI load, make sure we're not extending any live
  164. // ranges.
  165. if (!allUsesAvailableAt(DefMI,
  166. LIS.getInstructionIndex(DefMI),
  167. LIS.getInstructionIndex(UseMI)))
  168. return false;
  169. // We also need to make sure it is safe to move the load.
  170. // Assume there are stores between DefMI and UseMI.
  171. bool SawStore = true;
  172. if (!DefMI->isSafeToMove(nullptr, SawStore))
  173. return false;
  174. DEBUG(dbgs() << "Try to fold single def: " << *DefMI
  175. << " into single use: " << *UseMI);
  176. SmallVector<unsigned, 8> Ops;
  177. if (UseMI->readsWritesVirtualRegister(LI->reg, &Ops).second)
  178. return false;
  179. MachineInstr *FoldMI = TII.foldMemoryOperand(UseMI, Ops, DefMI);
  180. if (!FoldMI)
  181. return false;
  182. DEBUG(dbgs() << " folded: " << *FoldMI);
  183. LIS.ReplaceMachineInstrInMaps(UseMI, FoldMI);
  184. UseMI->eraseFromParent();
  185. DefMI->addRegisterDead(LI->reg, nullptr);
  186. Dead.push_back(DefMI);
  187. ++NumDCEFoldedLoads;
  188. return true;
  189. }
  190. bool LiveRangeEdit::useIsKill(const LiveInterval &LI,
  191. const MachineOperand &MO) const {
  192. const MachineInstr *MI = MO.getParent();
  193. SlotIndex Idx = LIS.getInstructionIndex(MI).getRegSlot();
  194. if (LI.Query(Idx).isKill())
  195. return true;
  196. const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
  197. unsigned SubReg = MO.getSubReg();
  198. unsigned LaneMask = TRI.getSubRegIndexLaneMask(SubReg);
  199. for (const LiveInterval::SubRange &S : LI.subranges()) {
  200. if ((S.LaneMask & LaneMask) != 0 && S.Query(Idx).isKill())
  201. return true;
  202. }
  203. return false;
  204. }
  205. /// Find all live intervals that need to shrink, then remove the instruction.
  206. void LiveRangeEdit::eliminateDeadDef(MachineInstr *MI, ToShrinkSet &ToShrink) {
  207. assert(MI->allDefsAreDead() && "Def isn't really dead");
  208. SlotIndex Idx = LIS.getInstructionIndex(MI).getRegSlot();
  209. // Never delete a bundled instruction.
  210. if (MI->isBundled()) {
  211. return;
  212. }
  213. // Never delete inline asm.
  214. if (MI->isInlineAsm()) {
  215. DEBUG(dbgs() << "Won't delete: " << Idx << '\t' << *MI);
  216. return;
  217. }
  218. // Use the same criteria as DeadMachineInstructionElim.
  219. bool SawStore = false;
  220. if (!MI->isSafeToMove(nullptr, SawStore)) {
  221. DEBUG(dbgs() << "Can't delete: " << Idx << '\t' << *MI);
  222. return;
  223. }
  224. DEBUG(dbgs() << "Deleting dead def " << Idx << '\t' << *MI);
  225. // Collect virtual registers to be erased after MI is gone.
  226. SmallVector<unsigned, 8> RegsToErase;
  227. bool ReadsPhysRegs = false;
  228. // Check for live intervals that may shrink
  229. for (MachineInstr::mop_iterator MOI = MI->operands_begin(),
  230. MOE = MI->operands_end(); MOI != MOE; ++MOI) {
  231. if (!MOI->isReg())
  232. continue;
  233. unsigned Reg = MOI->getReg();
  234. if (!TargetRegisterInfo::isVirtualRegister(Reg)) {
  235. // Check if MI reads any unreserved physregs.
  236. if (Reg && MOI->readsReg() && !MRI.isReserved(Reg))
  237. ReadsPhysRegs = true;
  238. else if (MOI->isDef())
  239. LIS.removePhysRegDefAt(Reg, Idx);
  240. continue;
  241. }
  242. LiveInterval &LI = LIS.getInterval(Reg);
  243. // Shrink read registers, unless it is likely to be expensive and
  244. // unlikely to change anything. We typically don't want to shrink the
  245. // PIC base register that has lots of uses everywhere.
  246. // Always shrink COPY uses that probably come from live range splitting.
  247. if ((MI->readsVirtualRegister(Reg) && (MI->isCopy() || MOI->isDef())) ||
  248. (MOI->readsReg() && (MRI.hasOneNonDBGUse(Reg) || useIsKill(LI, *MOI))))
  249. ToShrink.insert(&LI);
  250. // Remove defined value.
  251. if (MOI->isDef()) {
  252. if (TheDelegate && LI.getVNInfoAt(Idx) != nullptr)
  253. TheDelegate->LRE_WillShrinkVirtReg(LI.reg);
  254. LIS.removeVRegDefAt(LI, Idx);
  255. if (LI.empty())
  256. RegsToErase.push_back(Reg);
  257. }
  258. }
  259. // Currently, we don't support DCE of physreg live ranges. If MI reads
  260. // any unreserved physregs, don't erase the instruction, but turn it into
  261. // a KILL instead. This way, the physreg live ranges don't end up
  262. // dangling.
  263. // FIXME: It would be better to have something like shrinkToUses() for
  264. // physregs. That could potentially enable more DCE and it would free up
  265. // the physreg. It would not happen often, though.
  266. if (ReadsPhysRegs) {
  267. MI->setDesc(TII.get(TargetOpcode::KILL));
  268. // Remove all operands that aren't physregs.
  269. for (unsigned i = MI->getNumOperands(); i; --i) {
  270. const MachineOperand &MO = MI->getOperand(i-1);
  271. if (MO.isReg() && TargetRegisterInfo::isPhysicalRegister(MO.getReg()))
  272. continue;
  273. MI->RemoveOperand(i-1);
  274. }
  275. DEBUG(dbgs() << "Converted physregs to:\t" << *MI);
  276. } else {
  277. if (TheDelegate)
  278. TheDelegate->LRE_WillEraseInstruction(MI);
  279. LIS.RemoveMachineInstrFromMaps(MI);
  280. MI->eraseFromParent();
  281. ++NumDCEDeleted;
  282. }
  283. // Erase any virtregs that are now empty and unused. There may be <undef>
  284. // uses around. Keep the empty live range in that case.
  285. for (unsigned i = 0, e = RegsToErase.size(); i != e; ++i) {
  286. unsigned Reg = RegsToErase[i];
  287. if (LIS.hasInterval(Reg) && MRI.reg_nodbg_empty(Reg)) {
  288. ToShrink.remove(&LIS.getInterval(Reg));
  289. eraseVirtReg(Reg);
  290. }
  291. }
  292. }
  293. void LiveRangeEdit::eliminateDeadDefs(SmallVectorImpl<MachineInstr*> &Dead,
  294. ArrayRef<unsigned> RegsBeingSpilled) {
  295. ToShrinkSet ToShrink;
  296. for (;;) {
  297. // Erase all dead defs.
  298. while (!Dead.empty())
  299. eliminateDeadDef(Dead.pop_back_val(), ToShrink);
  300. if (ToShrink.empty())
  301. break;
  302. // Shrink just one live interval. Then delete new dead defs.
  303. LiveInterval *LI = ToShrink.back();
  304. ToShrink.pop_back();
  305. if (foldAsLoad(LI, Dead))
  306. continue;
  307. if (TheDelegate)
  308. TheDelegate->LRE_WillShrinkVirtReg(LI->reg);
  309. if (!LIS.shrinkToUses(LI, &Dead))
  310. continue;
  311. // Don't create new intervals for a register being spilled.
  312. // The new intervals would have to be spilled anyway so its not worth it.
  313. // Also they currently aren't spilled so creating them and not spilling
  314. // them results in incorrect code.
  315. bool BeingSpilled = false;
  316. for (unsigned i = 0, e = RegsBeingSpilled.size(); i != e; ++i) {
  317. if (LI->reg == RegsBeingSpilled[i]) {
  318. BeingSpilled = true;
  319. break;
  320. }
  321. }
  322. if (BeingSpilled) continue;
  323. // LI may have been separated, create new intervals.
  324. LI->RenumberValues();
  325. ConnectedVNInfoEqClasses ConEQ(LIS);
  326. unsigned NumComp = ConEQ.Classify(LI);
  327. if (NumComp <= 1)
  328. continue;
  329. ++NumFracRanges;
  330. bool IsOriginal = VRM && VRM->getOriginal(LI->reg) == LI->reg;
  331. DEBUG(dbgs() << NumComp << " components: " << *LI << '\n');
  332. SmallVector<LiveInterval*, 8> Dups(1, LI);
  333. for (unsigned i = 1; i != NumComp; ++i) {
  334. Dups.push_back(&createEmptyIntervalFrom(LI->reg));
  335. // If LI is an original interval that hasn't been split yet, make the new
  336. // intervals their own originals instead of referring to LI. The original
  337. // interval must contain all the split products, and LI doesn't.
  338. if (IsOriginal)
  339. VRM->setIsSplitFromReg(Dups.back()->reg, 0);
  340. if (TheDelegate)
  341. TheDelegate->LRE_DidCloneVirtReg(Dups.back()->reg, LI->reg);
  342. }
  343. ConEQ.Distribute(&Dups[0], MRI);
  344. DEBUG({
  345. for (unsigned i = 0; i != NumComp; ++i)
  346. dbgs() << '\t' << *Dups[i] << '\n';
  347. });
  348. }
  349. }
  350. // Keep track of new virtual registers created via
  351. // MachineRegisterInfo::createVirtualRegister.
  352. void
  353. LiveRangeEdit::MRI_NoteNewVirtualRegister(unsigned VReg)
  354. {
  355. if (VRM)
  356. VRM->grow();
  357. NewRegs.push_back(VReg);
  358. }
  359. void
  360. LiveRangeEdit::calculateRegClassAndHint(MachineFunction &MF,
  361. const MachineLoopInfo &Loops,
  362. const MachineBlockFrequencyInfo &MBFI) {
  363. VirtRegAuxInfo VRAI(MF, LIS, Loops, MBFI);
  364. for (unsigned I = 0, Size = size(); I < Size; ++I) {
  365. LiveInterval &LI = LIS.getInterval(get(I));
  366. if (MRI.recomputeRegClass(LI.reg))
  367. DEBUG({
  368. const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
  369. dbgs() << "Inflated " << PrintReg(LI.reg) << " to "
  370. << TRI->getRegClassName(MRI.getRegClass(LI.reg)) << '\n';
  371. });
  372. VRAI.calculateSpillWeightAndHint(LI);
  373. }
  374. }