2
0

AggressiveAntiDepBreaker.cpp 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970
  1. //===----- AggressiveAntiDepBreaker.cpp - Anti-dep breaker ----------------===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file implements the AggressiveAntiDepBreaker class, which
  11. // implements register anti-dependence breaking during post-RA
  12. // scheduling. It attempts to break all anti-dependencies within a
  13. // block.
  14. //
  15. //===----------------------------------------------------------------------===//
  16. #include "AggressiveAntiDepBreaker.h"
  17. #include "llvm/CodeGen/MachineBasicBlock.h"
  18. #include "llvm/CodeGen/MachineFrameInfo.h"
  19. #include "llvm/CodeGen/MachineInstr.h"
  20. #include "llvm/CodeGen/RegisterClassInfo.h"
  21. #include "llvm/Support/CommandLine.h"
  22. #include "llvm/Support/Debug.h"
  23. #include "llvm/Support/ErrorHandling.h"
  24. #include "llvm/Support/raw_ostream.h"
  25. #include "llvm/Target/TargetInstrInfo.h"
  26. #include "llvm/Target/TargetRegisterInfo.h"
  27. using namespace llvm;
  28. #define DEBUG_TYPE "post-RA-sched"
  29. // If DebugDiv > 0 then only break antidep with (ID % DebugDiv) == DebugMod
  30. static cl::opt<int>
  31. DebugDiv("agg-antidep-debugdiv",
  32. cl::desc("Debug control for aggressive anti-dep breaker"),
  33. cl::init(0), cl::Hidden);
  34. static cl::opt<int>
  35. DebugMod("agg-antidep-debugmod",
  36. cl::desc("Debug control for aggressive anti-dep breaker"),
  37. cl::init(0), cl::Hidden);
  38. AggressiveAntiDepState::AggressiveAntiDepState(const unsigned TargetRegs,
  39. MachineBasicBlock *BB) :
  40. NumTargetRegs(TargetRegs), GroupNodes(TargetRegs, 0),
  41. GroupNodeIndices(TargetRegs, 0),
  42. KillIndices(TargetRegs, 0),
  43. DefIndices(TargetRegs, 0)
  44. {
  45. const unsigned BBSize = BB->size();
  46. for (unsigned i = 0; i < NumTargetRegs; ++i) {
  47. // Initialize all registers to be in their own group. Initially we
  48. // assign the register to the same-indexed GroupNode.
  49. GroupNodeIndices[i] = i;
  50. // Initialize the indices to indicate that no registers are live.
  51. KillIndices[i] = ~0u;
  52. DefIndices[i] = BBSize;
  53. }
  54. }
  55. unsigned AggressiveAntiDepState::GetGroup(unsigned Reg) {
  56. unsigned Node = GroupNodeIndices[Reg];
  57. while (GroupNodes[Node] != Node)
  58. Node = GroupNodes[Node];
  59. return Node;
  60. }
  61. void AggressiveAntiDepState::GetGroupRegs(
  62. unsigned Group,
  63. std::vector<unsigned> &Regs,
  64. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference> *RegRefs)
  65. {
  66. for (unsigned Reg = 0; Reg != NumTargetRegs; ++Reg) {
  67. if ((GetGroup(Reg) == Group) && (RegRefs->count(Reg) > 0))
  68. Regs.push_back(Reg);
  69. }
  70. }
  71. unsigned AggressiveAntiDepState::UnionGroups(unsigned Reg1, unsigned Reg2)
  72. {
  73. assert(GroupNodes[0] == 0 && "GroupNode 0 not parent!");
  74. assert(GroupNodeIndices[0] == 0 && "Reg 0 not in Group 0!");
  75. // find group for each register
  76. unsigned Group1 = GetGroup(Reg1);
  77. unsigned Group2 = GetGroup(Reg2);
  78. // if either group is 0, then that must become the parent
  79. unsigned Parent = (Group1 == 0) ? Group1 : Group2;
  80. unsigned Other = (Parent == Group1) ? Group2 : Group1;
  81. GroupNodes.at(Other) = Parent;
  82. return Parent;
  83. }
  84. unsigned AggressiveAntiDepState::LeaveGroup(unsigned Reg)
  85. {
  86. // Create a new GroupNode for Reg. Reg's existing GroupNode must
  87. // stay as is because there could be other GroupNodes referring to
  88. // it.
  89. unsigned idx = GroupNodes.size();
  90. GroupNodes.push_back(idx);
  91. GroupNodeIndices[Reg] = idx;
  92. return idx;
  93. }
  94. bool AggressiveAntiDepState::IsLive(unsigned Reg)
  95. {
  96. // KillIndex must be defined and DefIndex not defined for a register
  97. // to be live.
  98. return((KillIndices[Reg] != ~0u) && (DefIndices[Reg] == ~0u));
  99. }
  100. AggressiveAntiDepBreaker::AggressiveAntiDepBreaker(
  101. MachineFunction &MFi, const RegisterClassInfo &RCI,
  102. TargetSubtargetInfo::RegClassVector &CriticalPathRCs)
  103. : AntiDepBreaker(), MF(MFi), MRI(MF.getRegInfo()),
  104. TII(MF.getSubtarget().getInstrInfo()),
  105. TRI(MF.getSubtarget().getRegisterInfo()), RegClassInfo(RCI),
  106. State(nullptr) {
  107. /* Collect a bitset of all registers that are only broken if they
  108. are on the critical path. */
  109. for (unsigned i = 0, e = CriticalPathRCs.size(); i < e; ++i) {
  110. BitVector CPSet = TRI->getAllocatableSet(MF, CriticalPathRCs[i]);
  111. if (CriticalPathSet.none())
  112. CriticalPathSet = CPSet;
  113. else
  114. CriticalPathSet |= CPSet;
  115. }
  116. DEBUG(dbgs() << "AntiDep Critical-Path Registers:");
  117. DEBUG(for (int r = CriticalPathSet.find_first(); r != -1;
  118. r = CriticalPathSet.find_next(r))
  119. dbgs() << " " << TRI->getName(r));
  120. DEBUG(dbgs() << '\n');
  121. }
  122. AggressiveAntiDepBreaker::~AggressiveAntiDepBreaker() {
  123. delete State;
  124. }
  125. void AggressiveAntiDepBreaker::StartBlock(MachineBasicBlock *BB) {
  126. assert(!State);
  127. State = new AggressiveAntiDepState(TRI->getNumRegs(), BB);
  128. bool IsReturnBlock = (!BB->empty() && BB->back().isReturn());
  129. std::vector<unsigned> &KillIndices = State->GetKillIndices();
  130. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  131. // Examine the live-in regs of all successors.
  132. for (MachineBasicBlock::succ_iterator SI = BB->succ_begin(),
  133. SE = BB->succ_end(); SI != SE; ++SI)
  134. for (MachineBasicBlock::livein_iterator I = (*SI)->livein_begin(),
  135. E = (*SI)->livein_end(); I != E; ++I) {
  136. for (MCRegAliasIterator AI(*I, TRI, true); AI.isValid(); ++AI) {
  137. unsigned Reg = *AI;
  138. State->UnionGroups(Reg, 0);
  139. KillIndices[Reg] = BB->size();
  140. DefIndices[Reg] = ~0u;
  141. }
  142. }
  143. // Mark live-out callee-saved registers. In a return block this is
  144. // all callee-saved registers. In non-return this is any
  145. // callee-saved register that is not saved in the prolog.
  146. const MachineFrameInfo *MFI = MF.getFrameInfo();
  147. BitVector Pristine = MFI->getPristineRegs(MF);
  148. for (const MCPhysReg *I = TRI->getCalleeSavedRegs(&MF); *I; ++I) {
  149. unsigned Reg = *I;
  150. if (!IsReturnBlock && !Pristine.test(Reg)) continue;
  151. for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
  152. unsigned AliasReg = *AI;
  153. State->UnionGroups(AliasReg, 0);
  154. KillIndices[AliasReg] = BB->size();
  155. DefIndices[AliasReg] = ~0u;
  156. }
  157. }
  158. }
  159. void AggressiveAntiDepBreaker::FinishBlock() {
  160. delete State;
  161. State = nullptr;
  162. }
  163. void AggressiveAntiDepBreaker::Observe(MachineInstr *MI, unsigned Count,
  164. unsigned InsertPosIndex) {
  165. assert(Count < InsertPosIndex && "Instruction index out of expected range!");
  166. std::set<unsigned> PassthruRegs;
  167. GetPassthruRegs(MI, PassthruRegs);
  168. PrescanInstruction(MI, Count, PassthruRegs);
  169. ScanInstruction(MI, Count);
  170. DEBUG(dbgs() << "Observe: ");
  171. DEBUG(MI->dump());
  172. DEBUG(dbgs() << "\tRegs:");
  173. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  174. for (unsigned Reg = 0; Reg != TRI->getNumRegs(); ++Reg) {
  175. // If Reg is current live, then mark that it can't be renamed as
  176. // we don't know the extent of its live-range anymore (now that it
  177. // has been scheduled). If it is not live but was defined in the
  178. // previous schedule region, then set its def index to the most
  179. // conservative location (i.e. the beginning of the previous
  180. // schedule region).
  181. if (State->IsLive(Reg)) {
  182. DEBUG(if (State->GetGroup(Reg) != 0)
  183. dbgs() << " " << TRI->getName(Reg) << "=g" <<
  184. State->GetGroup(Reg) << "->g0(region live-out)");
  185. State->UnionGroups(Reg, 0);
  186. } else if ((DefIndices[Reg] < InsertPosIndex)
  187. && (DefIndices[Reg] >= Count)) {
  188. DefIndices[Reg] = Count;
  189. }
  190. }
  191. DEBUG(dbgs() << '\n');
  192. }
  193. bool AggressiveAntiDepBreaker::IsImplicitDefUse(MachineInstr *MI,
  194. MachineOperand& MO)
  195. {
  196. if (!MO.isReg() || !MO.isImplicit())
  197. return false;
  198. unsigned Reg = MO.getReg();
  199. if (Reg == 0)
  200. return false;
  201. MachineOperand *Op = nullptr;
  202. if (MO.isDef())
  203. Op = MI->findRegisterUseOperand(Reg, true);
  204. else
  205. Op = MI->findRegisterDefOperand(Reg);
  206. return(Op && Op->isImplicit());
  207. }
  208. void AggressiveAntiDepBreaker::GetPassthruRegs(MachineInstr *MI,
  209. std::set<unsigned>& PassthruRegs) {
  210. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  211. MachineOperand &MO = MI->getOperand(i);
  212. if (!MO.isReg()) continue;
  213. if ((MO.isDef() && MI->isRegTiedToUseOperand(i)) ||
  214. IsImplicitDefUse(MI, MO)) {
  215. const unsigned Reg = MO.getReg();
  216. for (MCSubRegIterator SubRegs(Reg, TRI, /*IncludeSelf=*/true);
  217. SubRegs.isValid(); ++SubRegs)
  218. PassthruRegs.insert(*SubRegs);
  219. }
  220. }
  221. }
  222. /// AntiDepEdges - Return in Edges the anti- and output- dependencies
  223. /// in SU that we want to consider for breaking.
  224. static void AntiDepEdges(const SUnit *SU, std::vector<const SDep*>& Edges) {
  225. SmallSet<unsigned, 4> RegSet;
  226. for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
  227. P != PE; ++P) {
  228. if ((P->getKind() == SDep::Anti) || (P->getKind() == SDep::Output)) {
  229. if (RegSet.insert(P->getReg()).second)
  230. Edges.push_back(&*P);
  231. }
  232. }
  233. }
  234. /// CriticalPathStep - Return the next SUnit after SU on the bottom-up
  235. /// critical path.
  236. static const SUnit *CriticalPathStep(const SUnit *SU) {
  237. const SDep *Next = nullptr;
  238. unsigned NextDepth = 0;
  239. // Find the predecessor edge with the greatest depth.
  240. if (SU) {
  241. for (SUnit::const_pred_iterator P = SU->Preds.begin(), PE = SU->Preds.end();
  242. P != PE; ++P) {
  243. const SUnit *PredSU = P->getSUnit();
  244. unsigned PredLatency = P->getLatency();
  245. unsigned PredTotalLatency = PredSU->getDepth() + PredLatency;
  246. // In the case of a latency tie, prefer an anti-dependency edge over
  247. // other types of edges.
  248. if (NextDepth < PredTotalLatency ||
  249. (NextDepth == PredTotalLatency && P->getKind() == SDep::Anti)) {
  250. NextDepth = PredTotalLatency;
  251. Next = &*P;
  252. }
  253. }
  254. }
  255. return (Next) ? Next->getSUnit() : nullptr;
  256. }
  257. void AggressiveAntiDepBreaker::HandleLastUse(unsigned Reg, unsigned KillIdx,
  258. const char *tag,
  259. const char *header,
  260. const char *footer) {
  261. std::vector<unsigned> &KillIndices = State->GetKillIndices();
  262. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  263. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
  264. RegRefs = State->GetRegRefs();
  265. // FIXME: We must leave subregisters of live super registers as live, so that
  266. // we don't clear out the register tracking information for subregisters of
  267. // super registers we're still tracking (and with which we're unioning
  268. // subregister definitions).
  269. for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI)
  270. if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI)) {
  271. DEBUG(if (!header && footer) dbgs() << footer);
  272. return;
  273. }
  274. if (!State->IsLive(Reg)) {
  275. KillIndices[Reg] = KillIdx;
  276. DefIndices[Reg] = ~0u;
  277. RegRefs.erase(Reg);
  278. State->LeaveGroup(Reg);
  279. DEBUG(if (header) {
  280. dbgs() << header << TRI->getName(Reg); header = nullptr; });
  281. DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << tag);
  282. }
  283. // Repeat for subregisters.
  284. for (MCSubRegIterator SubRegs(Reg, TRI); SubRegs.isValid(); ++SubRegs) {
  285. unsigned SubregReg = *SubRegs;
  286. if (!State->IsLive(SubregReg)) {
  287. KillIndices[SubregReg] = KillIdx;
  288. DefIndices[SubregReg] = ~0u;
  289. RegRefs.erase(SubregReg);
  290. State->LeaveGroup(SubregReg);
  291. DEBUG(if (header) {
  292. dbgs() << header << TRI->getName(Reg); header = nullptr; });
  293. DEBUG(dbgs() << " " << TRI->getName(SubregReg) << "->g" <<
  294. State->GetGroup(SubregReg) << tag);
  295. }
  296. }
  297. DEBUG(if (!header && footer) dbgs() << footer);
  298. }
  299. void AggressiveAntiDepBreaker::PrescanInstruction(MachineInstr *MI,
  300. unsigned Count,
  301. std::set<unsigned>& PassthruRegs) {
  302. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  303. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
  304. RegRefs = State->GetRegRefs();
  305. // Handle dead defs by simulating a last-use of the register just
  306. // after the def. A dead def can occur because the def is truly
  307. // dead, or because only a subregister is live at the def. If we
  308. // don't do this the dead def will be incorrectly merged into the
  309. // previous def.
  310. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  311. MachineOperand &MO = MI->getOperand(i);
  312. if (!MO.isReg() || !MO.isDef()) continue;
  313. unsigned Reg = MO.getReg();
  314. if (Reg == 0) continue;
  315. HandleLastUse(Reg, Count + 1, "", "\tDead Def: ", "\n");
  316. }
  317. DEBUG(dbgs() << "\tDef Groups:");
  318. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  319. MachineOperand &MO = MI->getOperand(i);
  320. if (!MO.isReg() || !MO.isDef()) continue;
  321. unsigned Reg = MO.getReg();
  322. if (Reg == 0) continue;
  323. DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" << State->GetGroup(Reg));
  324. // If MI's defs have a special allocation requirement, don't allow
  325. // any def registers to be changed. Also assume all registers
  326. // defined in a call must not be changed (ABI).
  327. if (MI->isCall() || MI->hasExtraDefRegAllocReq() ||
  328. TII->isPredicated(MI)) {
  329. DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
  330. State->UnionGroups(Reg, 0);
  331. }
  332. // Any aliased that are live at this point are completely or
  333. // partially defined here, so group those aliases with Reg.
  334. for (MCRegAliasIterator AI(Reg, TRI, false); AI.isValid(); ++AI) {
  335. unsigned AliasReg = *AI;
  336. if (State->IsLive(AliasReg)) {
  337. State->UnionGroups(Reg, AliasReg);
  338. DEBUG(dbgs() << "->g" << State->GetGroup(Reg) << "(via " <<
  339. TRI->getName(AliasReg) << ")");
  340. }
  341. }
  342. // Note register reference...
  343. const TargetRegisterClass *RC = nullptr;
  344. if (i < MI->getDesc().getNumOperands())
  345. RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
  346. AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
  347. RegRefs.insert(std::make_pair(Reg, RR));
  348. }
  349. DEBUG(dbgs() << '\n');
  350. // Scan the register defs for this instruction and update
  351. // live-ranges.
  352. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  353. MachineOperand &MO = MI->getOperand(i);
  354. if (!MO.isReg() || !MO.isDef()) continue;
  355. unsigned Reg = MO.getReg();
  356. if (Reg == 0) continue;
  357. // Ignore KILLs and passthru registers for liveness...
  358. if (MI->isKill() || (PassthruRegs.count(Reg) != 0))
  359. continue;
  360. // Update def for Reg and aliases.
  361. for (MCRegAliasIterator AI(Reg, TRI, true); AI.isValid(); ++AI) {
  362. // We need to be careful here not to define already-live super registers.
  363. // If the super register is already live, then this definition is not
  364. // a definition of the whole super register (just a partial insertion
  365. // into it). Earlier subregister definitions (which we've not yet visited
  366. // because we're iterating bottom-up) need to be linked to the same group
  367. // as this definition.
  368. if (TRI->isSuperRegister(Reg, *AI) && State->IsLive(*AI))
  369. continue;
  370. DefIndices[*AI] = Count;
  371. }
  372. }
  373. }
  374. void AggressiveAntiDepBreaker::ScanInstruction(MachineInstr *MI,
  375. unsigned Count) {
  376. DEBUG(dbgs() << "\tUse Groups:");
  377. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
  378. RegRefs = State->GetRegRefs();
  379. // If MI's uses have special allocation requirement, don't allow
  380. // any use registers to be changed. Also assume all registers
  381. // used in a call must not be changed (ABI).
  382. // FIXME: The issue with predicated instruction is more complex. We are being
  383. // conservatively here because the kill markers cannot be trusted after
  384. // if-conversion:
  385. // %R6<def> = LDR %SP, %reg0, 92, pred:14, pred:%reg0; mem:LD4[FixedStack14]
  386. // ...
  387. // STR %R0, %R6<kill>, %reg0, 0, pred:0, pred:%CPSR; mem:ST4[%395]
  388. // %R6<def> = LDR %SP, %reg0, 100, pred:0, pred:%CPSR; mem:LD4[FixedStack12]
  389. // STR %R0, %R6<kill>, %reg0, 0, pred:14, pred:%reg0; mem:ST4[%396](align=8)
  390. //
  391. // The first R6 kill is not really a kill since it's killed by a predicated
  392. // instruction which may not be executed. The second R6 def may or may not
  393. // re-define R6 so it's not safe to change it since the last R6 use cannot be
  394. // changed.
  395. bool Special = MI->isCall() ||
  396. MI->hasExtraSrcRegAllocReq() ||
  397. TII->isPredicated(MI);
  398. // Scan the register uses for this instruction and update
  399. // live-ranges, groups and RegRefs.
  400. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  401. MachineOperand &MO = MI->getOperand(i);
  402. if (!MO.isReg() || !MO.isUse()) continue;
  403. unsigned Reg = MO.getReg();
  404. if (Reg == 0) continue;
  405. DEBUG(dbgs() << " " << TRI->getName(Reg) << "=g" <<
  406. State->GetGroup(Reg));
  407. // It wasn't previously live but now it is, this is a kill. Forget
  408. // the previous live-range information and start a new live-range
  409. // for the register.
  410. HandleLastUse(Reg, Count, "(last-use)");
  411. if (Special) {
  412. DEBUG(if (State->GetGroup(Reg) != 0) dbgs() << "->g0(alloc-req)");
  413. State->UnionGroups(Reg, 0);
  414. }
  415. // Note register reference...
  416. const TargetRegisterClass *RC = nullptr;
  417. if (i < MI->getDesc().getNumOperands())
  418. RC = TII->getRegClass(MI->getDesc(), i, TRI, MF);
  419. AggressiveAntiDepState::RegisterReference RR = { &MO, RC };
  420. RegRefs.insert(std::make_pair(Reg, RR));
  421. }
  422. DEBUG(dbgs() << '\n');
  423. // Form a group of all defs and uses of a KILL instruction to ensure
  424. // that all registers are renamed as a group.
  425. if (MI->isKill()) {
  426. DEBUG(dbgs() << "\tKill Group:");
  427. unsigned FirstReg = 0;
  428. for (unsigned i = 0, e = MI->getNumOperands(); i != e; ++i) {
  429. MachineOperand &MO = MI->getOperand(i);
  430. if (!MO.isReg()) continue;
  431. unsigned Reg = MO.getReg();
  432. if (Reg == 0) continue;
  433. if (FirstReg != 0) {
  434. DEBUG(dbgs() << "=" << TRI->getName(Reg));
  435. State->UnionGroups(FirstReg, Reg);
  436. } else {
  437. DEBUG(dbgs() << " " << TRI->getName(Reg));
  438. FirstReg = Reg;
  439. }
  440. }
  441. DEBUG(dbgs() << "->g" << State->GetGroup(FirstReg) << '\n');
  442. }
  443. }
  444. BitVector AggressiveAntiDepBreaker::GetRenameRegisters(unsigned Reg) {
  445. BitVector BV(TRI->getNumRegs(), false);
  446. bool first = true;
  447. // Check all references that need rewriting for Reg. For each, use
  448. // the corresponding register class to narrow the set of registers
  449. // that are appropriate for renaming.
  450. std::pair<std::multimap<unsigned,
  451. AggressiveAntiDepState::RegisterReference>::iterator,
  452. std::multimap<unsigned,
  453. AggressiveAntiDepState::RegisterReference>::iterator>
  454. Range = State->GetRegRefs().equal_range(Reg);
  455. for (std::multimap<unsigned,
  456. AggressiveAntiDepState::RegisterReference>::iterator Q = Range.first,
  457. QE = Range.second; Q != QE; ++Q) {
  458. const TargetRegisterClass *RC = Q->second.RC;
  459. if (!RC) continue;
  460. BitVector RCBV = TRI->getAllocatableSet(MF, RC);
  461. if (first) {
  462. BV |= RCBV;
  463. first = false;
  464. } else {
  465. BV &= RCBV;
  466. }
  467. DEBUG(dbgs() << " " << TRI->getRegClassName(RC));
  468. }
  469. return BV;
  470. }
  471. bool AggressiveAntiDepBreaker::FindSuitableFreeRegisters(
  472. unsigned AntiDepGroupIndex,
  473. RenameOrderType& RenameOrder,
  474. std::map<unsigned, unsigned> &RenameMap) {
  475. std::vector<unsigned> &KillIndices = State->GetKillIndices();
  476. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  477. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
  478. RegRefs = State->GetRegRefs();
  479. // Collect all referenced registers in the same group as
  480. // AntiDepReg. These all need to be renamed together if we are to
  481. // break the anti-dependence.
  482. std::vector<unsigned> Regs;
  483. State->GetGroupRegs(AntiDepGroupIndex, Regs, &RegRefs);
  484. assert(Regs.size() > 0 && "Empty register group!");
  485. if (Regs.size() == 0)
  486. return false;
  487. // Find the "superest" register in the group. At the same time,
  488. // collect the BitVector of registers that can be used to rename
  489. // each register.
  490. DEBUG(dbgs() << "\tRename Candidates for Group g" << AntiDepGroupIndex
  491. << ":\n");
  492. std::map<unsigned, BitVector> RenameRegisterMap;
  493. unsigned SuperReg = 0;
  494. for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
  495. unsigned Reg = Regs[i];
  496. if ((SuperReg == 0) || TRI->isSuperRegister(SuperReg, Reg))
  497. SuperReg = Reg;
  498. // If Reg has any references, then collect possible rename regs
  499. if (RegRefs.count(Reg) > 0) {
  500. DEBUG(dbgs() << "\t\t" << TRI->getName(Reg) << ":");
  501. BitVector BV = GetRenameRegisters(Reg);
  502. RenameRegisterMap.insert(std::pair<unsigned, BitVector>(Reg, BV));
  503. DEBUG(dbgs() << " ::");
  504. DEBUG(for (int r = BV.find_first(); r != -1; r = BV.find_next(r))
  505. dbgs() << " " << TRI->getName(r));
  506. DEBUG(dbgs() << "\n");
  507. }
  508. }
  509. // All group registers should be a subreg of SuperReg.
  510. for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
  511. unsigned Reg = Regs[i];
  512. if (Reg == SuperReg) continue;
  513. bool IsSub = TRI->isSubRegister(SuperReg, Reg);
  514. // FIXME: remove this once PR18663 has been properly fixed. For now,
  515. // return a conservative answer:
  516. // assert(IsSub && "Expecting group subregister");
  517. if (!IsSub)
  518. return false;
  519. }
  520. #ifndef NDEBUG
  521. // If DebugDiv > 0 then only rename (renamecnt % DebugDiv) == DebugMod
  522. if (DebugDiv > 0) {
  523. static int renamecnt = 0;
  524. if (renamecnt++ % DebugDiv != DebugMod)
  525. return false;
  526. dbgs() << "*** Performing rename " << TRI->getName(SuperReg) <<
  527. " for debug ***\n";
  528. }
  529. #endif
  530. // Check each possible rename register for SuperReg in round-robin
  531. // order. If that register is available, and the corresponding
  532. // registers are available for the other group subregisters, then we
  533. // can use those registers to rename.
  534. // FIXME: Using getMinimalPhysRegClass is very conservative. We should
  535. // check every use of the register and find the largest register class
  536. // that can be used in all of them.
  537. const TargetRegisterClass *SuperRC =
  538. TRI->getMinimalPhysRegClass(SuperReg, MVT::Other);
  539. ArrayRef<MCPhysReg> Order = RegClassInfo.getOrder(SuperRC);
  540. if (Order.empty()) {
  541. DEBUG(dbgs() << "\tEmpty Super Regclass!!\n");
  542. return false;
  543. }
  544. DEBUG(dbgs() << "\tFind Registers:");
  545. RenameOrder.insert(RenameOrderType::value_type(SuperRC, Order.size()));
  546. unsigned OrigR = RenameOrder[SuperRC];
  547. unsigned EndR = ((OrigR == Order.size()) ? 0 : OrigR);
  548. unsigned R = OrigR;
  549. do {
  550. if (R == 0) R = Order.size();
  551. --R;
  552. const unsigned NewSuperReg = Order[R];
  553. // Don't consider non-allocatable registers
  554. if (!MRI.isAllocatable(NewSuperReg)) continue;
  555. // Don't replace a register with itself.
  556. if (NewSuperReg == SuperReg) continue;
  557. DEBUG(dbgs() << " [" << TRI->getName(NewSuperReg) << ':');
  558. RenameMap.clear();
  559. // For each referenced group register (which must be a SuperReg or
  560. // a subregister of SuperReg), find the corresponding subregister
  561. // of NewSuperReg and make sure it is free to be renamed.
  562. for (unsigned i = 0, e = Regs.size(); i != e; ++i) {
  563. unsigned Reg = Regs[i];
  564. unsigned NewReg = 0;
  565. if (Reg == SuperReg) {
  566. NewReg = NewSuperReg;
  567. } else {
  568. unsigned NewSubRegIdx = TRI->getSubRegIndex(SuperReg, Reg);
  569. if (NewSubRegIdx != 0)
  570. NewReg = TRI->getSubReg(NewSuperReg, NewSubRegIdx);
  571. }
  572. DEBUG(dbgs() << " " << TRI->getName(NewReg));
  573. // Check if Reg can be renamed to NewReg.
  574. BitVector BV = RenameRegisterMap[Reg];
  575. if (!BV.test(NewReg)) {
  576. DEBUG(dbgs() << "(no rename)");
  577. goto next_super_reg;
  578. }
  579. // If NewReg is dead and NewReg's most recent def is not before
  580. // Regs's kill, it's safe to replace Reg with NewReg. We
  581. // must also check all aliases of NewReg, because we can't define a
  582. // register when any sub or super is already live.
  583. if (State->IsLive(NewReg) || (KillIndices[Reg] > DefIndices[NewReg])) {
  584. DEBUG(dbgs() << "(live)");
  585. goto next_super_reg;
  586. } else {
  587. bool found = false;
  588. for (MCRegAliasIterator AI(NewReg, TRI, false); AI.isValid(); ++AI) {
  589. unsigned AliasReg = *AI;
  590. if (State->IsLive(AliasReg) ||
  591. (KillIndices[Reg] > DefIndices[AliasReg])) {
  592. DEBUG(dbgs() << "(alias " << TRI->getName(AliasReg) << " live)");
  593. found = true;
  594. break;
  595. }
  596. }
  597. if (found)
  598. goto next_super_reg;
  599. }
  600. // We cannot rename 'Reg' to 'NewReg' if one of the uses of 'Reg' also
  601. // defines 'NewReg' via an early-clobber operand.
  602. auto Range = RegRefs.equal_range(Reg);
  603. for (auto Q = Range.first, QE = Range.second; Q != QE; ++Q) {
  604. auto UseMI = Q->second.Operand->getParent();
  605. int Idx = UseMI->findRegisterDefOperandIdx(NewReg, false, true, TRI);
  606. if (Idx == -1)
  607. continue;
  608. if (UseMI->getOperand(Idx).isEarlyClobber()) {
  609. DEBUG(dbgs() << "(ec)");
  610. goto next_super_reg;
  611. }
  612. }
  613. // Record that 'Reg' can be renamed to 'NewReg'.
  614. RenameMap.insert(std::pair<unsigned, unsigned>(Reg, NewReg));
  615. }
  616. // If we fall-out here, then every register in the group can be
  617. // renamed, as recorded in RenameMap.
  618. RenameOrder.erase(SuperRC);
  619. RenameOrder.insert(RenameOrderType::value_type(SuperRC, R));
  620. DEBUG(dbgs() << "]\n");
  621. return true;
  622. next_super_reg:
  623. DEBUG(dbgs() << ']');
  624. } while (R != EndR);
  625. DEBUG(dbgs() << '\n');
  626. // No registers are free and available!
  627. return false;
  628. }
  629. /// BreakAntiDependencies - Identifiy anti-dependencies within the
  630. /// ScheduleDAG and break them by renaming registers.
  631. ///
  632. unsigned AggressiveAntiDepBreaker::BreakAntiDependencies(
  633. const std::vector<SUnit>& SUnits,
  634. MachineBasicBlock::iterator Begin,
  635. MachineBasicBlock::iterator End,
  636. unsigned InsertPosIndex,
  637. DbgValueVector &DbgValues) {
  638. std::vector<unsigned> &KillIndices = State->GetKillIndices();
  639. std::vector<unsigned> &DefIndices = State->GetDefIndices();
  640. std::multimap<unsigned, AggressiveAntiDepState::RegisterReference>&
  641. RegRefs = State->GetRegRefs();
  642. // The code below assumes that there is at least one instruction,
  643. // so just duck out immediately if the block is empty.
  644. if (SUnits.empty()) return 0;
  645. // For each regclass the next register to use for renaming.
  646. RenameOrderType RenameOrder;
  647. // ...need a map from MI to SUnit.
  648. std::map<MachineInstr *, const SUnit *> MISUnitMap;
  649. for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
  650. const SUnit *SU = &SUnits[i];
  651. MISUnitMap.insert(std::pair<MachineInstr *, const SUnit *>(SU->getInstr(),
  652. SU));
  653. }
  654. // Track progress along the critical path through the SUnit graph as
  655. // we walk the instructions. This is needed for regclasses that only
  656. // break critical-path anti-dependencies.
  657. const SUnit *CriticalPathSU = nullptr;
  658. MachineInstr *CriticalPathMI = nullptr;
  659. if (CriticalPathSet.any()) {
  660. for (unsigned i = 0, e = SUnits.size(); i != e; ++i) {
  661. const SUnit *SU = &SUnits[i];
  662. if (!CriticalPathSU ||
  663. ((SU->getDepth() + SU->Latency) >
  664. (CriticalPathSU->getDepth() + CriticalPathSU->Latency))) {
  665. CriticalPathSU = SU;
  666. }
  667. }
  668. CriticalPathMI = CriticalPathSU->getInstr();
  669. }
  670. #ifndef NDEBUG
  671. DEBUG(dbgs() << "\n===== Aggressive anti-dependency breaking\n");
  672. DEBUG(dbgs() << "Available regs:");
  673. for (unsigned Reg = 0; Reg < TRI->getNumRegs(); ++Reg) {
  674. if (!State->IsLive(Reg))
  675. DEBUG(dbgs() << " " << TRI->getName(Reg));
  676. }
  677. DEBUG(dbgs() << '\n');
  678. #endif
  679. // Attempt to break anti-dependence edges. Walk the instructions
  680. // from the bottom up, tracking information about liveness as we go
  681. // to help determine which registers are available.
  682. unsigned Broken = 0;
  683. unsigned Count = InsertPosIndex - 1;
  684. for (MachineBasicBlock::iterator I = End, E = Begin;
  685. I != E; --Count) {
  686. MachineInstr *MI = --I;
  687. if (MI->isDebugValue())
  688. continue;
  689. DEBUG(dbgs() << "Anti: ");
  690. DEBUG(MI->dump());
  691. std::set<unsigned> PassthruRegs;
  692. GetPassthruRegs(MI, PassthruRegs);
  693. // Process the defs in MI...
  694. PrescanInstruction(MI, Count, PassthruRegs);
  695. // The dependence edges that represent anti- and output-
  696. // dependencies that are candidates for breaking.
  697. std::vector<const SDep *> Edges;
  698. const SUnit *PathSU = MISUnitMap[MI];
  699. AntiDepEdges(PathSU, Edges);
  700. // If MI is not on the critical path, then we don't rename
  701. // registers in the CriticalPathSet.
  702. BitVector *ExcludeRegs = nullptr;
  703. if (MI == CriticalPathMI) {
  704. CriticalPathSU = CriticalPathStep(CriticalPathSU);
  705. CriticalPathMI = (CriticalPathSU) ? CriticalPathSU->getInstr() : nullptr;
  706. } else if (CriticalPathSet.any()) {
  707. ExcludeRegs = &CriticalPathSet;
  708. }
  709. // Ignore KILL instructions (they form a group in ScanInstruction
  710. // but don't cause any anti-dependence breaking themselves)
  711. if (!MI->isKill()) {
  712. // Attempt to break each anti-dependency...
  713. for (unsigned i = 0, e = Edges.size(); i != e; ++i) {
  714. const SDep *Edge = Edges[i];
  715. SUnit *NextSU = Edge->getSUnit();
  716. if ((Edge->getKind() != SDep::Anti) &&
  717. (Edge->getKind() != SDep::Output)) continue;
  718. unsigned AntiDepReg = Edge->getReg();
  719. DEBUG(dbgs() << "\tAntidep reg: " << TRI->getName(AntiDepReg));
  720. assert(AntiDepReg != 0 && "Anti-dependence on reg0?");
  721. if (!MRI.isAllocatable(AntiDepReg)) {
  722. // Don't break anti-dependencies on non-allocatable registers.
  723. DEBUG(dbgs() << " (non-allocatable)\n");
  724. continue;
  725. } else if (ExcludeRegs && ExcludeRegs->test(AntiDepReg)) {
  726. // Don't break anti-dependencies for critical path registers
  727. // if not on the critical path
  728. DEBUG(dbgs() << " (not critical-path)\n");
  729. continue;
  730. } else if (PassthruRegs.count(AntiDepReg) != 0) {
  731. // If the anti-dep register liveness "passes-thru", then
  732. // don't try to change it. It will be changed along with
  733. // the use if required to break an earlier antidep.
  734. DEBUG(dbgs() << " (passthru)\n");
  735. continue;
  736. } else {
  737. // No anti-dep breaking for implicit deps
  738. MachineOperand *AntiDepOp = MI->findRegisterDefOperand(AntiDepReg);
  739. assert(AntiDepOp && "Can't find index for defined register operand");
  740. if (!AntiDepOp || AntiDepOp->isImplicit()) {
  741. DEBUG(dbgs() << " (implicit)\n");
  742. continue;
  743. }
  744. // If the SUnit has other dependencies on the SUnit that
  745. // it anti-depends on, don't bother breaking the
  746. // anti-dependency since those edges would prevent such
  747. // units from being scheduled past each other
  748. // regardless.
  749. //
  750. // Also, if there are dependencies on other SUnits with the
  751. // same register as the anti-dependency, don't attempt to
  752. // break it.
  753. for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
  754. PE = PathSU->Preds.end(); P != PE; ++P) {
  755. if (P->getSUnit() == NextSU ?
  756. (P->getKind() != SDep::Anti || P->getReg() != AntiDepReg) :
  757. (P->getKind() == SDep::Data && P->getReg() == AntiDepReg)) {
  758. AntiDepReg = 0;
  759. break;
  760. }
  761. }
  762. for (SUnit::const_pred_iterator P = PathSU->Preds.begin(),
  763. PE = PathSU->Preds.end(); P != PE; ++P) {
  764. if ((P->getSUnit() == NextSU) && (P->getKind() != SDep::Anti) &&
  765. (P->getKind() != SDep::Output)) {
  766. DEBUG(dbgs() << " (real dependency)\n");
  767. AntiDepReg = 0;
  768. break;
  769. } else if ((P->getSUnit() != NextSU) &&
  770. (P->getKind() == SDep::Data) &&
  771. (P->getReg() == AntiDepReg)) {
  772. DEBUG(dbgs() << " (other dependency)\n");
  773. AntiDepReg = 0;
  774. break;
  775. }
  776. }
  777. if (AntiDepReg == 0) continue;
  778. }
  779. assert(AntiDepReg != 0);
  780. if (AntiDepReg == 0) continue;
  781. // Determine AntiDepReg's register group.
  782. const unsigned GroupIndex = State->GetGroup(AntiDepReg);
  783. if (GroupIndex == 0) {
  784. DEBUG(dbgs() << " (zero group)\n");
  785. continue;
  786. }
  787. DEBUG(dbgs() << '\n');
  788. // Look for a suitable register to use to break the anti-dependence.
  789. std::map<unsigned, unsigned> RenameMap;
  790. if (FindSuitableFreeRegisters(GroupIndex, RenameOrder, RenameMap)) {
  791. DEBUG(dbgs() << "\tBreaking anti-dependence edge on "
  792. << TRI->getName(AntiDepReg) << ":");
  793. // Handle each group register...
  794. for (std::map<unsigned, unsigned>::iterator
  795. S = RenameMap.begin(), E = RenameMap.end(); S != E; ++S) {
  796. unsigned CurrReg = S->first;
  797. unsigned NewReg = S->second;
  798. DEBUG(dbgs() << " " << TRI->getName(CurrReg) << "->" <<
  799. TRI->getName(NewReg) << "(" <<
  800. RegRefs.count(CurrReg) << " refs)");
  801. // Update the references to the old register CurrReg to
  802. // refer to the new register NewReg.
  803. std::pair<std::multimap<unsigned,
  804. AggressiveAntiDepState::RegisterReference>::iterator,
  805. std::multimap<unsigned,
  806. AggressiveAntiDepState::RegisterReference>::iterator>
  807. Range = RegRefs.equal_range(CurrReg);
  808. for (std::multimap<unsigned,
  809. AggressiveAntiDepState::RegisterReference>::iterator
  810. Q = Range.first, QE = Range.second; Q != QE; ++Q) {
  811. Q->second.Operand->setReg(NewReg);
  812. // If the SU for the instruction being updated has debug
  813. // information related to the anti-dependency register, make
  814. // sure to update that as well.
  815. const SUnit *SU = MISUnitMap[Q->second.Operand->getParent()];
  816. if (!SU) continue;
  817. for (DbgValueVector::iterator DVI = DbgValues.begin(),
  818. DVE = DbgValues.end(); DVI != DVE; ++DVI)
  819. if (DVI->second == Q->second.Operand->getParent())
  820. UpdateDbgValue(DVI->first, AntiDepReg, NewReg);
  821. }
  822. // We just went back in time and modified history; the
  823. // liveness information for CurrReg is now inconsistent. Set
  824. // the state as if it were dead.
  825. State->UnionGroups(NewReg, 0);
  826. RegRefs.erase(NewReg);
  827. DefIndices[NewReg] = DefIndices[CurrReg];
  828. KillIndices[NewReg] = KillIndices[CurrReg];
  829. State->UnionGroups(CurrReg, 0);
  830. RegRefs.erase(CurrReg);
  831. DefIndices[CurrReg] = KillIndices[CurrReg];
  832. KillIndices[CurrReg] = ~0u;
  833. assert(((KillIndices[CurrReg] == ~0u) !=
  834. (DefIndices[CurrReg] == ~0u)) &&
  835. "Kill and Def maps aren't consistent for AntiDepReg!");
  836. }
  837. ++Broken;
  838. DEBUG(dbgs() << '\n');
  839. }
  840. }
  841. }
  842. ScanInstruction(MI, Count);
  843. }
  844. return Broken;
  845. }