RegAllocPBQP.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598
  1. //===-- RegAllocPBQP.h ------------------------------------------*- C++ -*-===//
  2. //
  3. // The LLVM Compiler Infrastructure
  4. //
  5. // This file is distributed under the University of Illinois Open Source
  6. // License. See LICENSE.TXT for details.
  7. //
  8. //===----------------------------------------------------------------------===//
  9. //
  10. // This file defines the PBQPBuilder interface, for classes which build PBQP
  11. // instances to represent register allocation problems, and the RegAllocPBQP
  12. // interface.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #ifndef LLVM_CODEGEN_REGALLOCPBQP_H
  16. #define LLVM_CODEGEN_REGALLOCPBQP_H
  17. #include "llvm/CodeGen/MachineFunctionPass.h"
  18. #include "llvm/CodeGen/PBQP/CostAllocator.h"
  19. #include "llvm/CodeGen/PBQP/ReductionRules.h"
  20. #include "llvm/CodeGen/PBQPRAConstraint.h"
  21. #include "llvm/Support/ErrorHandling.h"
  22. namespace llvm {
  23. class raw_ostream;
  24. namespace PBQP {
  25. namespace RegAlloc {
  26. /// @brief Spill option index.
  27. inline unsigned getSpillOptionIdx() { return 0; }
  28. /// \brief Metadata to speed allocatability test.
  29. ///
  30. /// Keeps track of the number of infinities in each row and column.
  31. class MatrixMetadata {
  32. private:
  33. MatrixMetadata(const MatrixMetadata&);
  34. void operator=(const MatrixMetadata&);
  35. public:
  36. MatrixMetadata(const Matrix& M)
  37. : WorstRow(0), WorstCol(0),
  38. UnsafeRows(new bool[M.getRows() - 1]()),
  39. UnsafeCols(new bool[M.getCols() - 1]()) {
  40. unsigned* ColCounts = new unsigned[M.getCols() - 1]();
  41. for (unsigned i = 1; i < M.getRows(); ++i) {
  42. unsigned RowCount = 0;
  43. for (unsigned j = 1; j < M.getCols(); ++j) {
  44. if (M[i][j] == std::numeric_limits<PBQPNum>::infinity()) {
  45. ++RowCount;
  46. ++ColCounts[j - 1];
  47. UnsafeRows[i - 1] = true;
  48. UnsafeCols[j - 1] = true;
  49. }
  50. }
  51. WorstRow = std::max(WorstRow, RowCount);
  52. }
  53. unsigned WorstColCountForCurRow =
  54. *std::max_element(ColCounts, ColCounts + M.getCols() - 1);
  55. WorstCol = std::max(WorstCol, WorstColCountForCurRow);
  56. delete[] ColCounts;
  57. }
  58. unsigned getWorstRow() const { return WorstRow; }
  59. unsigned getWorstCol() const { return WorstCol; }
  60. const bool* getUnsafeRows() const { return UnsafeRows.get(); }
  61. const bool* getUnsafeCols() const { return UnsafeCols.get(); }
  62. private:
  63. unsigned WorstRow, WorstCol;
  64. std::unique_ptr<bool[]> UnsafeRows;
  65. std::unique_ptr<bool[]> UnsafeCols;
  66. };
  67. /// \brief Holds a vector of the allowed physical regs for a vreg.
  68. class AllowedRegVector {
  69. friend hash_code hash_value(const AllowedRegVector &);
  70. public:
  71. AllowedRegVector() : NumOpts(0), Opts(nullptr) {}
  72. AllowedRegVector(const std::vector<unsigned> &OptVec)
  73. : NumOpts(OptVec.size()), Opts(new unsigned[NumOpts]) {
  74. std::copy(OptVec.begin(), OptVec.end(), Opts.get());
  75. }
  76. AllowedRegVector(const AllowedRegVector &Other)
  77. : NumOpts(Other.NumOpts), Opts(new unsigned[NumOpts]) {
  78. std::copy(Other.Opts.get(), Other.Opts.get() + NumOpts, Opts.get());
  79. }
  80. AllowedRegVector(AllowedRegVector &&Other)
  81. : NumOpts(std::move(Other.NumOpts)), Opts(std::move(Other.Opts)) {}
  82. AllowedRegVector& operator=(const AllowedRegVector &Other) {
  83. NumOpts = Other.NumOpts;
  84. Opts.reset(new unsigned[NumOpts]);
  85. std::copy(Other.Opts.get(), Other.Opts.get() + NumOpts, Opts.get());
  86. return *this;
  87. }
  88. AllowedRegVector& operator=(AllowedRegVector &&Other) {
  89. NumOpts = std::move(Other.NumOpts);
  90. Opts = std::move(Other.Opts);
  91. return *this;
  92. }
  93. unsigned size() const { return NumOpts; }
  94. unsigned operator[](size_t I) const { return Opts[I]; }
  95. bool operator==(const AllowedRegVector &Other) const {
  96. if (NumOpts != Other.NumOpts)
  97. return false;
  98. return std::equal(Opts.get(), Opts.get() + NumOpts, Other.Opts.get());
  99. }
  100. bool operator!=(const AllowedRegVector &Other) const {
  101. return !(*this == Other);
  102. }
  103. private:
  104. unsigned NumOpts;
  105. std::unique_ptr<unsigned[]> Opts;
  106. };
  107. inline hash_code hash_value(const AllowedRegVector &OptRegs) {
  108. unsigned *OStart = OptRegs.Opts.get();
  109. unsigned *OEnd = OptRegs.Opts.get() + OptRegs.NumOpts;
  110. return hash_combine(OptRegs.NumOpts,
  111. hash_combine_range(OStart, OEnd));
  112. }
  113. /// \brief Holds graph-level metadata relevent to PBQP RA problems.
  114. class GraphMetadata {
  115. private:
  116. typedef ValuePool<AllowedRegVector> AllowedRegVecPool;
  117. public:
  118. typedef AllowedRegVecPool::PoolRef AllowedRegVecRef;
  119. GraphMetadata(MachineFunction &MF,
  120. LiveIntervals &LIS,
  121. MachineBlockFrequencyInfo &MBFI)
  122. : MF(MF), LIS(LIS), MBFI(MBFI) {}
  123. MachineFunction &MF;
  124. LiveIntervals &LIS;
  125. MachineBlockFrequencyInfo &MBFI;
  126. void setNodeIdForVReg(unsigned VReg, GraphBase::NodeId NId) {
  127. VRegToNodeId[VReg] = NId;
  128. }
  129. GraphBase::NodeId getNodeIdForVReg(unsigned VReg) const {
  130. auto VRegItr = VRegToNodeId.find(VReg);
  131. if (VRegItr == VRegToNodeId.end())
  132. return GraphBase::invalidNodeId();
  133. return VRegItr->second;
  134. }
  135. void eraseNodeIdForVReg(unsigned VReg) {
  136. VRegToNodeId.erase(VReg);
  137. }
  138. AllowedRegVecRef getAllowedRegs(AllowedRegVector Allowed) {
  139. return AllowedRegVecs.getValue(std::move(Allowed));
  140. }
  141. private:
  142. DenseMap<unsigned, GraphBase::NodeId> VRegToNodeId;
  143. AllowedRegVecPool AllowedRegVecs;
  144. };
  145. /// \brief Holds solver state and other metadata relevant to each PBQP RA node.
  146. class NodeMetadata {
  147. public:
  148. typedef RegAlloc::AllowedRegVector AllowedRegVector;
  149. // The node's reduction state. The order in this enum is important,
  150. // as it is assumed nodes can only progress up (i.e. towards being
  151. // optimally reducible) when reducing the graph.
  152. typedef enum {
  153. Unprocessed,
  154. NotProvablyAllocatable,
  155. ConservativelyAllocatable,
  156. OptimallyReducible
  157. } ReductionState;
  158. NodeMetadata()
  159. : RS(Unprocessed), NumOpts(0), DeniedOpts(0), OptUnsafeEdges(nullptr),
  160. VReg(0)
  161. #ifndef NDEBUG
  162. , everConservativelyAllocatable(false)
  163. #endif
  164. {}
  165. // FIXME: Re-implementing default behavior to work around MSVC. Remove once
  166. // MSVC synthesizes move constructors properly.
  167. NodeMetadata(const NodeMetadata &Other)
  168. : RS(Other.RS), NumOpts(Other.NumOpts), DeniedOpts(Other.DeniedOpts),
  169. OptUnsafeEdges(new unsigned[NumOpts]), VReg(Other.VReg),
  170. AllowedRegs(Other.AllowedRegs)
  171. #ifndef NDEBUG
  172. , everConservativelyAllocatable(Other.everConservativelyAllocatable)
  173. #endif
  174. {
  175. if (NumOpts > 0) {
  176. std::copy(&Other.OptUnsafeEdges[0], &Other.OptUnsafeEdges[NumOpts],
  177. &OptUnsafeEdges[0]);
  178. }
  179. }
  180. // FIXME: Re-implementing default behavior to work around MSVC. Remove once
  181. // MSVC synthesizes move constructors properly.
  182. NodeMetadata(NodeMetadata &&Other)
  183. : RS(Other.RS), NumOpts(Other.NumOpts), DeniedOpts(Other.DeniedOpts),
  184. OptUnsafeEdges(std::move(Other.OptUnsafeEdges)), VReg(Other.VReg),
  185. AllowedRegs(std::move(Other.AllowedRegs))
  186. #ifndef NDEBUG
  187. , everConservativelyAllocatable(Other.everConservativelyAllocatable)
  188. #endif
  189. {}
  190. // FIXME: Re-implementing default behavior to work around MSVC. Remove once
  191. // MSVC synthesizes move constructors properly.
  192. NodeMetadata& operator=(const NodeMetadata &Other) {
  193. RS = Other.RS;
  194. NumOpts = Other.NumOpts;
  195. DeniedOpts = Other.DeniedOpts;
  196. OptUnsafeEdges.reset(new unsigned[NumOpts]);
  197. std::copy(Other.OptUnsafeEdges.get(), Other.OptUnsafeEdges.get() + NumOpts,
  198. OptUnsafeEdges.get());
  199. VReg = Other.VReg;
  200. AllowedRegs = Other.AllowedRegs;
  201. #ifndef NDEBUG
  202. everConservativelyAllocatable = Other.everConservativelyAllocatable;
  203. #endif
  204. return *this;
  205. }
  206. // FIXME: Re-implementing default behavior to work around MSVC. Remove once
  207. // MSVC synthesizes move constructors properly.
  208. NodeMetadata& operator=(NodeMetadata &&Other) {
  209. RS = Other.RS;
  210. NumOpts = Other.NumOpts;
  211. DeniedOpts = Other.DeniedOpts;
  212. OptUnsafeEdges = std::move(Other.OptUnsafeEdges);
  213. VReg = Other.VReg;
  214. AllowedRegs = std::move(Other.AllowedRegs);
  215. #ifndef NDEBUG
  216. everConservativelyAllocatable = Other.everConservativelyAllocatable;
  217. #endif
  218. return *this;
  219. }
  220. void setVReg(unsigned VReg) { this->VReg = VReg; }
  221. unsigned getVReg() const { return VReg; }
  222. void setAllowedRegs(GraphMetadata::AllowedRegVecRef AllowedRegs) {
  223. this->AllowedRegs = std::move(AllowedRegs);
  224. }
  225. const AllowedRegVector& getAllowedRegs() const { return *AllowedRegs; }
  226. void setup(const Vector& Costs) {
  227. NumOpts = Costs.getLength() - 1;
  228. OptUnsafeEdges = std::unique_ptr<unsigned[]>(new unsigned[NumOpts]());
  229. }
  230. ReductionState getReductionState() const { return RS; }
  231. void setReductionState(ReductionState RS) {
  232. assert(RS >= this->RS && "A node's reduction state can not be downgraded");
  233. this->RS = RS;
  234. #ifndef NDEBUG
  235. // Remember this state to assert later that a non-infinite register
  236. // option was available.
  237. if (RS == ConservativelyAllocatable)
  238. everConservativelyAllocatable = true;
  239. #endif
  240. }
  241. void handleAddEdge(const MatrixMetadata& MD, bool Transpose) {
  242. DeniedOpts += Transpose ? MD.getWorstRow() : MD.getWorstCol();
  243. const bool* UnsafeOpts =
  244. Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
  245. for (unsigned i = 0; i < NumOpts; ++i)
  246. OptUnsafeEdges[i] += UnsafeOpts[i];
  247. }
  248. void handleRemoveEdge(const MatrixMetadata& MD, bool Transpose) {
  249. DeniedOpts -= Transpose ? MD.getWorstRow() : MD.getWorstCol();
  250. const bool* UnsafeOpts =
  251. Transpose ? MD.getUnsafeCols() : MD.getUnsafeRows();
  252. for (unsigned i = 0; i < NumOpts; ++i)
  253. OptUnsafeEdges[i] -= UnsafeOpts[i];
  254. }
  255. bool isConservativelyAllocatable() const {
  256. return (DeniedOpts < NumOpts) ||
  257. (std::find(&OptUnsafeEdges[0], &OptUnsafeEdges[NumOpts], 0) !=
  258. &OptUnsafeEdges[NumOpts]);
  259. }
  260. #ifndef NDEBUG
  261. bool wasConservativelyAllocatable() const {
  262. return everConservativelyAllocatable;
  263. }
  264. #endif
  265. private:
  266. ReductionState RS;
  267. unsigned NumOpts;
  268. unsigned DeniedOpts;
  269. std::unique_ptr<unsigned[]> OptUnsafeEdges;
  270. unsigned VReg;
  271. GraphMetadata::AllowedRegVecRef AllowedRegs;
  272. #ifndef NDEBUG
  273. bool everConservativelyAllocatable;
  274. #endif
  275. };
  276. class RegAllocSolverImpl {
  277. private:
  278. typedef MDMatrix<MatrixMetadata> RAMatrix;
  279. public:
  280. typedef PBQP::Vector RawVector;
  281. typedef PBQP::Matrix RawMatrix;
  282. typedef PBQP::Vector Vector;
  283. typedef RAMatrix Matrix;
  284. typedef PBQP::PoolCostAllocator<Vector, Matrix> CostAllocator;
  285. typedef GraphBase::NodeId NodeId;
  286. typedef GraphBase::EdgeId EdgeId;
  287. typedef RegAlloc::NodeMetadata NodeMetadata;
  288. struct EdgeMetadata { };
  289. typedef RegAlloc::GraphMetadata GraphMetadata;
  290. typedef PBQP::Graph<RegAllocSolverImpl> Graph;
  291. RegAllocSolverImpl(Graph &G) : G(G) {}
  292. Solution solve() {
  293. G.setSolver(*this);
  294. Solution S;
  295. setup();
  296. S = backpropagate(G, reduce());
  297. G.unsetSolver();
  298. return S;
  299. }
  300. void handleAddNode(NodeId NId) {
  301. assert(G.getNodeCosts(NId).getLength() > 1 &&
  302. "PBQP Graph should not contain single or zero-option nodes");
  303. G.getNodeMetadata(NId).setup(G.getNodeCosts(NId));
  304. }
  305. void handleRemoveNode(NodeId NId) {}
  306. void handleSetNodeCosts(NodeId NId, const Vector& newCosts) {}
  307. void handleAddEdge(EdgeId EId) {
  308. handleReconnectEdge(EId, G.getEdgeNode1Id(EId));
  309. handleReconnectEdge(EId, G.getEdgeNode2Id(EId));
  310. }
  311. void handleRemoveEdge(EdgeId EId) {
  312. handleDisconnectEdge(EId, G.getEdgeNode1Id(EId));
  313. handleDisconnectEdge(EId, G.getEdgeNode2Id(EId));
  314. }
  315. void handleDisconnectEdge(EdgeId EId, NodeId NId) {
  316. NodeMetadata& NMd = G.getNodeMetadata(NId);
  317. const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
  318. NMd.handleRemoveEdge(MMd, NId == G.getEdgeNode2Id(EId));
  319. promote(NId, NMd);
  320. }
  321. void handleReconnectEdge(EdgeId EId, NodeId NId) {
  322. NodeMetadata& NMd = G.getNodeMetadata(NId);
  323. const MatrixMetadata& MMd = G.getEdgeCosts(EId).getMetadata();
  324. NMd.handleAddEdge(MMd, NId == G.getEdgeNode2Id(EId));
  325. }
  326. void handleUpdateCosts(EdgeId EId, const Matrix& NewCosts) {
  327. NodeId N1Id = G.getEdgeNode1Id(EId);
  328. NodeId N2Id = G.getEdgeNode2Id(EId);
  329. NodeMetadata& N1Md = G.getNodeMetadata(N1Id);
  330. NodeMetadata& N2Md = G.getNodeMetadata(N2Id);
  331. bool Transpose = N1Id != G.getEdgeNode1Id(EId);
  332. // Metadata are computed incrementally. First, update them
  333. // by removing the old cost.
  334. const MatrixMetadata& OldMMd = G.getEdgeCosts(EId).getMetadata();
  335. N1Md.handleRemoveEdge(OldMMd, Transpose);
  336. N2Md.handleRemoveEdge(OldMMd, !Transpose);
  337. // And update now the metadata with the new cost.
  338. const MatrixMetadata& MMd = NewCosts.getMetadata();
  339. N1Md.handleAddEdge(MMd, Transpose);
  340. N2Md.handleAddEdge(MMd, !Transpose);
  341. // As the metadata may have changed with the update, the nodes may have
  342. // become ConservativelyAllocatable or OptimallyReducible.
  343. promote(N1Id, N1Md);
  344. promote(N2Id, N2Md);
  345. }
  346. private:
  347. void promote(NodeId NId, NodeMetadata& NMd) {
  348. if (G.getNodeDegree(NId) == 3) {
  349. // This node is becoming optimally reducible.
  350. moveToOptimallyReducibleNodes(NId);
  351. } else if (NMd.getReductionState() ==
  352. NodeMetadata::NotProvablyAllocatable &&
  353. NMd.isConservativelyAllocatable()) {
  354. // This node just became conservatively allocatable.
  355. moveToConservativelyAllocatableNodes(NId);
  356. }
  357. }
  358. void removeFromCurrentSet(NodeId NId) {
  359. switch (G.getNodeMetadata(NId).getReductionState()) {
  360. case NodeMetadata::Unprocessed: break;
  361. case NodeMetadata::OptimallyReducible:
  362. assert(OptimallyReducibleNodes.find(NId) !=
  363. OptimallyReducibleNodes.end() &&
  364. "Node not in optimally reducible set.");
  365. OptimallyReducibleNodes.erase(NId);
  366. break;
  367. case NodeMetadata::ConservativelyAllocatable:
  368. assert(ConservativelyAllocatableNodes.find(NId) !=
  369. ConservativelyAllocatableNodes.end() &&
  370. "Node not in conservatively allocatable set.");
  371. ConservativelyAllocatableNodes.erase(NId);
  372. break;
  373. case NodeMetadata::NotProvablyAllocatable:
  374. assert(NotProvablyAllocatableNodes.find(NId) !=
  375. NotProvablyAllocatableNodes.end() &&
  376. "Node not in not-provably-allocatable set.");
  377. NotProvablyAllocatableNodes.erase(NId);
  378. break;
  379. }
  380. }
  381. void moveToOptimallyReducibleNodes(NodeId NId) {
  382. removeFromCurrentSet(NId);
  383. OptimallyReducibleNodes.insert(NId);
  384. G.getNodeMetadata(NId).setReductionState(
  385. NodeMetadata::OptimallyReducible);
  386. }
  387. void moveToConservativelyAllocatableNodes(NodeId NId) {
  388. removeFromCurrentSet(NId);
  389. ConservativelyAllocatableNodes.insert(NId);
  390. G.getNodeMetadata(NId).setReductionState(
  391. NodeMetadata::ConservativelyAllocatable);
  392. }
  393. void moveToNotProvablyAllocatableNodes(NodeId NId) {
  394. removeFromCurrentSet(NId);
  395. NotProvablyAllocatableNodes.insert(NId);
  396. G.getNodeMetadata(NId).setReductionState(
  397. NodeMetadata::NotProvablyAllocatable);
  398. }
  399. void setup() {
  400. // Set up worklists.
  401. for (auto NId : G.nodeIds()) {
  402. if (G.getNodeDegree(NId) < 3)
  403. moveToOptimallyReducibleNodes(NId);
  404. else if (G.getNodeMetadata(NId).isConservativelyAllocatable())
  405. moveToConservativelyAllocatableNodes(NId);
  406. else
  407. moveToNotProvablyAllocatableNodes(NId);
  408. }
  409. }
  410. // Compute a reduction order for the graph by iteratively applying PBQP
  411. // reduction rules. Locally optimal rules are applied whenever possible (R0,
  412. // R1, R2). If no locally-optimal rules apply then any conservatively
  413. // allocatable node is reduced. Finally, if no conservatively allocatable
  414. // node exists then the node with the lowest spill-cost:degree ratio is
  415. // selected.
  416. std::vector<GraphBase::NodeId> reduce() {
  417. assert(!G.empty() && "Cannot reduce empty graph.");
  418. typedef GraphBase::NodeId NodeId;
  419. std::vector<NodeId> NodeStack;
  420. // Consume worklists.
  421. while (true) {
  422. if (!OptimallyReducibleNodes.empty()) {
  423. NodeSet::iterator NItr = OptimallyReducibleNodes.begin();
  424. NodeId NId = *NItr;
  425. OptimallyReducibleNodes.erase(NItr);
  426. NodeStack.push_back(NId);
  427. switch (G.getNodeDegree(NId)) {
  428. case 0:
  429. break;
  430. case 1:
  431. applyR1(G, NId);
  432. break;
  433. case 2:
  434. applyR2(G, NId);
  435. break;
  436. default: llvm_unreachable("Not an optimally reducible node.");
  437. }
  438. } else if (!ConservativelyAllocatableNodes.empty()) {
  439. // Conservatively allocatable nodes will never spill. For now just
  440. // take the first node in the set and push it on the stack. When we
  441. // start optimizing more heavily for register preferencing, it may
  442. // would be better to push nodes with lower 'expected' or worst-case
  443. // register costs first (since early nodes are the most
  444. // constrained).
  445. NodeSet::iterator NItr = ConservativelyAllocatableNodes.begin();
  446. NodeId NId = *NItr;
  447. ConservativelyAllocatableNodes.erase(NItr);
  448. NodeStack.push_back(NId);
  449. G.disconnectAllNeighborsFromNode(NId);
  450. } else if (!NotProvablyAllocatableNodes.empty()) {
  451. NodeSet::iterator NItr =
  452. std::min_element(NotProvablyAllocatableNodes.begin(),
  453. NotProvablyAllocatableNodes.end(),
  454. SpillCostComparator(G));
  455. NodeId NId = *NItr;
  456. NotProvablyAllocatableNodes.erase(NItr);
  457. NodeStack.push_back(NId);
  458. G.disconnectAllNeighborsFromNode(NId);
  459. } else
  460. break;
  461. }
  462. return NodeStack;
  463. }
  464. class SpillCostComparator {
  465. public:
  466. SpillCostComparator(const Graph& G) : G(G) {}
  467. bool operator()(NodeId N1Id, NodeId N2Id) {
  468. PBQPNum N1SC = G.getNodeCosts(N1Id)[0];
  469. PBQPNum N2SC = G.getNodeCosts(N2Id)[0];
  470. if (N1SC == N2SC)
  471. return G.getNodeDegree(N1Id) < G.getNodeDegree(N2Id);
  472. return N1SC < N2SC;
  473. }
  474. private:
  475. const Graph& G;
  476. };
  477. Graph& G;
  478. typedef std::set<NodeId> NodeSet;
  479. NodeSet OptimallyReducibleNodes;
  480. NodeSet ConservativelyAllocatableNodes;
  481. NodeSet NotProvablyAllocatableNodes;
  482. };
  483. class PBQPRAGraph : public PBQP::Graph<RegAllocSolverImpl> {
  484. private:
  485. typedef PBQP::Graph<RegAllocSolverImpl> BaseT;
  486. public:
  487. PBQPRAGraph(GraphMetadata Metadata) : BaseT(Metadata) {}
  488. /// @brief Dump this graph to dbgs().
  489. void dump() const;
  490. /// @brief Dump this graph to an output stream.
  491. /// @param OS Output stream to print on.
  492. void dump(raw_ostream &OS) const;
  493. /// @brief Print a representation of this graph in DOT format.
  494. /// @param OS Output stream to print on.
  495. void printDot(raw_ostream &OS) const;
  496. };
  497. inline Solution solve(PBQPRAGraph& G) {
  498. if (G.empty())
  499. return Solution();
  500. RegAllocSolverImpl RegAllocSolver(G);
  501. return RegAllocSolver.solve();
  502. }
  503. } // namespace RegAlloc
  504. } // namespace PBQP
  505. /// @brief Create a PBQP register allocator instance.
  506. FunctionPass *
  507. createPBQPRegisterAllocator(char *customPassID = nullptr);
  508. } // namespace llvm
  509. #endif /* LLVM_CODEGEN_REGALLOCPBQP_H */