ns.c 30 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270
  1. /**
  2. * @file
  3. * @brief Network Simplex algorithm for ranking nodes of a DAG, @ref rank, @ref rank2
  4. * @ingroup common_render
  5. */
  6. /*************************************************************************
  7. * Copyright (c) 2011 AT&T Intellectual Property
  8. * All rights reserved. This program and the accompanying materials
  9. * are made available under the terms of the Eclipse Public License v1.0
  10. * which accompanies this distribution, and is available at
  11. * https://www.eclipse.org/legal/epl-v10.html
  12. *
  13. * Contributors: Details at https://graphviz.org
  14. *************************************************************************/
  15. #include <assert.h>
  16. #include <cgraph/list.h>
  17. #include <common/render.h>
  18. #include <limits.h>
  19. #include <stdbool.h>
  20. #include <stddef.h>
  21. #include <stdint.h>
  22. #include <stdio.h>
  23. #include <util/alloc.h>
  24. #include <util/exit.h>
  25. #include <util/overflow.h>
  26. #include <util/prisize_t.h>
  27. #include <util/streq.h>
  28. static void dfs_cutval(node_t * v, edge_t * par);
  29. static int dfs_range_init(node_t * v, edge_t * par, int low);
  30. static int dfs_range(node_t * v, edge_t * par, int low);
  31. static int x_val(edge_t * e, node_t * v, int dir);
  32. #ifdef DEBUG
  33. static void check_cycles(graph_t * g);
  34. #endif
  35. #define LENGTH(e) (ND_rank(aghead(e)) - ND_rank(agtail(e)))
  36. #define SLACK(e) (LENGTH(e) - ED_minlen(e))
  37. #define SEQ(a,b,c) ((a) <= (b) && (b) <= (c))
  38. #define TREE_EDGE(e) (ED_tree_index(e) >= 0)
  39. static graph_t *G;
  40. static size_t N_nodes, N_edges;
  41. static size_t S_i; /* search index for enter_edge */
  42. static int Search_size;
  43. #define SEARCHSIZE 30
  44. static nlist_t Tree_node;
  45. static elist Tree_edge;
  46. static int add_tree_edge(edge_t * e)
  47. {
  48. node_t *n;
  49. if (TREE_EDGE(e)) {
  50. agerrorf("add_tree_edge: missing tree edge\n");
  51. return -1;
  52. }
  53. assert(Tree_edge.size <= INT_MAX);
  54. ED_tree_index(e) = (int)Tree_edge.size;
  55. Tree_edge.list[Tree_edge.size++] = e;
  56. if (!ND_mark(agtail(e)))
  57. Tree_node.list[Tree_node.size++] = agtail(e);
  58. if (!ND_mark(aghead(e)))
  59. Tree_node.list[Tree_node.size++] = aghead(e);
  60. n = agtail(e);
  61. ND_mark(n) = true;
  62. ND_tree_out(n).list[ND_tree_out(n).size++] = e;
  63. ND_tree_out(n).list[ND_tree_out(n).size] = NULL;
  64. if (ND_out(n).list[ND_tree_out(n).size - 1] == 0) {
  65. agerrorf("add_tree_edge: empty outedge list\n");
  66. return -1;
  67. }
  68. n = aghead(e);
  69. ND_mark(n) = true;
  70. ND_tree_in(n).list[ND_tree_in(n).size++] = e;
  71. ND_tree_in(n).list[ND_tree_in(n).size] = NULL;
  72. if (ND_in(n).list[ND_tree_in(n).size - 1] == 0) {
  73. agerrorf("add_tree_edge: empty inedge list\n");
  74. return -1;
  75. }
  76. return 0;
  77. }
  78. /**
  79. * Invalidate DFS attributes by walking up the tree from to_node till lca
  80. * (inclusively). Called when updating tree to improve pruning in dfs_range().
  81. * Assigns ND_low(n) = -1 for the affected nodes.
  82. */
  83. static void invalidate_path(node_t *lca, node_t *to_node) {
  84. while (true) {
  85. if (ND_low(to_node) == -1)
  86. break;
  87. ND_low(to_node) = -1;
  88. edge_t *e = ND_par(to_node);
  89. if (e == NULL)
  90. break;
  91. if (ND_lim(to_node) >= ND_lim(lca)) {
  92. if (to_node != lca)
  93. agerrorf("invalidate_path: skipped over LCA\n");
  94. break;
  95. }
  96. if (ND_lim(agtail(e)) > ND_lim(aghead(e)))
  97. to_node = agtail(e);
  98. else
  99. to_node = aghead(e);
  100. }
  101. }
  102. static void exchange_tree_edges(edge_t * e, edge_t * f)
  103. {
  104. node_t *n;
  105. ED_tree_index(f) = ED_tree_index(e);
  106. Tree_edge.list[ED_tree_index(e)] = f;
  107. ED_tree_index(e) = -1;
  108. n = agtail(e);
  109. size_t i = --ND_tree_out(n).size;
  110. size_t j;
  111. for (j = 0; j <= i; j++)
  112. if (ND_tree_out(n).list[j] == e)
  113. break;
  114. ND_tree_out(n).list[j] = ND_tree_out(n).list[i];
  115. ND_tree_out(n).list[i] = NULL;
  116. n = aghead(e);
  117. i = --ND_tree_in(n).size;
  118. for (j = 0; j <= i; j++)
  119. if (ND_tree_in(n).list[j] == e)
  120. break;
  121. ND_tree_in(n).list[j] = ND_tree_in(n).list[i];
  122. ND_tree_in(n).list[i] = NULL;
  123. n = agtail(f);
  124. ND_tree_out(n).list[ND_tree_out(n).size++] = f;
  125. ND_tree_out(n).list[ND_tree_out(n).size] = NULL;
  126. n = aghead(f);
  127. ND_tree_in(n).list[ND_tree_in(n).size++] = f;
  128. ND_tree_in(n).list[ND_tree_in(n).size] = NULL;
  129. }
  130. DEFINE_LIST(node_queue, node_t *)
  131. static
  132. void init_rank(void)
  133. {
  134. int i;
  135. node_t *v;
  136. edge_t *e;
  137. node_queue_t Q = {0};
  138. node_queue_reserve(&Q, N_nodes);
  139. size_t ctr = 0;
  140. for (v = GD_nlist(G); v; v = ND_next(v)) {
  141. if (ND_priority(v) == 0)
  142. node_queue_push_back(&Q, v);
  143. }
  144. while (!node_queue_is_empty(&Q)) {
  145. v = node_queue_pop_front(&Q);
  146. ND_rank(v) = 0;
  147. ctr++;
  148. for (i = 0; (e = ND_in(v).list[i]); i++)
  149. ND_rank(v) = MAX(ND_rank(v), ND_rank(agtail(e)) + ED_minlen(e));
  150. for (i = 0; (e = ND_out(v).list[i]); i++) {
  151. if (--ND_priority(aghead(e)) <= 0)
  152. node_queue_push_back(&Q, aghead(e));
  153. }
  154. }
  155. if (ctr != N_nodes) {
  156. agerrorf("trouble in init_rank\n");
  157. for (v = GD_nlist(G); v; v = ND_next(v))
  158. if (ND_priority(v))
  159. agerr(AGPREV, "\t%s %d\n", agnameof(v), ND_priority(v));
  160. }
  161. node_queue_free(&Q);
  162. }
  163. static edge_t *leave_edge(void)
  164. {
  165. edge_t *f, *rv = NULL;
  166. int cnt = 0;
  167. size_t j = S_i;
  168. while (S_i < Tree_edge.size) {
  169. if (ED_cutvalue(f = Tree_edge.list[S_i]) < 0) {
  170. if (rv) {
  171. if (ED_cutvalue(rv) > ED_cutvalue(f))
  172. rv = f;
  173. } else
  174. rv = Tree_edge.list[S_i];
  175. if (++cnt >= Search_size)
  176. return rv;
  177. }
  178. S_i++;
  179. }
  180. if (j > 0) {
  181. S_i = 0;
  182. while (S_i < j) {
  183. if (ED_cutvalue(f = Tree_edge.list[S_i]) < 0) {
  184. if (rv) {
  185. if (ED_cutvalue(rv) > ED_cutvalue(f))
  186. rv = f;
  187. } else
  188. rv = Tree_edge.list[S_i];
  189. if (++cnt >= Search_size)
  190. return rv;
  191. }
  192. S_i++;
  193. }
  194. }
  195. return rv;
  196. }
  197. static edge_t *Enter;
  198. static int Low, Lim, Slack;
  199. static void dfs_enter_outedge(node_t * v)
  200. {
  201. int i, slack;
  202. edge_t *e;
  203. for (i = 0; (e = ND_out(v).list[i]); i++) {
  204. if (!TREE_EDGE(e)) {
  205. if (!SEQ(Low, ND_lim(aghead(e)), Lim)) {
  206. slack = SLACK(e);
  207. if (slack < Slack || Enter == NULL) {
  208. Enter = e;
  209. Slack = slack;
  210. }
  211. }
  212. } else if (ND_lim(aghead(e)) < ND_lim(v))
  213. dfs_enter_outedge(aghead(e));
  214. }
  215. for (i = 0; (e = ND_tree_in(v).list[i]) && (Slack > 0); i++)
  216. if (ND_lim(agtail(e)) < ND_lim(v))
  217. dfs_enter_outedge(agtail(e));
  218. }
  219. static void dfs_enter_inedge(node_t * v)
  220. {
  221. int i, slack;
  222. edge_t *e;
  223. for (i = 0; (e = ND_in(v).list[i]); i++) {
  224. if (!TREE_EDGE(e)) {
  225. if (!SEQ(Low, ND_lim(agtail(e)), Lim)) {
  226. slack = SLACK(e);
  227. if (slack < Slack || Enter == NULL) {
  228. Enter = e;
  229. Slack = slack;
  230. }
  231. }
  232. } else if (ND_lim(agtail(e)) < ND_lim(v))
  233. dfs_enter_inedge(agtail(e));
  234. }
  235. for (i = 0; (e = ND_tree_out(v).list[i]) && Slack > 0; i++)
  236. if (ND_lim(aghead(e)) < ND_lim(v))
  237. dfs_enter_inedge(aghead(e));
  238. }
  239. static edge_t *enter_edge(edge_t * e)
  240. {
  241. node_t *v;
  242. bool outsearch;
  243. /* v is the down node */
  244. if (ND_lim(agtail(e)) < ND_lim(aghead(e))) {
  245. v = agtail(e);
  246. outsearch = false;
  247. } else {
  248. v = aghead(e);
  249. outsearch = true;
  250. }
  251. Enter = NULL;
  252. Slack = INT_MAX;
  253. Low = ND_low(v);
  254. Lim = ND_lim(v);
  255. if (outsearch)
  256. dfs_enter_outedge(v);
  257. else
  258. dfs_enter_inedge(v);
  259. return Enter;
  260. }
  261. static void init_cutvalues(void)
  262. {
  263. dfs_range_init(GD_nlist(G), NULL, 1);
  264. dfs_cutval(GD_nlist(G), NULL);
  265. }
  266. /* functions for initial tight tree construction */
  267. // borrow field from network simplex - overwritten in init_cutvalues() forgive me
  268. #define ND_subtree(n) (subtree_t*)ND_par(n)
  269. #define ND_subtree_set(n,value) (ND_par(n) = (edge_t*)value)
  270. typedef struct subtree_s {
  271. node_t *rep; /* some node in the tree */
  272. int size; /* total tight tree size */
  273. size_t heap_index; ///< required to find non-min elts when merged
  274. struct subtree_s *par; /* union find */
  275. } subtree_t;
  276. /// is this subtree stored in an STheap?
  277. static bool on_heap(const subtree_t *tree) {
  278. return tree->heap_index != SIZE_MAX;
  279. }
  280. /* find initial tight subtrees */
  281. static int tight_subtree_search(Agnode_t *v, subtree_t *st)
  282. {
  283. Agedge_t *e;
  284. int i;
  285. int rv;
  286. rv = 1;
  287. ND_subtree_set(v,st);
  288. for (i = 0; (e = ND_in(v).list[i]); i++) {
  289. if (TREE_EDGE(e)) continue;
  290. if (ND_subtree(agtail(e)) == 0 && SLACK(e) == 0) {
  291. if (add_tree_edge(e) != 0) {
  292. return -1;
  293. }
  294. rv += tight_subtree_search(agtail(e),st);
  295. }
  296. }
  297. for (i = 0; (e = ND_out(v).list[i]); i++) {
  298. if (TREE_EDGE(e)) continue;
  299. if (ND_subtree(aghead(e)) == 0 && SLACK(e) == 0) {
  300. if (add_tree_edge(e) != 0) {
  301. return -1;
  302. }
  303. rv += tight_subtree_search(aghead(e),st);
  304. }
  305. }
  306. return rv;
  307. }
  308. static subtree_t *find_tight_subtree(Agnode_t *v)
  309. {
  310. subtree_t *rv;
  311. rv = gv_alloc(sizeof(subtree_t));
  312. rv->rep = v;
  313. rv->size = tight_subtree_search(v,rv);
  314. if (rv->size < 0) {
  315. free(rv);
  316. return NULL;
  317. }
  318. rv->par = rv;
  319. return rv;
  320. }
  321. typedef struct STheap_s {
  322. subtree_t **elt;
  323. size_t size;
  324. } STheap_t;
  325. static subtree_t *STsetFind(Agnode_t *n0)
  326. {
  327. subtree_t *s0 = ND_subtree(n0);
  328. while (s0->par && s0->par != s0) {
  329. if (s0->par->par) {s0->par = s0->par->par;} /* path compression for the code weary */
  330. s0 = s0->par;
  331. }
  332. return s0;
  333. }
  334. static subtree_t *STsetUnion(subtree_t *s0, subtree_t *s1)
  335. {
  336. subtree_t *r0, *r1, *r;
  337. for (r0 = s0; r0->par && r0->par != r0; r0 = r0->par);
  338. for (r1 = s1; r1->par && r1->par != r1; r1 = r1->par);
  339. if (r0 == r1) return r0; /* safety code but shouldn't happen */
  340. assert(on_heap(r0) || on_heap(r1));
  341. if (!on_heap(r1)) r = r0;
  342. else if (!on_heap(r0)) r = r1;
  343. else if (r1->size < r0->size) r = r0;
  344. else r = r1;
  345. r0->par = r1->par = r;
  346. r->size = r0->size + r1->size;
  347. assert(on_heap(r));
  348. return r;
  349. }
  350. /* find tightest edge to another tree incident on the given tree */
  351. static Agedge_t *inter_tree_edge_search(Agnode_t *v, Agnode_t *from, Agedge_t *best)
  352. {
  353. int i;
  354. Agedge_t *e;
  355. subtree_t *ts = STsetFind(v);
  356. if (best && SLACK(best) == 0) return best;
  357. for (i = 0; (e = ND_out(v).list[i]); i++) {
  358. if (TREE_EDGE(e)) {
  359. if (aghead(e) == from) continue; // do not search back in tree
  360. best = inter_tree_edge_search(aghead(e),v,best); // search forward in tree
  361. }
  362. else {
  363. if (STsetFind(aghead(e)) != ts) { // encountered candidate edge
  364. if (best == 0 || SLACK(e) < SLACK(best)) best = e;
  365. }
  366. /* else ignore non-tree edge between nodes in the same tree */
  367. }
  368. }
  369. /* the following code must mirror the above, but for in-edges */
  370. for (i = 0; (e = ND_in(v).list[i]); i++) {
  371. if (TREE_EDGE(e)) {
  372. if (agtail(e) == from) continue;
  373. best = inter_tree_edge_search(agtail(e),v,best);
  374. }
  375. else {
  376. if (STsetFind(agtail(e)) != ts) {
  377. if (best == 0 || SLACK(e) < SLACK(best)) best = e;
  378. }
  379. }
  380. }
  381. return best;
  382. }
  383. static Agedge_t *inter_tree_edge(subtree_t *tree)
  384. {
  385. Agedge_t *rv;
  386. rv = inter_tree_edge_search(tree->rep, NULL, NULL);
  387. return rv;
  388. }
  389. static size_t STheapsize(const STheap_t *heap) { return heap->size; }
  390. static void STheapify(STheap_t *heap, size_t i) {
  391. subtree_t **elt = heap->elt;
  392. do {
  393. const size_t left = 2 * (i + 1) - 1;
  394. const size_t right = 2 * (i + 1);
  395. size_t smallest = i;
  396. if (left < heap->size && elt[left]->size < elt[smallest]->size) smallest = left;
  397. if (right < heap->size && elt[right]->size < elt[smallest]->size) smallest = right;
  398. if (smallest != i) {
  399. subtree_t *temp;
  400. temp = elt[i];
  401. elt[i] = elt[smallest];
  402. elt[smallest] = temp;
  403. elt[i]->heap_index = i;
  404. elt[smallest]->heap_index = smallest;
  405. i = smallest;
  406. }
  407. else break;
  408. } while (i < heap->size);
  409. }
  410. static STheap_t *STbuildheap(subtree_t **elt, size_t size) {
  411. STheap_t *heap = gv_alloc(sizeof(STheap_t));
  412. heap->elt = elt;
  413. heap->size = size;
  414. for (size_t i = 0; i < heap->size; i++) heap->elt[i]->heap_index = i;
  415. for (size_t i = heap->size / 2; i != SIZE_MAX; i--)
  416. STheapify(heap,i);
  417. return heap;
  418. }
  419. static
  420. subtree_t *STextractmin(STheap_t *heap)
  421. {
  422. subtree_t *rv;
  423. rv = heap->elt[0];
  424. rv->heap_index = SIZE_MAX;
  425. // mark this as not participating in the heap anymore
  426. heap->elt[0] = heap->elt[heap->size - 1];
  427. heap->elt[0]->heap_index = 0;
  428. heap->elt[heap->size -1] = rv; /* needed to free storage later */
  429. heap->size--;
  430. STheapify(heap,0);
  431. return rv;
  432. }
  433. static
  434. void tree_adjust(Agnode_t *v, Agnode_t *from, int delta)
  435. {
  436. int i;
  437. Agedge_t *e;
  438. Agnode_t *w;
  439. ND_rank(v) = ND_rank(v) + delta;
  440. for (i = 0; (e = ND_tree_in(v).list[i]); i++) {
  441. w = agtail(e);
  442. if (w != from)
  443. tree_adjust(w, v, delta);
  444. }
  445. for (i = 0; (e = ND_tree_out(v).list[i]); i++) {
  446. w = aghead(e);
  447. if (w != from)
  448. tree_adjust(w, v, delta);
  449. }
  450. }
  451. static
  452. subtree_t *merge_trees(Agedge_t *e) /* entering tree edge */
  453. {
  454. int delta;
  455. subtree_t *t0, *t1, *rv;
  456. assert(!TREE_EDGE(e));
  457. t0 = STsetFind(agtail(e));
  458. t1 = STsetFind(aghead(e));
  459. if (!on_heap(t0)) { // move t0
  460. delta = SLACK(e);
  461. if (delta != 0)
  462. tree_adjust(t0->rep,NULL,delta);
  463. }
  464. else { // move t1
  465. delta = -SLACK(e);
  466. if (delta != 0)
  467. tree_adjust(t1->rep,NULL,delta);
  468. }
  469. if (add_tree_edge(e) != 0) {
  470. return NULL;
  471. }
  472. rv = STsetUnion(t0,t1);
  473. return rv;
  474. }
  475. /* Construct initial tight tree. Graph must be connected, feasible.
  476. * Adjust ND_rank(v) as needed. add_tree_edge() on tight tree edges.
  477. * trees are basically lists of nodes stored in `node_queue_t`s.
  478. * Return 1 if input graph is not connected; 0 on success.
  479. */
  480. static
  481. int feasible_tree(void)
  482. {
  483. Agedge_t *ee;
  484. size_t subtree_count = 0;
  485. STheap_t *heap = NULL;
  486. int error = 0;
  487. /* initialization */
  488. for (Agnode_t *n = GD_nlist(G); n != NULL; n = ND_next(n)) {
  489. ND_subtree_set(n,0);
  490. }
  491. subtree_t **tree = gv_calloc(N_nodes, sizeof(subtree_t *));
  492. /* given init_rank, find all tight subtrees */
  493. for (Agnode_t *n = GD_nlist(G); n != NULL; n = ND_next(n)) {
  494. if (ND_subtree(n) == 0) {
  495. tree[subtree_count] = find_tight_subtree(n);
  496. if (tree[subtree_count] == NULL) {
  497. error = 2;
  498. goto end;
  499. }
  500. subtree_count++;
  501. }
  502. }
  503. /* incrementally merge subtrees */
  504. heap = STbuildheap(tree,subtree_count);
  505. while (STheapsize(heap) > 1) {
  506. subtree_t *tree0 = STextractmin(heap);
  507. if (!(ee = inter_tree_edge(tree0))) {
  508. error = 1;
  509. break;
  510. }
  511. subtree_t *tree1 = merge_trees(ee);
  512. if (tree1 == NULL) {
  513. error = 2;
  514. break;
  515. }
  516. STheapify(heap,tree1->heap_index);
  517. }
  518. end:
  519. free(heap);
  520. for (size_t i = 0; i < subtree_count; i++) free(tree[i]);
  521. free(tree);
  522. if (error) return error;
  523. assert(Tree_edge.size == N_nodes - 1);
  524. init_cutvalues();
  525. return 0;
  526. }
  527. /* walk up from v to LCA(v,w), setting new cutvalues. */
  528. static Agnode_t *treeupdate(Agnode_t * v, Agnode_t * w, int cutvalue, int dir)
  529. {
  530. edge_t *e;
  531. int d;
  532. while (!SEQ(ND_low(v), ND_lim(w), ND_lim(v))) {
  533. e = ND_par(v);
  534. if (v == agtail(e))
  535. d = dir;
  536. else
  537. d = !dir;
  538. if (d)
  539. ED_cutvalue(e) += cutvalue;
  540. else
  541. ED_cutvalue(e) -= cutvalue;
  542. if (ND_lim(agtail(e)) > ND_lim(aghead(e)))
  543. v = agtail(e);
  544. else
  545. v = aghead(e);
  546. }
  547. return v;
  548. }
  549. static void rerank(Agnode_t * v, int delta)
  550. {
  551. int i;
  552. edge_t *e;
  553. ND_rank(v) -= delta;
  554. for (i = 0; (e = ND_tree_out(v).list[i]); i++)
  555. if (e != ND_par(v))
  556. rerank(aghead(e), delta);
  557. for (i = 0; (e = ND_tree_in(v).list[i]); i++)
  558. if (e != ND_par(v))
  559. rerank(agtail(e), delta);
  560. }
  561. /* e is the tree edge that is leaving and f is the nontree edge that
  562. * is entering. compute new cut values, ranks, and exchange e and f.
  563. */
  564. static int
  565. update(edge_t * e, edge_t * f)
  566. {
  567. int cutvalue, delta;
  568. Agnode_t *lca;
  569. delta = SLACK(f);
  570. /* "for (v = in nodes in tail side of e) do ND_rank(v) -= delta;" */
  571. if (delta > 0) {
  572. size_t s = ND_tree_in(agtail(e)).size + ND_tree_out(agtail(e)).size;
  573. if (s == 1)
  574. rerank(agtail(e), delta);
  575. else {
  576. s = ND_tree_in(aghead(e)).size + ND_tree_out(aghead(e)).size;
  577. if (s == 1)
  578. rerank(aghead(e), -delta);
  579. else {
  580. if (ND_lim(agtail(e)) < ND_lim(aghead(e)))
  581. rerank(agtail(e), delta);
  582. else
  583. rerank(aghead(e), -delta);
  584. }
  585. }
  586. }
  587. cutvalue = ED_cutvalue(e);
  588. lca = treeupdate(agtail(f), aghead(f), cutvalue, 1);
  589. if (treeupdate(aghead(f), agtail(f), cutvalue, 0) != lca) {
  590. agerrorf("update: mismatched lca in treeupdates\n");
  591. return 2;
  592. }
  593. // invalidate paths from LCA till affected nodes:
  594. int lca_low = ND_low(lca);
  595. invalidate_path(lca, aghead(f));
  596. invalidate_path(lca, agtail(f));
  597. ED_cutvalue(f) = -cutvalue;
  598. ED_cutvalue(e) = 0;
  599. exchange_tree_edges(e, f);
  600. dfs_range(lca, ND_par(lca), lca_low);
  601. return 0;
  602. }
  603. static int scan_and_normalize(void) {
  604. node_t *n;
  605. int Minrank = INT_MAX;
  606. int Maxrank = INT_MIN;
  607. for (n = GD_nlist(G); n; n = ND_next(n)) {
  608. if (ND_node_type(n) == NORMAL) {
  609. Minrank = MIN(Minrank, ND_rank(n));
  610. Maxrank = MAX(Maxrank, ND_rank(n));
  611. }
  612. }
  613. for (n = GD_nlist(G); n; n = ND_next(n))
  614. ND_rank(n) -= Minrank;
  615. Maxrank -= Minrank;
  616. return Maxrank;
  617. }
  618. static void reset_lists(void) {
  619. free(Tree_node.list);
  620. Tree_node = (nlist_t){0};
  621. free(Tree_edge.list);
  622. Tree_edge = (elist){0};
  623. }
  624. static void
  625. freeTreeList (graph_t* g)
  626. {
  627. node_t *n;
  628. for (n = GD_nlist(g); n; n = ND_next(n)) {
  629. free_list(ND_tree_in(n));
  630. free_list(ND_tree_out(n));
  631. ND_mark(n) = false;
  632. }
  633. reset_lists();
  634. }
  635. static void LR_balance(void)
  636. {
  637. int delta;
  638. edge_t *e, *f;
  639. for (size_t i = 0; i < Tree_edge.size; i++) {
  640. e = Tree_edge.list[i];
  641. if (ED_cutvalue(e) == 0) {
  642. f = enter_edge(e);
  643. if (f == NULL)
  644. continue;
  645. delta = SLACK(f);
  646. if (delta <= 1)
  647. continue;
  648. if (ND_lim(agtail(e)) < ND_lim(aghead(e)))
  649. rerank(agtail(e), delta / 2);
  650. else
  651. rerank(aghead(e), -delta / 2);
  652. }
  653. }
  654. freeTreeList (G);
  655. }
  656. static int decreasingrankcmpf(const void *x, const void *y) {
  657. // Suppress Clang/GCC -Wcast-qual warning. Casting away const here is acceptable
  658. // as the later usage is const. We need the cast because the macros use
  659. // non-const pointers for genericity.
  660. #ifdef __GNUC__
  661. #pragma GCC diagnostic push
  662. #pragma GCC diagnostic ignored "-Wcast-qual"
  663. #endif
  664. node_t **n0 = (node_t**)x;
  665. node_t **n1 = (node_t**)y;
  666. #ifdef __GNUC__
  667. #pragma GCC diagnostic pop
  668. #endif
  669. if (ND_rank(*n1) < ND_rank(*n0)) {
  670. return -1;
  671. }
  672. if (ND_rank(*n1) > ND_rank(*n0)) {
  673. return 1;
  674. }
  675. return 0;
  676. }
  677. static int increasingrankcmpf(const void *x, const void *y) {
  678. // Suppress Clang/GCC -Wcast-qual warning. Casting away const here is acceptable
  679. // as the later usage is const. We need the cast because the macros use
  680. // non-const pointers for genericity.
  681. #ifdef __GNUC__
  682. #pragma GCC diagnostic push
  683. #pragma GCC diagnostic ignored "-Wcast-qual"
  684. #endif
  685. node_t **n0 = (node_t **)x;
  686. node_t **n1 = (node_t **)y;
  687. #ifdef __GNUC__
  688. #pragma GCC diagnostic pop
  689. #endif
  690. if (ND_rank(*n0) < ND_rank(*n1)) {
  691. return -1;
  692. }
  693. if (ND_rank(*n0) > ND_rank(*n1)) {
  694. return 1;
  695. }
  696. return 0;
  697. }
  698. static void TB_balance(void)
  699. {
  700. node_t *n;
  701. edge_t *e;
  702. int low, high, choice;
  703. int inweight, outweight;
  704. int adj = 0;
  705. char *s;
  706. const int Maxrank = scan_and_normalize();
  707. /* find nodes that are not tight and move to less populated ranks */
  708. assert(Maxrank >= 0);
  709. int *nrank = gv_calloc((size_t)Maxrank + 1, sizeof(int));
  710. if ( (s = agget(G,"TBbalance")) ) {
  711. if (streq(s,"min")) adj = 1;
  712. else if (streq(s,"max")) adj = 2;
  713. if (adj) for (n = GD_nlist(G); n; n = ND_next(n))
  714. if (ND_node_type(n) == NORMAL) {
  715. if (ND_in(n).size == 0 && adj == 1) {
  716. ND_rank(n) = 0;
  717. }
  718. if (ND_out(n).size == 0 && adj == 2) {
  719. ND_rank(n) = Maxrank;
  720. }
  721. }
  722. }
  723. size_t ii;
  724. for (ii = 0, n = GD_nlist(G); n; ii++, n = ND_next(n)) {
  725. Tree_node.list[ii] = n;
  726. }
  727. Tree_node.size = ii;
  728. qsort(Tree_node.list, Tree_node.size, sizeof(Tree_node.list[0]),
  729. adj > 1 ? decreasingrankcmpf: increasingrankcmpf);
  730. for (size_t i = 0; i < Tree_node.size; i++) {
  731. n = Tree_node.list[i];
  732. if (ND_node_type(n) == NORMAL)
  733. nrank[ND_rank(n)]++;
  734. }
  735. for (ii = 0; ii < Tree_node.size; ii++) {
  736. n = Tree_node.list[ii];
  737. if (ND_node_type(n) != NORMAL)
  738. continue;
  739. inweight = outweight = 0;
  740. low = 0;
  741. high = Maxrank;
  742. for (size_t i = 0; (e = ND_in(n).list[i]); i++) {
  743. inweight += ED_weight(e);
  744. low = MAX(low, ND_rank(agtail(e)) + ED_minlen(e));
  745. }
  746. for (size_t i = 0; (e = ND_out(n).list[i]); i++) {
  747. outweight += ED_weight(e);
  748. high = MIN(high, ND_rank(aghead(e)) - ED_minlen(e));
  749. }
  750. if (low < 0)
  751. low = 0; /* vnodes can have ranks < 0 */
  752. if (adj) {
  753. if (inweight == outweight)
  754. ND_rank(n) = (adj == 1? low : high);
  755. }
  756. else {
  757. if (inweight == outweight) {
  758. choice = low;
  759. for (int i = low + 1; i <= high; i++)
  760. if (nrank[i] < nrank[choice])
  761. choice = i;
  762. nrank[ND_rank(n)]--;
  763. nrank[choice]++;
  764. ND_rank(n) = choice;
  765. }
  766. }
  767. free_list(ND_tree_in(n));
  768. free_list(ND_tree_out(n));
  769. ND_mark(n) = false;
  770. }
  771. free(nrank);
  772. }
  773. static bool init_graph(graph_t *g) {
  774. node_t *n;
  775. edge_t *e;
  776. G = g;
  777. N_nodes = N_edges = S_i = 0;
  778. for (n = GD_nlist(g); n; n = ND_next(n)) {
  779. ND_mark(n) = false;
  780. N_nodes++;
  781. for (size_t i = 0; (e = ND_out(n).list[i]); i++)
  782. N_edges++;
  783. }
  784. Tree_node.list = gv_calloc(N_nodes, sizeof(node_t *));
  785. Tree_edge.list = gv_calloc(N_nodes, sizeof(edge_t *));
  786. bool feasible = true;
  787. for (n = GD_nlist(g); n; n = ND_next(n)) {
  788. ND_priority(n) = 0;
  789. size_t i;
  790. for (i = 0; (e = ND_in(n).list[i]); i++) {
  791. ND_priority(n)++;
  792. ED_cutvalue(e) = 0;
  793. ED_tree_index(e) = -1;
  794. if (ND_rank(aghead(e)) - ND_rank(agtail(e)) < ED_minlen(e))
  795. feasible = false;
  796. }
  797. ND_tree_in(n).list = gv_calloc(i + 1, sizeof(edge_t *));
  798. ND_tree_in(n).size = 0;
  799. for (i = 0; (e = ND_out(n).list[i]); i++);
  800. ND_tree_out(n).list = gv_calloc(i + 1, sizeof(edge_t *));
  801. ND_tree_out(n).size = 0;
  802. }
  803. return feasible;
  804. }
  805. /* graphSize:
  806. * Compute no. of nodes and edges in the graph
  807. */
  808. static void
  809. graphSize (graph_t * g, int* nn, int* ne)
  810. {
  811. int i, nnodes, nedges;
  812. node_t *n;
  813. edge_t *e;
  814. nnodes = nedges = 0;
  815. for (n = GD_nlist(g); n; n = ND_next(n)) {
  816. nnodes++;
  817. for (i = 0; (e = ND_out(n).list[i]); i++) {
  818. nedges++;
  819. }
  820. }
  821. *nn = nnodes;
  822. *ne = nedges;
  823. }
  824. /* rank:
  825. * Apply network simplex to rank the nodes in a graph.
  826. * Uses ED_minlen as the internode constraint: if a->b with minlen=ml,
  827. * rank b - rank a >= ml.
  828. * Assumes the graph has the following additional structure:
  829. * A list of all nodes, starting at GD_nlist, and linked using ND_next.
  830. * Out and in edges lists stored in ND_out and ND_in, even if the node
  831. * doesn't have any out or in edges.
  832. * The node rank values are stored in ND_rank.
  833. * Returns 0 if successful; returns 1 if the graph was not connected;
  834. * returns 2 if something seriously wrong;
  835. */
  836. int rank2(graph_t * g, int balance, int maxiter, int search_size)
  837. {
  838. int iter = 0;
  839. char *ns = "network simplex: ";
  840. edge_t *e, *f;
  841. #ifdef DEBUG
  842. check_cycles(g);
  843. #endif
  844. if (Verbose) {
  845. int nn, ne;
  846. graphSize (g, &nn, &ne);
  847. fprintf(stderr, "%s %d nodes %d edges maxiter=%d balance=%d\n", ns,
  848. nn, ne, maxiter, balance);
  849. start_timer();
  850. }
  851. bool feasible = init_graph(g);
  852. if (!feasible)
  853. init_rank();
  854. if (search_size >= 0)
  855. Search_size = search_size;
  856. else
  857. Search_size = SEARCHSIZE;
  858. {
  859. int err = feasible_tree();
  860. if (err != 0) {
  861. freeTreeList (g);
  862. return err;
  863. }
  864. }
  865. if (maxiter <= 0) {
  866. freeTreeList (g);
  867. return 0;
  868. }
  869. while ((e = leave_edge())) {
  870. int err;
  871. f = enter_edge(e);
  872. err = update(e, f);
  873. if (err != 0) {
  874. freeTreeList (g);
  875. return err;
  876. }
  877. iter++;
  878. if (Verbose && iter % 100 == 0) {
  879. if (iter % 1000 == 100)
  880. fputs(ns, stderr);
  881. fprintf(stderr, "%d ", iter);
  882. if (iter % 1000 == 0)
  883. fputc('\n', stderr);
  884. }
  885. if (iter >= maxiter)
  886. break;
  887. }
  888. switch (balance) {
  889. case 1:
  890. TB_balance();
  891. reset_lists();
  892. break;
  893. case 2:
  894. LR_balance();
  895. break;
  896. default:
  897. (void)scan_and_normalize();
  898. freeTreeList (G);
  899. break;
  900. }
  901. if (Verbose) {
  902. if (iter >= 100)
  903. fputc('\n', stderr);
  904. fprintf(stderr, "%s%" PRISIZE_T " nodes %" PRISIZE_T " edges %d iter %.2f sec\n",
  905. ns, N_nodes, N_edges, iter, elapsed_sec());
  906. }
  907. return 0;
  908. }
  909. int rank(graph_t * g, int balance, int maxiter)
  910. {
  911. char *s;
  912. int search_size;
  913. if ((s = agget(g, "searchsize")))
  914. search_size = atoi(s);
  915. else
  916. search_size = SEARCHSIZE;
  917. return rank2 (g, balance, maxiter, search_size);
  918. }
  919. /* set cut value of f, assuming values of edges on one side were already set */
  920. static void x_cutval(edge_t * f)
  921. {
  922. node_t *v;
  923. edge_t *e;
  924. int i, sum, dir;
  925. /* set v to the node on the side of the edge already searched */
  926. if (ND_par(agtail(f)) == f) {
  927. v = agtail(f);
  928. dir = 1;
  929. } else {
  930. v = aghead(f);
  931. dir = -1;
  932. }
  933. sum = 0;
  934. for (i = 0; (e = ND_out(v).list[i]); i++)
  935. if (sadd_overflow(sum, x_val(e, v, dir), &sum)) {
  936. agerrorf("overflow when computing edge weight sum\n");
  937. graphviz_exit(EXIT_FAILURE);
  938. }
  939. for (i = 0; (e = ND_in(v).list[i]); i++)
  940. if (sadd_overflow(sum, x_val(e, v, dir), &sum)) {
  941. agerrorf("overflow when computing edge weight sum\n");
  942. graphviz_exit(EXIT_FAILURE);
  943. }
  944. ED_cutvalue(f) = sum;
  945. }
  946. static int x_val(edge_t * e, node_t * v, int dir)
  947. {
  948. node_t *other;
  949. int d, rv, f;
  950. if (agtail(e) == v)
  951. other = aghead(e);
  952. else
  953. other = agtail(e);
  954. if (!(SEQ(ND_low(v), ND_lim(other), ND_lim(v)))) {
  955. f = 1;
  956. rv = ED_weight(e);
  957. } else {
  958. f = 0;
  959. if (TREE_EDGE(e))
  960. rv = ED_cutvalue(e);
  961. else
  962. rv = 0;
  963. rv -= ED_weight(e);
  964. }
  965. if (dir > 0) {
  966. if (aghead(e) == v)
  967. d = 1;
  968. else
  969. d = -1;
  970. } else {
  971. if (agtail(e) == v)
  972. d = 1;
  973. else
  974. d = -1;
  975. }
  976. if (f)
  977. d = -d;
  978. if (d < 0)
  979. rv = -rv;
  980. return rv;
  981. }
  982. static void dfs_cutval(node_t * v, edge_t * par)
  983. {
  984. int i;
  985. edge_t *e;
  986. for (i = 0; (e = ND_tree_out(v).list[i]); i++)
  987. if (e != par)
  988. dfs_cutval(aghead(e), e);
  989. for (i = 0; (e = ND_tree_in(v).list[i]); i++)
  990. if (e != par)
  991. dfs_cutval(agtail(e), e);
  992. if (par)
  993. x_cutval(par);
  994. }
  995. /*
  996. * Initializes DFS range attributes (par, low, lim) over tree nodes such that:
  997. * ND_par(n) - parent tree edge
  998. * ND_low(n) - min DFS index for nodes in sub-tree (>= 1)
  999. * ND_lim(n) - max DFS index for nodes in sub-tree
  1000. */
  1001. static int dfs_range_init(node_t *v, edge_t *par, int low) {
  1002. int i, lim;
  1003. lim = low;
  1004. ND_par(v) = par;
  1005. ND_low(v) = low;
  1006. for (i = 0; ND_tree_out(v).list[i]; i++) {
  1007. edge_t *e = ND_tree_out(v).list[i];
  1008. if (e != par) {
  1009. lim = dfs_range_init(aghead(e), e, lim);
  1010. }
  1011. }
  1012. for (i = 0; ND_tree_in(v).list[i]; i++) {
  1013. edge_t *e = ND_tree_in(v).list[i];
  1014. if (e != par) {
  1015. lim = dfs_range_init(agtail(e), e, lim);
  1016. }
  1017. }
  1018. ND_lim(v) = lim;
  1019. return lim + 1;
  1020. }
  1021. /*
  1022. * Incrementally updates DFS range attributes
  1023. */
  1024. static int dfs_range(node_t * v, edge_t * par, int low)
  1025. {
  1026. edge_t *e;
  1027. int i, lim;
  1028. if (ND_par(v) == par && ND_low(v) == low) {
  1029. return ND_lim(v) + 1;
  1030. }
  1031. lim = low;
  1032. ND_par(v) = par;
  1033. ND_low(v) = low;
  1034. for (i = 0; (e = ND_tree_out(v).list[i]); i++)
  1035. if (e != par)
  1036. lim = dfs_range(aghead(e), e, lim);
  1037. for (i = 0; (e = ND_tree_in(v).list[i]); i++)
  1038. if (e != par)
  1039. lim = dfs_range(agtail(e), e, lim);
  1040. ND_lim(v) = lim;
  1041. return lim + 1;
  1042. }
  1043. #ifdef DEBUG
  1044. void tchk(void)
  1045. {
  1046. int i;
  1047. node_t *n;
  1048. edge_t *e;
  1049. size_t n_cnt = 0;
  1050. size_t e_cnt = 0;
  1051. for (n = agfstnode(G); n; n = agnxtnode(G, n)) {
  1052. n_cnt++;
  1053. for (i = 0; (e = ND_tree_out(n).list[i]); i++) {
  1054. e_cnt++;
  1055. if (SLACK(e) > 0)
  1056. fprintf(stderr, "not a tight tree %p", e);
  1057. }
  1058. }
  1059. if (n_cnt != Tree_node.size || e_cnt != Tree_edge.size)
  1060. fprintf(stderr, "something missing\n");
  1061. }
  1062. void check_fast_node(node_t * n)
  1063. {
  1064. node_t *nptr;
  1065. nptr = GD_nlist(agraphof(n));
  1066. while (nptr && nptr != n)
  1067. nptr = ND_next(nptr);
  1068. assert(nptr != NULL);
  1069. }
  1070. static void dump_node(FILE *sink, node_t *n) {
  1071. if (ND_node_type(n)) {
  1072. fprintf(sink, "%p", n);
  1073. }
  1074. else
  1075. fputs(agnameof(n), sink);
  1076. }
  1077. static void dump_graph (graph_t* g)
  1078. {
  1079. int i;
  1080. edge_t *e;
  1081. node_t *n,*w;
  1082. FILE* fp = fopen ("ns.gv", "w");
  1083. fprintf (fp, "digraph \"%s\" {\n", agnameof(g));
  1084. for (n = GD_nlist(g); n; n = ND_next(n)) {
  1085. fputs(" \"", fp);
  1086. dump_node(fp, n);
  1087. fputs("\"\n", fp);
  1088. }
  1089. for (n = GD_nlist(g); n; n = ND_next(n)) {
  1090. for (i = 0; (e = ND_out(n).list[i]); i++) {
  1091. fputs(" \"", fp);
  1092. dump_node(fp, n);
  1093. fputs("\"", fp);
  1094. w = aghead(e);
  1095. fputs(" -> \"", fp);
  1096. dump_node(fp, w);
  1097. fputs("\"\n", fp);
  1098. }
  1099. }
  1100. fprintf (fp, "}\n");
  1101. fclose (fp);
  1102. }
  1103. static node_t *checkdfs(graph_t* g, node_t * n)
  1104. {
  1105. edge_t *e;
  1106. node_t *w,*x;
  1107. int i;
  1108. if (ND_mark(n))
  1109. return 0;
  1110. ND_mark(n) = true;
  1111. ND_onstack(n) = true;
  1112. for (i = 0; (e = ND_out(n).list[i]); i++) {
  1113. w = aghead(e);
  1114. if (ND_onstack(w)) {
  1115. dump_graph (g);
  1116. fprintf(stderr, "cycle: last edge %lx %s(%lx) %s(%lx)\n",
  1117. (uint64_t)e,
  1118. agnameof(n), (uint64_t)n,
  1119. agnameof(w), (uint64_t)w);
  1120. return w;
  1121. }
  1122. else {
  1123. if (!ND_mark(w)) {
  1124. x = checkdfs(g, w);
  1125. if (x) {
  1126. fprintf(stderr,"unwind %lx %s(%lx)\n",
  1127. (uint64_t)e,
  1128. agnameof(n), (uint64_t)n);
  1129. if (x != n) return x;
  1130. fprintf(stderr,"unwound to root\n");
  1131. fflush(stderr);
  1132. abort();
  1133. return 0;
  1134. }
  1135. }
  1136. }
  1137. }
  1138. ND_onstack(n) = false;
  1139. return 0;
  1140. }
  1141. void check_cycles(graph_t * g)
  1142. {
  1143. node_t *n;
  1144. for (n = GD_nlist(g); n; n = ND_next(n)) {
  1145. ND_mark(n) = false;
  1146. ND_onstack(n) = false;
  1147. }
  1148. for (n = GD_nlist(g); n; n = ND_next(n))
  1149. checkdfs(g, n);
  1150. }
  1151. #endif /* DEBUG */