cluster.c 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451
  1. /*************************************************************************
  2. * Copyright (c) 2011 AT&T Intellectual Property
  3. * All rights reserved. This program and the accompanying materials
  4. * are made available under the terms of the Eclipse Public License v1.0
  5. * which accompanies this distribution, and is available at
  6. * https://www.eclipse.org/legal/epl-v10.html
  7. *
  8. * Contributors: Details at https://graphviz.org
  9. *************************************************************************/
  10. #include <assert.h>
  11. #include <dotgen/dot.h>
  12. #include <stdbool.h>
  13. #include <stddef.h>
  14. #include <util/alloc.h>
  15. static node_t*
  16. map_interclust_node(node_t * n)
  17. {
  18. node_t *rv;
  19. if ((ND_clust(n) == NULL) || ( GD_expanded(ND_clust(n))) )
  20. rv = n;
  21. else
  22. rv = GD_rankleader(ND_clust(n))[ND_rank(n)];
  23. return rv;
  24. }
  25. /* make d slots starting at position pos (where 1 already exists) */
  26. static void
  27. make_slots(graph_t * root, int r, int pos, int d)
  28. {
  29. int i;
  30. node_t *v, **vlist;
  31. vlist = GD_rank(root)[r].v;
  32. if (d <= 0) {
  33. for (i = pos - d + 1; i < GD_rank(root)[r].n; i++) {
  34. v = vlist[i];
  35. ND_order(v) = i + d - 1;
  36. vlist[ND_order(v)] = v;
  37. }
  38. for (i = GD_rank(root)[r].n + d - 1; i < GD_rank(root)[r].n; i++)
  39. vlist[i] = NULL;
  40. } else {
  41. /*assert(ND_rank(root)[r].n + d - 1 <= ND_rank(root)[r].an);*/
  42. for (i = GD_rank(root)[r].n - 1; i > pos; i--) {
  43. v = vlist[i];
  44. ND_order(v) = i + d - 1;
  45. vlist[ND_order(v)] = v;
  46. }
  47. for (i = pos + 1; i < pos + d; i++)
  48. vlist[i] = NULL;
  49. }
  50. GD_rank(root)[r].n += d - 1;
  51. }
  52. static node_t*
  53. clone_vn(graph_t * g, node_t * vn)
  54. {
  55. node_t *rv;
  56. int r;
  57. r = ND_rank(vn);
  58. make_slots(g, r, ND_order(vn), 2);
  59. rv = virtual_node(g);
  60. ND_lw(rv) = ND_lw(vn);
  61. ND_rw(rv) = ND_rw(vn);
  62. ND_rank(rv) = ND_rank(vn);
  63. ND_order(rv) = ND_order(vn) + 1;
  64. GD_rank(g)[r].v[ND_order(rv)] = rv;
  65. return rv;
  66. }
  67. static void
  68. map_path(node_t * from, node_t * to, edge_t * orig, edge_t * ve, int type)
  69. {
  70. int r;
  71. node_t *u, *v;
  72. edge_t *e;
  73. assert(ND_rank(from) < ND_rank(to));
  74. if ((agtail(ve) == from) && (aghead(ve) == to))
  75. return;
  76. if (ED_count(ve) > 1) {
  77. ED_to_virt(orig) = NULL;
  78. if (ND_rank(to) - ND_rank(from) == 1) {
  79. if ((e = find_fast_edge(from, to)) && (ports_eq(orig, e))) {
  80. merge_oneway(orig, e);
  81. if ((ND_node_type(from) == NORMAL)
  82. && (ND_node_type(to) == NORMAL))
  83. other_edge(orig);
  84. return;
  85. }
  86. }
  87. u = from;
  88. for (r = ND_rank(from); r < ND_rank(to); r++) {
  89. if (r < ND_rank(to) - 1)
  90. v = clone_vn(dot_root(from), aghead(ve));
  91. else
  92. v = to;
  93. e = virtual_edge(u, v, orig);
  94. ED_edge_type(e) = type;
  95. u = v;
  96. ED_count(ve)--;
  97. ve = ND_out(aghead(ve)).list[0];
  98. }
  99. } else {
  100. if (ND_rank(to) - ND_rank(from) == 1) {
  101. if ((ve = find_fast_edge(from, to)) && (ports_eq(orig, ve))) {
  102. /*ED_to_orig(ve) = orig; */
  103. ED_to_virt(orig) = ve;
  104. ED_edge_type(ve) = type;
  105. ED_count(ve)++;
  106. if ((ND_node_type(from) == NORMAL)
  107. && (ND_node_type(to) == NORMAL))
  108. other_edge(orig);
  109. } else {
  110. ED_to_virt(orig) = NULL;
  111. ve = virtual_edge(from, to, orig);
  112. ED_edge_type(ve) = type;
  113. }
  114. }
  115. if (ND_rank(to) - ND_rank(from) > 1) {
  116. if (agtail(ve) != from) {
  117. ED_to_virt(orig) = NULL;
  118. e = ED_to_virt(orig) = virtual_edge(from, aghead(ve), orig);
  119. delete_fast_edge(ve);
  120. } else
  121. e = ve;
  122. while (ND_rank(aghead(e)) != ND_rank(to))
  123. e = ND_out(aghead(e)).list[0];
  124. if (aghead(e) != to) {
  125. ve = e;
  126. e = virtual_edge(agtail(e), to, orig);
  127. ED_edge_type(e) = type;
  128. delete_fast_edge(ve);
  129. }
  130. }
  131. }
  132. }
  133. static void make_interclust_chain(node_t * from, node_t * to, edge_t * orig) {
  134. int newtype;
  135. node_t *u, *v;
  136. u = map_interclust_node(from);
  137. v = map_interclust_node(to);
  138. if ((u == from) && (v == to))
  139. newtype = VIRTUAL;
  140. else
  141. newtype = CLUSTER_EDGE;
  142. map_path(u, v, orig, ED_to_virt(orig), newtype);
  143. }
  144. /*
  145. * attach and install edges between clusters.
  146. * essentially, class2() for interclust edges.
  147. */
  148. static void interclexp(graph_t * subg)
  149. {
  150. graph_t *g;
  151. node_t *n;
  152. edge_t *e, *prev, *next;
  153. g = dot_root(subg);
  154. for (n = agfstnode(subg); n; n = agnxtnode(subg, n)) {
  155. /* N.B. n may be in a sub-cluster of subg */
  156. prev = NULL;
  157. for (e = agfstedge(g, n); e; e = next) {
  158. next = agnxtedge(g, e, n);
  159. if (agcontains(subg, e))
  160. continue;
  161. /* canonicalize edge */
  162. e = AGMKOUT(e);
  163. /* short/flat multi edges */
  164. if (mergeable(prev, e)) {
  165. if (ND_rank(agtail(e)) == ND_rank(aghead(e)))
  166. ED_to_virt(e) = prev;
  167. else
  168. ED_to_virt(e) = NULL;
  169. if (ED_to_virt(prev) == NULL)
  170. continue; /* internal edge */
  171. ED_to_virt(e) = NULL;
  172. merge_chain(subg, e, ED_to_virt(prev), false);
  173. safe_other_edge(e);
  174. continue;
  175. }
  176. /* flat edges */
  177. if (ND_rank(agtail(e)) == ND_rank(aghead(e))) {
  178. edge_t* fe;
  179. if ((fe = find_flat_edge(agtail(e), aghead(e))) == NULL) {
  180. flat_edge(g, e);
  181. prev = e;
  182. } else if (e != fe) {
  183. safe_other_edge(e);
  184. if (!ED_to_virt(e)) merge_oneway(e, fe);
  185. }
  186. continue;
  187. }
  188. /* forward edges */
  189. if (ND_rank(aghead(e)) > ND_rank(agtail(e))) {
  190. make_interclust_chain(agtail(e), aghead(e), e);
  191. prev = e;
  192. continue;
  193. }
  194. /* backward edges */
  195. else {
  196. /*
  197. I think that make_interclust_chain should create call other_edge(e) anyway
  198. if (agcontains(subg,agtail(e))
  199. && agfindedge(g,aghead(e),agtail(e))) other_edge(e);
  200. */
  201. make_interclust_chain(aghead(e), agtail(e), e);
  202. prev = e;
  203. }
  204. }
  205. }
  206. }
  207. static void
  208. merge_ranks(graph_t * subg)
  209. {
  210. int i, d, r, pos, ipos;
  211. node_t *v;
  212. graph_t *root;
  213. assert(GD_minrank(subg) <= GD_maxrank(subg) && "corrupted rank bounds");
  214. root = dot_root(subg);
  215. if (GD_minrank(subg) > 0)
  216. GD_rank(root)[GD_minrank(subg) - 1].valid = false;
  217. for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) {
  218. d = GD_rank(subg)[r].n;
  219. ipos = pos = ND_order(GD_rankleader(subg)[r]);
  220. make_slots(root, r, pos, d);
  221. for (i = 0; i < GD_rank(subg)[r].n; i++) {
  222. v = GD_rank(root)[r].v[pos] = GD_rank(subg)[r].v[i];
  223. ND_order(v) = pos++;
  224. /* real nodes automatically have v->root = root graph */
  225. if (ND_node_type(v) == VIRTUAL)
  226. v->root = agroot(root);
  227. delete_fast_node(subg, v);
  228. fast_node(root, v);
  229. }
  230. GD_rank(subg)[r].v = GD_rank(root)[r].v + ipos;
  231. GD_rank(root)[r].valid = false;
  232. }
  233. if (r < GD_maxrank(root))
  234. GD_rank(root)[r].valid = false;
  235. GD_expanded(subg) = true;
  236. }
  237. static void
  238. remove_rankleaders(graph_t * g)
  239. {
  240. int r;
  241. node_t *v;
  242. edge_t *e;
  243. for (r = GD_minrank(g); r <= GD_maxrank(g); r++) {
  244. v = GD_rankleader(g)[r];
  245. /* remove the entire chain */
  246. while ((e = ND_out(v).list[0])) {
  247. delete_fast_edge(e);
  248. free(e->base.data);
  249. free(e);
  250. }
  251. while ((e = ND_in(v).list[0])) {
  252. delete_fast_edge(e);
  253. free(e);
  254. }
  255. delete_fast_node(dot_root(g), v);
  256. free(ND_in(v).list);
  257. free(ND_out(v).list);
  258. free(v->base.data);
  259. free(v);
  260. GD_rankleader(g)[r] = NULL;
  261. }
  262. }
  263. /* delete virtual nodes of a cluster, and install real nodes or sub-clusters */
  264. void expand_cluster(graph_t * subg)
  265. {
  266. /* build internal structure of the cluster */
  267. class2(subg);
  268. GD_comp(subg).size = 1;
  269. GD_comp(subg).list[0] = GD_nlist(subg);
  270. allocate_ranks(subg);
  271. ints_t scratch = {0};
  272. build_ranks(subg, 0, &scratch);
  273. ints_free(&scratch);
  274. merge_ranks(subg);
  275. /* build external structure of the cluster */
  276. interclexp(subg);
  277. remove_rankleaders(subg);
  278. }
  279. /* this function marks every node in <g> with its top-level cluster under <g> */
  280. void mark_clusters(graph_t * g)
  281. {
  282. int c;
  283. node_t *n, *nn, *vn;
  284. edge_t *orig, *e;
  285. graph_t *clust;
  286. /* remove sub-clusters below this level */
  287. for (n = agfstnode(g); n; n = agnxtnode(g, n)) {
  288. if (ND_ranktype(n) == CLUSTER)
  289. UF_singleton(n);
  290. ND_clust(n) = NULL;
  291. }
  292. for (c = 1; c <= GD_n_cluster(g); c++) {
  293. clust = GD_clust(g)[c];
  294. for (n = agfstnode(clust); n; n = nn) {
  295. nn = agnxtnode(clust,n);
  296. if (ND_ranktype(n) != NORMAL) {
  297. agwarningf(
  298. "%s was already in a rankset, deleted from cluster %s\n",
  299. agnameof(n), agnameof(g));
  300. agdelete(clust,n);
  301. continue;
  302. }
  303. UF_setname(n, GD_leader(clust));
  304. ND_clust(n) = clust;
  305. ND_ranktype(n) = CLUSTER;
  306. /* here we mark the vnodes of edges in the cluster */
  307. for (orig = agfstout(clust, n); orig;
  308. orig = agnxtout(clust, orig)) {
  309. if ((e = ED_to_virt(orig))) {
  310. while (e && ND_node_type(vn =aghead(e)) == VIRTUAL) {
  311. ND_clust(vn) = clust;
  312. e = ND_out(aghead(e)).list[0];
  313. /* trouble if concentrators and clusters are mixed */
  314. }
  315. }
  316. }
  317. }
  318. }
  319. }
  320. void build_skeleton(graph_t * g, graph_t * subg)
  321. {
  322. int r;
  323. node_t *v, *prev, *rl;
  324. edge_t *e;
  325. prev = NULL;
  326. GD_rankleader(subg) = gv_calloc(GD_maxrank(subg) + 2, sizeof(node_t*));
  327. for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) {
  328. v = GD_rankleader(subg)[r] = virtual_node(g);
  329. ND_rank(v) = r;
  330. ND_ranktype(v) = CLUSTER;
  331. ND_clust(v) = subg;
  332. if (prev) {
  333. e = virtual_edge(prev, v, NULL);
  334. ED_xpenalty(e) *= CL_CROSS;
  335. }
  336. prev = v;
  337. }
  338. /* set the counts on virtual edges of the cluster skeleton */
  339. for (v = agfstnode(subg); v; v = agnxtnode(subg, v)) {
  340. rl = GD_rankleader(subg)[ND_rank(v)];
  341. ND_UF_size(rl)++;
  342. for (e = agfstout(subg, v); e; e = agnxtout(subg, e)) {
  343. for (r = ND_rank(agtail(e)); r < ND_rank(aghead(e)); r++) {
  344. ED_count(ND_out(rl).list[0])++;
  345. }
  346. }
  347. }
  348. for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) {
  349. rl = GD_rankleader(subg)[r];
  350. if (ND_UF_size(rl) > 1)
  351. ND_UF_size(rl)--;
  352. }
  353. }
  354. void install_cluster(graph_t *g, node_t *n, int pass, node_queue_t *q) {
  355. int r;
  356. graph_t *clust;
  357. clust = ND_clust(n);
  358. if (GD_installed(clust) != pass + 1) {
  359. for (r = GD_minrank(clust); r <= GD_maxrank(clust); r++)
  360. install_in_rank(g, GD_rankleader(clust)[r]);
  361. for (r = GD_minrank(clust); r <= GD_maxrank(clust); r++)
  362. enqueue_neighbors(q, GD_rankleader(clust)[r], pass);
  363. GD_installed(clust) = pass + 1;
  364. }
  365. }
  366. static void mark_lowcluster_basic(Agraph_t * g);
  367. void mark_lowclusters(Agraph_t * root)
  368. {
  369. Agnode_t *n, *vn;
  370. Agedge_t *orig, *e;
  371. /* first, zap any previous cluster labelings */
  372. for (n = agfstnode(root); n; n = agnxtnode(root, n)) {
  373. ND_clust(n) = NULL;
  374. for (orig = agfstout(root, n); orig; orig = agnxtout(root, orig)) {
  375. if ((e = ED_to_virt(orig))) {
  376. while (e && (ND_node_type(vn = aghead(e))) == VIRTUAL) {
  377. ND_clust(vn) = NULL;
  378. e = ND_out(aghead(e)).list[0];
  379. }
  380. }
  381. }
  382. }
  383. /* do the recursion */
  384. mark_lowcluster_basic(root);
  385. }
  386. static void mark_lowcluster_basic(Agraph_t * g)
  387. {
  388. Agraph_t *clust;
  389. Agnode_t *n, *vn;
  390. Agedge_t *orig, *e;
  391. int c;
  392. for (c = 1; c <= GD_n_cluster(g); c++) {
  393. clust = GD_clust(g)[c];
  394. mark_lowcluster_basic(clust);
  395. }
  396. /* see what belongs to this graph that wasn't already marked */
  397. for (n = agfstnode(g); n; n = agnxtnode(g, n)) {
  398. if (ND_clust(n) == NULL)
  399. ND_clust(n) = g;
  400. for (orig = agfstout(g, n); orig; orig = agnxtout(g, orig)) {
  401. if ((e = ED_to_virt(orig))) {
  402. while (e && (ND_node_type(vn = aghead(e))) == VIRTUAL) {
  403. if (ND_clust(vn) == NULL)
  404. ND_clust(vn) = g;
  405. e = ND_out(aghead(e)).list[0];
  406. }
  407. }
  408. }
  409. }
  410. }