wikiheaders.pl 63 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708
  1. #!/usr/bin/perl -w
  2. use warnings;
  3. use strict;
  4. use File::Path;
  5. use Text::Wrap;
  6. $Text::Wrap::huge = 'overflow';
  7. my $projectfullname = 'Simple Directmedia Layer';
  8. my $projectshortname = 'SDL';
  9. my $wikisubdir = '';
  10. my $incsubdir = 'include';
  11. my $readmesubdir = undef;
  12. my $apiprefixregex = undef;
  13. my $versionfname = 'include/SDL_version.h';
  14. my $versionmajorregex = '\A\#define\s+SDL_MAJOR_VERSION\s+(\d+)\Z';
  15. my $versionminorregex = '\A\#define\s+SDL_MINOR_VERSION\s+(\d+)\Z';
  16. my $versionpatchregex = '\A\#define\s+SDL_PATCHLEVEL\s+(\d+)\Z';
  17. my $mainincludefname = 'SDL.h';
  18. my $selectheaderregex = '\ASDL.*?\.h\Z';
  19. my $projecturl = 'https://libsdl.org/';
  20. my $wikiurl = 'https://wiki.libsdl.org';
  21. my $bugreporturl = 'https://github.com/libsdl-org/sdlwiki/issues/new';
  22. my $srcpath = undef;
  23. my $wikipath = undef;
  24. my $wikireadmesubdir = 'README';
  25. my $warn_about_missing = 0;
  26. my $copy_direction = 0;
  27. my $optionsfname = undef;
  28. my $wikipreamble = undef;
  29. my $changeformat = undef;
  30. my $manpath = undef;
  31. my $gitrev = undef;
  32. foreach (@ARGV) {
  33. $warn_about_missing = 1, next if $_ eq '--warn-about-missing';
  34. $copy_direction = 1, next if $_ eq '--copy-to-headers';
  35. $copy_direction = 1, next if $_ eq '--copy-to-header';
  36. $copy_direction = -1, next if $_ eq '--copy-to-wiki';
  37. $copy_direction = -2, next if $_ eq '--copy-to-manpages';
  38. if (/\A--options=(.*)\Z/) {
  39. $optionsfname = $1;
  40. next;
  41. } elsif (/\A--changeformat=(.*)\Z/) {
  42. $changeformat = $1;
  43. next;
  44. } elsif (/\A--manpath=(.*)\Z/) {
  45. $manpath = $1;
  46. next;
  47. } elsif (/\A--rev=(.*)\Z/) {
  48. $gitrev = $1;
  49. next;
  50. }
  51. $srcpath = $_, next if not defined $srcpath;
  52. $wikipath = $_, next if not defined $wikipath;
  53. }
  54. my $default_optionsfname = '.wikiheaders-options';
  55. $default_optionsfname = "$srcpath/$default_optionsfname" if defined $srcpath;
  56. if ((not defined $optionsfname) && (-f $default_optionsfname)) {
  57. $optionsfname = $default_optionsfname;
  58. }
  59. if (defined $optionsfname) {
  60. open OPTIONS, '<', $optionsfname or die("Failed to open options file '$optionsfname': $!\n");
  61. while (<OPTIONS>) {
  62. chomp;
  63. if (/\A(.*?)\=(.*)\Z/) {
  64. my $key = $1;
  65. my $val = $2;
  66. $key =~ s/\A\s+//;
  67. $key =~ s/\s+\Z//;
  68. $val =~ s/\A\s+//;
  69. $val =~ s/\s+\Z//;
  70. $warn_about_missing = int($val), next if $key eq 'warn_about_missing';
  71. $srcpath = $val, next if $key eq 'srcpath';
  72. $wikipath = $val, next if $key eq 'wikipath';
  73. $apiprefixregex = $val, next if $key eq 'apiprefixregex';
  74. $projectfullname = $val, next if $key eq 'projectfullname';
  75. $projectshortname = $val, next if $key eq 'projectshortname';
  76. $wikisubdir = $val, next if $key eq 'wikisubdir';
  77. $incsubdir = $val, next if $key eq 'incsubdir';
  78. $readmesubdir = $val, next if $key eq 'readmesubdir';
  79. $versionmajorregex = $val, next if $key eq 'versionmajorregex';
  80. $versionminorregex = $val, next if $key eq 'versionminorregex';
  81. $versionpatchregex = $val, next if $key eq 'versionpatchregex';
  82. $versionfname = $val, next if $key eq 'versionfname';
  83. $mainincludefname = $val, next if $key eq 'mainincludefname';
  84. $selectheaderregex = $val, next if $key eq 'selectheaderregex';
  85. $projecturl = $val, next if $key eq 'projecturl';
  86. $wikiurl = $val, next if $key eq 'wikiurl';
  87. $bugreporturl = $val, next if $key eq 'bugreporturl';
  88. $wikipreamble = $val, next if $key eq 'wikipreamble';
  89. }
  90. }
  91. close(OPTIONS);
  92. }
  93. my $wordwrap_mode = 'mediawiki';
  94. sub wordwrap_atom { # don't call this directly.
  95. my $str = shift;
  96. my $retval = '';
  97. # wordwrap but leave links intact, even if they overflow.
  98. if ($wordwrap_mode eq 'mediawiki') {
  99. while ($str =~ s/(.*?)\s*(\[https?\:\/\/.*?\s+.*?\])\s*//ms) {
  100. $retval .= fill('', '', $1); # wrap it.
  101. $retval .= "\n$2\n"; # don't wrap it.
  102. }
  103. } elsif ($wordwrap_mode eq 'md') {
  104. while ($str =~ s/(.*?)\s*(\[.*?\]\(https?\:\/\/.*?\))\s*//ms) {
  105. $retval .= fill('', '', $1); # wrap it.
  106. $retval .= "\n$2\n"; # don't wrap it.
  107. }
  108. }
  109. return $retval . fill('', '', $str);
  110. }
  111. sub wordwrap_with_bullet_indent { # don't call this directly.
  112. my $bullet = shift;
  113. my $str = shift;
  114. my $retval = '';
  115. #print("WORDWRAP BULLET ('$bullet'):\n\n$str\n\n");
  116. # You _can't_ (at least with Pandoc) have a bullet item with a newline in
  117. # MediaWiki, so _remove_ wrapping!
  118. if ($wordwrap_mode eq 'mediawiki') {
  119. $retval = "$bullet$str";
  120. $retval =~ s/\n/ /gms;
  121. $retval =~ s/\s+$//gms;
  122. #print("WORDWRAP BULLET DONE:\n\n$retval\n\n");
  123. return "$retval\n";
  124. }
  125. my $bulletlen = length($bullet);
  126. # wrap it and then indent each line to be under the bullet.
  127. $Text::Wrap::columns -= $bulletlen;
  128. my @wrappedlines = split /\n/, wordwrap_atom($str);
  129. $Text::Wrap::columns += $bulletlen;
  130. my $prefix = $bullet;
  131. my $usual_prefix = ' ' x $bulletlen;
  132. foreach (@wrappedlines) {
  133. s/\s*\Z//;
  134. $retval .= "$prefix$_\n";
  135. $prefix = $usual_prefix;
  136. }
  137. return $retval;
  138. }
  139. sub wordwrap_one_paragraph { # don't call this directly.
  140. my $retval = '';
  141. my $p = shift;
  142. #print "\n\n\nPARAGRAPH: [$p]\n\n\n";
  143. if ($p =~ s/\A([\*\-] )//) { # bullet list, starts with "* " or "- ".
  144. my $bullet = $1;
  145. my $item = '';
  146. my @items = split /\n/, $p;
  147. foreach (@items) {
  148. if (s/\A([\*\-] )//) {
  149. $retval .= wordwrap_with_bullet_indent($bullet, $item);
  150. $item = '';
  151. }
  152. s/\A\s*//;
  153. $item .= "$_\n"; # accumulate lines until we hit the end or another bullet.
  154. }
  155. if ($item ne '') {
  156. $retval .= wordwrap_with_bullet_indent($bullet, $item);
  157. }
  158. } else {
  159. $retval = wordwrap_atom($p) . "\n";
  160. }
  161. return $retval;
  162. }
  163. sub wordwrap_paragraphs { # don't call this directly.
  164. my $str = shift;
  165. my $retval = '';
  166. my @paragraphs = split /\n\n/, $str;
  167. foreach (@paragraphs) {
  168. next if $_ eq '';
  169. $retval .= wordwrap_one_paragraph($_);
  170. $retval .= "\n";
  171. }
  172. return $retval;
  173. }
  174. my $wordwrap_default_columns = 76;
  175. sub wordwrap {
  176. my $str = shift;
  177. my $columns = shift;
  178. $columns = $wordwrap_default_columns if not defined $columns;
  179. $columns += $wordwrap_default_columns if $columns < 0;
  180. $Text::Wrap::columns = $columns;
  181. my $retval = '';
  182. #print("\n\nWORDWRAP:\n\n$str\n\n\n");
  183. $str =~ s/\A\n+//ms;
  184. while ($str =~ s/(.*?)(\`\`\`.*?\`\`\`|\<syntaxhighlight.*?\<\/syntaxhighlight\>)//ms) {
  185. #print("\n\nWORDWRAP BLOCK:\n\n$1\n\n ===\n\n$2\n\n\n");
  186. $retval .= wordwrap_paragraphs($1); # wrap it.
  187. $retval .= "$2\n\n"; # don't wrap it.
  188. }
  189. $retval .= wordwrap_paragraphs($str); # wrap what's left.
  190. $retval =~ s/\n+\Z//ms;
  191. #print("\n\nWORDWRAP DONE:\n\n$retval\n\n\n");
  192. return $retval;
  193. }
  194. # This assumes you're moving from Markdown (in the Doxygen data) to Wiki, which
  195. # is why the 'md' section is so sparse.
  196. sub wikify_chunk {
  197. my $wikitype = shift;
  198. my $str = shift;
  199. my $codelang = shift;
  200. my $code = shift;
  201. #print("\n\nWIKIFY CHUNK:\n\n$str\n\n\n");
  202. if ($wikitype eq 'mediawiki') {
  203. # convert `code` things first, so they aren't mistaken for other markdown items.
  204. my $codedstr = '';
  205. while ($str =~ s/\A(.*?)\`(.*?)\`//ms) {
  206. my $codeblock = $2;
  207. $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
  208. if (defined $apiprefixregex) {
  209. # Convert obvious API things to wikilinks, even inside `code` blocks.
  210. $codeblock =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
  211. }
  212. $codedstr .= "<code>$codeblock</code>";
  213. }
  214. # Convert obvious API things to wikilinks.
  215. if (defined $apiprefixregex) {
  216. $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
  217. }
  218. # Make some Markdown things into MediaWiki...
  219. # links
  220. $str =~ s/\[(.*?)\]\((https?\:\/\/.*?)\)/\[$2 $1\]/g;
  221. # bold+italic
  222. $str =~ s/\*\*\*(.*?)\*\*\*/'''''$1'''''/gms;
  223. # bold
  224. $str =~ s/\*\*(.*?)\*\*/'''$1'''/gms;
  225. # italic
  226. $str =~ s/\*(.*?)\*/''$1''/gms;
  227. # bullets
  228. $str =~ s/^\- /* /gm;
  229. $str = $codedstr . $str;
  230. if (defined $code) {
  231. $str .= "<syntaxhighlight lang='$codelang'>$code<\/syntaxhighlight>";
  232. }
  233. } elsif ($wikitype eq 'md') {
  234. # convert `code` things first, so they aren't mistaken for other markdown items.
  235. my $codedstr = '';
  236. while ($str =~ s/\A(.*?)(\`.*?\`)//ms) {
  237. my $codeblock = $2;
  238. $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
  239. if (defined $apiprefixregex) {
  240. # Convert obvious API things to wikilinks, even inside `code` blocks,
  241. # BUT ONLY IF the entire code block is the API thing,
  242. # So something like "just call `SDL_Whatever`" will become
  243. # "just call [`SDL_Whatever`](SDL_Whatever)", but
  244. # "just call `SDL_Whatever(7)`" will not. It's just the safest
  245. # way to do this without resorting to wrapping things in html <code> tags.
  246. $codeblock =~ s/\A\`($apiprefixregex[a-zA-Z0-9_]+)\`\Z/[`$1`]($1)/gms;
  247. }
  248. $codedstr .= $codeblock;
  249. }
  250. # Convert obvious API things to wikilinks.
  251. if (defined $apiprefixregex) {
  252. $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[$1]($1)/gms;
  253. }
  254. $str = $codedstr . $str;
  255. if (defined $code) {
  256. $str .= "```$codelang$code```";
  257. }
  258. }
  259. #print("\n\nWIKIFY CHUNK DONE:\n\n$str\n\n\n");
  260. return $str;
  261. }
  262. sub wikify {
  263. my $wikitype = shift;
  264. my $str = shift;
  265. my $retval = '';
  266. #print("WIKIFY WHOLE:\n\n$str\n\n\n");
  267. while ($str =~ s/\A(.*?)\`\`\`(c\+\+|c)(.*?)\`\`\`//ms) {
  268. $retval .= wikify_chunk($wikitype, $1, $2, $3);
  269. }
  270. $retval .= wikify_chunk($wikitype, $str, undef, undef);
  271. #print("WIKIFY WHOLE DONE:\n\n$retval\n\n\n");
  272. return $retval;
  273. }
  274. my $dewikify_mode = 'md';
  275. my $dewikify_manpage_code_indent = 1;
  276. sub dewikify_chunk {
  277. my $wikitype = shift;
  278. my $str = shift;
  279. my $codelang = shift;
  280. my $code = shift;
  281. #print("\n\nDEWIKIFY CHUNK:\n\n$str\n\n\n");
  282. if ($dewikify_mode eq 'md') {
  283. if ($wikitype eq 'mediawiki') {
  284. # Doxygen supports Markdown (and it just simply looks better than MediaWiki
  285. # when looking at the raw headers), so do some conversions here as necessary.
  286. # Dump obvious wikilinks.
  287. if (defined $apiprefixregex) {
  288. $str =~ s/\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms;
  289. }
  290. # links
  291. $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\[$2\]\($1\)/g;
  292. # <code></code> is also popular. :/
  293. $str =~ s/\<code>(.*?)<\/code>/`$1`/gms;
  294. # bold+italic
  295. $str =~ s/'''''(.*?)'''''/***$1***/gms;
  296. # bold
  297. $str =~ s/'''(.*?)'''/**$1**/gms;
  298. # italic
  299. $str =~ s/''(.*?)''/*$1*/gms;
  300. # bullets
  301. $str =~ s/^\* /- /gm;
  302. } elsif ($wikitype eq 'md') {
  303. # Dump obvious wikilinks. The rest can just passthrough.
  304. if (defined $apiprefixregex) {
  305. $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms;
  306. }
  307. }
  308. if (defined $code) {
  309. $str .= "```$codelang$code```";
  310. }
  311. } elsif ($dewikify_mode eq 'manpage') {
  312. $str =~ s/\./\\[char46]/gms; # make sure these can't become control codes.
  313. if ($wikitype eq 'mediawiki') {
  314. # Dump obvious wikilinks.
  315. if (defined $apiprefixregex) {
  316. $str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]\s*/\n.BR $1\n/gms;
  317. }
  318. # links
  319. $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\n.URL "$1" "$2"\n/g;
  320. # <code></code> is also popular. :/
  321. $str =~ s/\s*\<code>(.*?)<\/code>\s*/\n.BR $1\n/gms;
  322. # bold+italic (this looks bad, just make it bold).
  323. $str =~ s/\s*'''''(.*?)'''''\s*/\n.B $1\n/gms;
  324. # bold
  325. $str =~ s/\s*'''(.*?)'''\s*/\n.B $1\n/gms;
  326. # italic
  327. $str =~ s/\s*''(.*?)''\s*/\n.I $1\n/gms;
  328. # bullets
  329. $str =~ s/^\* /\n\\\(bu /gm;
  330. } elsif ($wikitype eq 'md') {
  331. # Dump obvious wikilinks.
  332. if (defined $apiprefixregex) {
  333. $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/\n.BR $1\n/gms;
  334. }
  335. # links
  336. $str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\n.URL "$2" "$1"\n/g;
  337. # <code></code> is also popular. :/
  338. $str =~ s/\s*\`(.*?)\`\s*/\n.BR $1\n/gms;
  339. # bold+italic (this looks bad, just make it bold).
  340. $str =~ s/\s*\*\*\*(.*?)\*\*\*\s*/\n.B $1\n/gms;
  341. # bold
  342. $str =~ s/\s*\*\*(.*?)\*\*\s*/\n.B $1\n/gms;
  343. # italic
  344. $str =~ s/\s*\*(.*?)\*\s*/\n.I $1\n/gms;
  345. # bullets
  346. $str =~ s/^\- /\n\\\(bu /gm;
  347. } else {
  348. die("Unexpected wikitype when converting to manpages\n"); # !!! FIXME: need to handle Markdown wiki pages.
  349. }
  350. if (defined $code) {
  351. $code =~ s/\A\n+//gms;
  352. $code =~ s/\n+\Z//gms;
  353. if ($dewikify_manpage_code_indent) {
  354. $str .= "\n.IP\n"
  355. } else {
  356. $str .= "\n.PP\n"
  357. }
  358. $str .= ".EX\n$code\n.EE\n.PP\n";
  359. }
  360. } else {
  361. die("Unexpected dewikify_mode\n");
  362. }
  363. #print("\n\nDEWIKIFY CHUNK DONE:\n\n$str\n\n\n");
  364. return $str;
  365. }
  366. sub dewikify {
  367. my $wikitype = shift;
  368. my $str = shift;
  369. return '' if not defined $str;
  370. #print("DEWIKIFY WHOLE:\n\n$str\n\n\n");
  371. $str =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms;
  372. $str =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms;
  373. my $retval = '';
  374. while ($str =~ s/\A(.*?)<syntaxhighlight lang='?(.*?)'?>(.*?)<\/syntaxhighlight\>//ms) {
  375. $retval .= dewikify_chunk($wikitype, $1, $2, $3);
  376. }
  377. $retval .= dewikify_chunk($wikitype, $str, undef, undef);
  378. #print("DEWIKIFY WHOLE DONE:\n\n$retval\n\n\n");
  379. return $retval;
  380. }
  381. sub filecopy {
  382. my $src = shift;
  383. my $dst = shift;
  384. my $endline = shift;
  385. $endline = "\n" if not defined $endline;
  386. open(COPYIN, '<', $src) or die("Failed to open '$src' for reading: $!\n");
  387. open(COPYOUT, '>', $dst) or die("Failed to open '$dst' for writing: $!\n");
  388. while (<COPYIN>) {
  389. chomp;
  390. s/[ \t\r\n]*\Z//;
  391. print COPYOUT "$_$endline";
  392. }
  393. close(COPYOUT);
  394. close(COPYIN);
  395. }
  396. sub usage {
  397. die("USAGE: $0 <source code git clone path> <wiki git clone path> [--copy-to-headers|--copy-to-wiki|--copy-to-manpages] [--warn-about-missing] [--manpath=<man path>]\n\n");
  398. }
  399. usage() if not defined $srcpath;
  400. usage() if not defined $wikipath;
  401. #usage() if $copy_direction == 0;
  402. if (not defined $manpath) {
  403. $manpath = "$srcpath/man";
  404. }
  405. my @standard_wiki_sections = (
  406. 'Draft',
  407. '[Brief]',
  408. 'Deprecated',
  409. 'Syntax',
  410. 'Function Parameters',
  411. 'Return Value',
  412. 'Remarks',
  413. 'Thread Safety',
  414. 'Version',
  415. 'Code Examples',
  416. 'Related Functions'
  417. );
  418. # Sections that only ever exist in the wiki and shouldn't be deleted when
  419. # not found in the headers.
  420. my %only_wiki_sections = ( # The ones don't mean anything, I just need to check for key existence.
  421. 'Draft', 1,
  422. 'Code Examples', 1
  423. );
  424. my %headers = (); # $headers{"SDL_audio.h"} -> reference to an array of all lines of text in SDL_audio.h.
  425. my %headerfuncs = (); # $headerfuncs{"SDL_OpenAudio"} -> string of header documentation for SDL_OpenAudio, with comment '*' bits stripped from the start. Newlines embedded!
  426. my %headerdecls = ();
  427. my %headerfuncslocation = (); # $headerfuncslocation{"SDL_OpenAudio"} -> name of header holding SDL_OpenAudio define ("SDL_audio.h" in this case).
  428. my %headerfuncschunk = (); # $headerfuncschunk{"SDL_OpenAudio"} -> offset in array in %headers that should be replaced for this function.
  429. my %headerfuncshasdoxygen = (); # $headerfuncschunk{"SDL_OpenAudio"} -> 1 if there was no existing doxygen for this function.
  430. my $incpath = "$srcpath";
  431. $incpath .= "/$incsubdir" if $incsubdir ne '';
  432. my $wikireadmepath = "$wikipath/$wikireadmesubdir";
  433. my $readmepath = undef;
  434. if (defined $readmesubdir) {
  435. $readmepath = "$srcpath/$readmesubdir";
  436. }
  437. opendir(DH, $incpath) or die("Can't opendir '$incpath': $!\n");
  438. while (my $d = readdir(DH)) {
  439. my $dent = $d;
  440. next if not $dent =~ /$selectheaderregex/; # just selected headers.
  441. open(FH, '<', "$incpath/$dent") or die("Can't open '$incpath/$dent': $!\n");
  442. my @contents = ();
  443. while (<FH>) {
  444. chomp;
  445. my $decl;
  446. my @templines;
  447. my $str;
  448. my $has_doxygen = 1;
  449. if (/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) { # a function declaration without a doxygen comment?
  450. @templines = ();
  451. $decl = $_;
  452. $str = '';
  453. $has_doxygen = 0;
  454. } elsif (not /\A\/\*\*\s*\Z/) { # not doxygen comment start?
  455. push @contents, $_;
  456. next;
  457. } else { # Start of a doxygen comment, parse it out.
  458. @templines = ( $_ );
  459. while (<FH>) {
  460. chomp;
  461. push @templines, $_;
  462. last if /\A\s*\*\/\Z/;
  463. if (s/\A\s*\*\s*\`\`\`/```/) { # this is a hack, but a lot of other code relies on the whitespace being trimmed, but we can't trim it in code blocks...
  464. $str .= "$_\n";
  465. while (<FH>) {
  466. chomp;
  467. push @templines, $_;
  468. s/\A\s*\*\s?//;
  469. if (s/\A\s*\`\`\`/```/) {
  470. $str .= "$_\n";
  471. last;
  472. } else {
  473. $str .= "$_\n";
  474. }
  475. }
  476. } else {
  477. s/\A\s*\*\s*//;
  478. $str .= "$_\n";
  479. }
  480. }
  481. $decl = <FH>;
  482. $decl = '' if not defined $decl;
  483. chomp($decl);
  484. if (not $decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) {
  485. #print "Found doxygen but no function sig:\n$str\n\n";
  486. foreach (@templines) {
  487. push @contents, $_;
  488. }
  489. push @contents, $decl;
  490. next;
  491. }
  492. }
  493. my @decllines = ( $decl );
  494. if (not $decl =~ /\)\s*;/) {
  495. while (<FH>) {
  496. chomp;
  497. push @decllines, $_;
  498. s/\A\s+//;
  499. s/\s+\Z//;
  500. $decl .= " $_";
  501. last if /\)\s*;/;
  502. }
  503. }
  504. $decl =~ s/\s+\);\Z/);/;
  505. $decl =~ s/\s+\Z//;
  506. #print("DECL: [$decl]\n");
  507. my $fn = '';
  508. if ($decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(const\s+|)(unsigned\s+|)(.*?)\s*(\*?)\s*SDLCALL\s+(.*?)\s*\((.*?)\);/) {
  509. $fn = $6;
  510. #$decl =~ s/\A\s*extern\s+DECLSPEC\s+(.*?)\s+SDLCALL/$1/;
  511. } else {
  512. #print "Found doxygen but no function sig:\n$str\n\n";
  513. foreach (@templines) {
  514. push @contents, $_;
  515. }
  516. foreach (@decllines) {
  517. push @contents, $_;
  518. }
  519. next;
  520. }
  521. $decl = ''; # build this with the line breaks, since it looks better for syntax highlighting.
  522. foreach (@decllines) {
  523. if ($decl eq '') {
  524. $decl = $_;
  525. $decl =~ s/\Aextern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL\s+/$2$3 /;
  526. } else {
  527. my $trimmed = $_;
  528. # !!! FIXME: trim space for SDL_DEPRECATED if it was used, too.
  529. $trimmed =~ s/\A\s{24}//; # 24 for shrinking to match the removed "extern DECLSPEC SDLCALL "
  530. $decl .= $trimmed;
  531. }
  532. $decl .= "\n";
  533. }
  534. #print("$fn:\n$str\n\n");
  535. # There might be multiple declarations of a function due to #ifdefs,
  536. # and only one of them will have documentation. If we hit an
  537. # undocumented one before, delete the placeholder line we left for
  538. # it so it doesn't accumulate a new blank line on each run.
  539. my $skipfn = 0;
  540. if (defined $headerfuncshasdoxygen{$fn}) {
  541. if ($headerfuncshasdoxygen{$fn} == 0) { # An undocumented declaration already exists, nuke its placeholder line.
  542. delete $contents[$headerfuncschunk{$fn}]; # delete DOES NOT RENUMBER existing elements!
  543. } else { # documented function already existed?
  544. $skipfn = 1; # don't add this copy to the list of functions.
  545. if ($has_doxygen) {
  546. print STDERR "WARNING: Function '$fn' appears to be documented in multiple locations. Only keeping the first one we saw!\n";
  547. }
  548. push @contents, join("\n", @decllines); # just put the existing declation in as-is.
  549. }
  550. }
  551. if (!$skipfn) {
  552. $headerfuncs{$fn} = $str;
  553. $headerdecls{$fn} = $decl;
  554. $headerfuncslocation{$fn} = $dent;
  555. $headerfuncschunk{$fn} = scalar(@contents);
  556. $headerfuncshasdoxygen{$fn} = $has_doxygen;
  557. push @contents, join("\n", @templines);
  558. push @contents, join("\n", @decllines);
  559. }
  560. }
  561. close(FH);
  562. $headers{$dent} = \@contents;
  563. }
  564. closedir(DH);
  565. # !!! FIXME: we need to parse enums and typedefs and structs and defines and and and and and...
  566. # !!! FIXME: (but functions are good enough for now.)
  567. my %wikitypes = (); # contains string of wiki page extension, like $wikitypes{"SDL_OpenAudio"} == 'mediawiki'
  568. my %wikifuncs = (); # contains references to hash of strings, each string being the full contents of a section of a wiki page, like $wikifuncs{"SDL_OpenAudio"}{"Remarks"}.
  569. my %wikisectionorder = (); # contains references to array, each array item being a key to a wikipage section in the correct order, like $wikisectionorder{"SDL_OpenAudio"}[2] == 'Remarks'
  570. opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n");
  571. while (my $d = readdir(DH)) {
  572. my $dent = $d;
  573. my $type = '';
  574. if ($dent =~ /\.(md|mediawiki)\Z/) {
  575. $type = $1;
  576. } else {
  577. next; # only dealing with wiki pages.
  578. }
  579. my $fn = $dent;
  580. $fn =~ s/\..*\Z//;
  581. # Ignore FrontPage.
  582. next if $fn eq 'FrontPage';
  583. # Ignore "Category*" pages.
  584. next if ($fn =~ /\ACategory/);
  585. open(FH, '<', "$wikipath/$dent") or die("Can't open '$wikipath/$dent': $!\n");
  586. my $current_section = '[start]';
  587. my @section_order = ( $current_section );
  588. my %sections = ();
  589. $sections{$current_section} = '';
  590. my $firstline = 1;
  591. while (<FH>) {
  592. chomp;
  593. my $orig = $_;
  594. s/\A\s*//;
  595. s/\s*\Z//;
  596. if ($type eq 'mediawiki') {
  597. if (defined($wikipreamble) && $firstline && /\A\=\=\=\=\=\= (.*?) \=\=\=\=\=\=\Z/ && ($1 eq $wikipreamble)) {
  598. $firstline = 0; # skip this.
  599. next;
  600. } elsif (/\A\= (.*?) \=\Z/) {
  601. $firstline = 0;
  602. $current_section = ($1 eq $fn) ? '[Brief]' : $1;
  603. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  604. push @section_order, $current_section;
  605. $sections{$current_section} = '';
  606. } elsif (/\A\=\= (.*?) \=\=\Z/) {
  607. $firstline = 0;
  608. $current_section = ($1 eq $fn) ? '[Brief]' : $1;
  609. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  610. push @section_order, $current_section;
  611. $sections{$current_section} = '';
  612. next;
  613. } elsif (/\A\-\-\-\-\Z/) {
  614. $firstline = 0;
  615. $current_section = '[footer]';
  616. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  617. push @section_order, $current_section;
  618. $sections{$current_section} = '';
  619. next;
  620. }
  621. } elsif ($type eq 'md') {
  622. if (defined($wikipreamble) && $firstline && /\A\#\#\#\#\#\# (.*?)\Z/ && ($1 eq $wikipreamble)) {
  623. $firstline = 0; # skip this.
  624. next;
  625. } elsif (/\A\#+ (.*?)\Z/) {
  626. $firstline = 0;
  627. $current_section = ($1 eq $fn) ? '[Brief]' : $1;
  628. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  629. push @section_order, $current_section;
  630. $sections{$current_section} = '';
  631. next;
  632. } elsif (/\A\-\-\-\-\Z/) {
  633. $firstline = 0;
  634. $current_section = '[footer]';
  635. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  636. push @section_order, $current_section;
  637. $sections{$current_section} = '';
  638. next;
  639. }
  640. } else {
  641. die("Unexpected wiki file type. Fixme!\n");
  642. }
  643. if ($firstline) {
  644. $firstline = ($_ ne '');
  645. }
  646. if (!$firstline) {
  647. $sections{$current_section} .= "$orig\n";
  648. }
  649. }
  650. close(FH);
  651. foreach (keys %sections) {
  652. $sections{$_} =~ s/\A\n+//;
  653. $sections{$_} =~ s/\n+\Z//;
  654. $sections{$_} .= "\n";
  655. }
  656. if (0) {
  657. foreach (@section_order) {
  658. print("$fn SECTION '$_':\n");
  659. print($sections{$_});
  660. print("\n\n");
  661. }
  662. }
  663. $wikitypes{$fn} = $type;
  664. $wikifuncs{$fn} = \%sections;
  665. $wikisectionorder{$fn} = \@section_order;
  666. }
  667. closedir(DH);
  668. delete $wikifuncs{"Undocumented"};
  669. {
  670. my $path = "$wikipath/Undocumented.md";
  671. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  672. print FH "# Undocumented\n\n";
  673. print FH "## Functions defined in the headers, but not in the wiki\n\n";
  674. my $header_only_func = 0;
  675. foreach (sort keys %headerfuncs) {
  676. my $fn = $_;
  677. if (not defined $wikifuncs{$fn}) {
  678. print FH "- [$fn]($fn)\n";
  679. $header_only_func = 1;
  680. }
  681. }
  682. if (!$header_only_func) {
  683. print FH "(none)\n";
  684. }
  685. print FH "\n";
  686. print FH "## Functions defined in the wiki, but not in the headers\n\n";
  687. my $wiki_only_func = 0;
  688. foreach (sort keys %wikifuncs) {
  689. my $fn = $_;
  690. if (not defined $headerfuncs{$fn}) {
  691. print FH "- [$fn]($fn)\n";
  692. $wiki_only_func = 1;
  693. }
  694. }
  695. if (!$wiki_only_func) {
  696. print FH "(none)\n";
  697. }
  698. print FH "\n";
  699. close(FH);
  700. }
  701. if ($warn_about_missing) {
  702. foreach (keys %wikifuncs) {
  703. my $fn = $_;
  704. if (not defined $headerfuncs{$fn}) {
  705. print("WARNING: $fn defined in the wiki but not the headers!\n");
  706. }
  707. }
  708. foreach (keys %headerfuncs) {
  709. my $fn = $_;
  710. if (not defined $wikifuncs{$fn}) {
  711. print("WARNING: $fn defined in the headers but not the wiki!\n");
  712. }
  713. }
  714. }
  715. if ($copy_direction == 1) { # --copy-to-headers
  716. my %changed_headers = ();
  717. $dewikify_mode = 'md';
  718. $wordwrap_mode = 'md'; # the headers use Markdown format.
  719. foreach (keys %headerfuncs) {
  720. my $fn = $_;
  721. next if not defined $wikifuncs{$fn}; # don't have a page for that function, skip it.
  722. my $wikitype = $wikitypes{$fn};
  723. my $sectionsref = $wikifuncs{$fn};
  724. my $remarks = $sectionsref->{'Remarks'};
  725. my $params = $sectionsref->{'Function Parameters'};
  726. my $returns = $sectionsref->{'Return Value'};
  727. my $threadsafety = $sectionsref->{'Thread Safety'};
  728. my $version = $sectionsref->{'Version'};
  729. my $related = $sectionsref->{'Related Functions'};
  730. my $deprecated = $sectionsref->{'Deprecated'};
  731. my $brief = $sectionsref->{'[Brief]'};
  732. my $addblank = 0;
  733. my $str = '';
  734. $headerfuncshasdoxygen{$fn} = 1; # Added/changed doxygen for this header.
  735. $brief = dewikify($wikitype, $brief);
  736. $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  737. my @briefsplit = split /\n/, $brief;
  738. $brief = shift @briefsplit;
  739. if (defined $remarks) {
  740. $remarks = join("\n", @briefsplit) . dewikify($wikitype, $remarks);
  741. }
  742. if (defined $brief) {
  743. $str .= "\n" if $addblank; $addblank = 1;
  744. $str .= wordwrap($brief) . "\n";
  745. }
  746. if (defined $remarks) {
  747. $str .= "\n" if $addblank; $addblank = 1;
  748. $str .= wordwrap($remarks) . "\n";
  749. }
  750. if (defined $deprecated) {
  751. # !!! FIXME: lots of code duplication in all of these.
  752. $str .= "\n" if $addblank; $addblank = 1;
  753. my $v = dewikify($wikitype, $deprecated);
  754. my $whitespacelen = length("\\deprecated") + 1;
  755. my $whitespace = ' ' x $whitespacelen;
  756. $v = wordwrap($v, -$whitespacelen);
  757. my @desclines = split /\n/, $v;
  758. my $firstline = shift @desclines;
  759. $str .= "\\deprecated $firstline\n";
  760. foreach (@desclines) {
  761. $str .= "${whitespace}$_\n";
  762. }
  763. }
  764. if (defined $params) {
  765. $str .= "\n" if $addblank; $addblank = (defined $returns) ? 0 : 1;
  766. my @lines = split /\n/, dewikify($wikitype, $params);
  767. if ($wikitype eq 'mediawiki') {
  768. die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start
  769. while (scalar(@lines) >= 3) {
  770. my $name = shift @lines;
  771. my $desc = shift @lines;
  772. my $terminator = shift @lines; # the '|-' or '|}' line.
  773. last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table.
  774. $name =~ s/\A\|\s*//;
  775. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  776. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  777. $desc =~ s/\A\|\s*//;
  778. #print STDERR "FN: $fn NAME: $name DESC: $desc TERM: $terminator\n";
  779. my $whitespacelen = length($name) + 8;
  780. my $whitespace = ' ' x $whitespacelen;
  781. $desc = wordwrap($desc, -$whitespacelen);
  782. my @desclines = split /\n/, $desc;
  783. my $firstline = shift @desclines;
  784. $str .= "\\param $name $firstline\n";
  785. foreach (@desclines) {
  786. $str .= "${whitespace}$_\n";
  787. }
  788. }
  789. } elsif ($wikitype eq 'md') {
  790. my $l;
  791. $l = shift @lines;
  792. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\|\s*\|\s*\Z/);
  793. $l = shift @lines;
  794. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/);
  795. while (scalar(@lines) >= 1) {
  796. $l = shift @lines;
  797. if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) {
  798. my $name = $1;
  799. my $desc = $2;
  800. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  801. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  802. #print STDERR "FN: $fn NAME: $name DESC: $desc\n";
  803. my $whitespacelen = length($name) + 8;
  804. my $whitespace = ' ' x $whitespacelen;
  805. $desc = wordwrap($desc, -$whitespacelen);
  806. my @desclines = split /\n/, $desc;
  807. my $firstline = shift @desclines;
  808. $str .= "\\param $name $firstline\n";
  809. foreach (@desclines) {
  810. $str .= "${whitespace}$_\n";
  811. }
  812. } else {
  813. last; # we seem to have run out of table.
  814. }
  815. }
  816. } else {
  817. die("write me");
  818. }
  819. }
  820. if (defined $returns) {
  821. $str .= "\n" if $addblank; $addblank = 1;
  822. my $r = dewikify($wikitype, $returns);
  823. my $retstr = "\\returns";
  824. if ($r =~ s/\AReturn(s?) //) {
  825. $retstr = "\\return$1";
  826. }
  827. my $whitespacelen = length($retstr) + 1;
  828. my $whitespace = ' ' x $whitespacelen;
  829. $r = wordwrap($r, -$whitespacelen);
  830. my @desclines = split /\n/, $r;
  831. my $firstline = shift @desclines;
  832. $str .= "$retstr $firstline\n";
  833. foreach (@desclines) {
  834. $str .= "${whitespace}$_\n";
  835. }
  836. }
  837. if (defined $threadsafety) {
  838. # !!! FIXME: lots of code duplication in all of these.
  839. $str .= "\n" if $addblank; $addblank = 1;
  840. my $v = dewikify($wikitype, $threadsafety);
  841. my $whitespacelen = length("\\threadsafety") + 1;
  842. my $whitespace = ' ' x $whitespacelen;
  843. $v = wordwrap($v, -$whitespacelen);
  844. my @desclines = split /\n/, $v;
  845. my $firstline = shift @desclines;
  846. $str .= "\\threadsafety $firstline\n";
  847. foreach (@desclines) {
  848. $str .= "${whitespace}$_\n";
  849. }
  850. }
  851. if (defined $version) {
  852. # !!! FIXME: lots of code duplication in all of these.
  853. $str .= "\n" if $addblank; $addblank = 1;
  854. my $v = dewikify($wikitype, $version);
  855. my $whitespacelen = length("\\since") + 1;
  856. my $whitespace = ' ' x $whitespacelen;
  857. $v = wordwrap($v, -$whitespacelen);
  858. my @desclines = split /\n/, $v;
  859. my $firstline = shift @desclines;
  860. $str .= "\\since $firstline\n";
  861. foreach (@desclines) {
  862. $str .= "${whitespace}$_\n";
  863. }
  864. }
  865. if (defined $related) {
  866. # !!! FIXME: lots of code duplication in all of these.
  867. $str .= "\n" if $addblank; $addblank = 1;
  868. my $v = dewikify($wikitype, $related);
  869. my @desclines = split /\n/, $v;
  870. foreach (@desclines) {
  871. s/\A(\:|\* )//;
  872. s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  873. s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain.
  874. s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain.
  875. s/\A\/*//;
  876. $str .= "\\sa $_\n";
  877. }
  878. }
  879. my $header = $headerfuncslocation{$fn};
  880. my $contentsref = $headers{$header};
  881. my $chunk = $headerfuncschunk{$fn};
  882. my @lines = split /\n/, $str;
  883. my $addnewline = (($chunk > 0) && ($$contentsref[$chunk-1] ne '')) ? "\n" : '';
  884. my $output = "$addnewline/**\n";
  885. foreach (@lines) {
  886. chomp;
  887. s/\s*\Z//;
  888. if ($_ eq '') {
  889. $output .= " *\n";
  890. } else {
  891. $output .= " * $_\n";
  892. }
  893. }
  894. $output .= " */";
  895. #print("$fn:\n$output\n\n");
  896. $$contentsref[$chunk] = $output;
  897. #$$contentsref[$chunk+1] = $headerdecls{$fn};
  898. $changed_headers{$header} = 1;
  899. }
  900. foreach (keys %changed_headers) {
  901. my $header = $_;
  902. # this is kinda inefficient, but oh well.
  903. my @removelines = ();
  904. foreach (keys %headerfuncslocation) {
  905. my $fn = $_;
  906. next if $headerfuncshasdoxygen{$fn};
  907. next if $headerfuncslocation{$fn} ne $header;
  908. # the index of the blank line we put before the function declaration in case we needed to replace it with new content from the wiki.
  909. push @removelines, $headerfuncschunk{$fn};
  910. }
  911. my $contentsref = $headers{$header};
  912. foreach (@removelines) {
  913. delete $$contentsref[$_]; # delete DOES NOT RENUMBER existing elements!
  914. }
  915. my $path = "$incpath/$header.tmp";
  916. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  917. foreach (@$contentsref) {
  918. print FH "$_\n" if defined $_;
  919. }
  920. close(FH);
  921. rename($path, "$incpath/$header") or die("Can't rename '$path' to '$incpath/$header': $!\n");
  922. }
  923. if (defined $readmepath) {
  924. if ( -d $wikireadmepath ) {
  925. mkdir($readmepath); # just in case
  926. opendir(DH, $wikireadmepath) or die("Can't opendir '$wikireadmepath': $!\n");
  927. while (readdir(DH)) {
  928. my $dent = $_;
  929. if ($dent =~ /\A(.*?)\.md\Z/) { # we only bridge Markdown files here.
  930. next if $1 eq 'FrontPage';
  931. filecopy("$wikireadmepath/$dent", "$readmepath/README-$dent", "\r\n");
  932. }
  933. }
  934. closedir(DH);
  935. }
  936. }
  937. } elsif ($copy_direction == -1) { # --copy-to-wiki
  938. if (defined $changeformat) {
  939. $dewikify_mode = $changeformat;
  940. $wordwrap_mode = $changeformat;
  941. }
  942. foreach (keys %headerfuncs) {
  943. my $fn = $_;
  944. next if not $headerfuncshasdoxygen{$fn};
  945. my $origwikitype = defined $wikitypes{$fn} ? $wikitypes{$fn} : 'md'; # default to MarkDown for new stuff.
  946. my $wikitype = (defined $changeformat) ? $changeformat : $origwikitype;
  947. die("Unexpected wikitype '$wikitype'\n") if (($wikitype ne 'mediawiki') and ($wikitype ne 'md') and ($wikitype ne 'manpage'));
  948. #print("$fn\n"); next;
  949. $wordwrap_mode = $wikitype;
  950. my $raw = $headerfuncs{$fn}; # raw doxygen text with comment characters stripped from start/end and start of each line.
  951. next if not defined $raw;
  952. $raw =~ s/\A\s*\\brief\s+//; # Technically we don't need \brief (please turn on JAVADOC_AUTOBRIEF if you use Doxygen), so just in case one is present, strip it.
  953. my @doxygenlines = split /\n/, $raw;
  954. my $brief = '';
  955. while (@doxygenlines) {
  956. last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks.
  957. last if $doxygenlines[0] =~ /\A\s*\Z/; # blank line? End of paragraph, done.
  958. my $l = shift @doxygenlines;
  959. chomp($l);
  960. $l =~ s/\A\s*//;
  961. $l =~ s/\s*\Z//;
  962. $brief .= "$l ";
  963. }
  964. $brief =~ s/\A(.*?\.) /$1\n\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  965. my @briefsplit = split /\n/, $brief;
  966. $brief = wikify($wikitype, shift @briefsplit) . "\n";
  967. @doxygenlines = (@briefsplit, @doxygenlines);
  968. my $remarks = '';
  969. # !!! FIXME: wordwrap and wikify might handle this, now.
  970. while (@doxygenlines) {
  971. last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks.
  972. my $l = shift @doxygenlines;
  973. if ($l =~ /\A\`\`\`/) { # syntax highlighting, don't reformat.
  974. $remarks .= "$l\n";
  975. while ((@doxygenlines) && (not $l =~ /\`\`\`\Z/)) {
  976. $l = shift @doxygenlines;
  977. $remarks .= "$l\n";
  978. }
  979. } else {
  980. $l =~ s/\A\s*//;
  981. $l =~ s/\s*\Z//;
  982. $remarks .= "$l\n";
  983. }
  984. }
  985. #print("REMARKS:\n\n $remarks\n\n");
  986. $remarks = wordwrap(wikify($wikitype, $remarks));
  987. $remarks =~ s/\A\s*//;
  988. $remarks =~ s/\s*\Z//;
  989. my $decl = $headerdecls{$fn};
  990. #$decl =~ s/\*\s+SDLCALL/ *SDLCALL/; # Try to make "void * Function" become "void *Function"
  991. #$decl =~ s/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL/$2$3/;
  992. my $syntax = '';
  993. if ($wikitype eq 'mediawiki') {
  994. $syntax = "<syntaxhighlight lang='c'>\n$decl</syntaxhighlight>\n";
  995. } elsif ($wikitype eq 'md') {
  996. $syntax = "```c\n$decl\n```\n";
  997. } else { die("Expected wikitype '$wikitype'\n"); }
  998. my %sections = ();
  999. $sections{'[Brief]'} = $brief; # include this section even if blank so we get a title line.
  1000. $sections{'Remarks'} = "$remarks\n" if $remarks ne '';
  1001. $sections{'Syntax'} = $syntax;
  1002. my @params = (); # have to parse these and build up the wiki tables after, since Markdown needs to know the length of the largest string. :/
  1003. while (@doxygenlines) {
  1004. my $l = shift @doxygenlines;
  1005. if ($l =~ /\A\\param\s+(.*?)\s+(.*)\Z/) {
  1006. my $arg = $1;
  1007. my $desc = $2;
  1008. while (@doxygenlines) {
  1009. my $subline = $doxygenlines[0];
  1010. $subline =~ s/\A\s*//;
  1011. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1012. shift @doxygenlines; # dump this line from the array; we're using it.
  1013. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1014. $desc .= "\n";
  1015. } else {
  1016. $desc .= " $subline";
  1017. }
  1018. }
  1019. $desc =~ s/[\s\n]+\Z//ms;
  1020. # We need to know the length of the longest string to make Markdown tables, so we just store these off until everything is parsed.
  1021. push @params, $arg;
  1022. push @params, $desc;
  1023. } elsif ($l =~ /\A\\r(eturns?)\s+(.*)\Z/) {
  1024. my $retstr = "R$1"; # "Return" or "Returns"
  1025. my $desc = $2;
  1026. while (@doxygenlines) {
  1027. my $subline = $doxygenlines[0];
  1028. $subline =~ s/\A\s*//;
  1029. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1030. shift @doxygenlines; # dump this line from the array; we're using it.
  1031. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1032. $desc .= "\n";
  1033. } else {
  1034. $desc .= " $subline";
  1035. }
  1036. }
  1037. $desc =~ s/[\s\n]+\Z//ms;
  1038. $sections{'Return Value'} = wordwrap("$retstr " . wikify($wikitype, $desc)) . "\n";
  1039. } elsif ($l =~ /\A\\deprecated\s+(.*)\Z/) {
  1040. my $desc = $1;
  1041. while (@doxygenlines) {
  1042. my $subline = $doxygenlines[0];
  1043. $subline =~ s/\A\s*//;
  1044. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1045. shift @doxygenlines; # dump this line from the array; we're using it.
  1046. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1047. $desc .= "\n";
  1048. } else {
  1049. $desc .= " $subline";
  1050. }
  1051. }
  1052. $desc =~ s/[\s\n]+\Z//ms;
  1053. $sections{'Deprecated'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1054. } elsif ($l =~ /\A\\since\s+(.*)\Z/) {
  1055. my $desc = $1;
  1056. while (@doxygenlines) {
  1057. my $subline = $doxygenlines[0];
  1058. $subline =~ s/\A\s*//;
  1059. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1060. shift @doxygenlines; # dump this line from the array; we're using it.
  1061. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1062. $desc .= "\n";
  1063. } else {
  1064. $desc .= " $subline";
  1065. }
  1066. }
  1067. $desc =~ s/[\s\n]+\Z//ms;
  1068. $sections{'Version'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1069. } elsif ($l =~ /\A\\threadsafety\s+(.*)\Z/) {
  1070. my $desc = $1;
  1071. while (@doxygenlines) {
  1072. my $subline = $doxygenlines[0];
  1073. $subline =~ s/\A\s*//;
  1074. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1075. shift @doxygenlines; # dump this line from the array; we're using it.
  1076. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1077. $desc .= "\n";
  1078. } else {
  1079. $desc .= " $subline";
  1080. }
  1081. }
  1082. $desc =~ s/[\s\n]+\Z//ms;
  1083. $sections{'Thread Safety'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1084. } elsif ($l =~ /\A\\sa\s+(.*)\Z/) {
  1085. my $sa = $1;
  1086. $sa =~ s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  1087. $sections{'Related Functions'} = '' if not defined $sections{'Related Functions'};
  1088. if ($wikitype eq 'mediawiki') {
  1089. $sections{'Related Functions'} .= ":[[$sa]]\n";
  1090. } elsif ($wikitype eq 'md') {
  1091. $sections{'Related Functions'} .= "* [$sa]($sa)\n";
  1092. } else { die("Expected wikitype '$wikitype'\n"); }
  1093. }
  1094. }
  1095. # Make sure this ends with a double-newline.
  1096. $sections{'Related Functions'} .= "\n" if defined $sections{'Related Functions'};
  1097. # We can build the wiki table now that we have all the data.
  1098. if (scalar(@params) > 0) {
  1099. my $str = '';
  1100. if ($wikitype eq 'mediawiki') {
  1101. while (scalar(@params) > 0) {
  1102. my $arg = shift @params;
  1103. my $desc = wikify($wikitype, shift @params);
  1104. $str .= ($str eq '') ? "{|\n" : "|-\n";
  1105. $str .= "|'''$arg'''\n";
  1106. $str .= "|$desc\n";
  1107. }
  1108. $str .= "|}\n";
  1109. } elsif ($wikitype eq 'md') {
  1110. my $longest_arg = 0;
  1111. my $longest_desc = 0;
  1112. my $which = 0;
  1113. foreach (@params) {
  1114. if ($which == 0) {
  1115. my $len = length($_) + 4;
  1116. $longest_arg = $len if ($len > $longest_arg);
  1117. $which = 1;
  1118. } else {
  1119. my $len = length(wikify($wikitype, $_));
  1120. $longest_desc = $len if ($len > $longest_desc);
  1121. $which = 0;
  1122. }
  1123. }
  1124. # Markdown tables are sort of obnoxious.
  1125. $str .= '| ' . (' ' x ($longest_arg+4)) . ' | ' . (' ' x $longest_desc) . " |\n";
  1126. $str .= '| ' . ('-' x ($longest_arg+4)) . ' | ' . ('-' x $longest_desc) . " |\n";
  1127. while (@params) {
  1128. my $arg = shift @params;
  1129. my $desc = wikify($wikitype, shift @params);
  1130. $str .= "| **$arg** " . (' ' x ($longest_arg - length($arg))) . "| $desc" . (' ' x ($longest_desc - length($desc))) . " |\n";
  1131. }
  1132. } else {
  1133. die("Unexpected wikitype!\n"); # should have checked this elsewhere.
  1134. }
  1135. $sections{'Function Parameters'} = $str;
  1136. }
  1137. my $path = "$wikipath/$_.${wikitype}.tmp";
  1138. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  1139. my $sectionsref = $wikifuncs{$fn};
  1140. foreach (@standard_wiki_sections) {
  1141. # drop sections we either replaced or removed from the original wiki's contents.
  1142. if (not defined $only_wiki_sections{$_}) {
  1143. delete($$sectionsref{$_});
  1144. }
  1145. }
  1146. my $wikisectionorderref = $wikisectionorder{$fn};
  1147. # Make sure there's a footer in the wiki that puts this function in CategoryAPI...
  1148. if (not $$sectionsref{'[footer]'}) {
  1149. $$sectionsref{'[footer]'} = '';
  1150. push @$wikisectionorderref, '[footer]';
  1151. }
  1152. # If changing format, convert things that otherwise are passed through unmolested.
  1153. if (defined $changeformat) {
  1154. if (($dewikify_mode eq 'md') and ($origwikitype eq 'mediawiki')) {
  1155. $$sectionsref{'[footer]'} =~ s/\[\[(Category[a-zA-Z0-9_]+)\]\]/[$1]($1)/g;
  1156. } elsif (($dewikify_mode eq 'mediawiki') and ($origwikitype eq 'md')) {
  1157. $$sectionsref{'[footer]'} =~ s/\[(Category[a-zA-Z0-9_]+)\]\(.*?\)/[[$1]]/g;
  1158. }
  1159. foreach (keys %only_wiki_sections) {
  1160. my $sect = $_;
  1161. if (defined $$sectionsref{$sect}) {
  1162. $$sectionsref{$sect} = wikify($wikitype, dewikify($origwikitype, $$sectionsref{$sect}));
  1163. }
  1164. }
  1165. }
  1166. # !!! FIXME: This won't be CategoryAPI if we eventually handle things other than functions.
  1167. my $footer = $$sectionsref{'[footer]'};
  1168. if ($wikitype eq 'mediawiki') {
  1169. $footer =~ s/\[\[CategoryAPI\]\],?\s*//g;
  1170. $footer = '[[CategoryAPI]]' . (($footer eq '') ? "\n" : ", $footer");
  1171. } elsif ($wikitype eq 'md') {
  1172. $footer =~ s/\[CategoryAPI\]\(CategoryAPI\),?\s*//g;
  1173. $footer = '[CategoryAPI](CategoryAPI)' . (($footer eq '') ? '' : ', ') . $footer;
  1174. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1175. $$sectionsref{'[footer]'} = $footer;
  1176. if (defined $wikipreamble) {
  1177. my $wikified_preamble = wikify($wikitype, $wikipreamble);
  1178. if ($wikitype eq 'mediawiki') {
  1179. print FH "====== $wikified_preamble ======\n";
  1180. } elsif ($wikitype eq 'md') {
  1181. print FH "###### $wikified_preamble\n";
  1182. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1183. }
  1184. my $prevsectstr = '';
  1185. my @ordered_sections = (@standard_wiki_sections, defined $wikisectionorderref ? @$wikisectionorderref : ()); # this copies the arrays into one.
  1186. foreach (@ordered_sections) {
  1187. my $sect = $_;
  1188. next if $sect eq '[start]';
  1189. next if (not defined $sections{$sect} and not defined $$sectionsref{$sect});
  1190. my $section = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect};
  1191. if ($sect eq '[footer]') {
  1192. # Make sure previous section ends with two newlines.
  1193. if (substr($prevsectstr, -1) ne "\n") {
  1194. print FH "\n\n";
  1195. } elsif (substr($prevsectstr, -2) ne "\n\n") {
  1196. print FH "\n";
  1197. }
  1198. print FH "----\n"; # It's the same in Markdown and MediaWiki.
  1199. } elsif ($sect eq '[Brief]') {
  1200. if ($wikitype eq 'mediawiki') {
  1201. print FH "= $fn =\n\n";
  1202. } elsif ($wikitype eq 'md') {
  1203. print FH "# $fn\n\n";
  1204. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1205. } else {
  1206. if ($wikitype eq 'mediawiki') {
  1207. print FH "\n== $sect ==\n\n";
  1208. } elsif ($wikitype eq 'md') {
  1209. print FH "\n## $sect\n\n";
  1210. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1211. }
  1212. my $sectstr = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect};
  1213. print FH $sectstr;
  1214. $prevsectstr = $sectstr;
  1215. # make sure these don't show up twice.
  1216. delete($sections{$sect});
  1217. delete($$sectionsref{$sect});
  1218. }
  1219. print FH "\n\n";
  1220. close(FH);
  1221. if (defined $changeformat and ($origwikitype ne $wikitype)) {
  1222. system("cd '$wikipath' ; git mv '$_.${origwikitype}' '$_.${wikitype}'");
  1223. unlink("$wikipath/$_.${origwikitype}");
  1224. }
  1225. rename($path, "$wikipath/$_.${wikitype}") or die("Can't rename '$path' to '$wikipath/$_.${wikitype}': $!\n");
  1226. }
  1227. if (defined $readmepath) {
  1228. if ( -d $readmepath ) {
  1229. mkdir($wikireadmepath); # just in case
  1230. opendir(DH, $readmepath) or die("Can't opendir '$readmepath': $!\n");
  1231. while (my $d = readdir(DH)) {
  1232. my $dent = $d;
  1233. if ($dent =~ /\AREADME\-(.*?\.md)\Z/) { # we only bridge Markdown files here.
  1234. my $wikifname = $1;
  1235. next if $wikifname eq 'FrontPage.md';
  1236. filecopy("$readmepath/$dent", "$wikireadmepath/$wikifname", "\n");
  1237. }
  1238. }
  1239. closedir(DH);
  1240. my @pages = ();
  1241. opendir(DH, $wikireadmepath) or die("Can't opendir '$wikireadmepath': $!\n");
  1242. while (my $d = readdir(DH)) {
  1243. my $dent = $d;
  1244. if ($dent =~ /\A(.*?)\.(mediawiki|md)\Z/) {
  1245. my $wikiname = $1;
  1246. next if $wikiname eq 'FrontPage';
  1247. push @pages, $wikiname;
  1248. }
  1249. }
  1250. closedir(DH);
  1251. open(FH, '>', "$wikireadmepath/FrontPage.md") or die("Can't open '$wikireadmepath/FrontPage.md': $!\n");
  1252. print FH "# All READMEs available here\n\n";
  1253. foreach (sort @pages) {
  1254. my $wikiname = $_;
  1255. print FH "- [$wikiname]($wikiname)\n";
  1256. }
  1257. close(FH);
  1258. }
  1259. }
  1260. } elsif ($copy_direction == -2) { # --copy-to-manpages
  1261. # This only takes from the wiki data, since it has sections we omit from the headers, like code examples.
  1262. $manpath .= "/man3";
  1263. File::Path::make_path($manpath);
  1264. $dewikify_mode = 'manpage';
  1265. $wordwrap_mode = 'manpage';
  1266. my $introtxt = '';
  1267. if (0) {
  1268. open(FH, '<', "$srcpath/LICENSE.txt") or die("Can't open '$srcpath/LICENSE.txt': $!\n");
  1269. while (<FH>) {
  1270. chomp;
  1271. $introtxt .= ".\\\" $_\n";
  1272. }
  1273. close(FH);
  1274. }
  1275. if (!$gitrev) {
  1276. $gitrev = `cd "$srcpath" ; git rev-list HEAD~..`;
  1277. chomp($gitrev);
  1278. }
  1279. # !!! FIXME
  1280. open(FH, '<', "$srcpath/$versionfname") or die("Can't open '$srcpath/$versionfname': $!\n");
  1281. my $majorver = 0;
  1282. my $minorver = 0;
  1283. my $patchver = 0;
  1284. while (<FH>) {
  1285. chomp;
  1286. if (/$versionmajorregex/) {
  1287. $majorver = int($1);
  1288. } elsif (/$versionminorregex/) {
  1289. $minorver = int($1);
  1290. } elsif (/$versionpatchregex/) {
  1291. $patchver = int($1);
  1292. }
  1293. }
  1294. close(FH);
  1295. my $fullversion = "$majorver.$minorver.$patchver";
  1296. foreach (keys %headerfuncs) {
  1297. my $fn = $_;
  1298. next if not defined $wikifuncs{$fn}; # don't have a page for that function, skip it.
  1299. my $wikitype = $wikitypes{$fn};
  1300. my $sectionsref = $wikifuncs{$fn};
  1301. my $remarks = $sectionsref->{'Remarks'};
  1302. my $params = $sectionsref->{'Function Parameters'};
  1303. my $returns = $sectionsref->{'Return Value'};
  1304. my $version = $sectionsref->{'Version'};
  1305. my $threadsafety = $sectionsref->{'Thread Safety'};
  1306. my $related = $sectionsref->{'Related Functions'};
  1307. my $examples = $sectionsref->{'Code Examples'};
  1308. my $deprecated = $sectionsref->{'Deprecated'};
  1309. my $brief = $sectionsref->{'[Brief]'};
  1310. my $decl = $headerdecls{$fn};
  1311. my $str = '';
  1312. $brief = "$brief";
  1313. $brief =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms;
  1314. $brief =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms;
  1315. $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  1316. my @briefsplit = split /\n/, $brief;
  1317. $brief = shift @briefsplit;
  1318. $brief = dewikify($wikitype, $brief);
  1319. if (defined $remarks) {
  1320. $remarks = dewikify($wikitype, join("\n", @briefsplit) . $remarks);
  1321. }
  1322. $str .= $introtxt;
  1323. $str .= ".\\\" This manpage content is licensed under Creative Commons\n";
  1324. $str .= ".\\\" Attribution 4.0 International (CC BY 4.0)\n";
  1325. $str .= ".\\\" https://creativecommons.org/licenses/by/4.0/\n";
  1326. $str .= ".\\\" This manpage was generated from ${projectshortname}'s wiki page for $fn:\n";
  1327. $str .= ".\\\" $wikiurl/$fn\n";
  1328. $str .= ".\\\" Generated with SDL/build-scripts/wikiheaders.pl\n";
  1329. $str .= ".\\\" revision $gitrev\n" if $gitrev ne '';
  1330. $str .= ".\\\" Please report issues in this manpage's content at:\n";
  1331. $str .= ".\\\" $bugreporturl\n";
  1332. $str .= ".\\\" Please report issues in the generation of this manpage from the wiki at:\n";
  1333. $str .= ".\\\" https://github.com/libsdl-org/SDL/issues/new?title=Misgenerated%20manpage%20for%20$fn\n";
  1334. $str .= ".\\\" $projectshortname can be found at $projecturl\n";
  1335. # Define a .URL macro. The "www.tmac" thing decides if we're using GNU roff (which has a .URL macro already), and if so, overrides the macro we just created.
  1336. # This wizadry is from https://web.archive.org/web/20060102165607/http://people.debian.org/~branden/talks/wtfm/wtfm.pdf
  1337. $str .= ".de URL\n";
  1338. $str .= '\\$2 \(laURL: \\$1 \(ra\\$3' . "\n";
  1339. $str .= "..\n";
  1340. $str .= '.if \n[.g] .mso www.tmac' . "\n";
  1341. $str .= ".TH $fn 3 \"$projectshortname $fullversion\" \"$projectfullname\" \"$projectshortname$majorver FUNCTIONS\"\n";
  1342. $str .= ".SH NAME\n";
  1343. $str .= "$fn";
  1344. $str .= " \\- $brief" if (defined $brief);
  1345. $str .= "\n";
  1346. $str .= ".SH SYNOPSIS\n";
  1347. $str .= ".nf\n";
  1348. $str .= ".B #include \\(dq$mainincludefname\\(dq\n";
  1349. $str .= ".PP\n";
  1350. my @decllines = split /\n/, $decl;
  1351. foreach (@decllines) {
  1352. $str .= ".BI \"$_\n";
  1353. }
  1354. $str .= ".fi\n";
  1355. if (defined $remarks) {
  1356. $str .= ".SH DESCRIPTION\n";
  1357. $str .= $remarks . "\n";
  1358. }
  1359. if (defined $deprecated) {
  1360. $str .= ".SH DEPRECATED\n";
  1361. $str .= dewikify($wikitype, $deprecated) . "\n";
  1362. }
  1363. if (defined $params) {
  1364. $str .= ".SH FUNCTION PARAMETERS\n";
  1365. my @lines = split /\n/, $params;
  1366. if ($wikitype eq 'mediawiki') {
  1367. die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start
  1368. while (scalar(@lines) >= 3) {
  1369. my $name = shift @lines;
  1370. my $desc = shift @lines;
  1371. my $terminator = shift @lines; # the '|-' or '|}' line.
  1372. last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table.
  1373. $name =~ s/\A\|\s*//;
  1374. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  1375. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  1376. $desc =~ s/\A\|\s*//;
  1377. $desc = dewikify($wikitype, $desc);
  1378. #print STDERR "FN: $fn NAME: $name DESC: $desc TERM: $terminator\n";
  1379. $str .= ".TP\n";
  1380. $str .= ".I $name\n";
  1381. $str .= "$desc\n";
  1382. }
  1383. } elsif ($wikitype eq 'md') {
  1384. my $l;
  1385. $l = shift @lines;
  1386. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\|\s*\|\s*\Z/);
  1387. $l = shift @lines;
  1388. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/);
  1389. while (scalar(@lines) >= 1) {
  1390. $l = shift @lines;
  1391. if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) {
  1392. my $name = $1;
  1393. my $desc = $2;
  1394. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  1395. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  1396. $desc = dewikify($wikitype, $desc);
  1397. $str .= ".TP\n";
  1398. $str .= ".I $name\n";
  1399. $str .= "$desc\n";
  1400. } else {
  1401. last; # we seem to have run out of table.
  1402. }
  1403. }
  1404. } else {
  1405. die("write me");
  1406. }
  1407. }
  1408. if (defined $returns) {
  1409. $str .= ".SH RETURN VALUE\n";
  1410. $str .= dewikify($wikitype, $returns) . "\n";
  1411. }
  1412. if (defined $examples) {
  1413. $str .= ".SH CODE EXAMPLES\n";
  1414. $dewikify_manpage_code_indent = 0;
  1415. $str .= dewikify($wikitype, $examples) . "\n";
  1416. $dewikify_manpage_code_indent = 1;
  1417. }
  1418. if (defined $threadsafety) {
  1419. $str .= ".SH THREAD SAFETY\n";
  1420. $str .= dewikify($wikitype, $threadsafety) . "\n";
  1421. }
  1422. if (defined $version) {
  1423. $str .= ".SH AVAILABILITY\n";
  1424. $str .= dewikify($wikitype, $version) . "\n";
  1425. }
  1426. if (defined $related) {
  1427. $str .= ".SH SEE ALSO\n";
  1428. # !!! FIXME: lots of code duplication in all of these.
  1429. my $v = dewikify($wikitype, $related);
  1430. my @desclines = split /\n/, $v;
  1431. my $nextstr = '';
  1432. foreach (@desclines) {
  1433. s/\A(\:|\* )//;
  1434. s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  1435. s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain.
  1436. s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain.
  1437. s/\A\*\s*\Z//;
  1438. s/\A\/*//;
  1439. s/\A\.BR\s+//; # dewikify added this, but we want to handle it.
  1440. s/\A\.I\s+//; # dewikify added this, but we want to handle it.
  1441. s/\A\s+//;
  1442. s/\s+\Z//;
  1443. next if $_ eq '';
  1444. $str .= "$nextstr.BR $_ (3)";
  1445. $nextstr = ",\n";
  1446. }
  1447. $str .= "\n";
  1448. }
  1449. if (0) {
  1450. $str .= ".SH COPYRIGHT\n";
  1451. $str .= "This manpage is licensed under\n";
  1452. $str .= ".UR https://creativecommons.org/licenses/by/4.0/\n";
  1453. $str .= "Creative Commons Attribution 4.0 International (CC BY 4.0)\n";
  1454. $str .= ".UE\n";
  1455. $str .= ".PP\n";
  1456. $str .= "This manpage was generated from\n";
  1457. $str .= ".UR $wikiurl/$fn\n";
  1458. $str .= "${projectshortname}'s wiki\n";
  1459. $str .= ".UE\n";
  1460. $str .= "using SDL/build-scripts/wikiheaders.pl";
  1461. $str .= " revision $gitrev" if $gitrev ne '';
  1462. $str .= ".\n";
  1463. $str .= "Please report issues in this manpage at\n";
  1464. $str .= ".UR $bugreporturl\n";
  1465. $str .= "our bugtracker!\n";
  1466. $str .= ".UE\n";
  1467. }
  1468. my $path = "$manpath/$_.3.tmp";
  1469. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  1470. print FH $str;
  1471. close(FH);
  1472. rename($path, "$manpath/$_.3") or die("Can't rename '$path' to '$manpath/$_.3': $!\n");
  1473. }
  1474. }
  1475. # end of wikiheaders.pl ...