summaryrefslogtreecommitdiff
path: root/ikiwiki
blob: 61fa4a713f52a6b3d75dc6d5fefbf3e6bc2247dc (plain)
  1. #!/usr/bin/perl -T
  2. use warnings;
  3. use strict;
  4. use File::Find;
  5. use Memoize;
  6. use File::Spec;
  7. $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
  8. BEGIN {
  9. $blosxom::version="is a proper perl module too much to ask?";
  10. do "/usr/bin/markdown";
  11. }
  12. my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
  13. %pagesources);
  14. my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
  15. my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
  16. my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!;
  17. my $verbose=0;
  18. my $wikiname="wiki";
  19. my $default_pagetype=".mdwn";
  20. my $cgi=0;
  21. my $url="";
  22. my $cgiurl="";
  23. my $svn=1;
  24. sub usage {
  25. die "usage: ikiwiki [options] source dest\n";
  26. }
  27. sub error ($) {
  28. if ($cgi) {
  29. print "Content-type: text/html\n\n";
  30. print "Error: @_\n";
  31. exit 1;
  32. }
  33. else {
  34. die @_;
  35. }
  36. }
  37. sub debug ($) {
  38. print "@_\n" if $verbose;
  39. }
  40. sub mtime ($) {
  41. my $page=shift;
  42. return (stat($page))[9];
  43. }
  44. sub possibly_foolish_untaint ($) {
  45. my $tainted=shift;
  46. my ($untainted)=$tainted=~/(.*)/;
  47. return $untainted;
  48. }
  49. sub basename {
  50. my $file=shift;
  51. $file=~s!.*/!!;
  52. return $file;
  53. }
  54. sub dirname {
  55. my $file=shift;
  56. $file=~s!/?[^/]+$!!;
  57. return $file;
  58. }
  59. sub pagetype ($) {
  60. my $page=shift;
  61. if ($page =~ /\.mdwn$/) {
  62. return ".mdwn";
  63. }
  64. else {
  65. return "unknown";
  66. }
  67. }
  68. sub pagename ($) {
  69. my $file=shift;
  70. my $type=pagetype($file);
  71. my $page=$file;
  72. $page=~s/\Q$type\E*$// unless $type eq 'unknown';
  73. return $page;
  74. }
  75. sub htmlpage ($) {
  76. my $page=shift;
  77. return $page.".html";
  78. }
  79. sub readfile ($) {
  80. my $file=shift;
  81. local $/=undef;
  82. open (IN, "$file") || error("failed to read $file: $!");
  83. my $ret=<IN>;
  84. close IN;
  85. return $ret;
  86. }
  87. sub writefile ($$) {
  88. my $file=shift;
  89. my $content=shift;
  90. my $dir=dirname($file);
  91. if (! -d $dir) {
  92. my $d="";
  93. foreach my $s (split(m!/+!, $dir)) {
  94. $d.="$s/";
  95. if (! -d $d) {
  96. mkdir($d) || error("failed to create directory $d: $!");
  97. }
  98. }
  99. }
  100. open (OUT, ">$file") || error("failed to write $file: $!");
  101. print OUT $content;
  102. close OUT;
  103. }
  104. sub findlinks {
  105. my $content=shift;
  106. my @links;
  107. while ($content =~ /$wiki_link_regexp/g) {
  108. push @links, lc($1);
  109. }
  110. return @links;
  111. }
  112. # Given a page and the text of a link on the page, determine which existing
  113. # page that link best points to. Prefers pages under a subdirectory with
  114. # the same name as the source page, failing that goes down the directory tree
  115. # to the base looking for matching pages.
  116. sub bestlink ($$) {
  117. my $page=shift;
  118. my $link=lc(shift);
  119. my $cwd=$page;
  120. do {
  121. my $l=$cwd;
  122. $l.="/" if length $l;
  123. $l.=$link;
  124. if (exists $links{$l}) {
  125. #debug("for $page, \"$link\", use $l");
  126. return $l;
  127. }
  128. } while $cwd=~s!/?[^/]+$!!;
  129. #print STDERR "warning: page $page, broken link: $link\n";
  130. return "";
  131. }
  132. sub isinlinableimage ($) {
  133. my $file=shift;
  134. $file=~/\.(png|gif|jpg|jpeg)$/;
  135. }
  136. sub htmllink {
  137. my $page=shift;
  138. my $link=shift;
  139. my $noimagelink=shift;
  140. my $bestlink=bestlink($page, $link);
  141. return $link if $page eq $bestlink;
  142. # TODO BUG: %renderedfiles may not have it, if the linked to page
  143. # was also added and isn't yet rendered! Note that this bug is
  144. # masked by the bug mentioned below that makes all new files
  145. # be rendered twice.
  146. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  147. $bestlink=htmlpage($bestlink);
  148. }
  149. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  150. return "<a href=\"$cgiurl?do=create&page=$link&from=$page\">?</a>$link"
  151. }
  152. $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
  153. if (! $noimagelink && isinlinableimage($bestlink)) {
  154. return "<img src=\"$bestlink\">";
  155. }
  156. return "<a href=\"$bestlink\">$link</a>";
  157. }
  158. sub linkify ($$) {
  159. my $content=shift;
  160. my $file=shift;
  161. $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
  162. return $content;
  163. }
  164. sub htmlize ($$) {
  165. my $type=shift;
  166. my $content=shift;
  167. if ($type eq '.mdwn') {
  168. return Markdown::Markdown($content);
  169. }
  170. else {
  171. error("htmlization of $type not supported");
  172. }
  173. }
  174. sub linkbacks ($$) {
  175. my $content=shift;
  176. my $page=shift;
  177. my @links;
  178. foreach my $p (keys %links) {
  179. next if bestlink($page, $p) eq $page;
  180. if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
  181. my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
  182. # Trim common dir prefixes from both pages.
  183. my $p_trimmed=$p;
  184. my $page_trimmed=$page;
  185. my $dir;
  186. 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
  187. defined $dir &&
  188. $p_trimmed=~s/^\Q$dir\E// &&
  189. $page_trimmed=~s/^\Q$dir\E//;
  190. push @links, "<a href=\"$href\">$p_trimmed</a>";
  191. }
  192. }
  193. $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
  194. return $content;
  195. }
  196. sub finalize ($$) {
  197. my $content=shift;
  198. my $page=shift;
  199. my $title=basename($page);
  200. $title=~s/_/ /g;
  201. my $pagelink="";
  202. my $path="";
  203. foreach my $dir (reverse split("/", $page)) {
  204. if (length($pagelink)) {
  205. $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
  206. }
  207. else {
  208. $pagelink=$dir;
  209. }
  210. $path.="../";
  211. }
  212. $path=~s/\.\.\/$/index.html/;
  213. $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
  214. my @actions;
  215. if (length $cgiurl) {
  216. push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
  217. push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
  218. }
  219. $content="<html>\n<head><title>$title</title></head>\n<body>\n".
  220. "<h1>$pagelink</h1>\n".
  221. "@actions\n<hr>\n".
  222. $content.
  223. "</body>\n</html>\n";
  224. return $content;
  225. }
  226. sub render ($) {
  227. my $file=shift;
  228. my $type=pagetype($file);
  229. my $content=readfile("$srcdir/$file");
  230. if ($type ne 'unknown') {
  231. my $page=pagename($file);
  232. $links{$page}=[findlinks($content)];
  233. $content=linkify($content, $file);
  234. $content=htmlize($type, $content);
  235. $content=linkbacks($content, $page);
  236. $content=finalize($content, $page);
  237. writefile("$destdir/".htmlpage($page), $content);
  238. $oldpagemtime{$page}=time;
  239. $renderedfiles{$page}=htmlpage($page);
  240. }
  241. else {
  242. $links{$file}=[];
  243. writefile("$destdir/$file", $content);
  244. $oldpagemtime{$file}=time;
  245. $renderedfiles{$file}=$file;
  246. }
  247. }
  248. sub loadindex () {
  249. open (IN, "$srcdir/.index") || return;
  250. while (<IN>) {
  251. $_=possibly_foolish_untaint($_);
  252. chomp;
  253. my ($mtime, $file, $rendered, @links)=split(' ', $_);
  254. my $page=pagename($file);
  255. $pagesources{$page}=$file;
  256. $oldpagemtime{$page}=$mtime;
  257. $oldlinks{$page}=[@links];
  258. $links{$page}=[@links];
  259. $renderedfiles{$page}=$rendered;
  260. }
  261. close IN;
  262. }
  263. sub saveindex () {
  264. open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
  265. foreach my $page (keys %oldpagemtime) {
  266. print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
  267. join(" ", @{$links{$page}})."\n"
  268. if $oldpagemtime{$page};
  269. }
  270. close OUT;
  271. }
  272. sub rcs_update () {
  273. if (-d "$srcdir/.svn") {
  274. if (system("svn", "update", "--quiet", $srcdir) != 0) {
  275. warn("svn update failed\n");
  276. }
  277. }
  278. }
  279. sub rcs_commit ($) {
  280. my $message=shift;
  281. if (-d "$srcdir/.svn") {
  282. if (system("svn", "commit", "--quiet", "-m",
  283. possibly_foolish_untaint($message), $srcdir) != 0) {
  284. warn("svn commit failed\n");
  285. }
  286. }
  287. }
  288. sub rcs_add ($) {
  289. my $file=shift;
  290. if (-d "$srcdir/.svn") {
  291. my $parent=dirname($file);
  292. while (! -d "$srcdir/$parent/.svn") {
  293. $file=$parent;
  294. $parent=dirname($file);
  295. }
  296. if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) {
  297. warn("svn add failed\n");
  298. }
  299. }
  300. }
  301. sub rcs_recentchanges ($) {
  302. my $num=shift;
  303. my @ret;
  304. eval q{use Date::Parse};
  305. eval q{use Time::Duration};
  306. if (-d "$srcdir/.svn") {
  307. my $info=`LANG=C svn info $srcdir`;
  308. my ($svn_url)=$info=~/^URL: (.*)$/m;
  309. # FIXME: currently assumes that the wiki is somewhere
  310. # under trunk in svn, doesn't support other layouts.
  311. my ($svn_base)=$svn_url=~m!(/trunk(?:/.*)?)$!;
  312. my $div=qr/^--------------------+$/;
  313. my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
  314. my $state='start';
  315. my ($rev, $user, $when, @pages, $message);
  316. foreach (`LANG=C svn log -v '$svn_url'`) {
  317. chomp;
  318. if ($state eq 'start' && /$div/) {
  319. $state='header';
  320. }
  321. elsif ($state eq 'header' && /$infoline/) {
  322. $rev=$1;
  323. $user=$2;
  324. $when=concise(ago(time - str2time($3)));
  325. }
  326. elsif ($state eq 'header' && /^\s+[A-Z]\s+\Q$svn_base\E\/(.+)$/) {
  327. push @pages, pagename($1) if length $1;
  328. }
  329. elsif ($state eq 'header' && /^$/) {
  330. $state='body';
  331. }
  332. elsif ($state eq 'body' && /$div/) {
  333. push @ret, { rev => $rev, user => $user,
  334. when => $when, message => $message,
  335. pages => [@pages] } if @pages;
  336. return @ret if @ret >= $num;
  337. $state='header';
  338. $message=$rev=$user=$when=undef;
  339. @pages=();
  340. }
  341. elsif ($state eq 'body') {
  342. $message.="$_<br>\n";
  343. }
  344. }
  345. }
  346. return @ret;
  347. }
  348. sub prune ($) {
  349. my $file=shift;
  350. unlink($file);
  351. my $dir=dirname($file);
  352. while (rmdir($dir)) {
  353. $dir=dirname($dir);
  354. }
  355. }
  356. sub refresh () {
  357. # Find existing pages.
  358. my %exists;
  359. my @files;
  360. find({
  361. no_chdir => 1,
  362. wanted => sub {
  363. if (/$wiki_file_prune_regexp/) {
  364. $File::Find::prune=1;
  365. }
  366. elsif (! -d $_) {
  367. my ($f)=/$wiki_file_regexp/; # untaint
  368. if (! defined $f) {
  369. warn("skipping bad filename $_\n");
  370. }
  371. else {
  372. $f=~s/^\Q$srcdir\E\/?//;
  373. push @files, $f;
  374. $exists{pagename($f)}=1;
  375. }
  376. }
  377. },
  378. }, $srcdir);
  379. my %rendered;
  380. # check for added or removed pages
  381. my @add;
  382. foreach my $file (@files) {
  383. my $page=pagename($file);
  384. if (! $oldpagemtime{$page}) {
  385. debug("new page $page");
  386. push @add, $file;
  387. $links{$page}=[];
  388. $pagesources{$page}=$file;
  389. }
  390. }
  391. my @del;
  392. foreach my $page (keys %oldpagemtime) {
  393. if (! $exists{$page}) {
  394. debug("removing old page $page");
  395. push @del, $renderedfiles{$page};
  396. prune($destdir."/".$renderedfiles{$page});
  397. delete $renderedfiles{$page};
  398. $oldpagemtime{$page}=0;
  399. delete $pagesources{$page};
  400. }
  401. }
  402. # render any updated files
  403. foreach my $file (@files) {
  404. my $page=pagename($file);
  405. if (! exists $oldpagemtime{$page} ||
  406. mtime("$srcdir/$file") > $oldpagemtime{$page}) {
  407. debug("rendering changed file $file");
  408. render($file);
  409. $rendered{$file}=1;
  410. }
  411. }
  412. # if any files were added or removed, check to see if each page
  413. # needs an update due to linking to them
  414. # TODO: inefficient; pages may get rendered above and again here;
  415. # problem is the bestlink may have changed and we won't know until
  416. # now
  417. if (@add || @del) {
  418. FILE: foreach my $file (@files) {
  419. my $page=pagename($file);
  420. foreach my $f (@add, @del) {
  421. my $p=pagename($f);
  422. foreach my $link (@{$links{$page}}) {
  423. if (bestlink($page, $link) eq $p) {
  424. debug("rendering $file, which links to $p");
  425. render($file);
  426. $rendered{$file}=1;
  427. next FILE;
  428. }
  429. }
  430. }
  431. }
  432. }
  433. # handle linkbacks; if a page has added/removed links, update the
  434. # pages it links to
  435. # TODO: inefficient; pages may get rendered above and again here;
  436. # problem is the linkbacks could be wrong in the first pass render
  437. # above
  438. if (%rendered) {
  439. my %linkchanged;
  440. foreach my $file (keys %rendered, @del) {
  441. my $page=pagename($file);
  442. if (exists $links{$page}) {
  443. foreach my $link (@{$links{$page}}) {
  444. $link=bestlink($page, $link);
  445. if (length $link &&
  446. ! exists $oldlinks{$page} ||
  447. ! grep { $_ eq $link } @{$oldlinks{$page}}) {
  448. $linkchanged{$link}=1;
  449. }
  450. }
  451. }
  452. if (exists $oldlinks{$page}) {
  453. foreach my $link (@{$oldlinks{$page}}) {
  454. $link=bestlink($page, $link);
  455. if (length $link &&
  456. ! exists $links{$page} ||
  457. ! grep { $_ eq $link } @{$links{$page}}) {
  458. $linkchanged{$link}=1;
  459. }
  460. }
  461. }
  462. }
  463. foreach my $link (keys %linkchanged) {
  464. my $linkfile=$pagesources{$link};
  465. if (defined $linkfile) {
  466. debug("rendering $linkfile, to update its linkbacks");
  467. render($linkfile);
  468. }
  469. }
  470. }
  471. }
  472. # Generates a C wrapper program for running ikiwiki in a specific way.
  473. # The wrapper may be safely made suid.
  474. sub gen_wrapper ($$) {
  475. my ($svn, $rebuild)=@_;
  476. eval q{use Cwd 'abs_path'};
  477. $srcdir=abs_path($srcdir);
  478. $destdir=abs_path($destdir);
  479. my $this=abs_path($0);
  480. if (! -x $this) {
  481. error("$this doesn't seem to be executable");
  482. }
  483. my @params=($srcdir, $destdir, "--wikiname=$wikiname");
  484. push @params, "--verbose" if $verbose;
  485. push @params, "--rebuild" if $rebuild;
  486. push @params, "--nosvn" if !$svn;
  487. push @params, "--cgi" if $cgi;
  488. push @params, "--url=$url" if $url;
  489. push @params, "--cgiurl=$cgiurl" if $cgiurl;
  490. my $params=join(" ", @params);
  491. my $call='';
  492. foreach my $p ($this, $this, @params) {
  493. $call.=qq{"$p", };
  494. }
  495. $call.="NULL";
  496. my @envsave;
  497. push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
  498. CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
  499. my $envsave="";
  500. foreach my $var (@envsave) {
  501. $envsave.=<<"EOF"
  502. if ((s=getenv("$var")))
  503. asprintf(&newenviron[i++], "%s=%s", "$var", s);
  504. EOF
  505. }
  506. open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
  507. print OUT <<"EOF";
  508. /* A wrapper for ikiwiki, can be safely made suid. */
  509. #define _GNU_SOURCE
  510. #include <stdio.h>
  511. #include <unistd.h>
  512. #include <stdlib.h>
  513. #include <string.h>
  514. extern char **environ;
  515. int main (int argc, char **argv) {
  516. /* Sanitize environment. */
  517. char *s;
  518. char *newenviron[$#envsave+3];
  519. int i=0;
  520. $envsave
  521. newenviron[i++]="HOME=$ENV{HOME}";
  522. newenviron[i]=NULL;
  523. environ=newenviron;
  524. if (argc == 2 && strcmp(argv[1], "--params") == 0) {
  525. printf("$params\\n");
  526. exit(0);
  527. }
  528. execl($call);
  529. perror("failed to run $this");
  530. exit(1);
  531. }
  532. EOF
  533. close OUT;
  534. if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
  535. error("failed to compile ikiwiki-wrap.c");
  536. }
  537. unlink("ikiwiki-wrap.c");
  538. print "successfully generated ikiwiki-wrap\n";
  539. exit 0;
  540. }
  541. sub cgi () {
  542. eval q{use CGI};
  543. my $q=CGI->new;
  544. my $do=$q->param('do');
  545. if (! defined $do || ! length $do) {
  546. error("\"do\" parameter missing");
  547. }
  548. if ($do eq 'recentchanges') {
  549. my $list="<ul>\n";
  550. foreach my $change (rcs_recentchanges(100)) {
  551. $list.="<li>";
  552. $list.=join(", ", map { htmllink("", $_, 1) } @{$change->{pages}});
  553. $list.="<br>\n";
  554. $list.="changed ".$change->{when}." by ".
  555. htmllink("", $change->{user}, 1).
  556. ": <i>".$change->{message}."</i>\n";
  557. $list.="</li>\n";
  558. }
  559. $list.="</ul>\n";
  560. print $q->header,
  561. $q->start_html("RecentChanges"),
  562. $q->h1("<a href=\"$url\">$wikiname</a>/ RecentChanges"),
  563. $list,
  564. $q->end_form,
  565. $q->end_html;
  566. return;
  567. }
  568. my ($page)=$q->param('page')=~/$wiki_file_regexp/;
  569. if (! defined $page || ! length $page || $page ne $q->param('page') ||
  570. $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
  571. error("bad page name");
  572. }
  573. $page=lc($page);
  574. my $action=$q->request_uri;
  575. $action=~s/\?.*//;
  576. if ($do eq 'create') {
  577. if (exists $pagesources{lc($page)}) {
  578. # hmm, someone else made the page in the meantime?
  579. print $q->redirect("$url/".htmlpage($page));
  580. }
  581. my @page_locs;
  582. my ($from)=$q->param('from')=~/$wiki_file_regexp/;
  583. if (! defined $from || ! length $from ||
  584. $from ne $q->param('from') ||
  585. $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) {
  586. @page_locs=$page;
  587. }
  588. else {
  589. my $dir=$from."/";
  590. $dir=~s![^/]+/$!!;
  591. push @page_locs, $dir.$page;
  592. push @page_locs, "$from/$page";
  593. while (length $dir) {
  594. $dir=~s![^/]+/$!!;
  595. push @page_locs, $dir.$page;
  596. }
  597. }
  598. $q->param("do", "save");
  599. print $q->header,
  600. $q->start_html("Creating $page"),
  601. $q->h1("<a href=\"$url\">$wikiname</a>/ Creating $page"),
  602. $q->start_form(-action => $action),
  603. $q->hidden('do'),
  604. "Select page location:",
  605. $q->popup_menu('page', \@page_locs),
  606. $q->textarea(-name => 'content',
  607. -default => "",
  608. -rows => 20,
  609. -columns => 80),
  610. $q->br,
  611. "Optional comment about this change:",
  612. $q->br,
  613. $q->textfield(-name => "comments", -size => 80),
  614. $q->br,
  615. $q->submit("Save Page"),
  616. $q->end_form,
  617. $q->end_html;
  618. }
  619. elsif ($do eq 'edit') {
  620. my $content="";
  621. if (exists $pagesources{lc($page)}) {
  622. $content=readfile("$srcdir/$pagesources{lc($page)}");
  623. $content=~s/\n/\r\n/g;
  624. }
  625. $q->param("do", "save");
  626. print $q->header,
  627. $q->start_html("Editing $page"),
  628. $q->h1("<a href=\"$url\">$wikiname</a>/ Editing $page"),
  629. $q->start_form(-action => $action),
  630. $q->hidden('do'),
  631. $q->hidden('page'),
  632. $q->textarea(-name => 'content',
  633. -default => $content,
  634. -rows => 20,
  635. -columns => 80),
  636. $q->br,
  637. "Optional comment about this change:",
  638. $q->br,
  639. $q->textfield(-name => "comments", -size => 80),
  640. $q->br,
  641. $q->submit("Save Page"),
  642. $q->end_form,
  643. $q->end_html;
  644. }
  645. elsif ($do eq 'save') {
  646. my $file=$page.$default_pagetype;
  647. my $newfile=1;
  648. if (exists $pagesources{lc($page)}) {
  649. $file=$pagesources{lc($page)};
  650. $newfile=0;
  651. }
  652. my $content=$q->param('content');
  653. $content=~s/\r\n/\n/g;
  654. $content=~s/\r/\n/g;
  655. writefile("$srcdir/$file", $content);
  656. my $message="web commit from $ENV{REMOTE_ADDR}";
  657. if (defined $q->param('comments')) {
  658. $message.=": ".$q->param('comments');
  659. }
  660. if ($svn) {
  661. if ($newfile) {
  662. rcs_add($file);
  663. }
  664. # presumably the commit will trigger an update
  665. # of the wiki
  666. rcs_commit($message);
  667. }
  668. else {
  669. refresh();
  670. }
  671. print $q->redirect("$url/".htmlpage($page));
  672. }
  673. else {
  674. error("unknown do parameter");
  675. }
  676. }
  677. my $rebuild=0;
  678. my $wrapper=0;
  679. if (grep /^-/, @ARGV) {
  680. eval {use Getopt::Long};
  681. GetOptions(
  682. "wikiname=s" => \$wikiname,
  683. "verbose|v" => \$verbose,
  684. "rebuild" => \$rebuild,
  685. "wrapper" => \$wrapper,
  686. "svn!" => \$svn,
  687. "cgi" => \$cgi,
  688. "url=s" => \$url,
  689. "cgiurl=s" => \$cgiurl,
  690. ) || usage();
  691. }
  692. usage() unless @ARGV == 2;
  693. ($srcdir) = possibly_foolish_untaint(shift);
  694. ($destdir) = possibly_foolish_untaint(shift);
  695. if ($cgi && ! length $url) {
  696. error("Must specify url to wiki with --url when using --cgi");
  697. }
  698. gen_wrapper($svn, $rebuild) if $wrapper;
  699. memoize('pagename');
  700. memoize('bestlink');
  701. loadindex() unless $rebuild;
  702. if ($cgi) {
  703. cgi();
  704. }
  705. else {
  706. rcs_update() if $svn;
  707. refresh();
  708. saveindex();
  709. }