summaryrefslogtreecommitdiff
path: root/ikiwiki
blob: daf9c6d9069b30a3b857edc7240634c249a758be (plain)
  1. #!/usr/bin/perl -T
  2. use warnings;
  3. use strict;
  4. use File::Find;
  5. use Memoize;
  6. use File::Spec;
  7. BEGIN {
  8. $blosxom::version="is a proper perl module too much to ask?";
  9. do "/usr/bin/markdown";
  10. }
  11. $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
  12. my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
  13. %pagesources);
  14. my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
  15. my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
  16. my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!;
  17. my $verbose=0;
  18. my $wikiname="wiki";
  19. my $default_pagetype=".mdwn";
  20. my $cgi=0;
  21. my $url="";
  22. my $cgiurl="";
  23. my $historyurl="";
  24. my $svn=1;
  25. sub usage { #{{{
  26. die "usage: ikiwiki [options] source dest\n";
  27. } #}}}
  28. sub error ($) { #{{{
  29. if ($cgi) {
  30. print "Content-type: text/html\n\n";
  31. print "Error: @_\n";
  32. exit 1;
  33. }
  34. else {
  35. die @_;
  36. }
  37. } #}}}
  38. sub debug ($) { #{{{
  39. print "@_\n" if $verbose;
  40. } #}}}
  41. sub mtime ($) { #{{{
  42. my $page=shift;
  43. return (stat($page))[9];
  44. } #}}}
  45. sub possibly_foolish_untaint ($) { #{{{
  46. my $tainted=shift;
  47. my ($untainted)=$tainted=~/(.*)/;
  48. return $untainted;
  49. } #}}}
  50. sub basename ($) { #{{{
  51. my $file=shift;
  52. $file=~s!.*/!!;
  53. return $file;
  54. } #}}}
  55. sub dirname ($) { #{{{
  56. my $file=shift;
  57. $file=~s!/?[^/]+$!!;
  58. return $file;
  59. } #}}}
  60. sub pagetype ($) { #{{{
  61. my $page=shift;
  62. if ($page =~ /\.mdwn$/) {
  63. return ".mdwn";
  64. }
  65. else {
  66. return "unknown";
  67. }
  68. } #}}}
  69. sub pagename ($) { #{{{
  70. my $file=shift;
  71. my $type=pagetype($file);
  72. my $page=$file;
  73. $page=~s/\Q$type\E*$// unless $type eq 'unknown';
  74. return $page;
  75. } #}}}
  76. sub htmlpage ($) { #{{{
  77. my $page=shift;
  78. return $page.".html";
  79. } #}}}
  80. sub readfile ($) { #{{{
  81. my $file=shift;
  82. local $/=undef;
  83. open (IN, "$file") || error("failed to read $file: $!");
  84. my $ret=<IN>;
  85. close IN;
  86. return $ret;
  87. } #}}}
  88. sub writefile ($$) { #{{{
  89. my $file=shift;
  90. my $content=shift;
  91. my $dir=dirname($file);
  92. if (! -d $dir) {
  93. my $d="";
  94. foreach my $s (split(m!/+!, $dir)) {
  95. $d.="$s/";
  96. if (! -d $d) {
  97. mkdir($d) || error("failed to create directory $d: $!");
  98. }
  99. }
  100. }
  101. open (OUT, ">$file") || error("failed to write $file: $!");
  102. print OUT $content;
  103. close OUT;
  104. } #}}}
  105. sub findlinks { #{{{
  106. my $content=shift;
  107. my @links;
  108. while ($content =~ /$wiki_link_regexp/g) {
  109. push @links, lc($1);
  110. }
  111. return @links;
  112. } #}}}
  113. # Given a page and the text of a link on the page, determine which existing
  114. # page that link best points to. Prefers pages under a subdirectory with
  115. # the same name as the source page, failing that goes down the directory tree
  116. # to the base looking for matching pages.
  117. sub bestlink ($$) { #{{{
  118. my $page=shift;
  119. my $link=lc(shift);
  120. my $cwd=$page;
  121. do {
  122. my $l=$cwd;
  123. $l.="/" if length $l;
  124. $l.=$link;
  125. if (exists $links{$l}) {
  126. #debug("for $page, \"$link\", use $l");
  127. return $l;
  128. }
  129. } while $cwd=~s!/?[^/]+$!!;
  130. #print STDERR "warning: page $page, broken link: $link\n";
  131. return "";
  132. } #}}}
  133. sub isinlinableimage ($) { #{{{
  134. my $file=shift;
  135. $file=~/\.(png|gif|jpg|jpeg)$/;
  136. } #}}}
  137. sub htmllink { #{{{
  138. my $page=shift;
  139. my $link=shift;
  140. my $noimagelink=shift;
  141. my $bestlink=bestlink($page, $link);
  142. return $link if $page eq $bestlink;
  143. # TODO BUG: %renderedfiles may not have it, if the linked to page
  144. # was also added and isn't yet rendered! Note that this bug is
  145. # masked by the bug mentioned below that makes all new files
  146. # be rendered twice.
  147. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  148. $bestlink=htmlpage($bestlink);
  149. }
  150. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  151. return "<a href=\"$cgiurl?do=create&page=$link&from=$page\">?</a>$link"
  152. }
  153. $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
  154. if (! $noimagelink && isinlinableimage($bestlink)) {
  155. return "<img src=\"$bestlink\">";
  156. }
  157. return "<a href=\"$bestlink\">$link</a>";
  158. } #}}}
  159. sub linkify ($$) { #{{{
  160. my $content=shift;
  161. my $file=shift;
  162. $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
  163. return $content;
  164. } #}}}
  165. sub htmlize ($$) { #{{{
  166. my $type=shift;
  167. my $content=shift;
  168. if ($type eq '.mdwn') {
  169. return Markdown::Markdown($content);
  170. }
  171. else {
  172. error("htmlization of $type not supported");
  173. }
  174. } #}}}
  175. sub linkbacks ($$) { #{{{
  176. my $content=shift;
  177. my $page=shift;
  178. my @links;
  179. foreach my $p (keys %links) {
  180. next if bestlink($page, $p) eq $page;
  181. if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
  182. my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
  183. # Trim common dir prefixes from both pages.
  184. my $p_trimmed=$p;
  185. my $page_trimmed=$page;
  186. my $dir;
  187. 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
  188. defined $dir &&
  189. $p_trimmed=~s/^\Q$dir\E// &&
  190. $page_trimmed=~s/^\Q$dir\E//;
  191. push @links, "<a href=\"$href\">$p_trimmed</a>";
  192. }
  193. }
  194. $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
  195. return $content;
  196. } #}}}
  197. sub finalize ($$) { #{{{
  198. my $content=shift;
  199. my $page=shift;
  200. my $title=basename($page);
  201. $title=~s/_/ /g;
  202. my $pagelink="";
  203. my $path="";
  204. foreach my $dir (reverse split("/", $page)) {
  205. if (length($pagelink)) {
  206. $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
  207. }
  208. else {
  209. $pagelink=$dir;
  210. }
  211. $path.="../";
  212. }
  213. $path=~s/\.\.\/$/index.html/;
  214. $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
  215. my @actions;
  216. if (length $cgiurl) {
  217. push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
  218. push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
  219. }
  220. if (length $historyurl) {
  221. my $url=$historyurl;
  222. $url=~s/\[\[\]\]/$pagesources{$page}/g;
  223. push @actions, "<a href=\"$url\">History</a>";
  224. }
  225. $content="<html>\n<head><title>$title</title></head>\n<body>\n".
  226. "<h1>$pagelink</h1>\n".
  227. "@actions\n<hr>\n".
  228. $content.
  229. "</body>\n</html>\n";
  230. return $content;
  231. } #}}}
  232. sub render ($) { #{{{
  233. my $file=shift;
  234. my $type=pagetype($file);
  235. my $content=readfile("$srcdir/$file");
  236. if ($type ne 'unknown') {
  237. my $page=pagename($file);
  238. $links{$page}=[findlinks($content)];
  239. $content=linkify($content, $file);
  240. $content=htmlize($type, $content);
  241. $content=linkbacks($content, $page);
  242. $content=finalize($content, $page);
  243. writefile("$destdir/".htmlpage($page), $content);
  244. $oldpagemtime{$page}=time;
  245. $renderedfiles{$page}=htmlpage($page);
  246. }
  247. else {
  248. $links{$file}=[];
  249. writefile("$destdir/$file", $content);
  250. $oldpagemtime{$file}=time;
  251. $renderedfiles{$file}=$file;
  252. }
  253. } #}}}
  254. sub loadindex () { #{{{
  255. open (IN, "$srcdir/.index") || return;
  256. while (<IN>) {
  257. $_=possibly_foolish_untaint($_);
  258. chomp;
  259. my ($mtime, $file, $rendered, @links)=split(' ', $_);
  260. my $page=pagename($file);
  261. $pagesources{$page}=$file;
  262. $oldpagemtime{$page}=$mtime;
  263. $oldlinks{$page}=[@links];
  264. $links{$page}=[@links];
  265. $renderedfiles{$page}=$rendered;
  266. }
  267. close IN;
  268. } #}}}
  269. sub saveindex () { #{{{
  270. open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
  271. foreach my $page (keys %oldpagemtime) {
  272. print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
  273. join(" ", @{$links{$page}})."\n"
  274. if $oldpagemtime{$page};
  275. }
  276. close OUT;
  277. } #}}}
  278. sub rcs_update () { #{{{
  279. if (-d "$srcdir/.svn") {
  280. if (system("svn", "update", "--quiet", $srcdir) != 0) {
  281. warn("svn update failed\n");
  282. }
  283. }
  284. } #}}}
  285. sub rcs_commit ($) { #{{{
  286. my $message=shift;
  287. if (-d "$srcdir/.svn") {
  288. if (system("svn", "commit", "--quiet", "-m",
  289. possibly_foolish_untaint($message), $srcdir) != 0) {
  290. warn("svn commit failed\n");
  291. }
  292. }
  293. } #}}}
  294. sub rcs_add ($) { #{{{
  295. my $file=shift;
  296. if (-d "$srcdir/.svn") {
  297. my $parent=dirname($file);
  298. while (! -d "$srcdir/$parent/.svn") {
  299. $file=$parent;
  300. $parent=dirname($file);
  301. }
  302. if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) {
  303. warn("svn add failed\n");
  304. }
  305. }
  306. } #}}}
  307. sub rcs_recentchanges ($) { #{{{
  308. my $num=shift;
  309. my @ret;
  310. eval q{use Date::Parse};
  311. eval q{use Time::Duration};
  312. if (-d "$srcdir/.svn") {
  313. my $info=`LANG=C svn info $srcdir`;
  314. my ($svn_url)=$info=~/^URL: (.*)$/m;
  315. # FIXME: currently assumes that the wiki is somewhere
  316. # under trunk in svn, doesn't support other layouts.
  317. my ($svn_base)=$svn_url=~m!(/trunk(?:/.*)?)$!;
  318. my $div=qr/^--------------------+$/;
  319. my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/;
  320. my $state='start';
  321. my ($rev, $user, $when, @pages, $message);
  322. foreach (`LANG=C svn log -v '$svn_url'`) {
  323. chomp;
  324. if ($state eq 'start' && /$div/) {
  325. $state='header';
  326. }
  327. elsif ($state eq 'header' && /$infoline/) {
  328. $rev=$1;
  329. $user=$2;
  330. $when=concise(ago(time - str2time($3)));
  331. }
  332. elsif ($state eq 'header' && /^\s+[A-Z]\s+\Q$svn_base\E\/(.+)$/) {
  333. push @pages, pagename($1) if length $1;
  334. }
  335. elsif ($state eq 'header' && /^$/) {
  336. $state='body';
  337. }
  338. elsif ($state eq 'body' && /$div/) {
  339. push @ret, { rev => $rev, user => $user,
  340. when => $when, message => $message,
  341. pages => [@pages] } if @pages;
  342. return @ret if @ret >= $num;
  343. $state='header';
  344. $message=$rev=$user=$when=undef;
  345. @pages=();
  346. }
  347. elsif ($state eq 'body') {
  348. $message.="$_<br>\n";
  349. }
  350. }
  351. }
  352. return @ret;
  353. } #}}}
  354. sub prune ($) { #{{{
  355. my $file=shift;
  356. unlink($file);
  357. my $dir=dirname($file);
  358. while (rmdir($dir)) {
  359. $dir=dirname($dir);
  360. }
  361. } #}}}
  362. sub refresh () { #{{{
  363. # Find existing pages.
  364. my %exists;
  365. my @files;
  366. find({
  367. no_chdir => 1,
  368. wanted => sub {
  369. if (/$wiki_file_prune_regexp/) {
  370. $File::Find::prune=1;
  371. }
  372. elsif (! -d $_) {
  373. my ($f)=/$wiki_file_regexp/; # untaint
  374. if (! defined $f) {
  375. warn("skipping bad filename $_\n");
  376. }
  377. else {
  378. $f=~s/^\Q$srcdir\E\/?//;
  379. push @files, $f;
  380. $exists{pagename($f)}=1;
  381. }
  382. }
  383. },
  384. }, $srcdir);
  385. my %rendered;
  386. # check for added or removed pages
  387. my @add;
  388. foreach my $file (@files) {
  389. my $page=pagename($file);
  390. if (! $oldpagemtime{$page}) {
  391. debug("new page $page");
  392. push @add, $file;
  393. $links{$page}=[];
  394. $pagesources{$page}=$file;
  395. }
  396. }
  397. my @del;
  398. foreach my $page (keys %oldpagemtime) {
  399. if (! $exists{$page}) {
  400. debug("removing old page $page");
  401. push @del, $renderedfiles{$page};
  402. prune($destdir."/".$renderedfiles{$page});
  403. delete $renderedfiles{$page};
  404. $oldpagemtime{$page}=0;
  405. delete $pagesources{$page};
  406. }
  407. }
  408. # render any updated files
  409. foreach my $file (@files) {
  410. my $page=pagename($file);
  411. if (! exists $oldpagemtime{$page} ||
  412. mtime("$srcdir/$file") > $oldpagemtime{$page}) {
  413. debug("rendering changed file $file");
  414. render($file);
  415. $rendered{$file}=1;
  416. }
  417. }
  418. # if any files were added or removed, check to see if each page
  419. # needs an update due to linking to them
  420. # TODO: inefficient; pages may get rendered above and again here;
  421. # problem is the bestlink may have changed and we won't know until
  422. # now
  423. if (@add || @del) {
  424. FILE: foreach my $file (@files) {
  425. my $page=pagename($file);
  426. foreach my $f (@add, @del) {
  427. my $p=pagename($f);
  428. foreach my $link (@{$links{$page}}) {
  429. if (bestlink($page, $link) eq $p) {
  430. debug("rendering $file, which links to $p");
  431. render($file);
  432. $rendered{$file}=1;
  433. next FILE;
  434. }
  435. }
  436. }
  437. }
  438. }
  439. # handle linkbacks; if a page has added/removed links, update the
  440. # pages it links to
  441. # TODO: inefficient; pages may get rendered above and again here;
  442. # problem is the linkbacks could be wrong in the first pass render
  443. # above
  444. if (%rendered) {
  445. my %linkchanged;
  446. foreach my $file (keys %rendered, @del) {
  447. my $page=pagename($file);
  448. if (exists $links{$page}) {
  449. foreach my $link (@{$links{$page}}) {
  450. $link=bestlink($page, $link);
  451. if (length $link &&
  452. ! exists $oldlinks{$page} ||
  453. ! grep { $_ eq $link } @{$oldlinks{$page}}) {
  454. $linkchanged{$link}=1;
  455. }
  456. }
  457. }
  458. if (exists $oldlinks{$page}) {
  459. foreach my $link (@{$oldlinks{$page}}) {
  460. $link=bestlink($page, $link);
  461. if (length $link &&
  462. ! exists $links{$page} ||
  463. ! grep { $_ eq $link } @{$links{$page}}) {
  464. $linkchanged{$link}=1;
  465. }
  466. }
  467. }
  468. }
  469. foreach my $link (keys %linkchanged) {
  470. my $linkfile=$pagesources{$link};
  471. if (defined $linkfile) {
  472. debug("rendering $linkfile, to update its linkbacks");
  473. render($linkfile);
  474. }
  475. }
  476. }
  477. } #}}}
  478. # Generates a C wrapper program for running ikiwiki in a specific way.
  479. # The wrapper may be safely made suid.
  480. sub gen_wrapper ($$) { #{{{
  481. my ($svn, $rebuild)=@_;
  482. eval q{use Cwd 'abs_path'};
  483. $srcdir=abs_path($srcdir);
  484. $destdir=abs_path($destdir);
  485. my $this=abs_path($0);
  486. if (! -x $this) {
  487. error("$this doesn't seem to be executable");
  488. }
  489. my @params=($srcdir, $destdir, "--wikiname=$wikiname");
  490. push @params, "--verbose" if $verbose;
  491. push @params, "--rebuild" if $rebuild;
  492. push @params, "--nosvn" if !$svn;
  493. push @params, "--cgi" if $cgi;
  494. push @params, "--url=$url" if $url;
  495. push @params, "--cgiurl=$cgiurl" if $cgiurl;
  496. push @params, "--historyurl=$historyurl" if $historyurl;
  497. my $params=join(" ", @params);
  498. my $call='';
  499. foreach my $p ($this, $this, @params) {
  500. $call.=qq{"$p", };
  501. }
  502. $call.="NULL";
  503. my @envsave;
  504. push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
  505. CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
  506. my $envsave="";
  507. foreach my $var (@envsave) {
  508. $envsave.=<<"EOF"
  509. if ((s=getenv("$var")))
  510. asprintf(&newenviron[i++], "%s=%s", "$var", s);
  511. EOF
  512. }
  513. open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
  514. print OUT <<"EOF";
  515. /* A wrapper for ikiwiki, can be safely made suid. */
  516. #define _GNU_SOURCE
  517. #include <stdio.h>
  518. #include <unistd.h>
  519. #include <stdlib.h>
  520. #include <string.h>
  521. extern char **environ;
  522. int main (int argc, char **argv) {
  523. /* Sanitize environment. */
  524. char *s;
  525. char *newenviron[$#envsave+3];
  526. int i=0;
  527. $envsave
  528. newenviron[i++]="HOME=$ENV{HOME}";
  529. newenviron[i]=NULL;
  530. environ=newenviron;
  531. if (argc == 2 && strcmp(argv[1], "--params") == 0) {
  532. printf("$params\\n");
  533. exit(0);
  534. }
  535. execl($call);
  536. perror("failed to run $this");
  537. exit(1);
  538. }
  539. EOF
  540. close OUT;
  541. if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
  542. error("failed to compile ikiwiki-wrap.c");
  543. }
  544. unlink("ikiwiki-wrap.c");
  545. print "successfully generated ikiwiki-wrap\n";
  546. exit 0;
  547. } #}}}
  548. sub cgi () { #{{{
  549. eval q{use CGI};
  550. my $q=CGI->new;
  551. my $do=$q->param('do');
  552. if (! defined $do || ! length $do) {
  553. error("\"do\" parameter missing");
  554. }
  555. if ($do eq 'recentchanges') {
  556. my $list="<ul>\n";
  557. foreach my $change (rcs_recentchanges(100)) {
  558. $list.="<li>";
  559. $list.=join(", ", map { htmllink("", $_, 1) } @{$change->{pages}});
  560. $list.="<br>\n";
  561. $list.="changed ".$change->{when}." by ".
  562. htmllink("", $change->{user}, 1).
  563. ": <i>".$change->{message}."</i>\n";
  564. $list.="</li>\n";
  565. }
  566. $list.="</ul>\n";
  567. print $q->header,
  568. $q->start_html("RecentChanges"),
  569. $q->h1("<a href=\"$url\">$wikiname</a>/ RecentChanges"),
  570. $list,
  571. $q->end_form,
  572. $q->end_html;
  573. return;
  574. }
  575. my ($page)=$q->param('page')=~/$wiki_file_regexp/;
  576. if (! defined $page || ! length $page || $page ne $q->param('page') ||
  577. $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
  578. error("bad page name");
  579. }
  580. $page=lc($page);
  581. my $action=$q->request_uri;
  582. $action=~s/\?.*//;
  583. if ($do eq 'create') {
  584. if (exists $pagesources{lc($page)}) {
  585. # hmm, someone else made the page in the meantime?
  586. print $q->redirect("$url/".htmlpage($page));
  587. }
  588. my @page_locs;
  589. my ($from)=$q->param('from')=~/$wiki_file_regexp/;
  590. if (! defined $from || ! length $from ||
  591. $from ne $q->param('from') ||
  592. $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) {
  593. @page_locs=$page;
  594. }
  595. else {
  596. my $dir=$from."/";
  597. $dir=~s![^/]+/$!!;
  598. push @page_locs, $dir.$page;
  599. push @page_locs, "$from/$page";
  600. while (length $dir) {
  601. $dir=~s![^/]+/$!!;
  602. push @page_locs, $dir.$page;
  603. }
  604. }
  605. $q->param("do", "save");
  606. print $q->header,
  607. $q->start_html("Creating $page"),
  608. $q->h1("<a href=\"$url\">$wikiname</a>/ Creating $page"),
  609. $q->start_form(-action => $action),
  610. $q->hidden('do'),
  611. "Select page location:",
  612. $q->popup_menu('page', \@page_locs),
  613. $q->textarea(-name => 'content',
  614. -default => "",
  615. -rows => 20,
  616. -columns => 80),
  617. $q->br,
  618. "Optional comment about this change:",
  619. $q->br,
  620. $q->textfield(-name => "comments", -size => 80),
  621. $q->br,
  622. $q->submit("Save Page"),
  623. $q->end_form,
  624. $q->end_html;
  625. }
  626. elsif ($do eq 'edit') {
  627. my $content="";
  628. if (exists $pagesources{lc($page)}) {
  629. $content=readfile("$srcdir/$pagesources{lc($page)}");
  630. $content=~s/\n/\r\n/g;
  631. }
  632. $q->param("do", "save");
  633. print $q->header,
  634. $q->start_html("Editing $page"),
  635. $q->h1("<a href=\"$url\">$wikiname</a>/ Editing $page"),
  636. $q->start_form(-action => $action),
  637. $q->hidden('do'),
  638. $q->hidden('page'),
  639. $q->textarea(-name => 'content',
  640. -default => $content,
  641. -rows => 20,
  642. -columns => 80),
  643. $q->br,
  644. "Optional comment about this change:",
  645. $q->br,
  646. $q->textfield(-name => "comments", -size => 80),
  647. $q->br,
  648. $q->submit("Save Page"),
  649. $q->end_form,
  650. $q->end_html;
  651. }
  652. elsif ($do eq 'save') {
  653. my $file=$page.$default_pagetype;
  654. my $newfile=1;
  655. if (exists $pagesources{lc($page)}) {
  656. $file=$pagesources{lc($page)};
  657. $newfile=0;
  658. }
  659. my $content=$q->param('content');
  660. $content=~s/\r\n/\n/g;
  661. $content=~s/\r/\n/g;
  662. writefile("$srcdir/$file", $content);
  663. my $message="web commit from $ENV{REMOTE_ADDR}";
  664. if (defined $q->param('comments')) {
  665. $message.=": ".$q->param('comments');
  666. }
  667. if ($svn) {
  668. if ($newfile) {
  669. rcs_add($file);
  670. }
  671. # presumably the commit will trigger an update
  672. # of the wiki
  673. rcs_commit($message);
  674. }
  675. else {
  676. refresh();
  677. }
  678. print $q->redirect("$url/".htmlpage($page));
  679. }
  680. else {
  681. error("unknown do parameter");
  682. }
  683. } #}}}
  684. # main {{{
  685. my $rebuild=0;
  686. my $wrapper=0;
  687. if (grep /^-/, @ARGV) {
  688. eval {use Getopt::Long};
  689. GetOptions(
  690. "wikiname=s" => \$wikiname,
  691. "verbose|v" => \$verbose,
  692. "rebuild" => \$rebuild,
  693. "wrapper" => \$wrapper,
  694. "svn!" => \$svn,
  695. "cgi" => \$cgi,
  696. "url=s" => \$url,
  697. "cgiurl=s" => \$cgiurl,
  698. "historyurl=s" => \$historyurl,
  699. ) || usage();
  700. }
  701. usage() unless @ARGV == 2;
  702. ($srcdir) = possibly_foolish_untaint(shift);
  703. ($destdir) = possibly_foolish_untaint(shift);
  704. if ($cgi && ! length $url) {
  705. error("Must specify url to wiki with --url when using --cgi");
  706. }
  707. gen_wrapper($svn, $rebuild) if $wrapper;
  708. memoize('pagename');
  709. memoize('bestlink');
  710. loadindex() unless $rebuild;
  711. if ($cgi) {
  712. cgi();
  713. }
  714. else {
  715. rcs_update() if $svn;
  716. refresh();
  717. saveindex();
  718. }
  719. #}}}