summaryrefslogtreecommitdiff
path: root/ikiwiki
blob: fcf081ccbb797a9863f3a281da4469609e8362f6 (plain)
  1. #!/usr/bin/perl -T
  2. use warnings;
  3. use strict;
  4. use File::Find;
  5. use Memoize;
  6. use File::Spec;
  7. $ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
  8. BEGIN {
  9. $blosxom::version="is a proper perl module too much to ask?";
  10. do "/usr/bin/markdown";
  11. }
  12. my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
  13. %pagesources);
  14. my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
  15. my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/;
  16. my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.)!;
  17. my $verbose=0;
  18. my $wikiname="wiki";
  19. my $default_pagetype=".mdwn";
  20. my $cgi=0;
  21. my $url="";
  22. my $svn=1;
  23. sub usage {
  24. die "usage: ikiwiki [options] source dest\n";
  25. }
  26. sub error ($) {
  27. if ($cgi) {
  28. print "Content-type: text/html\n\n";
  29. print "Error: @_\n";
  30. exit 1;
  31. }
  32. else {
  33. die @_;
  34. }
  35. }
  36. sub debug ($) {
  37. print "@_\n" if $verbose;
  38. }
  39. sub mtime ($) {
  40. my $page=shift;
  41. return (stat($page))[9];
  42. }
  43. sub possibly_foolish_untaint ($) {
  44. my $tainted=shift;
  45. my ($untainted)=$tainted=~/(.*)/;
  46. return $untainted;
  47. }
  48. sub basename {
  49. my $file=shift;
  50. $file=~s!.*/!!;
  51. return $file;
  52. }
  53. sub dirname {
  54. my $file=shift;
  55. $file=~s!/?[^/]+$!!;
  56. return $file;
  57. }
  58. sub pagetype ($) {
  59. my $page=shift;
  60. if ($page =~ /\.mdwn$/) {
  61. return ".mdwn";
  62. }
  63. else {
  64. return "unknown";
  65. }
  66. }
  67. sub pagename ($) {
  68. my $file=shift;
  69. my $type=pagetype($file);
  70. my $page=$file;
  71. $page=~s/\Q$type\E*$// unless $type eq 'unknown';
  72. return $page;
  73. }
  74. sub htmlpage ($) {
  75. my $page=shift;
  76. return $page.".html";
  77. }
  78. sub readfile ($) {
  79. my $file=shift;
  80. local $/=undef;
  81. open (IN, "$file") || error("failed to read $file: $!");
  82. my $ret=<IN>;
  83. close IN;
  84. return $ret;
  85. }
  86. sub writefile ($$) {
  87. my $file=shift;
  88. my $content=shift;
  89. my $dir=dirname($file);
  90. if (! -d $dir) {
  91. my $d="";
  92. foreach my $s (split(m!/+!, $dir)) {
  93. $d.="$s/";
  94. if (! -d $d) {
  95. mkdir($d) || error("failed to create directory $d: $!");
  96. }
  97. }
  98. }
  99. open (OUT, ">$file") || error("failed to write $file: $!");
  100. print OUT $content;
  101. close OUT;
  102. }
  103. sub findlinks {
  104. my $content=shift;
  105. my @links;
  106. while ($content =~ /$wiki_link_regexp/g) {
  107. push @links, lc($1);
  108. }
  109. return @links;
  110. }
  111. # Given a page and the text of a link on the page, determine which existing
  112. # page that link best points to. Prefers pages under a subdirectory with
  113. # the same name as the source page, failing that goes down the directory tree
  114. # to the base looking for matching pages.
  115. sub bestlink ($$) {
  116. my $page=shift;
  117. my $link=lc(shift);
  118. my $cwd=$page;
  119. do {
  120. my $l=$cwd;
  121. $l.="/" if length $l;
  122. $l.=$link;
  123. if (exists $links{$l}) {
  124. #debug("for $page, \"$link\", use $l");
  125. return $l;
  126. }
  127. } while $cwd=~s!/?[^/]+$!!;
  128. #print STDERR "warning: page $page, broken link: $link\n";
  129. return "";
  130. }
  131. sub isinlinableimage ($) {
  132. my $file=shift;
  133. $file=~/\.(png|gif|jpg|jpeg)$/;
  134. }
  135. sub htmllink ($$) {
  136. my $page=shift;
  137. my $link=shift;
  138. my $bestlink=bestlink($page, $link);
  139. return $link if $page eq $bestlink;
  140. # TODO BUG: %renderedfiles may not have it, if the linked to page
  141. # was also added and isn't yet rendered! Note that this bug is
  142. # masked by the bug mentioned below that makes all new files
  143. # be rendered twice.
  144. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  145. $bestlink=htmlpage($bestlink);
  146. }
  147. if (! grep { $_ eq $bestlink } values %renderedfiles) {
  148. return "<a href=\"?\">?</a>$link"
  149. }
  150. $bestlink=File::Spec->abs2rel($bestlink, dirname($page));
  151. if (isinlinableimage($bestlink)) {
  152. return "<img src=\"$bestlink\">";
  153. }
  154. return "<a href=\"$bestlink\">$link</a>";
  155. }
  156. sub linkify ($$) {
  157. my $content=shift;
  158. my $file=shift;
  159. $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
  160. return $content;
  161. }
  162. sub htmlize ($$) {
  163. my $type=shift;
  164. my $content=shift;
  165. if ($type eq '.mdwn') {
  166. return Markdown::Markdown($content);
  167. }
  168. else {
  169. error("htmlization of $type not supported");
  170. }
  171. }
  172. sub linkbacks ($$) {
  173. my $content=shift;
  174. my $page=shift;
  175. my @links;
  176. foreach my $p (keys %links) {
  177. next if bestlink($page, $p) eq $page;
  178. if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) {
  179. my $href=File::Spec->abs2rel(htmlpage($p), dirname($page));
  180. # Trim common dir prefixes from both pages.
  181. my $p_trimmed=$p;
  182. my $page_trimmed=$page;
  183. my $dir;
  184. 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) &&
  185. defined $dir &&
  186. $p_trimmed=~s/^\Q$dir\E// &&
  187. $page_trimmed=~s/^\Q$dir\E//;
  188. push @links, "<a href=\"$href\">$p_trimmed</a>";
  189. }
  190. }
  191. $content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
  192. return $content;
  193. }
  194. sub finalize ($$) {
  195. my $content=shift;
  196. my $page=shift;
  197. my $title=basename($page);
  198. $title=~s/_/ /g;
  199. my $pagelink="";
  200. my $path="";
  201. foreach my $dir (reverse split("/", $page)) {
  202. if (length($pagelink)) {
  203. $pagelink="<a href=\"$path$dir.html\">$dir</a>/ $pagelink";
  204. }
  205. else {
  206. $pagelink=$dir;
  207. }
  208. $path.="../";
  209. }
  210. $path=~s/\.\.\/$/index.html/;
  211. $pagelink="<a href=\"$path\">$wikiname</a>/ $pagelink";
  212. $content="<html>\n<head><title>$title</title></head>\n<body>\n".
  213. "<h1>$pagelink</h1>\n".
  214. $content.
  215. "</body>\n</html>\n";
  216. return $content;
  217. }
  218. sub render ($) {
  219. my $file=shift;
  220. my $type=pagetype($file);
  221. my $content=readfile("$srcdir/$file");
  222. if ($type ne 'unknown') {
  223. my $page=pagename($file);
  224. $links{$page}=[findlinks($content)];
  225. $content=linkify($content, $file);
  226. $content=htmlize($type, $content);
  227. $content=linkbacks($content, $page);
  228. $content=finalize($content, $page);
  229. writefile("$destdir/".htmlpage($page), $content);
  230. $oldpagemtime{$page}=time;
  231. $renderedfiles{$page}=htmlpage($page);
  232. }
  233. else {
  234. $links{$file}=[];
  235. writefile("$destdir/$file", $content);
  236. $oldpagemtime{$file}=time;
  237. $renderedfiles{$file}=$file;
  238. }
  239. }
  240. sub loadindex () {
  241. open (IN, "$srcdir/.index") || return;
  242. while (<IN>) {
  243. $_=possibly_foolish_untaint($_);
  244. chomp;
  245. my ($mtime, $file, $rendered, @links)=split(' ', $_);
  246. my $page=pagename($file);
  247. $pagesources{$page}=$file;
  248. $oldpagemtime{$page}=$mtime;
  249. $oldlinks{$page}=[@links];
  250. $links{$page}=[@links];
  251. $renderedfiles{$page}=$rendered;
  252. }
  253. close IN;
  254. }
  255. sub saveindex () {
  256. open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
  257. foreach my $page (keys %oldpagemtime) {
  258. print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
  259. join(" ", @{$links{$page}})."\n"
  260. if $oldpagemtime{$page};
  261. }
  262. close OUT;
  263. }
  264. sub rcs_update () {
  265. if (-d "$srcdir/.svn") {
  266. if (system("svn", "update", "--quiet", $srcdir) != 0) {
  267. warn("svn update failed\n");
  268. }
  269. }
  270. }
  271. sub rcs_commit ($) {
  272. my $message=shift;
  273. if (-d "$srcdir/.svn") {
  274. if (system("svn", "commit", "--quiet", "-m",
  275. possibly_foolish_untaint($message), $srcdir) != 0) {
  276. warn("svn commit failed\n");
  277. }
  278. }
  279. }
  280. sub rcs_ad ($) {
  281. my $file=shift;
  282. if (-d "$srcdir/.svn") {
  283. if (system("svn", "add", "--quiet", $file) != 0) {
  284. warn("svn add failed\n");
  285. }
  286. }
  287. }
  288. sub prune ($) {
  289. my $file=shift;
  290. unlink($file);
  291. my $dir=dirname($file);
  292. while (rmdir($dir)) {
  293. $dir=dirname($dir);
  294. }
  295. }
  296. sub refresh () {
  297. # Find existing pages.
  298. my %exists;
  299. my @files;
  300. find({
  301. no_chdir => 1,
  302. wanted => sub {
  303. if (/$wiki_file_prune_regexp/) {
  304. $File::Find::prune=1;
  305. }
  306. elsif (! -d $_ && ! /\.html$/ && ! /\/\./) {
  307. my ($f)=/$wiki_file_regexp/; # untaint
  308. if (! defined $f) {
  309. warn("skipping bad filename $_\n");
  310. }
  311. else {
  312. $f=~s/^\Q$srcdir\E\/?//;
  313. push @files, $f;
  314. $exists{pagename($f)}=1;
  315. }
  316. }
  317. },
  318. }, $srcdir);
  319. my %rendered;
  320. # check for added or removed pages
  321. my @add;
  322. foreach my $file (@files) {
  323. my $page=pagename($file);
  324. if (! $oldpagemtime{$page}) {
  325. debug("new page $page");
  326. push @add, $file;
  327. $links{$page}=[];
  328. $pagesources{$page}=$file;
  329. }
  330. }
  331. my @del;
  332. foreach my $page (keys %oldpagemtime) {
  333. if (! $exists{$page}) {
  334. debug("removing old page $page");
  335. push @del, $renderedfiles{$page};
  336. prune($destdir."/".$renderedfiles{$page});
  337. delete $renderedfiles{$page};
  338. $oldpagemtime{$page}=0;
  339. delete $pagesources{$page};
  340. }
  341. }
  342. # render any updated files
  343. foreach my $file (@files) {
  344. my $page=pagename($file);
  345. if (! exists $oldpagemtime{$page} ||
  346. mtime("$srcdir/$file") > $oldpagemtime{$page}) {
  347. debug("rendering changed file $file");
  348. render($file);
  349. $rendered{$file}=1;
  350. }
  351. }
  352. # if any files were added or removed, check to see if each page
  353. # needs an update due to linking to them
  354. # TODO: inefficient; pages may get rendered above and again here;
  355. # problem is the bestlink may have changed and we won't know until
  356. # now
  357. if (@add || @del) {
  358. FILE: foreach my $file (@files) {
  359. my $page=pagename($file);
  360. foreach my $f (@add, @del) {
  361. my $p=pagename($f);
  362. foreach my $link (@{$links{$page}}) {
  363. if (bestlink($page, $link) eq $p) {
  364. debug("rendering $file, which links to $p");
  365. render($file);
  366. $rendered{$file}=1;
  367. next FILE;
  368. }
  369. }
  370. }
  371. }
  372. }
  373. # handle linkbacks; if a page has added/removed links, update the
  374. # pages it links to
  375. # TODO: inefficient; pages may get rendered above and again here;
  376. # problem is the linkbacks could be wrong in the first pass render
  377. # above
  378. if (%rendered) {
  379. my %linkchanged;
  380. foreach my $file (keys %rendered, @del) {
  381. my $page=pagename($file);
  382. if (exists $links{$page}) {
  383. foreach my $link (@{$links{$page}}) {
  384. $link=bestlink($page, $link);
  385. if (length $link &&
  386. ! exists $oldlinks{$page} ||
  387. ! grep { $_ eq $link } @{$oldlinks{$page}}) {
  388. $linkchanged{$link}=1;
  389. }
  390. }
  391. }
  392. if (exists $oldlinks{$page}) {
  393. foreach my $link (@{$oldlinks{$page}}) {
  394. $link=bestlink($page, $link);
  395. if (length $link &&
  396. ! exists $links{$page} ||
  397. ! grep { $_ eq $link } @{$links{$page}}) {
  398. $linkchanged{$link}=1;
  399. }
  400. }
  401. }
  402. }
  403. foreach my $link (keys %linkchanged) {
  404. my $linkfile=$pagesources{$link};
  405. if (defined $linkfile) {
  406. debug("rendering $linkfile, to update its linkbacks");
  407. render($linkfile);
  408. }
  409. }
  410. }
  411. }
  412. # Generates a C wrapper program for running ikiwiki in a specific way.
  413. # The wrapper may be safely made suid.
  414. sub gen_wrapper ($$) {
  415. my ($svn, $rebuild)=@_;
  416. eval {use Cwd 'abs_path'};
  417. $srcdir=abs_path($srcdir);
  418. $destdir=abs_path($destdir);
  419. my $this=abs_path($0);
  420. if (! -x $this) {
  421. error("$this doesn't seem to be executable");
  422. }
  423. my @params=($srcdir, $destdir, "--wikiname=$wikiname");
  424. push @params, "--verbose" if $verbose;
  425. push @params, "--rebuild" if $rebuild;
  426. push @params, "--nosvn" if !$svn;
  427. push @params, "--cgi" if $cgi;
  428. push @params, "--url=$url" if $url;
  429. my $params=join(" ", @params);
  430. my $call='';
  431. foreach my $p ($this, $this, @params) {
  432. $call.=qq{"$p", };
  433. }
  434. $call.="NULL";
  435. my @envsave;
  436. push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI
  437. CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi;
  438. my $envsave="";
  439. foreach my $var (@envsave) {
  440. $envsave.=<<"EOF"
  441. if ((s=getenv("$var")))
  442. asprintf(&newenviron[i++], "%s=%s", "$var", s);
  443. EOF
  444. }
  445. open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");;
  446. print OUT <<"EOF";
  447. /* A wrapper for ikiwiki, can be safely made suid. */
  448. #define _GNU_SOURCE
  449. #include <stdio.h>
  450. #include <unistd.h>
  451. #include <stdlib.h>
  452. #include <string.h>
  453. extern char **environ;
  454. int main (int argc, char **argv) {
  455. /* Sanitize environment. */
  456. char *s;
  457. char *newenviron[$#envsave+3];
  458. int i=0;
  459. $envsave
  460. newenviron[i++]="HOME=$ENV{HOME}";
  461. newenviron[i]=NULL;
  462. environ=newenviron;
  463. if (argc == 2 && strcmp(argv[1], "--params") == 0) {
  464. printf("$params\\n");
  465. exit(0);
  466. }
  467. execl($call);
  468. perror("failed to run $this");
  469. exit(1);
  470. }
  471. EOF
  472. close OUT;
  473. if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) {
  474. error("failed to compile ikiwiki-wrap.c");
  475. }
  476. unlink("ikiwiki-wrap.c");
  477. print "successfully generated ikiwiki-wrap\n";
  478. exit 0;
  479. }
  480. sub cgi () {
  481. eval q{use CGI};
  482. my $q=CGI->new;
  483. my $do=$q->param('do');
  484. if (! defined $do || ! length $do) {
  485. error("\"do\" parameter missing");
  486. }
  487. my ($page)=$q->param('page')=~/$wiki_file_regexp/; # untaint
  488. if (! defined $page || ! length $page || $page ne $q->param('page') ||
  489. $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) {
  490. error("bad page name");
  491. }
  492. my $action=$q->request_uri;
  493. $action=~s/\?.*//;
  494. if ($do eq 'edit') {
  495. my $content="";
  496. if (exists $pagesources{lc($page)}) {
  497. $content=readfile("$srcdir/$pagesources{lc($page)}");
  498. $content=~s/\n/\r\n/g;
  499. }
  500. $q->param("do", "save");
  501. print $q->header,
  502. $q->start_html("$wikiname: Editing $page"),
  503. $q->h1("$wikiname: Editing $page"),
  504. $q->start_form(-action => $action),
  505. $q->hidden('do'),
  506. $q->hidden('page'),
  507. $q->textarea(-name => 'content',
  508. -default => $content,
  509. -rows => 20,
  510. -columns => 80),
  511. $q->br,
  512. "Optional comment about this change",
  513. $q->br,
  514. $q->textfield(-name => "comments", -size => 80),
  515. $q->br,
  516. $q->submit("Save Changes"),
  517. $q->end_form,
  518. $q->end_html;
  519. }
  520. elsif ($do eq 'save') {
  521. my $file=$page.$default_pagetype;
  522. my $newfile=1;
  523. if (exists $pagesources{lc($page)}) {
  524. $file=$pagesources{lc($page)};
  525. $newfile=0;
  526. }
  527. my $content=$q->param('content');
  528. $content=~s/\r\n/\n/g;
  529. $content=~s/\r/\n/g;
  530. writefile("$srcdir/$file", $content);
  531. my $message="web commit from $ENV{REMOTE_ADDR}";
  532. if (defined $q->param('comments')) {
  533. $message.=": ".$q->param('comments');
  534. }
  535. if ($svn) {
  536. if ($newfile) {
  537. rcs_add($file);
  538. }
  539. # presumably the commit will trigger an update
  540. # of the wiki
  541. rcs_commit($message);
  542. }
  543. else {
  544. refresh();
  545. }
  546. print $q->redirect("$url/".htmlpage($page));
  547. }
  548. else {
  549. error("unknown do parameter");
  550. }
  551. }
  552. my $rebuild=0;
  553. my $wrapper=0;
  554. if (grep /^-/, @ARGV) {
  555. eval {use Getopt::Long};
  556. GetOptions(
  557. "wikiname=s" => \$wikiname,
  558. "verbose|v" => \$verbose,
  559. "rebuild" => \$rebuild,
  560. "wrapper" => \$wrapper,
  561. "svn!" => \$svn,
  562. "cgi" => \$cgi,
  563. "url=s" => \$url,
  564. ) || usage();
  565. }
  566. usage() unless @ARGV == 2;
  567. ($srcdir) = possibly_foolish_untaint(shift);
  568. ($destdir) = possibly_foolish_untaint(shift);
  569. if ($cgi && ! length $url) {
  570. error("Must specify url to wiki with --url when using --cgi");
  571. }
  572. gen_wrapper($svn, $rebuild) if $wrapper;
  573. memoize('pagename');
  574. memoize('bestlink');
  575. loadindex() unless $rebuild;
  576. if ($cgi) {
  577. cgi();
  578. }
  579. else {
  580. rcs_update() if $svn;
  581. refresh();
  582. saveindex();
  583. }